Sampling: Add ability to send JSON schemas
TabbyAPI supports the ability to send JSON schemas with prompts in addition to EBNF strings supported by outlines. Add an extra box for TabbyAPI only. Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
parent
99005d6396
commit
4f0322351e
|
@ -1516,6 +1516,17 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div data-newbie-hidden id="json_schema_block" data-tg-type="tabby" class="wide100p">
|
||||
<hr class="wide100p">
|
||||
<h4 class="wide100p textAlignCenter"><span data-i18n="JSON Schema">JSON Schema</span>
|
||||
<a href="https://json-schema.org/learn/getting-started-step-by-step" target="_blank">
|
||||
<small>
|
||||
<div class="fa-solid fa-circle-question note-link-span"></div>
|
||||
</small>
|
||||
</a>
|
||||
</h4>
|
||||
<textarea id="tabby_json_schema" rows="4" class="text_pole textarea_compact monospace" data-i18n="[placeholder]Type in the desired JSON schema" placeholder="Type in the desired JSON schema"></textarea>
|
||||
</div>
|
||||
<div data-newbie-hidden id="grammar_block_ooba" class="wide100p">
|
||||
<hr class="wide100p">
|
||||
<h4 class="wide100p textAlignCenter">
|
||||
|
|
|
@ -128,6 +128,7 @@ const settings = {
|
|||
guidance_scale: 1,
|
||||
negative_prompt: '',
|
||||
grammar_string: '',
|
||||
json_schema: {},
|
||||
banned_tokens: '',
|
||||
sampler_priority: OOBA_DEFAULT_ORDER,
|
||||
samplers: LLAMACPP_DEFAULT_ORDER,
|
||||
|
@ -201,6 +202,7 @@ const setting_names = [
|
|||
'guidance_scale',
|
||||
'negative_prompt',
|
||||
'grammar_string',
|
||||
'json_schema',
|
||||
'banned_tokens',
|
||||
'legacy_api',
|
||||
//'n_aphrodite',
|
||||
|
@ -562,6 +564,16 @@ jQuery(function () {
|
|||
},
|
||||
});
|
||||
|
||||
$('#tabby_json_schema').on('input', function () {
|
||||
const json_schema_string = $(this).val();
|
||||
|
||||
// Ignore errors from here
|
||||
try {
|
||||
settings.json_schema = JSON.parse(json_schema_string ?? "{}");
|
||||
} catch {}
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#textgenerationwebui_default_order').on('click', function () {
|
||||
sortOobaItemsByOrder(OOBA_DEFAULT_ORDER);
|
||||
settings.sampler_priority = OOBA_DEFAULT_ORDER;
|
||||
|
@ -757,6 +769,12 @@ function setSettingByName(setting, value, trigger) {
|
|||
return;
|
||||
}
|
||||
|
||||
if ('json_schema' === setting) {
|
||||
settings.json_schema = value ?? {}
|
||||
$('#tabby_json_schema').text(JSON.stringify(settings.json_schema, null, 2))
|
||||
return;
|
||||
}
|
||||
|
||||
const isCheckbox = $(`#${setting}_textgenerationwebui`).attr('type') == 'checkbox';
|
||||
const isText = $(`#${setting}_textgenerationwebui`).attr('type') == 'text' || $(`#${setting}_textgenerationwebui`).is('textarea');
|
||||
if (isCheckbox) {
|
||||
|
@ -1027,6 +1045,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||
'guidance_scale': cfgValues?.guidanceScale?.value ?? settings.guidance_scale ?? 1,
|
||||
'negative_prompt': cfgValues?.negativePrompt ?? substituteParams(settings.negative_prompt) ?? '',
|
||||
'grammar_string': settings.grammar_string,
|
||||
'json_schema': settings.type === TABBY ? settings.json_schema : undefined,
|
||||
// llama.cpp aliases. In case someone wants to use LM Studio as Text Completion API
|
||||
'repeat_penalty': settings.rep_pen,
|
||||
'tfs_z': settings.tfs,
|
||||
|
|
Loading…
Reference in New Issue