Specify LLM prompt in case JSON schema is not supported

This commit is contained in:
Cohee
2024-04-14 17:13:54 +03:00
parent b02394008c
commit 3e60919289
3 changed files with 80 additions and 14 deletions

View File

@@ -3,6 +3,7 @@ import {
event_types,
getRequestHeaders,
getStoppingStrings,
main_api,
max_context,
saveSettingsDebounced,
setGenerationParamsFromPreset,
@@ -978,6 +979,10 @@ function getModel() {
return undefined;
}
export function isJsonSchemaSupported() {
return settings.type === TABBY && main_api === 'textgenerationwebui';
}
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
let params = {