Adjust number of VLLM logprobs

This commit is contained in:
Cohee 2024-06-09 00:59:40 +03:00
parent 60b7164c28
commit 1dd21caa66
1 changed files with 9 additions and 1 deletions

View File

@ -1019,6 +1019,14 @@ export function isJsonSchemaSupported() {
return [TABBY, LLAMACPP].includes(settings.type) && main_api === 'textgenerationwebui';
}
function getLogprobsNumber() {
if (settings.type === VLLM) {
return 5;
}
return 10;
}
export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate, isContinue, cfgValues, type) {
const canMultiSwipe = !isContinue && !isImpersonate && type !== 'quiet';
const { banned_tokens, banned_strings } = getCustomTokenBans();
@ -1028,7 +1036,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
'model': getTextGenModel(),
'max_new_tokens': maxTokens,
'max_tokens': maxTokens,
'logprobs': power_user.request_token_probabilities ? 10 : undefined,
'logprobs': power_user.request_token_probabilities ? getLogprobsNumber() : undefined,
'temperature': settings.dynatemp ? (settings.min_temp + settings.max_temp) / 2 : settings.temp,
'top_p': settings.top_p,
'typical_p': settings.typical_p,