#1589 Add 'cache_prompt' for l.cpp
This commit is contained in:
parent
352b00caca
commit
47cb017a45
|
@ -769,6 +769,7 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
|||
'logit_bias': logitBiasArray,
|
||||
// Conflicts with ooba's grammar_string
|
||||
'grammar': settings.grammar_string,
|
||||
'cache_prompt': true,
|
||||
};
|
||||
params = Object.assign(params, llamaCppParams);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue