Fix logprobs parser on NovelAI non-streaming
This commit is contained in:
parent
58086d26ba
commit
b4646da187
|
@ -374,6 +374,11 @@ function withVirtualWhitespace(text, span) {
|
|||
* @param {string | null} continueFrom - for 'continue' generations, the prompt
|
||||
*/
|
||||
export function saveLogprobsForActiveMessage(logprobs, continueFrom) {
|
||||
if (!logprobs) {
|
||||
// non-streaming APIs could return null data
|
||||
return;
|
||||
}
|
||||
|
||||
convertTokenIdLogprobsToText(logprobs);
|
||||
|
||||
const msgId = chat.length - 1;
|
||||
|
|
Loading…
Reference in New Issue