Parse non-streaming tabby logprobs

This commit is contained in:
Cohee 2024-02-24 20:10:53 +02:00
parent 3cedf64f66
commit d140b8d5be
2 changed files with 27 additions and 4 deletions

View File

@ -19,6 +19,7 @@ import {
getTextGenServer,
validateTextGenUrl,
parseTextgenLogprobs,
parseTabbyLogprobs,
} from './scripts/textgen-settings.js';
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA, INFERMATICAI } = textgen_types;
@ -4484,10 +4485,14 @@ function parseAndSaveLogprobs(data, continueFrom) {
// the text of the generated message, logprobs are not included.
return;
case 'textgenerationwebui':
if (textgen_settings.type === textgen_types.LLAMACPP) {
logprobs = data?.completion_probabilities?.map(x => parseTextgenLogprobs(x.content, [x])) || null;
}
break;
switch (textgen_settings.type) {
case textgen_types.LLAMACPP: {
logprobs = data?.completion_probabilities?.map(x => parseTextgenLogprobs(x.content, [x])) || null;
} break;
case textgen_types.TABBY: {
logprobs = parseTabbyLogprobs(data) || null;
} break;
} break;
default:
return;
}

View File

@ -849,6 +849,24 @@ export function parseTextgenLogprobs(token, logprobs) {
}
}
export function parseTabbyLogprobs(data) {
const text = data?.choices?.[0]?.text;
const offsets = data?.choices?.[0]?.logprobs?.text_offset;
if (!text || !offsets) {
return null;
}
// Convert string offsets list to tokens
const tokens = offsets?.map((offset, index) => {
const nextOffset = offsets[index + 1] || text.length;
return text.substring(offset, nextOffset);
});
const topLogprobs = data?.choices?.[0]?.logprobs?.top_logprobs?.map(x => ({ top_logprobs: [x] }));
return tokens?.map((token, index) => parseTextgenLogprobs(token, topLogprobs[index])) || null;
}
/**
* Parses errors in streaming responses and displays them in toastr.
* @param {Response} response - Response from the server.