Fix for non-streaming
This commit is contained in:
parent
445cbda02f
commit
9287ff18de
|
@ -18,6 +18,7 @@ import {
|
||||||
textgen_types,
|
textgen_types,
|
||||||
getTextGenServer,
|
getTextGenServer,
|
||||||
validateTextGenUrl,
|
validateTextGenUrl,
|
||||||
|
parseTextgenLogprobs,
|
||||||
} from './scripts/textgen-settings.js';
|
} from './scripts/textgen-settings.js';
|
||||||
|
|
||||||
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA } = textgen_types;
|
const { MANCER, TOGETHERAI, OOBA, APHRODITE, OLLAMA } = textgen_types;
|
||||||
|
@ -4478,6 +4479,11 @@ function parseAndSaveLogprobs(data, continueFrom) {
|
||||||
// `sendOpenAIRequest`. `data` for these APIs is just a string with
|
// `sendOpenAIRequest`. `data` for these APIs is just a string with
|
||||||
// the text of the generated message, logprobs are not included.
|
// the text of the generated message, logprobs are not included.
|
||||||
return;
|
return;
|
||||||
|
case 'textgenerationwebui':
|
||||||
|
if (textgen_settings.type === textgen_types.LLAMACPP) {
|
||||||
|
logprobs = data?.completion_probabilities?.map(x => parseTextgenLogprobs(x.content, [x])) || null;
|
||||||
|
}
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -764,7 +764,7 @@ async function generateTextGenWithStreaming(generate_data, signal) {
|
||||||
* @param {Object} logprobs - logprobs object returned from the API
|
* @param {Object} logprobs - logprobs object returned from the API
|
||||||
* @returns {import('logprobs.js').TokenLogprobs | null} - converted logprobs
|
* @returns {import('logprobs.js').TokenLogprobs | null} - converted logprobs
|
||||||
*/
|
*/
|
||||||
function parseTextgenLogprobs(token, logprobs) {
|
export function parseTextgenLogprobs(token, logprobs) {
|
||||||
if (!logprobs) {
|
if (!logprobs) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue