Implement Token Probabilities UI using logprobs

This commit is contained in:
khanon
2024-01-22 23:00:31 -06:00
parent 9b42be2334
commit 60044c18a4
16 changed files with 921 additions and 35 deletions

View File

@ -705,12 +705,21 @@ router.post('/generate', jsonParser, function (request, response) {
let apiKey;
let headers;
let bodyParams;
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
if (request.body.chat_completion_source === CHAT_COMPLETION_SOURCES.OPENAI) {
apiUrl = new URL(request.body.reverse_proxy || API_OPENAI).toString();
apiKey = request.body.reverse_proxy ? request.body.proxy_password : readSecret(SECRET_KEYS.OPENAI);
headers = {};
bodyParams = {};
bodyParams = {
logprobs: request.body.logprobs,
};
// Adjust logprobs params for Chat Completions API, which expects { top_logprobs: number; logprobs: boolean; }
if (!isTextCompletion && bodyParams.logprobs > 0) {
bodyParams.top_logprobs = bodyParams.logprobs;
bodyParams.logprobs = true
}
if (getConfigValue('openai.randomizeUserId', false)) {
bodyParams['user'] = uuidv4();
@ -759,7 +768,6 @@ router.post('/generate', jsonParser, function (request, response) {
bodyParams['stop'] = request.body.stop;
}
const isTextCompletion = Boolean(request.body.model && TEXT_COMPLETION_MODELS.includes(request.body.model)) || typeof request.body.messages === 'string';
const textPrompt = isTextCompletion ? convertTextCompletionPrompt(request.body.messages) : '';
const endpointUrl = isTextCompletion && request.body.chat_completion_source !== CHAT_COMPLETION_SOURCES.OPENROUTER ?
`${apiUrl}/completions` :

View File

@ -172,6 +172,7 @@ router.post('/generate', jsonParser, async function (req, res) {
'return_full_text': req.body.return_full_text,
'prefix': req.body.prefix,
'order': req.body.order,
'num_logprobs': req.body.num_logprobs,
},
};
@ -215,7 +216,7 @@ router.post('/generate', jsonParser, async function (req, res) {
}
const data = await response.json();
console.log(data);
console.log("NovelAI Output", data?.output);
return res.send(data);
}
} catch (error) {

View File

@ -298,11 +298,13 @@ function createSentencepieceDecodingHandler(tokenizer) {
const ids = request.body.ids || [];
const instance = await tokenizer?.get();
const text = await instance?.decodeIds(ids);
return response.send({ text });
const ops = ids.map(id => instance.decodeIds([id]));
const chunks = await Promise.all(ops);
const text = chunks.join('');
return response.send({ text, chunks });
} catch (error) {
console.log(error);
return response.send({ text: '' });
return response.send({ text: '', chunks: [] });
}
};
}