If token probability is a logarithm it'll be < 0

No need to read settings to find out if llama.cpp backend is in use...
This commit is contained in:
Deciare 2024-02-24 00:13:00 -05:00
parent 936fbac6c5
commit 445cbda02f
1 changed files with 1 additions and 2 deletions

View File

@ -12,7 +12,6 @@ import {
import { debounce, delay, getStringHash } from './utils.js';
import { decodeTextTokens, getTokenizerBestMatch } from './tokenizers.js';
import { power_user } from './power-user.js';
import { textgenerationwebui_settings, textgen_types } from './textgen-settings.js';
const TINTS = 4;
const MAX_MESSAGE_LOGPROBS = 100;
@ -140,7 +139,7 @@ function renderTopLogprobs() {
const candidates = topLogprobs
.sort(([, logA], [, logB]) => logB - logA)
.map(([text, log]) => {
if (textgenerationwebui_settings.type !== textgen_types.LLAMACPP) {
if (log < 0) {
const probability = Math.exp(log);
sum += probability;
return [text, probability, log];