API Tokenizer: Add support for TabbyAPI
Use Tabby's /v1/token endpoints. Signed-off-by: kingbri <bdashore3@proton.me>
This commit is contained in:
parent
f31b996cb5
commit
4cfa267b1b
|
@ -4,7 +4,7 @@ import { chat_completion_sources, model_list, oai_settings } from "./openai.js";
|
|||
import { groups, selected_group } from "./group-chats.js";
|
||||
import { getStringHash } from "./utils.js";
|
||||
import { kai_flags } from "./kai-settings.js";
|
||||
import { isMancer, textgenerationwebui_settings } from "./textgen-settings.js";
|
||||
import { isMancer, isTabby, textgenerationwebui_settings } from "./textgen-settings.js";
|
||||
|
||||
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
|
||||
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
|
||||
|
@ -369,6 +369,7 @@ function getRemoteTokenizationParams(str) {
|
|||
api: main_api,
|
||||
url: getAPIServerUrl(),
|
||||
legacy_api: main_api === 'textgenerationwebui' && textgenerationwebui_settings.legacy_api && !isMancer(),
|
||||
use_tabby: isTabby()
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -3424,7 +3424,7 @@ app.post("/tokenize_via_api", jsonParser, async function (request, response) {
|
|||
url += '/v1/token-count';
|
||||
args.body = JSON.stringify({ "prompt": text });
|
||||
} else {
|
||||
url += '/v1/internal/encode';
|
||||
url += request.body.use_tabby ? '/v1/token/encode' : '/v1/internal/encode';
|
||||
args.body = JSON.stringify({ "text": text });
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue