From 10d78f9a2589ed6145e02e2656a1ddec535fd9dc Mon Sep 17 00:00:00 2001 From: Cohee <18619528+Cohee1207@users.noreply.github.com> Date: Fri, 2 Feb 2024 01:09:03 +0200 Subject: [PATCH] Aphrodite tokenizer --- public/scripts/tokenizers.js | 2 +- src/endpoints/tokenizers.js | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js index 0fb82b6a..ac05be8b 100644 --- a/public/scripts/tokenizers.js +++ b/public/scripts/tokenizers.js @@ -35,7 +35,7 @@ export const SENTENCEPIECE_TOKENIZERS = [ //tokenizers.NERD2, ]; -export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP]; +export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, APHRODITE]; const TOKENIZER_URLS = { [tokenizers.GPT2]: { diff --git a/src/endpoints/tokenizers.js b/src/endpoints/tokenizers.js index d86cb139..ac577a4e 100644 --- a/src/endpoints/tokenizers.js +++ b/src/endpoints/tokenizers.js @@ -628,6 +628,10 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re url += '/tokenize'; args.body = JSON.stringify({ 'content': text }); break; + case TEXTGEN_TYPES.APHRODITE: + url += '/v1/tokenize'; + args.body = JSON.stringify({ 'prompt': text }); + break; default: url += '/v1/internal/encode'; args.body = JSON.stringify({ 'text': text });