diff --git a/public/scripts/tokenizers.js b/public/scripts/tokenizers.js index 0fb82b6a..ac05be8b 100644 --- a/public/scripts/tokenizers.js +++ b/public/scripts/tokenizers.js @@ -35,7 +35,7 @@ export const SENTENCEPIECE_TOKENIZERS = [ //tokenizers.NERD2, ]; -export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP]; +export const TEXTGEN_TOKENIZERS = [OOBA, TABBY, KOBOLDCPP, LLAMACPP, APHRODITE]; const TOKENIZER_URLS = { [tokenizers.GPT2]: { diff --git a/src/endpoints/tokenizers.js b/src/endpoints/tokenizers.js index d86cb139..ac577a4e 100644 --- a/src/endpoints/tokenizers.js +++ b/src/endpoints/tokenizers.js @@ -628,6 +628,10 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re url += '/tokenize'; args.body = JSON.stringify({ 'content': text }); break; + case TEXTGEN_TYPES.APHRODITE: + url += '/v1/tokenize'; + args.body = JSON.stringify({ 'prompt': text }); + break; default: url += '/v1/internal/encode'; args.body = JSON.stringify({ 'text': text });