Shut tokenization errors if not using ooba

Closes #2849
This commit is contained in:
Cohee 2024-09-22 17:44:19 +03:00
parent 8c5b02d2ea
commit 93bf87b035
3 changed files with 9 additions and 3 deletions

View File

@ -211,7 +211,7 @@ import {
selectContextPreset,
} from './scripts/instruct-mode.js';
import { initLocales, t, translate } from './scripts/i18n.js';
import { getFriendlyTokenizerName, getTokenCount, getTokenCountAsync, getTokenizerModel, initTokenizers, saveTokenCache } from './scripts/tokenizers.js';
import { getFriendlyTokenizerName, getTokenCount, getTokenCountAsync, getTokenizerModel, initTokenizers, saveTokenCache, TOKENIZER_SUPPORTED_KEY } from './scripts/tokenizers.js';
import {
user_avatar,
getUserAvatars,
@ -1216,6 +1216,9 @@ async function getStatusTextgen() {
// Determine instruct mode preset
autoSelectInstructPreset(online_status);
const supportsTokenization = response.headers.get('x-supports-tokenization') === 'true';
supportsTokenization ? sessionStorage.setItem(TOKENIZER_SUPPORTED_KEY, 'true') : sessionStorage.removeItem(TOKENIZER_SUPPORTED_KEY);
// We didn't get a 200 status code, but the endpoint has an explanation. Which means it DID connect, but I digress.
if (online_status === 'no_connection' && data.response) {
toastr.error(data.response, 'API Error', { timeOut: 5000, preventDuplicates: true });

View File

@ -10,7 +10,8 @@ import { getCurrentDreamGenModelTokenizer, getCurrentOpenRouterModelTokenizer, o
const { OOBA, TABBY, KOBOLDCPP, VLLM, APHRODITE, LLAMACPP, OPENROUTER, DREAMGEN } = textgen_types;
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
export const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
export const TOKENIZER_SUPPORTED_KEY = 'tokenizationSupported';
export const tokenizers = {
NONE: 0,
@ -280,8 +281,9 @@ export function getTokenizerBestMatch(forApi) {
// - Kobold must pass a version check
// - Tokenizer haven't reported an error previously
const hasTokenizerError = sessionStorage.getItem(TOKENIZER_WARNING_KEY);
const hasValidEndpoint = sessionStorage.getItem(TOKENIZER_SUPPORTED_KEY);
const isConnected = online_status !== 'no_connection';
const isTokenizerSupported = TEXTGEN_TOKENIZERS.includes(textgen_settings.type);
const isTokenizerSupported = TEXTGEN_TOKENIZERS.includes(textgen_settings.type) && (textgen_settings.type !== OOBA || hasValidEndpoint);
if (!hasTokenizerError && isConnected) {
if (forApi === 'kobold' && kai_flags.can_use_tokenization) {

View File

@ -186,6 +186,7 @@ router.post('/status', jsonParser, async function (request, response) {
const modelName = modelInfo?.model_name;
result = modelName || result;
response.setHeader('x-supports-tokenization', 'true');
}
} catch (error) {
console.error(`Failed to get Ooba model info: ${error}`);