mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-02-09 08:38:53 +01:00
TextGenWebUI: remove legacy API mode
This commit is contained in:
parent
8986cb0039
commit
24884b3206
@ -2573,10 +2573,6 @@
|
||||
<div data-tg-type="openrouter" class="menu_button menu_button_icon openrouter_authorize" title="Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai" data-i18n="Authorize;[title]Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai">Authorize</div>
|
||||
<div class="api_loading menu_button menu_button_icon" data-i18n="Cancel">Cancel</div>
|
||||
</div>
|
||||
<label data-tg-type="ooba" class="checkbox_label margin-bot-10px" for="legacy_api_textgenerationwebui">
|
||||
<input type="checkbox" id="legacy_api_textgenerationwebui" />
|
||||
<span data-i18n="Legacy API (pre-OAI, no streaming)">Legacy API (pre-OAI, no streaming)</span>
|
||||
</label>
|
||||
<label data-tg-type="ooba" class="checkbox_label margin-bot-10px" for="bypass_status_check_textgenerationwebui">
|
||||
<input type="checkbox" id="bypass_status_check_textgenerationwebui" />
|
||||
<span data-i18n="Bypass status check">Bypass status check</span>
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "مثال: 127.0.0.1:5001",
|
||||
"Authorize": "تفويض",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "احصل على رمز واجهة برمجة التطبيقات الخاص بك لموزع الاتصالات باستخدام تدفق OAuth. سيتم توجيهك إلى openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "واجهة برمجة التطبيقات القديمة (قبل OAI، بدون بث)",
|
||||
"Bypass status check": "تجاوز فحص الحالة",
|
||||
"Chat Completion Source": "مصدر استكمال الدردشة",
|
||||
"Reverse Proxy": "الوكيل العكسي",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Beispiel: 127.0.0.1:5001",
|
||||
"Authorize": "Autorisieren",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Hole dein OpenRouter-API-Token mit OAuth-Fluss. Du wirst zu openrouter.ai weitergeleitet",
|
||||
"Legacy API (pre-OAI, no streaming)": "Legacy-API (vor OAI, kein Streaming)",
|
||||
"Bypass status check": "Umgehe Statusüberprüfung",
|
||||
"Chat Completion Source": "Quelle für Chat-Vervollständigung",
|
||||
"Reverse Proxy": "Reverse-Proxy",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Ejemplo: 127.0.0.1:5001",
|
||||
"Authorize": "Autorizar",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Obtenga su token de API de OpenRouter utilizando el flujo OAuth. Será redirigido a openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "API heredada (pre-OAI, sin streaming)",
|
||||
"Bypass status check": "Saltar la verificación del estado",
|
||||
"Chat Completion Source": "Fuente de Completado de Chat",
|
||||
"Reverse Proxy": "Proxy inverso",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Exemple : 127.0.0.1:5001",
|
||||
"Authorize": "Autoriser",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Obtenez votre jeton API OpenRouter en utilisant le flux OAuth. Vous serez redirigé vers openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "API héritée (pré-OAI, pas de streaming)",
|
||||
"Bypass status check": "Contourner la vérification de l'état",
|
||||
"Chat Completion Source": "Source de complétion de la conversation",
|
||||
"Reverse Proxy": "Proxy inverse",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Dæmi: 127.0.0.1:5001",
|
||||
"Authorize": "Heimild",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Fáðu API lykilinn þinn fyrir OpenRouter með því að nota OAuth strauminn. Þú verður endurvísað(ð/ur) á openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "Eldri API (fyrir OAI, engin flæði)",
|
||||
"Bypass status check": "Hlaupa framhjá stöðutík",
|
||||
"Chat Completion Source": "Heimild að fullvirkni spjalls",
|
||||
"Reverse Proxy": "Reverse proxy",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Esempio: 127.0.0.1:5001",
|
||||
"Authorize": "Autorizzare",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Ottieni il tuo token API di OpenRouter utilizzando il flusso OAuth. Sarai reindirizzato su openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "API legacy (prima di OAI, senza streaming)",
|
||||
"Bypass status check": "Ignora controllo stato",
|
||||
"Chat Completion Source": "Fonte di Completamento della Chat",
|
||||
"Reverse Proxy": "Proxy inverso",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "例: 127.0.0.1:5001",
|
||||
"Authorize": "承認",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "OAuthフローを使用してOpenRouter APIトークンを取得します。 openrouter.aiにリダイレクトされます",
|
||||
"Legacy API (pre-OAI, no streaming)": "レガシーAPI(OAI以前のもの、ストリーミングなし)",
|
||||
"Bypass status check": "ステータスのチェックをバイパスする",
|
||||
"Chat Completion Source": "チャット補完ソース",
|
||||
"Reverse Proxy": "リバースプロキシ",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "예: 127.0.0.1:5001",
|
||||
"Authorize": "승인하다",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "OAuth 플로우를 사용하여 OpenRouter API 토큰을 가져옵니다. openrouter.ai로 리디렉션됩니다.",
|
||||
"Legacy API (pre-OAI, no streaming)": "레거시 API (OAI 이전, 스트리밍 없음)",
|
||||
"Bypass status check": "상태 확인 우회",
|
||||
"Chat Completion Source": "채팅 완성 소스",
|
||||
"Reverse Proxy": "역방향 프록시",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Voorbeeld: 127.0.0.1:5001",
|
||||
"Authorize": "Toestemming geven",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Haal uw OpenRouter API-token op met behulp van OAuth-flow. U wordt doorgestuurd naar openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "Legacy API (vóór OAI, geen streaming)",
|
||||
"Bypass status check": "Omzeil statuscontrole",
|
||||
"Chat Completion Source": "Bron voor Chatvoltooiing",
|
||||
"Reverse Proxy": "Omgekeerde proxy",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Exemplo: 127.0.0.1:5001",
|
||||
"Authorize": "Autorizar",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Obtenha seu token da API do OpenRouter usando o fluxo OAuth. Você será redirecionado para openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "API legada (pré-OAI, sem streaming)",
|
||||
"Bypass status check": "Ignorar verificação de status",
|
||||
"Chat Completion Source": "Fonte de conclusão de chat",
|
||||
"Reverse Proxy": "Proxy reverso",
|
||||
|
@ -137,7 +137,6 @@
|
||||
"Custom model (optional)": "Пользовательская модель (необязательно)",
|
||||
"Bypass API status check": "Обход проверки статуса API",
|
||||
"Example: 127.0.0.1:5000": "Пример: http://127.0.0.1:5000",
|
||||
"Legacy API (pre-OAI, no streaming)": "Устаревший API (до OAI, без стриминга)",
|
||||
"Bypass status check": "Обход проверки статуса",
|
||||
"Mancer API key": "Ключ от Mancer API",
|
||||
"to get your OpenAI API key.": "для получения ключа от API OpenAI",
|
||||
@ -1808,7 +1807,6 @@
|
||||
"Your chat is still saving...": "Чат всё ещё сохраняется...",
|
||||
"Character ${0} not found in the list": "Персонаж ${0} не найден в списке",
|
||||
"Streaming is enabled, but the version of Kobold used does not support token streaming.": "Включён стриминг текста, но ваша версия Kobold не поддерживает стриминг токенов.",
|
||||
"Streaming is not supported for the Legacy API. Update Ooba and use new API to enable streaming.": "Для устаревшего API стриминг недоступен. Обновите oobaboga и используйте новый API, чтобы включить стриминг.",
|
||||
"Verify that the server is running and accessible.": "Убедитесь, что сервер запущен и доступен по сети.",
|
||||
"ST Server cannot be reached": "Не удалось соединиться с сервером ST",
|
||||
"You must first select a character to duplicate!": "Вы не выбрали персонажа, которого хотите клонировать!",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Приклад: 127.0.0.1:5001",
|
||||
"Authorize": "Авторизувати",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Отримайте свій токен API OpenRouter за допомогою OAuth. Вас буде перенаправлено на openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "Застарілий API (до OAI, без потокової передачі)",
|
||||
"Bypass status check": "Обійти перевірку статусу",
|
||||
"Chat Completion Source": "Джерело Chat Completion",
|
||||
"Reverse Proxy": "Зворотний проксі",
|
||||
|
@ -337,7 +337,6 @@
|
||||
"Example: 127.0.0.1:5001": "Ví dụ: 127.0.0.1:5001",
|
||||
"Cho phép": "Ủy quyền",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "Nhận mã thông báo API OpenRouter của bạn bằng cách sử dụng luồng OAuth. Bạn sẽ được chuyển hướng đến openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "API cũ (trước OAI, không có streaming)",
|
||||
"Bypass status check": "Bỏ qua check trạng thái",
|
||||
"Chat Completion Source": "Nguồn cho Chat Completion",
|
||||
"Reverse Proxy": "Proxy",
|
||||
|
@ -351,7 +351,6 @@
|
||||
"Example: 127.0.0.1:5001": "示例:127.0.0.1:5001",
|
||||
"Authorize": "授权",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "使用OAuth流程获取您的OpenRouter API令牌。您将被重定向到openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "旧版API(OAI之前,无流式传输)",
|
||||
"Bypass status check": "绕过状态检查",
|
||||
"Chat Completion Source": "聊天补全来源",
|
||||
"Custom (OpenAI-compatible)": "自定义(兼容 OpenAI)",
|
||||
|
@ -338,7 +338,6 @@
|
||||
"Example: 127.0.0.1:5001": "範例:127.0.0.1:5001",
|
||||
"Authorize": "授權",
|
||||
"Get your OpenRouter API token using OAuth flow. You will be redirected to openrouter.ai": "使用 OAuth 流程取得您的 OpenRouter API 符記。您將被重新導向到 openrouter.ai",
|
||||
"Legacy API (pre-OAI, no streaming)": "傳統 API(OAI之前,無串流)",
|
||||
"Bypass status check": "繞過狀態檢查",
|
||||
"Chat Completion Source": "聊天補充來源",
|
||||
"Reverse Proxy": "反向代理伺服器",
|
||||
|
@ -1184,7 +1184,6 @@ async function getStatusTextgen() {
|
||||
body: JSON.stringify({
|
||||
api_server: endpoint,
|
||||
api_type: textgen_settings.type,
|
||||
legacy_api: textgen_settings.legacy_api && textgen_settings.type === OOBA,
|
||||
}),
|
||||
signal: abortStatusCheck.signal,
|
||||
});
|
||||
@ -3447,15 +3446,6 @@ export async function Generate(type, { automatic_trigger, force_name2, quiet_pro
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (main_api === 'textgenerationwebui' &&
|
||||
textgen_settings.streaming &&
|
||||
textgen_settings.legacy_api &&
|
||||
textgen_settings.type === OOBA) {
|
||||
toastr.error(t`Streaming is not supported for the Legacy API. Update Ooba and use new API to enable streaming.`, undefined, { timeOut: 10000, preventDuplicates: true });
|
||||
unblockGeneration(type);
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (isHordeGenerationNotAllowed()) {
|
||||
unblockGeneration(type);
|
||||
return Promise.resolve();
|
||||
|
@ -303,9 +303,6 @@ let movingUIPresets = [];
|
||||
export let context_presets = [];
|
||||
|
||||
const storage_keys = {
|
||||
auto_connect_legacy: 'AutoConnectEnabled',
|
||||
auto_load_chat_legacy: 'AutoLoadChatEnabled',
|
||||
|
||||
storyStringValidationCache: 'StoryStringValidationCache',
|
||||
};
|
||||
|
||||
@ -1429,20 +1426,6 @@ async function loadPowerUserSettings(settings, data) {
|
||||
context_presets = data.context;
|
||||
}
|
||||
|
||||
// These are still local storage. Delete in 1.12.7
|
||||
const autoLoadChat = localStorage.getItem(storage_keys.auto_load_chat_legacy);
|
||||
const autoConnect = localStorage.getItem(storage_keys.auto_connect_legacy);
|
||||
|
||||
if (autoLoadChat) {
|
||||
power_user.auto_load_chat = autoLoadChat === 'true';
|
||||
localStorage.removeItem(storage_keys.auto_load_chat_legacy);
|
||||
}
|
||||
|
||||
if (autoConnect) {
|
||||
power_user.auto_connect = autoConnect === 'true';
|
||||
localStorage.removeItem(storage_keys.auto_connect_legacy);
|
||||
}
|
||||
|
||||
if (power_user.chat_display === '') {
|
||||
power_user.chat_display = chat_styles.DEFAULT;
|
||||
}
|
||||
|
@ -183,7 +183,6 @@ const settings = {
|
||||
aphrodite_model: '',
|
||||
dreamgen_model: 'opus-v1-xl/text',
|
||||
tabby_model: '',
|
||||
legacy_api: false,
|
||||
sampler_order: KOBOLDCPP_ORDER,
|
||||
logit_bias: [],
|
||||
n: 1,
|
||||
@ -252,7 +251,6 @@ export const setting_names = [
|
||||
'grammar_string',
|
||||
'json_schema',
|
||||
'banned_tokens',
|
||||
'legacy_api',
|
||||
'ignore_eos_token',
|
||||
'spaces_between_special_tokens',
|
||||
'speculative_ngram',
|
||||
@ -329,22 +327,11 @@ async function selectPreset(name) {
|
||||
function formatTextGenURL(value) {
|
||||
try {
|
||||
const noFormatTypes = [MANCER, TOGETHERAI, INFERMATICAI, DREAMGEN, OPENROUTER];
|
||||
const legacyApiTypes = [OOBA];
|
||||
if (noFormatTypes.includes(settings.type)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const url = new URL(value);
|
||||
if (legacyApiTypes.includes(settings.type)) {
|
||||
if (url.pathname === '/api' && !settings.legacy_api) {
|
||||
toastr.info('Enable Legacy API or start Ooba with the OpenAI extension enabled.', 'Legacy API URL detected. Generation may fail.', { preventDuplicates: true, timeOut: 10000, extendedTimeOut: 20000 });
|
||||
url.pathname = '';
|
||||
}
|
||||
|
||||
if (!power_user.relaxed_api_urls && settings.legacy_api) {
|
||||
url.pathname = '/api';
|
||||
}
|
||||
}
|
||||
return url.toString();
|
||||
} catch {
|
||||
// Just using URL as a validation check
|
||||
@ -1185,7 +1172,6 @@ export function getTextGenGenerationData(finalPrompt, maxTokens, isImpersonate,
|
||||
'banned_strings': banned_strings,
|
||||
'api_type': settings.type,
|
||||
'api_server': getTextGenServer(),
|
||||
'legacy_api': settings.legacy_api && settings.type === OOBA,
|
||||
'sampler_order': settings.type === textgen_types.KOBOLDCPP ? settings.sampler_order : undefined,
|
||||
'xtc_threshold': settings.xtc_threshold,
|
||||
'xtc_probability': settings.xtc_probability,
|
||||
|
@ -889,7 +889,6 @@ function getTextgenAPITokenizationParams(str) {
|
||||
text: str,
|
||||
api_type: textgen_settings.type,
|
||||
url: getTextGenServer(),
|
||||
legacy_api: textgen_settings.legacy_api && textgen_settings.type === OOBA,
|
||||
vllm_model: textgen_settings.vllm_model,
|
||||
aphrodite_model: textgen_settings.aphrodite_model,
|
||||
};
|
||||
|
@ -112,41 +112,38 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
let url = baseUrl;
|
||||
let result = '';
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
url += '/v1/model';
|
||||
} else {
|
||||
switch (apiType) {
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
url += '/v1/model/list';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
url += '/api/models?&info';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/tags';
|
||||
break;
|
||||
case TEXTGEN_TYPES.FEATHERLESS:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/info';
|
||||
break;
|
||||
}
|
||||
|
||||
switch (apiType) {
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
url += '/v1/model/list';
|
||||
break;
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
url += '/api/models?&info';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/tags';
|
||||
break;
|
||||
case TEXTGEN_TYPES.FEATHERLESS:
|
||||
url += '/v1/models';
|
||||
break;
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/info';
|
||||
break;
|
||||
}
|
||||
|
||||
const modelsReply = await fetch(url, args);
|
||||
@ -160,11 +157,6 @@ router.post('/status', jsonParser, async function (request, response) {
|
||||
/** @type {any} */
|
||||
let data = await modelsReply.json();
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
console.log('Legacy API response:', data);
|
||||
return response.send({ result: data?.result });
|
||||
}
|
||||
|
||||
// Rewrap to OAI-like response
|
||||
if (apiType === TEXTGEN_TYPES.TOGETHERAI && Array.isArray(data)) {
|
||||
data = { data: data.map(x => ({ id: x.name, ...x })) };
|
||||
@ -259,37 +251,33 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
|
||||
let url = trimV1(baseUrl);
|
||||
|
||||
if (request.body.legacy_api) {
|
||||
url += '/v1/generate';
|
||||
} else {
|
||||
switch (request.body.api_type) {
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
case TEXTGEN_TYPES.FEATHERLESS:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/completion';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/generate';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/chat/completions';
|
||||
break;
|
||||
}
|
||||
switch (request.body.api_type) {
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
case TEXTGEN_TYPES.FEATHERLESS:
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
case TEXTGEN_TYPES.OOBA:
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
case TEXTGEN_TYPES.TOGETHERAI:
|
||||
case TEXTGEN_TYPES.INFERMATICAI:
|
||||
case TEXTGEN_TYPES.HUGGINGFACE:
|
||||
url += '/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.DREAMGEN:
|
||||
url += '/api/openai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.MANCER:
|
||||
url += '/oai/v1/completions';
|
||||
break;
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/completion';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OLLAMA:
|
||||
url += '/api/generate';
|
||||
break;
|
||||
case TEXTGEN_TYPES.OPENROUTER:
|
||||
url += '/v1/chat/completions';
|
||||
break;
|
||||
}
|
||||
|
||||
const args = {
|
||||
@ -370,12 +358,6 @@ router.post('/generate', jsonParser, async function (request, response) {
|
||||
const data = await completionsReply.json();
|
||||
console.log('Endpoint response:', data);
|
||||
|
||||
// Wrap legacy response to OAI completions format
|
||||
if (request.body.legacy_api) {
|
||||
const text = data?.results[0]?.text;
|
||||
data['choices'] = [{ text }];
|
||||
}
|
||||
|
||||
// Map InfermaticAI response to OAI completions format
|
||||
if (apiType === TEXTGEN_TYPES.INFERMATICAI) {
|
||||
data['choices'] = (data?.choices || []).map(choice => ({ text: choice?.message?.content || choice.text, logprobs: choice?.logprobs, index: choice?.index }));
|
||||
|
@ -942,7 +942,6 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
|
||||
}
|
||||
const text = String(request.body.text) || '';
|
||||
const baseUrl = String(request.body.url);
|
||||
const legacyApi = Boolean(request.body.legacy_api);
|
||||
const vllmModel = String(request.body.vllm_model) || '';
|
||||
const aphroditeModel = String(request.body.aphrodite_model) || '';
|
||||
|
||||
@ -957,36 +956,31 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
|
||||
// Convert to string + remove trailing slash + /v1 suffix
|
||||
let url = String(baseUrl).replace(/\/$/, '').replace(/\/v1$/, '');
|
||||
|
||||
if (legacyApi) {
|
||||
url += '/v1/token-count';
|
||||
args.body = JSON.stringify({ 'prompt': text });
|
||||
} else {
|
||||
switch (request.body.api_type) {
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
url += '/v1/token/encode';
|
||||
args.body = JSON.stringify({ 'text': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
url += '/api/extra/tokencount';
|
||||
args.body = JSON.stringify({ 'prompt': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/tokenize';
|
||||
args.body = JSON.stringify({ 'content': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
url += '/tokenize';
|
||||
args.body = JSON.stringify({ 'model': vllmModel, 'prompt': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
url += '/v1/tokenize';
|
||||
args.body = JSON.stringify({ 'model': aphroditeModel, 'prompt': text });
|
||||
break;
|
||||
default:
|
||||
url += '/v1/internal/encode';
|
||||
args.body = JSON.stringify({ 'text': text });
|
||||
break;
|
||||
}
|
||||
switch (request.body.api_type) {
|
||||
case TEXTGEN_TYPES.TABBY:
|
||||
url += '/v1/token/encode';
|
||||
args.body = JSON.stringify({ 'text': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.KOBOLDCPP:
|
||||
url += '/api/extra/tokencount';
|
||||
args.body = JSON.stringify({ 'prompt': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.LLAMACPP:
|
||||
url += '/tokenize';
|
||||
args.body = JSON.stringify({ 'content': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.VLLM:
|
||||
url += '/tokenize';
|
||||
args.body = JSON.stringify({ 'model': vllmModel, 'prompt': text });
|
||||
break;
|
||||
case TEXTGEN_TYPES.APHRODITE:
|
||||
url += '/v1/tokenize';
|
||||
args.body = JSON.stringify({ 'model': aphroditeModel, 'prompt': text });
|
||||
break;
|
||||
default:
|
||||
url += '/v1/internal/encode';
|
||||
args.body = JSON.stringify({ 'text': text });
|
||||
break;
|
||||
}
|
||||
|
||||
const result = await fetch(url, args);
|
||||
@ -997,8 +991,8 @@ router.post('/remote/textgenerationwebui/encode', jsonParser, async function (re
|
||||
}
|
||||
|
||||
const data = await result.json();
|
||||
const count = legacyApi ? data?.results[0]?.tokens : (data?.length ?? data?.count ?? data?.value ?? data?.tokens?.length);
|
||||
const ids = legacyApi ? [] : (data?.tokens ?? data?.ids ?? []);
|
||||
const count = (data?.length ?? data?.count ?? data?.value ?? data?.tokens?.length);
|
||||
const ids = (data?.tokens ?? data?.ids ?? []);
|
||||
|
||||
return response.send({ count, ids });
|
||||
} catch (error) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user