Compare commits
9 Commits
7de9a0e0cb
...
3fb54e0a38
Author | SHA1 | Date |
---|---|---|
Cohee | 3fb54e0a38 | |
Cohee | be7eb8b2b5 | |
Cohee | 3b6372431a | |
sirius422 | 389ee7917f | |
Cohee | 212e61d2a1 | |
Cohee | 1b60e4a013 | |
Aisu Wata | 93cd93ada3 | |
based | 2e50d61590 | |
based | 8430212474 |
|
@ -2380,6 +2380,15 @@
|
|||
<input id="openai_proxy_password" type="password" class="text_pole flex1" placeholder="" maxlength="5000" form="openai_form" autocomplete="off" />
|
||||
<div id="openai_proxy_password_show" title="Peek a password" class="menu_button fa-solid fa-eye-slash fa-fw"></div>
|
||||
</div>
|
||||
<div class="">
|
||||
<div class="range-block-title justifyLeft" id="proxy_current_source" data-i18n="Current source: ">
|
||||
Current source:
|
||||
</div>
|
||||
<label for="openai_show_external_models" class="checkbox_label">
|
||||
<input id="proxy_save_completion_source" type="checkbox" />
|
||||
<span data-i18n="Save Model/Completion Source?">Save Model/Completion Source?</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<form id="openai_form" data-source="openai" action="javascript:void(null);" method="post" enctype="multipart/form-data">
|
||||
|
|
|
@ -354,6 +354,9 @@ export let proxies = [
|
|||
name: 'None',
|
||||
url: '',
|
||||
password: '',
|
||||
save_source: false,
|
||||
model: '',
|
||||
source: '',
|
||||
},
|
||||
];
|
||||
export let selected_proxy = proxies[0];
|
||||
|
@ -2692,7 +2695,6 @@ function loadOpenAISettings(data, settings) {
|
|||
oai_settings.chat_completion_source = settings.chat_completion_source ?? default_settings.chat_completion_source;
|
||||
oai_settings.api_url_scale = settings.api_url_scale ?? default_settings.api_url_scale;
|
||||
oai_settings.show_external_models = settings.show_external_models ?? default_settings.show_external_models;
|
||||
oai_settings.proxy_password = settings.proxy_password ?? default_settings.proxy_password;
|
||||
oai_settings.assistant_prefill = settings.assistant_prefill ?? default_settings.assistant_prefill;
|
||||
oai_settings.human_sysprompt_message = settings.human_sysprompt_message ?? default_settings.human_sysprompt_message;
|
||||
oai_settings.image_inlining = settings.image_inlining ?? default_settings.image_inlining;
|
||||
|
@ -2727,7 +2729,6 @@ function loadOpenAISettings(data, settings) {
|
|||
$('#stream_toggle').prop('checked', oai_settings.stream_openai);
|
||||
$('#websearch_toggle').prop('checked', oai_settings.websearch_cohere);
|
||||
$('#api_url_scale').val(oai_settings.api_url_scale);
|
||||
$('#openai_proxy_password').val(oai_settings.proxy_password);
|
||||
$('#claude_assistant_prefill').val(oai_settings.assistant_prefill);
|
||||
$('#claude_human_sysprompt_textarea').val(oai_settings.human_sysprompt_message);
|
||||
$('#openai_image_inlining').prop('checked', oai_settings.image_inlining);
|
||||
|
@ -2813,11 +2814,6 @@ function loadOpenAISettings(data, settings) {
|
|||
$('#seed_openai').val(oai_settings.seed);
|
||||
$('#n_openai').val(oai_settings.n);
|
||||
|
||||
if (settings.reverse_proxy !== undefined) oai_settings.reverse_proxy = settings.reverse_proxy;
|
||||
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
|
||||
|
||||
$('.reverse_proxy_warning').toggle(oai_settings.reverse_proxy !== '');
|
||||
|
||||
$('#openai_logit_bias_preset').empty();
|
||||
for (const preset of Object.keys(oai_settings.bias_presets)) {
|
||||
const option = document.createElement('option');
|
||||
|
@ -3019,8 +3015,6 @@ async function saveOpenAIPreset(name, settings, triggerUi = true) {
|
|||
new_example_chat_prompt: settings.new_example_chat_prompt,
|
||||
continue_nudge_prompt: settings.continue_nudge_prompt,
|
||||
bias_preset_selected: settings.bias_preset_selected,
|
||||
reverse_proxy: settings.reverse_proxy,
|
||||
proxy_password: settings.proxy_password,
|
||||
max_context_unlocked: settings.max_context_unlocked,
|
||||
wi_format: settings.wi_format,
|
||||
scenario_format: settings.scenario_format,
|
||||
|
@ -3250,7 +3244,8 @@ async function onExportPresetClick() {
|
|||
delete preset.proxy_password;
|
||||
|
||||
const presetJsonString = JSON.stringify(preset, null, 4);
|
||||
download(presetJsonString, oai_settings.preset_settings_openai, 'application/json');
|
||||
const presetFileName = `${oai_settings.preset_settings_openai}.json`;
|
||||
download(presetJsonString, presetFileName, 'application/json');
|
||||
}
|
||||
|
||||
async function onLogitBiasPresetImportFileChange(e) {
|
||||
|
@ -3298,7 +3293,8 @@ function onLogitBiasPresetExportClick() {
|
|||
}
|
||||
|
||||
const presetJsonString = JSON.stringify(oai_settings.bias_presets[oai_settings.bias_preset_selected], null, 4);
|
||||
download(presetJsonString, oai_settings.bias_preset_selected, 'application/json');
|
||||
const presetFileName = `${oai_settings.bias_preset_selected}.json`;
|
||||
download(presetJsonString, presetFileName, 'application/json');
|
||||
}
|
||||
|
||||
async function onDeletePresetClick() {
|
||||
|
@ -3401,7 +3397,6 @@ function onSettingsPresetChange() {
|
|||
new_example_chat_prompt: ['#newexamplechat_prompt_textarea', 'new_example_chat_prompt', false],
|
||||
continue_nudge_prompt: ['#continue_nudge_prompt_textarea', 'continue_nudge_prompt', false],
|
||||
bias_preset_selected: ['#openai_logit_bias_preset', 'bias_preset_selected', false],
|
||||
reverse_proxy: ['#openai_reverse_proxy', 'reverse_proxy', false],
|
||||
wi_format: ['#wi_format_textarea', 'wi_format', false],
|
||||
scenario_format: ['#scenario_format_textarea', 'scenario_format', false],
|
||||
personality_format: ['#personality_format_textarea', 'personality_format', false],
|
||||
|
@ -3412,7 +3407,6 @@ function onSettingsPresetChange() {
|
|||
prompt_order: ['', 'prompt_order', false],
|
||||
api_url_scale: ['#api_url_scale', 'api_url_scale', false],
|
||||
show_external_models: ['#openai_show_external_models', 'show_external_models', true],
|
||||
proxy_password: ['#openai_proxy_password', 'proxy_password', false],
|
||||
assistant_prefill: ['#claude_assistant_prefill', 'assistant_prefill', false],
|
||||
human_sysprompt_message: ['#claude_human_sysprompt_textarea', 'human_sysprompt_message', false],
|
||||
use_ai21_tokenizer: ['#use_ai21_tokenizer', 'use_ai21_tokenizer', true],
|
||||
|
@ -3821,6 +3815,7 @@ async function onModelChange() {
|
|||
|
||||
saveSettingsDebounced();
|
||||
eventSource.emit(event_types.CHATCOMPLETION_MODEL_CHANGED, value);
|
||||
$('#proxy_current_source').text(`Current source: ${oai_settings.chat_completion_source}/${retrieveModel()}`);
|
||||
}
|
||||
|
||||
async function onOpenrouterModelSortChange() {
|
||||
|
@ -4180,7 +4175,8 @@ export function loadProxyPresets(settings) {
|
|||
} else {
|
||||
proxies = proxyPresets;
|
||||
}
|
||||
|
||||
//sort alphabetically
|
||||
proxyPresets.sort((a, b) => a.name.localeCompare(b.name));
|
||||
$('#openai_proxy_preset').empty();
|
||||
|
||||
for (const preset of proxyPresets) {
|
||||
|
@ -4191,17 +4187,91 @@ export function loadProxyPresets(settings) {
|
|||
$('#openai_proxy_preset').append(option);
|
||||
}
|
||||
$('#openai_proxy_preset').val(selected_proxy.name);
|
||||
setProxyPreset(selected_proxy.name, selected_proxy.url, selected_proxy.password);
|
||||
setProxyPreset(selected_proxy.name, selected_proxy.url, selected_proxy.password, selected_proxy.save_source, selected_proxy.model, selected_proxy.source);
|
||||
}
|
||||
|
||||
function setProxyPreset(name, url, password) {
|
||||
// two of the functions of all time
|
||||
function retrieveModel() {
|
||||
let model;
|
||||
switch (oai_settings.chat_completion_source) {
|
||||
case chat_completion_sources.OPENAI:
|
||||
model = oai_settings.openai_model;
|
||||
break;
|
||||
case chat_completion_sources.CLAUDE:
|
||||
model = oai_settings.claude_model;
|
||||
break;
|
||||
case chat_completion_sources.WINDOWAI:
|
||||
model = oai_settings.windowai_model;
|
||||
break;
|
||||
case chat_completion_sources.OPENROUTER:
|
||||
model = oai_settings.openrouter_model;
|
||||
break;
|
||||
case chat_completion_sources.AI21:
|
||||
model = oai_settings.ai21_model;
|
||||
break;
|
||||
case chat_completion_sources.MISTRALAI:
|
||||
model = oai_settings.mistralai_model;
|
||||
break;
|
||||
case chat_completion_sources.CUSTOM:
|
||||
model = oai_settings.custom_model;
|
||||
break;
|
||||
default:
|
||||
model = '';
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
function setModel(model) {
|
||||
switch (oai_settings.chat_completion_source) {
|
||||
case chat_completion_sources.OPENAI:
|
||||
oai_settings.openai_model = model;
|
||||
$('#model_openai_select').val(model);
|
||||
break;
|
||||
case chat_completion_sources.CLAUDE:
|
||||
oai_settings.claude_model = model;
|
||||
$('#model_claude_select').val(model);
|
||||
break;
|
||||
case chat_completion_sources.WINDOWAI:
|
||||
oai_settings.windowai_model = model;
|
||||
$('#model_windowai_select').val(model);
|
||||
break;
|
||||
case chat_completion_sources.MAKERSUITE:
|
||||
oai_settings.google_model = model;
|
||||
$('#model_google_select').val(model);
|
||||
break;
|
||||
case chat_completion_sources.OPENROUTER:
|
||||
oai_settings.openrouter_model = model;
|
||||
$('#model_openrouter_select').val(model);
|
||||
break;
|
||||
case chat_completion_sources.AI21:
|
||||
oai_settings.ai21_model = model;
|
||||
$('#model_ai21_select').val(model);
|
||||
break;
|
||||
case chat_completion_sources.MISTRALAI:
|
||||
oai_settings.mistralai_model = model;
|
||||
$('#model_mistralai_select').val(model);
|
||||
break;
|
||||
default:
|
||||
console.log('Invalid chat completion source');
|
||||
}
|
||||
toggleChatCompletionForms();
|
||||
}
|
||||
|
||||
function setProxyPreset(name, url, password, save_source, update) {
|
||||
name = name.trim();
|
||||
const preset = proxies.find(p => p.name === name);
|
||||
|
||||
const model = save_source ? (update ? retrieveModel() : preset.model) : '';
|
||||
const source = save_source ? (update ? oai_settings.chat_completion_source : preset.source) : '';
|
||||
if (preset) {
|
||||
preset.url = url;
|
||||
preset.password = password;
|
||||
preset.save_source = save_source;
|
||||
preset.model = model;
|
||||
preset.source = source;
|
||||
selected_proxy = preset;
|
||||
} else {
|
||||
let new_proxy = { name, url, password };
|
||||
let new_proxy = { name, url, password, save_source, model, source };
|
||||
proxies.push(new_proxy);
|
||||
selected_proxy = new_proxy;
|
||||
}
|
||||
|
@ -4211,7 +4281,15 @@ function setProxyPreset(name, url, password) {
|
|||
$('#openai_reverse_proxy').val(oai_settings.reverse_proxy);
|
||||
oai_settings.proxy_password = password;
|
||||
$('#openai_proxy_password').val(oai_settings.proxy_password);
|
||||
$('#proxy_save_completion_source').prop('checked', save_source === undefined ? false : save_source);
|
||||
if (save_source && model && source) {
|
||||
oai_settings.chat_completion_source = source;
|
||||
$('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change');
|
||||
setModel(model);
|
||||
}
|
||||
|
||||
reconnectOpenAi();
|
||||
$('.reverse_proxy_warning').toggle(oai_settings.reverse_proxy !== '');
|
||||
}
|
||||
|
||||
function onProxyPresetChange() {
|
||||
|
@ -4219,7 +4297,7 @@ function onProxyPresetChange() {
|
|||
const selectedPreset = proxies.find(preset => preset.name === value);
|
||||
|
||||
if (selectedPreset) {
|
||||
setProxyPreset(selectedPreset.name, selectedPreset.url, selectedPreset.password);
|
||||
setProxyPreset(selectedPreset.name, selectedPreset.url, selectedPreset.password, selectedPreset.save_source);
|
||||
} else {
|
||||
console.error(`Proxy preset "${value}" not found in proxies array.`);
|
||||
}
|
||||
|
@ -4227,11 +4305,12 @@ function onProxyPresetChange() {
|
|||
}
|
||||
|
||||
$('#save_proxy').on('click', async function () {
|
||||
const presetName = $('#openai_reverse_proxy_name').val();
|
||||
const reverseProxy = $('#openai_reverse_proxy').val();
|
||||
const proxyPassword = $('#openai_proxy_password').val();
|
||||
const presetName = $('#openai_reverse_proxy_name').val().trim();
|
||||
const reverseProxy = $('#openai_reverse_proxy').val().trim();
|
||||
const proxyPassword = $('#openai_proxy_password').val().trim();
|
||||
const saveSource = $('#proxy_save_completion_source').is(':checked');
|
||||
|
||||
setProxyPreset(presetName, reverseProxy, proxyPassword);
|
||||
setProxyPreset(presetName, reverseProxy, proxyPassword, saveSource, saveSource);
|
||||
saveSettingsDebounced();
|
||||
toastr.success('Proxy Saved');
|
||||
if ($('#openai_proxy_preset').val() !== presetName) {
|
||||
|
@ -4245,7 +4324,7 @@ $('#save_proxy').on('click', async function () {
|
|||
});
|
||||
|
||||
$('#delete_proxy').on('click', async function () {
|
||||
const presetName = $('#openai_reverse_proxy_name').val();
|
||||
const presetName = $('#openai_reverse_proxy_name').val().trim();
|
||||
const index = proxies.findIndex(preset => preset.name === presetName);
|
||||
|
||||
if (index !== -1) {
|
||||
|
@ -4256,7 +4335,7 @@ $('#delete_proxy').on('click', async function () {
|
|||
const newIndex = Math.max(0, index - 1);
|
||||
selected_proxy = proxies[newIndex];
|
||||
} else {
|
||||
selected_proxy = { name: 'None', url: '', password: '' };
|
||||
selected_proxy = { name: 'None', url: '', password: '', save_source: false, model: '', source: '' };
|
||||
}
|
||||
|
||||
$('#openai_reverse_proxy_name').val(selected_proxy.name);
|
||||
|
@ -4264,7 +4343,11 @@ $('#delete_proxy').on('click', async function () {
|
|||
$('#openai_reverse_proxy').val(selected_proxy.url);
|
||||
oai_settings.proxy_password = selected_proxy.password;
|
||||
$('#openai_proxy_password').val(selected_proxy.password);
|
||||
|
||||
if (selected_proxy.save_source) {
|
||||
oai_settings.chat_completion_source = selected_proxy.source;
|
||||
$('#chat_completion_source').val(oai_settings.chat_completion_source).trigger('change');
|
||||
setModel(selected_proxy.model);
|
||||
}
|
||||
saveSettingsDebounced();
|
||||
$('#openai_proxy_preset').val(selected_proxy.name);
|
||||
toastr.success('Proxy Deleted');
|
||||
|
|
|
@ -1000,6 +1000,7 @@ body .panelControlBar {
|
|||
padding-left: 10px;
|
||||
width: 100%;
|
||||
overflow-x: hidden;
|
||||
overflow-y: clip;
|
||||
}
|
||||
|
||||
.mes_text {
|
||||
|
|
61
server.js
61
server.js
|
@ -45,7 +45,6 @@ const {
|
|||
forwardFetchResponse,
|
||||
} = require('./src/util');
|
||||
const { ensureThumbnailCache } = require('./src/endpoints/thumbnails');
|
||||
const { loadTokenizers } = require('./src/endpoints/tokenizers');
|
||||
|
||||
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
|
||||
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
|
||||
|
@ -543,22 +542,12 @@ const setupTasks = async function () {
|
|||
}
|
||||
console.log();
|
||||
|
||||
// TODO: do endpoint init functions depend on certain directories existing or not existing? They should be callable
|
||||
// in any order for encapsulation reasons, but right now it's unknown if that would break anything.
|
||||
await userModule.initUserStorage(dataRoot);
|
||||
|
||||
if (listen && !basicAuthMode && enableAccounts) {
|
||||
await userModule.checkAccountsProtection();
|
||||
}
|
||||
|
||||
await settingsEndpoint.init();
|
||||
const directories = await userModule.ensurePublicDirectoriesExist();
|
||||
await userModule.migrateUserData();
|
||||
const directories = await userModule.getUserDirectoriesList();
|
||||
await contentManager.checkForNewContent(directories);
|
||||
await ensureThumbnailCache();
|
||||
cleanUploads();
|
||||
|
||||
await loadTokenizers();
|
||||
await settingsEndpoint.init();
|
||||
await statsEndpoint.init();
|
||||
|
||||
const cleanupPlugins = await loadPlugins();
|
||||
|
@ -581,7 +570,6 @@ const setupTasks = async function () {
|
|||
exitProcess();
|
||||
});
|
||||
|
||||
|
||||
console.log('Launching...');
|
||||
|
||||
if (autorun) open(autorunUrl.toString());
|
||||
|
@ -601,6 +589,9 @@ const setupTasks = async function () {
|
|||
}
|
||||
}
|
||||
|
||||
if (listen && !basicAuthMode && enableAccounts) {
|
||||
await userModule.checkAccountsProtection();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -642,21 +633,27 @@ function setWindowTitle(title) {
|
|||
}
|
||||
}
|
||||
|
||||
if (cliArguments.ssl) {
|
||||
https.createServer(
|
||||
{
|
||||
cert: fs.readFileSync(cliArguments.certPath),
|
||||
key: fs.readFileSync(cliArguments.keyPath),
|
||||
}, app)
|
||||
.listen(
|
||||
Number(tavernUrl.port) || 443,
|
||||
tavernUrl.hostname,
|
||||
setupTasks,
|
||||
);
|
||||
} else {
|
||||
http.createServer(app).listen(
|
||||
Number(tavernUrl.port) || 80,
|
||||
tavernUrl.hostname,
|
||||
setupTasks,
|
||||
);
|
||||
}
|
||||
// User storage module needs to be initialized before starting the server
|
||||
userModule.initUserStorage(dataRoot)
|
||||
.then(userModule.ensurePublicDirectoriesExist)
|
||||
.then(userModule.migrateUserData)
|
||||
.finally(() => {
|
||||
if (cliArguments.ssl) {
|
||||
https.createServer(
|
||||
{
|
||||
cert: fs.readFileSync(cliArguments.certPath),
|
||||
key: fs.readFileSync(cliArguments.keyPath),
|
||||
}, app)
|
||||
.listen(
|
||||
Number(tavernUrl.port) || 443,
|
||||
tavernUrl.hostname,
|
||||
setupTasks,
|
||||
);
|
||||
} else {
|
||||
http.createServer(app).listen(
|
||||
Number(tavernUrl.port) || 80,
|
||||
tavernUrl.hostname,
|
||||
setupTasks,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -10,6 +10,10 @@ const { TEXTGEN_TYPES } = require('../constants');
|
|||
const { jsonParser } = require('../express-common');
|
||||
const { setAdditionalHeaders } = require('../additional-headers');
|
||||
|
||||
/**
|
||||
* @typedef { (req: import('express').Request, res: import('express').Response) => Promise<any> } TokenizationHandler
|
||||
*/
|
||||
|
||||
/**
|
||||
* @type {{[key: string]: import("@dqbd/tiktoken").Tiktoken}} Tokenizers cache
|
||||
*/
|
||||
|
@ -48,16 +52,30 @@ const TEXT_COMPLETION_MODELS = [
|
|||
|
||||
const CHARS_PER_TOKEN = 3.35;
|
||||
|
||||
/**
|
||||
* Sentencepiece tokenizer for tokenizing text.
|
||||
*/
|
||||
class SentencePieceTokenizer {
|
||||
/**
|
||||
* @type {import('@agnai/sentencepiece-js').SentencePieceProcessor} Sentencepiece tokenizer instance
|
||||
*/
|
||||
#instance;
|
||||
/**
|
||||
* @type {string} Path to the tokenizer model
|
||||
*/
|
||||
#model;
|
||||
|
||||
/**
|
||||
* Creates a new Sentencepiece tokenizer.
|
||||
* @param {string} model Path to the tokenizer model
|
||||
*/
|
||||
constructor(model) {
|
||||
this.#model = model;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Sentencepiece tokenizer instance.
|
||||
* @returns {Promise<import('@agnai/sentencepiece-js').SentencePieceProcessor|null>} Sentencepiece tokenizer instance
|
||||
*/
|
||||
async get() {
|
||||
if (this.#instance) {
|
||||
|
@ -76,18 +94,61 @@ class SentencePieceTokenizer {
|
|||
}
|
||||
}
|
||||
|
||||
const spp_llama = new SentencePieceTokenizer('src/sentencepiece/llama.model');
|
||||
const spp_nerd = new SentencePieceTokenizer('src/sentencepiece/nerdstash.model');
|
||||
const spp_nerd_v2 = new SentencePieceTokenizer('src/sentencepiece/nerdstash_v2.model');
|
||||
const spp_mistral = new SentencePieceTokenizer('src/sentencepiece/mistral.model');
|
||||
const spp_yi = new SentencePieceTokenizer('src/sentencepiece/yi.model');
|
||||
let claude_tokenizer;
|
||||
/**
|
||||
* Web tokenizer for tokenizing text.
|
||||
*/
|
||||
class WebTokenizer {
|
||||
/**
|
||||
* @type {Tokenizer} Web tokenizer instance
|
||||
*/
|
||||
#instance;
|
||||
/**
|
||||
* @type {string} Path to the tokenizer model
|
||||
*/
|
||||
#model;
|
||||
|
||||
/**
|
||||
* Creates a new Web tokenizer.
|
||||
* @param {string} model Path to the tokenizer model
|
||||
*/
|
||||
constructor(model) {
|
||||
this.#model = model;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Web tokenizer instance.
|
||||
* @returns {Promise<Tokenizer|null>} Web tokenizer instance
|
||||
*/
|
||||
async get() {
|
||||
if (this.#instance) {
|
||||
return this.#instance;
|
||||
}
|
||||
|
||||
try {
|
||||
const arrayBuffer = fs.readFileSync(this.#model).buffer;
|
||||
this.#instance = await Tokenizer.fromJSON(arrayBuffer);
|
||||
console.log('Instantiated the tokenizer for', path.parse(this.#model).name);
|
||||
return this.#instance;
|
||||
} catch (error) {
|
||||
console.error('Web tokenizer failed to load: ' + this.#model, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const spp_llama = new SentencePieceTokenizer('src/tokenizers/llama.model');
|
||||
const spp_nerd = new SentencePieceTokenizer('src/tokenizers/nerdstash.model');
|
||||
const spp_nerd_v2 = new SentencePieceTokenizer('src/tokenizers/nerdstash_v2.model');
|
||||
const spp_mistral = new SentencePieceTokenizer('src/tokenizers/mistral.model');
|
||||
const spp_yi = new SentencePieceTokenizer('src/tokenizers/yi.model');
|
||||
const claude_tokenizer = new WebTokenizer('src/tokenizers/claude.json');
|
||||
|
||||
const sentencepieceTokenizers = [
|
||||
'llama',
|
||||
'nerdstash',
|
||||
'nerdstash_v2',
|
||||
'mistral',
|
||||
'yi',
|
||||
];
|
||||
|
||||
/**
|
||||
|
@ -112,6 +173,10 @@ function getSentencepiceTokenizer(model) {
|
|||
return spp_nerd_v2;
|
||||
}
|
||||
|
||||
if (model.includes('yi')) {
|
||||
return spp_yi;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
@ -168,13 +233,23 @@ async function getTiktokenChunks(tokenizer, ids) {
|
|||
return chunks;
|
||||
}
|
||||
|
||||
async function getWebTokenizersChunks(tokenizer, ids) {
|
||||
/**
|
||||
* Gets the token chunks for the given token IDs using the Web tokenizer.
|
||||
* @param {Tokenizer} tokenizer Web tokenizer instance
|
||||
* @param {number[]} ids Token IDs
|
||||
* @returns {string[]} Token chunks
|
||||
*/
|
||||
function getWebTokenizersChunks(tokenizer, ids) {
|
||||
const chunks = [];
|
||||
|
||||
for (let i = 0; i < ids.length; i++) {
|
||||
const id = ids[i];
|
||||
const chunkText = await tokenizer.decode(new Uint32Array([id]));
|
||||
for (let i = 0, lastProcessed = 0; i < ids.length; i++) {
|
||||
const chunkIds = ids.slice(lastProcessed, i + 1);
|
||||
const chunkText = tokenizer.decode(new Int32Array(chunkIds));
|
||||
if (chunkText === '<27>') {
|
||||
continue;
|
||||
}
|
||||
chunks.push(chunkText);
|
||||
lastProcessed = i + 1;
|
||||
}
|
||||
|
||||
return chunks;
|
||||
|
@ -237,17 +312,12 @@ function getTiktokenTokenizer(model) {
|
|||
return tokenizer;
|
||||
}
|
||||
|
||||
async function loadClaudeTokenizer(modelPath) {
|
||||
try {
|
||||
const arrayBuffer = fs.readFileSync(modelPath).buffer;
|
||||
const instance = await Tokenizer.fromJSON(arrayBuffer);
|
||||
return instance;
|
||||
} catch (error) {
|
||||
console.error('Claude tokenizer failed to load: ' + modelPath, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts the tokens for the given messages using the Claude tokenizer.
|
||||
* @param {Tokenizer} tokenizer Web tokenizer
|
||||
* @param {object[]} messages Array of messages
|
||||
* @returns {number} Number of tokens
|
||||
*/
|
||||
function countClaudeTokens(tokenizer, messages) {
|
||||
// Should be fine if we use the old conversion method instead of the messages API one i think?
|
||||
const convertedPrompt = convertClaudePrompt(messages, false, '', false, false, '', false);
|
||||
|
@ -264,9 +334,14 @@ function countClaudeTokens(tokenizer, messages) {
|
|||
/**
|
||||
* Creates an API handler for encoding Sentencepiece tokens.
|
||||
* @param {SentencePieceTokenizer} tokenizer Sentencepiece tokenizer
|
||||
* @returns {any} Handler function
|
||||
* @returns {TokenizationHandler} Handler function
|
||||
*/
|
||||
function createSentencepieceEncodingHandler(tokenizer) {
|
||||
/**
|
||||
* Request handler for encoding Sentencepiece tokens.
|
||||
* @param {import('express').Request} request
|
||||
* @param {import('express').Response} response
|
||||
*/
|
||||
return async function (request, response) {
|
||||
try {
|
||||
if (!request.body) {
|
||||
|
@ -276,7 +351,7 @@ function createSentencepieceEncodingHandler(tokenizer) {
|
|||
const text = request.body.text || '';
|
||||
const instance = await tokenizer?.get();
|
||||
const { ids, count } = await countSentencepieceTokens(tokenizer, text);
|
||||
const chunks = await instance?.encodePieces(text);
|
||||
const chunks = instance?.encodePieces(text);
|
||||
return response.send({ ids, count, chunks });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
@ -288,9 +363,14 @@ function createSentencepieceEncodingHandler(tokenizer) {
|
|||
/**
|
||||
* Creates an API handler for decoding Sentencepiece tokens.
|
||||
* @param {SentencePieceTokenizer} tokenizer Sentencepiece tokenizer
|
||||
* @returns {any} Handler function
|
||||
* @returns {TokenizationHandler} Handler function
|
||||
*/
|
||||
function createSentencepieceDecodingHandler(tokenizer) {
|
||||
/**
|
||||
* Request handler for decoding Sentencepiece tokens.
|
||||
* @param {import('express').Request} request
|
||||
* @param {import('express').Response} response
|
||||
*/
|
||||
return async function (request, response) {
|
||||
try {
|
||||
if (!request.body) {
|
||||
|
@ -299,6 +379,7 @@ function createSentencepieceDecodingHandler(tokenizer) {
|
|||
|
||||
const ids = request.body.ids || [];
|
||||
const instance = await tokenizer?.get();
|
||||
if (!instance) throw new Error('Failed to load the Sentencepiece tokenizer');
|
||||
const ops = ids.map(id => instance.decodeIds([id]));
|
||||
const chunks = await Promise.all(ops);
|
||||
const text = chunks.join('');
|
||||
|
@ -313,9 +394,14 @@ function createSentencepieceDecodingHandler(tokenizer) {
|
|||
/**
|
||||
* Creates an API handler for encoding Tiktoken tokens.
|
||||
* @param {string} modelId Tiktoken model ID
|
||||
* @returns {any} Handler function
|
||||
* @returns {TokenizationHandler} Handler function
|
||||
*/
|
||||
function createTiktokenEncodingHandler(modelId) {
|
||||
/**
|
||||
* Request handler for encoding Tiktoken tokens.
|
||||
* @param {import('express').Request} request
|
||||
* @param {import('express').Response} response
|
||||
*/
|
||||
return async function (request, response) {
|
||||
try {
|
||||
if (!request.body) {
|
||||
|
@ -337,9 +423,14 @@ function createTiktokenEncodingHandler(modelId) {
|
|||
/**
|
||||
* Creates an API handler for decoding Tiktoken tokens.
|
||||
* @param {string} modelId Tiktoken model ID
|
||||
* @returns {any} Handler function
|
||||
* @returns {TokenizationHandler} Handler function
|
||||
*/
|
||||
function createTiktokenDecodingHandler(modelId) {
|
||||
/**
|
||||
* Request handler for decoding Tiktoken tokens.
|
||||
* @param {import('express').Request} request
|
||||
* @param {import('express').Response} response
|
||||
*/
|
||||
return async function (request, response) {
|
||||
try {
|
||||
if (!request.body) {
|
||||
|
@ -358,14 +449,6 @@ function createTiktokenDecodingHandler(modelId) {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the model tokenizers.
|
||||
* @returns {Promise<void>} Promise that resolves when the tokenizers are loaded
|
||||
*/
|
||||
async function loadTokenizers() {
|
||||
claude_tokenizer = await loadClaudeTokenizer('src/claude.json');
|
||||
}
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/ai21/count', jsonParser, async function (req, res) {
|
||||
|
@ -446,8 +529,10 @@ router.post('/openai/encode', jsonParser, async function (req, res) {
|
|||
|
||||
if (queryModel.includes('claude')) {
|
||||
const text = req.body.text || '';
|
||||
const tokens = Object.values(claude_tokenizer.encode(text));
|
||||
const chunks = await getWebTokenizersChunks(claude_tokenizer, tokens);
|
||||
const instance = await claude_tokenizer.get();
|
||||
if (!instance) throw new Error('Failed to load the Claude tokenizer');
|
||||
const tokens = Object.values(instance.encode(text));
|
||||
const chunks = getWebTokenizersChunks(instance, tokens);
|
||||
return res.send({ ids: tokens, count: tokens.length, chunks });
|
||||
}
|
||||
|
||||
|
@ -481,7 +566,9 @@ router.post('/openai/decode', jsonParser, async function (req, res) {
|
|||
|
||||
if (queryModel.includes('claude')) {
|
||||
const ids = req.body.ids || [];
|
||||
const chunkText = await claude_tokenizer.decode(new Uint32Array(ids));
|
||||
const instance = await claude_tokenizer.get();
|
||||
if (!instance) throw new Error('Failed to load the Claude tokenizer');
|
||||
const chunkText = instance.decode(new Int32Array(ids));
|
||||
return res.send({ text: chunkText });
|
||||
}
|
||||
|
||||
|
@ -503,7 +590,9 @@ router.post('/openai/count', jsonParser, async function (req, res) {
|
|||
const model = getTokenizerModel(queryModel);
|
||||
|
||||
if (model === 'claude') {
|
||||
num_tokens = countClaudeTokens(claude_tokenizer, req.body);
|
||||
const instance = await claude_tokenizer.get();
|
||||
if (!instance) throw new Error('Failed to load the Claude tokenizer');
|
||||
num_tokens = countClaudeTokens(instance, req.body);
|
||||
return res.send({ 'token_count': num_tokens });
|
||||
}
|
||||
|
||||
|
@ -665,7 +754,6 @@ module.exports = {
|
|||
getTokenizerModel,
|
||||
getTiktokenTokenizer,
|
||||
countClaudeTokens,
|
||||
loadTokenizers,
|
||||
getSentencepiceTokenizer,
|
||||
sentencepieceTokenizers,
|
||||
router,
|
||||
|
|
11
src/users.js
11
src/users.js
|
@ -112,6 +112,16 @@ async function ensurePublicDirectoriesExist() {
|
|||
return directoriesList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a list of all user directories.
|
||||
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories
|
||||
*/
|
||||
async function getUserDirectoriesList() {
|
||||
const userHandles = await getAllUserHandles();
|
||||
const directoriesList = userHandles.map(handle => getUserDirectories(handle));
|
||||
return directoriesList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform migration from the old user data format to the new one.
|
||||
*/
|
||||
|
@ -707,6 +717,7 @@ module.exports = {
|
|||
toAvatarKey,
|
||||
initUserStorage,
|
||||
ensurePublicDirectoriesExist,
|
||||
getUserDirectoriesList,
|
||||
getAllUserHandles,
|
||||
getUserDirectories,
|
||||
setUserDataMiddleware,
|
||||
|
|
Loading…
Reference in New Issue