Compare commits

...

9 Commits

Author SHA1 Message Date
Azariel Del Carmen 43fbd08e4b
Merge 770f3e5da3 into be7eb8b2b5 2024-04-26 15:36:48 +00:00
Cohee be7eb8b2b5
Merge pull request #2143 from aisu-wata0/style_mes_block_overflow_y
style: `.mes_block { overflow-y: clip; }`
2024-04-26 18:36:17 +03:00
Cohee 3b6372431a
Merge pull request #2144 from sirius422/fix-json-export-extension
Add json extension to exported oai and LogitBias presets
2024-04-26 18:30:55 +03:00
sirius422 389ee7917f Add json extension to exported oai and LogitBias presets 2024-04-26 23:07:25 +08:00
Cohee 212e61d2a1 Lazy initialization of Claude tokenizer. Add JSDoc for tokenizer handlers 2024-04-26 15:17:02 +03:00
Cohee 1b60e4a013 Init user storage module before server listening 2024-04-26 14:09:40 +03:00
Aisu Wata 93cd93ada3 style: `.mes_block { overflow-y: clip; }` 2024-04-25 21:49:12 -03:00
Bronya-Rand 770f3e5da3 chore: apply align-items center and img sample for img only scraper icons 2024-04-22 19:12:02 +01:00
Bronya-Rand 0f0895f345 feat: implement miHoYo scraper 2024-04-22 19:11:00 +01:00
16 changed files with 367 additions and 76 deletions

3
public/img/mihoyo.svg Normal file
View File

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="19.998" height="19.998" viewBox="0 0 99 99">
<image width="99" height="99" xlink:href="data:img/png;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAYAAACNiR0NAAADbUlEQVQ4jVzLsQkAIADEwFPBAdx/ScH+bQTFQKqQkqS65PjSMNCxML9+f2wAAAD//2LBY5gRAwODLwMDgxUDA4MM1MBPDAwM5xgYGLYyMDBshKr/BzeUgYEBAAAA//9iALnw////IJeC2CAs8P///77///9/+48f7Pj//78ukj6m////MwEAAAD//wIbhAQUGRgYFjEwMNhAxR4zMDBchNIgoMDAwKALdTEI/GFgYAhiYGDYzMDAwMjAwMAIAAAA//9M0CEVwCAAQMHTcyj8KiBogiQehgBYSiwKAjeDwP934t9gwEDGQkfDh32aBwkFEe95XDHhBwAA//9CNnAyAwNDDgMDw3soezYDA8MTtAiAAZAr/RgYGB4yMDBwQ126kIGB4Q8AAAD//3TQsQ0AEBRAwRhDIrGBTmPDX9vXaUg0ijfAu/vfsQ7ZRHlsfg0EGjIq0gYAAP//gsVOKDQM7kBjD+YykBhIDQjD2CAaBI4xMDC8YGBgKGBgYDBkYGDgYWBg+A8AAAD//wIpAIWLNVTRaQYGhutIXoNpRo45cOBD2asYGBjYGRgYYsERysDACgAAAP//AqVDMQYGBhGoggcMDAw/0MIL2TD0RA9K5O8YGBg+MDAwsDAwMMgCAAAA//+CeQFm418ckYALgPSBDAPhHwwMDMwAAAAA//8CGfgaGrMgIAlyNpqLYBYiWwwDIFeB1ED0MTC8AgAAAP//AhkIyp+gsAMBU2jixQeQsygoW4IiA5R0lBkYGD4CAAAA//+CxfJaKK3DwMDggxSmsLyKjGEAVGDYQmP6BgMDw1MGBgYGAAAAAP//ghl4kIGBYT00WYBSPSjWQBpwAZCLMhgYGPQYGBg+MzAwPGNgYNjBwMDAAAAAAP//TNKxCcAgFEBBBwg6RMBenMslnNSQKlOcEH5h8dpX3Yn2xhu4n0DbkVGiCw0THxYG6v8gbQAAAP//AmU9UEAjh8lyBgYGOSj/JAMDw2VopIGyFx8DA4MBUroFZc9GmHcZGBgYAQAAAP//ApmKXHSBsPr///83Eyi6QGDJ////BZH0Mf7//58RAAAA//+CuRA94YLEQJk/GFr6SEDFQRFwnIGBYR0DA8MmJLWQ5MXAwAAAAAD//0I3ENlQGADFOKioAoFX0HQLA6h6GRj+AwAAAP//AwA0SINHgVxAugAAAABJRU5ErkJggg=="/>
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -840,7 +840,13 @@ async function openAttachmentManager() {
}
const buttonTemplate = template.find('.actionButtonTemplate .actionButton').clone();
buttonTemplate.find('.actionButtonIcon').addClass(scraper.iconClass);
if (scraper.iconAvailable) {
buttonTemplate.find('.actionButtonIcon').addClass(scraper.iconClass);
buttonTemplate.find('.actionButtonImg').remove();
} else {
buttonTemplate.find('.actionButtonImg').attr('src', scraper.iconClass);
buttonTemplate.find('.actionButtonIcon').remove();
}
buttonTemplate.find('.actionButtonText').text(scraper.name);
buttonTemplate.attr('title', scraper.description);
buttonTemplate.on('click', () => {

View File

@ -115,8 +115,9 @@
</div>
<div class="actionButtonTemplate">
<div class="actionButton list-group-item flex-container flexGap5" title="">
<div class="actionButton list-group-item flex-container flexGap5" style="align-items: center;" title="">
<i class="actionButtonIcon"></i>
<img class="actionButtonImg"/>
<span class="actionButtonText"></span>
</div>
</div>

View File

@ -0,0 +1,27 @@
<div class="flexFlowColumn flex-container">
<div class="range-block-title">
<h3 data-i18n="miHoYo/HoYoverse HoYoLAB Scraper">miHoYo/HoYoverse HoYoLAB Scraper</h3>
</div>
<h4 data-i18n="Select a Wiki to parse through.">Select a Wiki to parse through.</h4>
<div class="range-block-range wide100p">
<select id="mihoyoScrapeWikiDropdown" name="mihoyoScrapeWikiDropdown" class="wide100p">
<option value="">--- None ---</option>
<option value="hsr" data-i18n="Honkai: Star Rail (H:SR)">Honkai: Star Rail (H:SR)</option>
<option value="genshin" data-i18n="Genshin Impact (GI)">Genshin Impact (GI)</option>
</select>
</div>
<div class="range-block-title">
<h4>
<span data-i18n="Enter the Wiki Page ID.">Enter the Wiki Page ID.</span>
</h4>
</div>
<div class="range-block-counter justifyCenter flex-container flexFlowColumn margin-bot-10px">
<span data-i18n="This is the last digit in the HoYoLAB URL i.e.">This is the last digit in the HoYoLAB URL i.e.</span>
<code>https://wiki.hoyolab.com/pc/hsr/entry/X</code>
<small>
<span data-i18n="Example:">Example:</span>
<code>14</code>
</small>
</div>
<input type="text" id="mihoyoScrapeWikiID" name="mihoyoScrapeWikiID" class="text_pole" placeholder="14">
</div>

View File

@ -3250,7 +3250,8 @@ async function onExportPresetClick() {
delete preset.proxy_password;
const presetJsonString = JSON.stringify(preset, null, 4);
download(presetJsonString, oai_settings.preset_settings_openai, 'application/json');
const presetFileName = `${oai_settings.preset_settings_openai}.json`;
download(presetJsonString, presetFileName, 'application/json');
}
async function onLogitBiasPresetImportFileChange(e) {
@ -3298,7 +3299,8 @@ function onLogitBiasPresetExportClick() {
}
const presetJsonString = JSON.stringify(oai_settings.bias_presets[oai_settings.bias_preset_selected], null, 4);
download(presetJsonString, oai_settings.bias_preset_selected, 'application/json');
const presetFileName = `${oai_settings.bias_preset_selected}.json`;
download(presetJsonString, presetFileName, 'application/json');
}
async function onDeletePresetClick() {

View File

@ -9,6 +9,7 @@ import { isValidUrl } from './utils.js';
* @property {string} name
* @property {string} description
* @property {string} iconClass
* @property {boolean} iconAvailable
* @property {() => Promise<boolean>} isAvailable
* @property {() => Promise<File[]>} scrape
*/
@ -19,6 +20,7 @@ import { isValidUrl } from './utils.js';
* @property {string} name
* @property {string} description
* @property {string} iconClass
* @property {boolean} iconAvailable
*/
export class ScraperManager {
@ -45,7 +47,7 @@ export class ScraperManager {
* @returns {ScraperInfo[]} List of scrapers available for the Data Bank
*/
static getDataBankScrapers() {
return ScraperManager.#scrapers.map(s => ({ id: s.id, name: s.name, description: s.description, iconClass: s.iconClass }));
return ScraperManager.#scrapers.map(s => ({ id: s.id, name: s.name, description: s.description, iconClass: s.iconClass, iconAvailable: s.iconAvailable}));
}
/**
@ -87,6 +89,7 @@ class Notepad {
this.name = 'Notepad';
this.description = 'Create a text file from scratch.';
this.iconClass = 'fa-solid fa-note-sticky';
this.iconAvailable = true;
}
/**
@ -133,6 +136,7 @@ class WebScraper {
this.name = 'Web';
this.description = 'Download a page from the web.';
this.iconClass = 'fa-solid fa-globe';
this.iconAvailable = true;
}
/**
@ -207,6 +211,7 @@ class FileScraper {
this.name = 'File';
this.description = 'Upload a file from your computer.';
this.iconClass = 'fa-solid fa-upload';
this.iconAvailable = true;
}
/**
@ -243,6 +248,7 @@ class FandomScraper {
this.name = 'Fandom';
this.description = 'Download a page from the Fandom wiki.';
this.iconClass = 'fa-solid fa-fire';
this.iconAvailable = true;
}
/**
@ -339,6 +345,153 @@ class FandomScraper {
}
}
/**
* Scrapes data from the miHoYo/HoYoverse HoYoLAB wiki.
* @implements {Scraper}
*/
class miHoYoScraper {
constructor() {
this.id = 'mihoyo';
this.name = 'miHoYo';
this.description = 'Scrapes a page from the miHoYo/HoYoverse HoYoLAB wiki.';
this.iconClass = 'img/mihoyo.svg';
this.iconAvailable = false; // There is no miHoYo icon in Font Awesome
}
/**
* Check if the scraper is available.
* @returns {Promise<boolean>}
*/
async isAvailable() {
try {
const result = await fetch('/api/plugins/hoyoverse/probe', {
method: 'POST',
headers: getRequestHeaders(),
});
return result.ok;
} catch (error) {
console.debug('Could not probe miHoYo plugin', error);
return false;
}
}
/**
* Outputs Data Information in a human-readable format.
* @param {Object} m Data to be parsed
* @returns {string} Human-readable format of the data
*/
parseOutput(m) {
let temp = '';
for (const d in m) {
if (m[d].key === "") {
temp += `- ${m[d].value}\n`;
continue;
}
temp += `- ${m[d].key}: ${m[d].value}\n`;
}
return temp;
}
/** Scrape data from the miHoYo/HoYoverse HoYoLAB wiki.
* @returns {Promise<File[]>} File attachments scraped from the wiki.
*/
async scrape() {
let miHoYoWiki = '';
let miHoYoWikiID = '';
const template = $(await renderExtensionTemplateAsync('attachments', 'mihoyo-scrape', {}));
template.find('select[name="mihoyoScrapeWikiDropdown"]').on('change', function () {
miHoYoWiki = String($(this).val());
});
template.find('input[name="mihoyoScrapeWikiID"]').on('input', function () {
miHoYoWikiID = String($(this).val());
});
const confirm = await callGenericPopup(template, POPUP_TYPE.CONFIRM, '', { wide: false, large: false });
if (confirm !== POPUP_RESULT.AFFIRMATIVE) {
return;
}
if (!miHoYoWiki) {
toastr.error('A specific HoYoLab wiki is required');
return;
}
if (!miHoYoWikiID) {
toastr.error('A specific HoYoLab wiki ID is required');
return;
}
if (miHoYoWiki === 'genshin') {
toastr.error('The Genshin Impact parser has not been implemented *yet*');
return;
}
let toast;
if (miHoYoWiki === 'hsr') {
toast = toastr.info(`Scraping the Honkai: Star Rail HoYoLAB wiki for Wiki Entry ID: ${miHoYoWikiID}`);
} else {
toast = toastr.info(`Scraping the Genshin Impact wiki for Wiki Entry ID: ${miHoYoWikiID}`);
}
let result;
if (miHoYoWiki === 'hsr') {
result = await fetch('/api/plugins/hoyoverse/silver-wolf', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ miHoYoWiki, miHoYoWikiID }),
});
} else if (miHoYoWiki === 'genshin') {
result = await fetch('/api/plugins/hoyoverse/furina', {
method: 'POST',
headers: getRequestHeaders(),
body: JSON.stringify({ miHoYoWiki, miHoYoWikiID }),
});
} else {
throw new Error('Unknown wiki name identifier');
}
if (!result.ok) {
const error = await result.text();
throw new Error(error);
}
const data = await result.json();
toastr.clear(toast);
const fileName = data[0].name;
const dataContent = data[0].content;
//parse the data as a long string of data
let combinedContent = '';
combinedContent += `Name: ${data[0].name}\n`;
if (dataContent.description !== "") {
combinedContent += `Description: ${dataContent.description}\n\n`;
}
if (dataContent.modules != []) {
for (const m in dataContent.modules) {
if (dataContent.modules[m].data.length === 0) {
continue;
}
combinedContent += dataContent.modules[m].name + '\n';
combinedContent += this.parseOutput(dataContent.modules[m].data);
combinedContent += '\n';
}
}
const file = new File([combinedContent], `${fileName}.txt`, { type: 'text/plain' });
return [file];
}
}
/**
* Scrape transcript from a YouTube video.
* @implements {Scraper}
@ -349,6 +502,7 @@ class YouTubeScraper {
this.name = 'YouTube';
this.description = 'Download a transcript from a YouTube video.';
this.iconClass = 'fa-solid fa-closed-captioning';
this.iconAvailable = true;
}
/**
@ -413,4 +567,5 @@ ScraperManager.registerDataBankScraper(new FileScraper());
ScraperManager.registerDataBankScraper(new Notepad());
ScraperManager.registerDataBankScraper(new WebScraper());
ScraperManager.registerDataBankScraper(new FandomScraper());
ScraperManager.registerDataBankScraper(new miHoYoScraper());
ScraperManager.registerDataBankScraper(new YouTubeScraper());

View File

@ -1000,6 +1000,7 @@ body .panelControlBar {
padding-left: 10px;
width: 100%;
overflow-x: hidden;
overflow-y: clip;
}
.mes_text {

View File

@ -45,7 +45,6 @@ const {
forwardFetchResponse,
} = require('./src/util');
const { ensureThumbnailCache } = require('./src/endpoints/thumbnails');
const { loadTokenizers } = require('./src/endpoints/tokenizers');
// Work around a node v20.0.0, v20.1.0, and v20.2.0 bug. The issue was fixed in v20.3.0.
// https://github.com/nodejs/node/issues/47822#issuecomment-1564708870
@ -543,22 +542,12 @@ const setupTasks = async function () {
}
console.log();
// TODO: do endpoint init functions depend on certain directories existing or not existing? They should be callable
// in any order for encapsulation reasons, but right now it's unknown if that would break anything.
await userModule.initUserStorage(dataRoot);
if (listen && !basicAuthMode && enableAccounts) {
await userModule.checkAccountsProtection();
}
await settingsEndpoint.init();
const directories = await userModule.ensurePublicDirectoriesExist();
await userModule.migrateUserData();
const directories = await userModule.getUserDirectoriesList();
await contentManager.checkForNewContent(directories);
await ensureThumbnailCache();
cleanUploads();
await loadTokenizers();
await settingsEndpoint.init();
await statsEndpoint.init();
const cleanupPlugins = await loadPlugins();
@ -581,7 +570,6 @@ const setupTasks = async function () {
exitProcess();
});
console.log('Launching...');
if (autorun) open(autorunUrl.toString());
@ -601,6 +589,9 @@ const setupTasks = async function () {
}
}
if (listen && !basicAuthMode && enableAccounts) {
await userModule.checkAccountsProtection();
}
};
/**
@ -642,21 +633,27 @@ function setWindowTitle(title) {
}
}
if (cliArguments.ssl) {
https.createServer(
{
cert: fs.readFileSync(cliArguments.certPath),
key: fs.readFileSync(cliArguments.keyPath),
}, app)
.listen(
Number(tavernUrl.port) || 443,
tavernUrl.hostname,
setupTasks,
);
} else {
http.createServer(app).listen(
Number(tavernUrl.port) || 80,
tavernUrl.hostname,
setupTasks,
);
}
// User storage module needs to be initialized before starting the server
userModule.initUserStorage(dataRoot)
.then(userModule.ensurePublicDirectoriesExist)
.then(userModule.migrateUserData)
.finally(() => {
if (cliArguments.ssl) {
https.createServer(
{
cert: fs.readFileSync(cliArguments.certPath),
key: fs.readFileSync(cliArguments.keyPath),
}, app)
.listen(
Number(tavernUrl.port) || 443,
tavernUrl.hostname,
setupTasks,
);
} else {
http.createServer(app).listen(
Number(tavernUrl.port) || 80,
tavernUrl.hostname,
setupTasks,
);
}
});

View File

@ -10,6 +10,10 @@ const { TEXTGEN_TYPES } = require('../constants');
const { jsonParser } = require('../express-common');
const { setAdditionalHeaders } = require('../additional-headers');
/**
* @typedef { (req: import('express').Request, res: import('express').Response) => Promise<any> } TokenizationHandler
*/
/**
* @type {{[key: string]: import("@dqbd/tiktoken").Tiktoken}} Tokenizers cache
*/
@ -48,16 +52,30 @@ const TEXT_COMPLETION_MODELS = [
const CHARS_PER_TOKEN = 3.35;
/**
* Sentencepiece tokenizer for tokenizing text.
*/
class SentencePieceTokenizer {
/**
* @type {import('@agnai/sentencepiece-js').SentencePieceProcessor} Sentencepiece tokenizer instance
*/
#instance;
/**
* @type {string} Path to the tokenizer model
*/
#model;
/**
* Creates a new Sentencepiece tokenizer.
* @param {string} model Path to the tokenizer model
*/
constructor(model) {
this.#model = model;
}
/**
* Gets the Sentencepiece tokenizer instance.
* @returns {Promise<import('@agnai/sentencepiece-js').SentencePieceProcessor|null>} Sentencepiece tokenizer instance
*/
async get() {
if (this.#instance) {
@ -76,18 +94,61 @@ class SentencePieceTokenizer {
}
}
const spp_llama = new SentencePieceTokenizer('src/sentencepiece/llama.model');
const spp_nerd = new SentencePieceTokenizer('src/sentencepiece/nerdstash.model');
const spp_nerd_v2 = new SentencePieceTokenizer('src/sentencepiece/nerdstash_v2.model');
const spp_mistral = new SentencePieceTokenizer('src/sentencepiece/mistral.model');
const spp_yi = new SentencePieceTokenizer('src/sentencepiece/yi.model');
let claude_tokenizer;
/**
* Web tokenizer for tokenizing text.
*/
class WebTokenizer {
/**
* @type {Tokenizer} Web tokenizer instance
*/
#instance;
/**
* @type {string} Path to the tokenizer model
*/
#model;
/**
* Creates a new Web tokenizer.
* @param {string} model Path to the tokenizer model
*/
constructor(model) {
this.#model = model;
}
/**
* Gets the Web tokenizer instance.
* @returns {Promise<Tokenizer|null>} Web tokenizer instance
*/
async get() {
if (this.#instance) {
return this.#instance;
}
try {
const arrayBuffer = fs.readFileSync(this.#model).buffer;
this.#instance = await Tokenizer.fromJSON(arrayBuffer);
console.log('Instantiated the tokenizer for', path.parse(this.#model).name);
return this.#instance;
} catch (error) {
console.error('Web tokenizer failed to load: ' + this.#model, error);
return null;
}
}
}
const spp_llama = new SentencePieceTokenizer('src/tokenizers/llama.model');
const spp_nerd = new SentencePieceTokenizer('src/tokenizers/nerdstash.model');
const spp_nerd_v2 = new SentencePieceTokenizer('src/tokenizers/nerdstash_v2.model');
const spp_mistral = new SentencePieceTokenizer('src/tokenizers/mistral.model');
const spp_yi = new SentencePieceTokenizer('src/tokenizers/yi.model');
const claude_tokenizer = new WebTokenizer('src/tokenizers/claude.json');
const sentencepieceTokenizers = [
'llama',
'nerdstash',
'nerdstash_v2',
'mistral',
'yi',
];
/**
@ -112,6 +173,10 @@ function getSentencepiceTokenizer(model) {
return spp_nerd_v2;
}
if (model.includes('yi')) {
return spp_yi;
}
return null;
}
@ -168,13 +233,23 @@ async function getTiktokenChunks(tokenizer, ids) {
return chunks;
}
async function getWebTokenizersChunks(tokenizer, ids) {
/**
* Gets the token chunks for the given token IDs using the Web tokenizer.
* @param {Tokenizer} tokenizer Web tokenizer instance
* @param {number[]} ids Token IDs
* @returns {string[]} Token chunks
*/
function getWebTokenizersChunks(tokenizer, ids) {
const chunks = [];
for (let i = 0; i < ids.length; i++) {
const id = ids[i];
const chunkText = await tokenizer.decode(new Uint32Array([id]));
for (let i = 0, lastProcessed = 0; i < ids.length; i++) {
const chunkIds = ids.slice(lastProcessed, i + 1);
const chunkText = tokenizer.decode(new Int32Array(chunkIds));
if (chunkText === '<27>') {
continue;
}
chunks.push(chunkText);
lastProcessed = i + 1;
}
return chunks;
@ -237,17 +312,12 @@ function getTiktokenTokenizer(model) {
return tokenizer;
}
async function loadClaudeTokenizer(modelPath) {
try {
const arrayBuffer = fs.readFileSync(modelPath).buffer;
const instance = await Tokenizer.fromJSON(arrayBuffer);
return instance;
} catch (error) {
console.error('Claude tokenizer failed to load: ' + modelPath, error);
return null;
}
}
/**
* Counts the tokens for the given messages using the Claude tokenizer.
* @param {Tokenizer} tokenizer Web tokenizer
* @param {object[]} messages Array of messages
* @returns {number} Number of tokens
*/
function countClaudeTokens(tokenizer, messages) {
// Should be fine if we use the old conversion method instead of the messages API one i think?
const convertedPrompt = convertClaudePrompt(messages, false, '', false, false, '', false);
@ -264,9 +334,14 @@ function countClaudeTokens(tokenizer, messages) {
/**
* Creates an API handler for encoding Sentencepiece tokens.
* @param {SentencePieceTokenizer} tokenizer Sentencepiece tokenizer
* @returns {any} Handler function
* @returns {TokenizationHandler} Handler function
*/
function createSentencepieceEncodingHandler(tokenizer) {
/**
* Request handler for encoding Sentencepiece tokens.
* @param {import('express').Request} request
* @param {import('express').Response} response
*/
return async function (request, response) {
try {
if (!request.body) {
@ -276,7 +351,7 @@ function createSentencepieceEncodingHandler(tokenizer) {
const text = request.body.text || '';
const instance = await tokenizer?.get();
const { ids, count } = await countSentencepieceTokens(tokenizer, text);
const chunks = await instance?.encodePieces(text);
const chunks = instance?.encodePieces(text);
return response.send({ ids, count, chunks });
} catch (error) {
console.log(error);
@ -288,9 +363,14 @@ function createSentencepieceEncodingHandler(tokenizer) {
/**
* Creates an API handler for decoding Sentencepiece tokens.
* @param {SentencePieceTokenizer} tokenizer Sentencepiece tokenizer
* @returns {any} Handler function
* @returns {TokenizationHandler} Handler function
*/
function createSentencepieceDecodingHandler(tokenizer) {
/**
* Request handler for decoding Sentencepiece tokens.
* @param {import('express').Request} request
* @param {import('express').Response} response
*/
return async function (request, response) {
try {
if (!request.body) {
@ -299,6 +379,7 @@ function createSentencepieceDecodingHandler(tokenizer) {
const ids = request.body.ids || [];
const instance = await tokenizer?.get();
if (!instance) throw new Error('Failed to load the Sentencepiece tokenizer');
const ops = ids.map(id => instance.decodeIds([id]));
const chunks = await Promise.all(ops);
const text = chunks.join('');
@ -313,9 +394,14 @@ function createSentencepieceDecodingHandler(tokenizer) {
/**
* Creates an API handler for encoding Tiktoken tokens.
* @param {string} modelId Tiktoken model ID
* @returns {any} Handler function
* @returns {TokenizationHandler} Handler function
*/
function createTiktokenEncodingHandler(modelId) {
/**
* Request handler for encoding Tiktoken tokens.
* @param {import('express').Request} request
* @param {import('express').Response} response
*/
return async function (request, response) {
try {
if (!request.body) {
@ -337,9 +423,14 @@ function createTiktokenEncodingHandler(modelId) {
/**
* Creates an API handler for decoding Tiktoken tokens.
* @param {string} modelId Tiktoken model ID
* @returns {any} Handler function
* @returns {TokenizationHandler} Handler function
*/
function createTiktokenDecodingHandler(modelId) {
/**
* Request handler for decoding Tiktoken tokens.
* @param {import('express').Request} request
* @param {import('express').Response} response
*/
return async function (request, response) {
try {
if (!request.body) {
@ -358,14 +449,6 @@ function createTiktokenDecodingHandler(modelId) {
};
}
/**
* Loads the model tokenizers.
* @returns {Promise<void>} Promise that resolves when the tokenizers are loaded
*/
async function loadTokenizers() {
claude_tokenizer = await loadClaudeTokenizer('src/claude.json');
}
const router = express.Router();
router.post('/ai21/count', jsonParser, async function (req, res) {
@ -446,8 +529,10 @@ router.post('/openai/encode', jsonParser, async function (req, res) {
if (queryModel.includes('claude')) {
const text = req.body.text || '';
const tokens = Object.values(claude_tokenizer.encode(text));
const chunks = await getWebTokenizersChunks(claude_tokenizer, tokens);
const instance = await claude_tokenizer.get();
if (!instance) throw new Error('Failed to load the Claude tokenizer');
const tokens = Object.values(instance.encode(text));
const chunks = getWebTokenizersChunks(instance, tokens);
return res.send({ ids: tokens, count: tokens.length, chunks });
}
@ -481,7 +566,9 @@ router.post('/openai/decode', jsonParser, async function (req, res) {
if (queryModel.includes('claude')) {
const ids = req.body.ids || [];
const chunkText = await claude_tokenizer.decode(new Uint32Array(ids));
const instance = await claude_tokenizer.get();
if (!instance) throw new Error('Failed to load the Claude tokenizer');
const chunkText = instance.decode(new Int32Array(ids));
return res.send({ text: chunkText });
}
@ -503,7 +590,9 @@ router.post('/openai/count', jsonParser, async function (req, res) {
const model = getTokenizerModel(queryModel);
if (model === 'claude') {
num_tokens = countClaudeTokens(claude_tokenizer, req.body);
const instance = await claude_tokenizer.get();
if (!instance) throw new Error('Failed to load the Claude tokenizer');
num_tokens = countClaudeTokens(instance, req.body);
return res.send({ 'token_count': num_tokens });
}
@ -665,7 +754,6 @@ module.exports = {
getTokenizerModel,
getTiktokenTokenizer,
countClaudeTokens,
loadTokenizers,
getSentencepiceTokenizer,
sentencepieceTokenizers,
router,

View File

@ -112,6 +112,16 @@ async function ensurePublicDirectoriesExist() {
return directoriesList;
}
/**
* Gets a list of all user directories.
* @returns {Promise<import('./users').UserDirectoryList[]>} - The list of user directories
*/
async function getUserDirectoriesList() {
const userHandles = await getAllUserHandles();
const directoriesList = userHandles.map(handle => getUserDirectories(handle));
return directoriesList;
}
/**
* Perform migration from the old user data format to the new one.
*/
@ -707,6 +717,7 @@ module.exports = {
toAvatarKey,
initUserStorage,
ensurePublicDirectoriesExist,
getUserDirectoriesList,
getAllUserHandles,
getUserDirectories,
setUserDataMiddleware,