mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Merge branch 'staging' of https://github.com/city-unit/SillyTavern into feature/exorcism
This commit is contained in:
@ -445,7 +445,7 @@ export function dragElement(elmnt) {
|
||||
var pos1 = 0, pos2 = 0, pos3 = 0, pos4 = 0;
|
||||
var height, width, top, left, right, bottom,
|
||||
maxX, maxY, winHeight, winWidth,
|
||||
topbar, topbarWidth, topBarFirstX, topBarLastX, sheldWidth;
|
||||
topbar, topbarWidth, topBarFirstX, topBarLastX, topBarLastY, sheldWidth;
|
||||
|
||||
var elmntName = elmnt.attr('id');
|
||||
|
||||
@ -493,8 +493,9 @@ export function dragElement(elmnt) {
|
||||
topbar = document.getElementById("top-bar")
|
||||
const topbarstyle = getComputedStyle(topbar)
|
||||
topBarFirstX = parseInt(topbarstyle.marginInline)
|
||||
topbarWidth = parseInt(topbarstyle.width)
|
||||
topbarWidth = parseInt(topbarstyle.width);
|
||||
topBarLastX = topBarFirstX + topbarWidth;
|
||||
topBarLastY = parseInt(topbarstyle.height);
|
||||
|
||||
/*console.log(`
|
||||
winWidth: ${winWidth}, winHeight: ${winHeight}
|
||||
@ -540,7 +541,7 @@ export function dragElement(elmnt) {
|
||||
}
|
||||
|
||||
//prevent resizing from top left into the top bar
|
||||
if (top < 35 && maxX >= topBarFirstX && left <= topBarFirstX
|
||||
if (top < topBarLastY && maxX >= topBarFirstX && left <= topBarFirstX
|
||||
) {
|
||||
console.debug('prevent topbar underlap resize')
|
||||
elmnt.css('width', width - 1 + "px");
|
||||
@ -575,7 +576,7 @@ export function dragElement(elmnt) {
|
||||
}
|
||||
|
||||
//prevent underlap with topbar div
|
||||
if (top < 35
|
||||
if (top < topBarLastY
|
||||
&& (maxX >= topBarFirstX && left <= topBarFirstX //elmnt is hitting topbar from left side
|
||||
|| left <= topBarLastX && maxX >= topBarLastX //elmnt is hitting topbar from right side
|
||||
|| left >= topBarFirstX && maxX <= topBarLastX) //elmnt hitting topbar in the middle
|
||||
|
195
public/scripts/extensions/hypebot/index.js
Normal file
195
public/scripts/extensions/hypebot/index.js
Normal file
@ -0,0 +1,195 @@
|
||||
import { eventSource, event_types, getRequestHeaders, is_send_press, saveSettingsDebounced } from "../../../script.js";
|
||||
import { extension_settings, getContext, renderExtensionTemplate } from "../../extensions.js";
|
||||
import { SECRET_KEYS, secret_state } from "../../secrets.js";
|
||||
import { collapseNewlines } from "../../power-user.js";
|
||||
import { bufferToBase64, debounce } from "../../utils.js";
|
||||
import { decodeTextTokens, getTextTokens, tokenizers } from "../../tokenizers.js";
|
||||
|
||||
const MODULE_NAME = 'hypebot';
|
||||
const MAX_PROMPT = 1024;
|
||||
const MAX_LENGTH = 50;
|
||||
const MAX_STRING_LENGTH = MAX_PROMPT * 4;
|
||||
|
||||
const settings = {
|
||||
enabled: false,
|
||||
name: 'Goose',
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a random waiting verb
|
||||
* @returns {string} Random waiting verb
|
||||
*/
|
||||
function getWaitingVerb() {
|
||||
const waitingVerbs = ['thinking', 'typing', 'brainstorming', 'cooking', 'conjuring'];
|
||||
return waitingVerbs[Math.floor(Math.random() * waitingVerbs.length)];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a random verb based on the text
|
||||
* @param {string} text Text to generate a verb for
|
||||
* @returns {string} Random verb
|
||||
*/
|
||||
function getVerb(text) {
|
||||
let verbList = ['says', 'notes', 'states', 'whispers', 'murmurs', 'mumbles'];
|
||||
|
||||
if (text.endsWith('!')) {
|
||||
verbList = ['proclaims', 'declares', 'salutes', 'exclaims', 'cheers'];
|
||||
}
|
||||
|
||||
if (text.endsWith('?')) {
|
||||
verbList = ['asks', 'suggests', 'ponders', 'wonders', 'inquires', 'questions'];
|
||||
}
|
||||
|
||||
return verbList[Math.floor(Math.random() * verbList.length)];
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the HypeBot reply text
|
||||
* @param {string} text HypeBot output text
|
||||
* @returns {string} Formatted HTML text
|
||||
*/
|
||||
function formatReply(text) {
|
||||
const verb = getVerb(text);
|
||||
return DOMPurify.sanitize(`<span class="hypebot_name">${settings.name} ${verb}:</span> <span class="hypebot_text">${text}</span>`);
|
||||
}
|
||||
|
||||
let hypeBotBar;
|
||||
let abortController;
|
||||
|
||||
const generateDebounced = debounce(() => generateHypeBot(), 500);
|
||||
|
||||
/**
|
||||
* Called when a chat event occurs to generate a HypeBot reply.
|
||||
* @param {boolean} clear Clear the hypebot bar.
|
||||
*/
|
||||
function onChatEvent(clear) {
|
||||
if (clear) {
|
||||
hypeBotBar.text('');
|
||||
}
|
||||
|
||||
abortController?.abort();
|
||||
generateDebounced();
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates a HypeBot reply.
|
||||
*/
|
||||
async function generateHypeBot() {
|
||||
if (!settings.enabled || is_send_press) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!secret_state[SECRET_KEYS.NOVEL]) {
|
||||
hypeBotBar.html('<span class="hypebot_nokey">No API key found. Please enter your API key in the NovelAI API Settings</span>');
|
||||
return;
|
||||
}
|
||||
|
||||
console.debug('Generating HypeBot reply');
|
||||
hypeBotBar.html(DOMPurify.sanitize(`<span class="hypebot_name">${settings.name}</span> is ${getWaitingVerb()}...`));
|
||||
|
||||
const context = getContext();
|
||||
const chat = context.chat.slice();
|
||||
let prompt = '';
|
||||
|
||||
for (let index = chat.length - 1; index >= 0; index--) {
|
||||
const message = chat[index];
|
||||
|
||||
if (message.is_system || !message.mes) {
|
||||
continue;
|
||||
}
|
||||
|
||||
prompt = `\n${message.mes}\n${prompt}`;
|
||||
|
||||
if (prompt.length >= MAX_STRING_LENGTH) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
prompt = collapseNewlines(prompt.replaceAll(/[\*\[\]\{\}]/g, ''));
|
||||
|
||||
if (!prompt) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sliceLength = MAX_PROMPT - MAX_LENGTH;
|
||||
const encoded = getTextTokens(tokenizers.GPT2, prompt).slice(-sliceLength);
|
||||
|
||||
// Add a stop string token to the end of the prompt
|
||||
encoded.push(49527);
|
||||
|
||||
const base64String = await bufferToBase64(new Uint16Array(encoded).buffer);
|
||||
|
||||
const parameters = {
|
||||
input: base64String,
|
||||
model: "hypebot",
|
||||
streaming: false,
|
||||
temperature: 1,
|
||||
max_length: MAX_LENGTH,
|
||||
min_length: 1,
|
||||
top_k: 0,
|
||||
top_p: 1,
|
||||
tail_free_sampling: 0.95,
|
||||
repetition_penalty: 1,
|
||||
repetition_penalty_range: 2048,
|
||||
repetition_penalty_slope: 0.18,
|
||||
repetition_penalty_frequency: 0,
|
||||
repetition_penalty_presence: 0,
|
||||
phrase_rep_pen: "off",
|
||||
bad_words_ids: [],
|
||||
stop_sequences: [[48585]],
|
||||
generate_until_sentence: true,
|
||||
use_cache: false,
|
||||
use_string: false,
|
||||
return_full_text: false,
|
||||
prefix: "vanilla",
|
||||
logit_bias_exp: [],
|
||||
order: [0, 1, 2, 3],
|
||||
};
|
||||
|
||||
abortController = new AbortController();
|
||||
|
||||
const response = await fetch('/generate_novelai', {
|
||||
headers: getRequestHeaders(),
|
||||
body: JSON.stringify(parameters),
|
||||
method: 'POST',
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
const ids = Array.from(new Uint16Array(Uint8Array.from(atob(data.output), c => c.charCodeAt(0)).buffer));
|
||||
const output = decodeTextTokens(tokenizers.GPT2, ids).replace(/<2F>/g, '').trim();
|
||||
|
||||
hypeBotBar.html(formatReply(output));
|
||||
}
|
||||
}
|
||||
|
||||
jQuery(() => {
|
||||
if (!extension_settings.hypebot) {
|
||||
extension_settings.hypebot = settings;
|
||||
}
|
||||
|
||||
Object.assign(settings, extension_settings.hypebot);
|
||||
$('#extensions_settings2').append(renderExtensionTemplate(MODULE_NAME, 'settings'));
|
||||
hypeBotBar = $(`<div id="hypeBotBar"></div>`).toggle(settings.enabled);
|
||||
$('#send_form').append(hypeBotBar);
|
||||
|
||||
$('#hypebot_enabled').prop('checked', settings.enabled).on('input', () => {
|
||||
settings.enabled = $('#hypebot_enabled').prop('checked');
|
||||
hypeBotBar.toggle(settings.enabled);
|
||||
abortController?.abort();
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
$('#hypebot_name').val(settings.name).on('input', () => {
|
||||
settings.name = String($('#hypebot_name').val());
|
||||
saveSettingsDebounced();
|
||||
});
|
||||
|
||||
eventSource.on(event_types.CHAT_CHANGED, () => onChatEvent(true));
|
||||
eventSource.on(event_types.MESSAGE_DELETED, () => onChatEvent(true));
|
||||
eventSource.on(event_types.MESSAGE_EDITED, () => onChatEvent(true));
|
||||
eventSource.on(event_types.MESSAGE_SENT, () => onChatEvent(false));
|
||||
eventSource.on(event_types.MESSAGE_RECEIVED, () => onChatEvent(false));
|
||||
eventSource.on(event_types.MESSAGE_SWIPED, () => onChatEvent(false));
|
||||
});
|
11
public/scripts/extensions/hypebot/manifest.json
Normal file
11
public/scripts/extensions/hypebot/manifest.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"display_name": "HypeBot",
|
||||
"loading_order": 1000,
|
||||
"requires": [],
|
||||
"optional": [],
|
||||
"js": "index.js",
|
||||
"css": "style.css",
|
||||
"author": "Cohee#1207",
|
||||
"version": "1.0.0",
|
||||
"homePage": "https://github.com/SillyTavern/SillyTavern"
|
||||
}
|
18
public/scripts/extensions/hypebot/settings.html
Normal file
18
public/scripts/extensions/hypebot/settings.html
Normal file
@ -0,0 +1,18 @@
|
||||
<div class="hypebot_settings">
|
||||
<div class="inline-drawer">
|
||||
<div class="inline-drawer-toggle inline-drawer-header">
|
||||
<b>HypeBot</b>
|
||||
<div class="inline-drawer-icon fa-solid fa-circle-chevron-down down"></div>
|
||||
</div>
|
||||
<div class="inline-drawer-content">
|
||||
<div>Show personalized suggestions based on your recent chats using the NovelAI's HypeBot engine.</div>
|
||||
<small><i>Hint: Save an API key in the NovelAI API settings to use it here.</i></small>
|
||||
<label class="checkbox_label" for="hypebot_enabled">
|
||||
<input id="hypebot_enabled" type="checkbox" class="checkbox">
|
||||
Enabled
|
||||
</label>
|
||||
<label>Name:</label>
|
||||
<input id="hypebot_name" type="text" class="text_pole" placeholder="Goose">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
17
public/scripts/extensions/hypebot/style.css
Normal file
17
public/scripts/extensions/hypebot/style.css
Normal file
@ -0,0 +1,17 @@
|
||||
#hypeBotBar {
|
||||
width: 100%;
|
||||
max-width: 100%;
|
||||
padding: 0.5em;
|
||||
white-space: normal;
|
||||
font-size: calc(var(--mainFontSize) * 0.85);
|
||||
order: 20;
|
||||
}
|
||||
|
||||
.hypebot_nokey {
|
||||
text-align: center;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.hypebot_name {
|
||||
font-weight: 600;
|
||||
}
|
@ -12,6 +12,7 @@
|
||||
display: none;
|
||||
max-width: 100%;
|
||||
overflow-x: auto;
|
||||
order: 10;
|
||||
}
|
||||
|
||||
#quickReplies {
|
||||
|
@ -117,12 +117,18 @@ async function generateHorde(prompt, params, signal) {
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
callPopup(error.message, 'text');
|
||||
throw new Error('Horde generation failed: ' + error.message);
|
||||
toastr.error(response.statusText, 'Horde generation failed');
|
||||
throw new Error(`Horde generation failed: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
|
||||
if (responseJson.error) {
|
||||
const reason = responseJson.error?.message || 'Unknown error';
|
||||
toastr.error(reason, 'Horde generation failed');
|
||||
throw new Error(`Horde generation failed: ${reason}`);
|
||||
}
|
||||
|
||||
const task_id = responseJson.id;
|
||||
let queue_position_first = null;
|
||||
console.log(`Horde task id = ${task_id}`);
|
||||
|
@ -453,7 +453,7 @@ export function getNovelGenerationData(finalPrompt, this_settings, this_amount_g
|
||||
}
|
||||
|
||||
// Check if the prefix needs to be overriden to use instruct mode
|
||||
function selectPrefix(selected_prefix, finalPromt) {
|
||||
function selectPrefix(selected_prefix, finalPrompt) {
|
||||
let useInstruct = false;
|
||||
const clio = nai_settings.model_novel.includes('clio');
|
||||
const kayra = nai_settings.model_novel.includes('kayra');
|
||||
@ -461,7 +461,7 @@ function selectPrefix(selected_prefix, finalPromt) {
|
||||
|
||||
if (isNewModel) {
|
||||
// NovelAI claims they scan backwards 1000 characters (not tokens!) to look for instruct brackets. That's really short.
|
||||
const tail = finalPromt.slice(-1500);
|
||||
const tail = finalPrompt.slice(-1500);
|
||||
useInstruct = tail.includes("}");
|
||||
return useInstruct ? "special_instruct" : selected_prefix;
|
||||
}
|
||||
@ -558,6 +558,10 @@ function createLogitBiasListItem(entry) {
|
||||
$('.novelai_logit_bias_list').prepend(template);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates logit bias for Novel AI
|
||||
* @returns {object[]} Array of logit bias objects
|
||||
*/
|
||||
function calculateLogitBias() {
|
||||
const bias_preset = nai_settings.logit_bias;
|
||||
|
||||
@ -569,12 +573,62 @@ function calculateLogitBias() {
|
||||
const kayra = nai_settings.model_novel.includes('kayra');
|
||||
const tokenizerType = kayra ? tokenizers.NERD2 : (clio ? tokenizers.NERD : tokenizers.NONE);
|
||||
|
||||
return bias_preset.filter(b => b.text?.length > 0).map(bias => ({
|
||||
bias: bias.value,
|
||||
ensure_sequence_finish: false,
|
||||
generate_once: false,
|
||||
sequence: getTextTokens(tokenizerType, bias.text)
|
||||
}));
|
||||
/**
|
||||
* Creates a bias object for Novel AI
|
||||
* @param {number} bias Bias value
|
||||
* @param {number[]} sequence Sequence of token ids
|
||||
*/
|
||||
function getBiasObject(bias, sequence) {
|
||||
return {
|
||||
bias: bias,
|
||||
ensure_sequence_finish: false,
|
||||
generate_once: false,
|
||||
sequence: sequence
|
||||
};
|
||||
}
|
||||
|
||||
const result = [];
|
||||
|
||||
for (const entry of bias_preset) {
|
||||
if (entry.text?.length > 0) {
|
||||
const text = entry.text.trim();
|
||||
|
||||
// Skip empty lines
|
||||
if (text.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Verbatim text
|
||||
if (text.startsWith('{') && text.endsWith('}')) {
|
||||
const tokens = getTextTokens(tokenizerType, text.slice(1, -1));
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
|
||||
// Raw token ids, JSON serialized
|
||||
else if (text.startsWith('[') && text.endsWith(']')) {
|
||||
try {
|
||||
const tokens = JSON.parse(text);
|
||||
|
||||
if (Array.isArray(tokens) && tokens.every(t => Number.isInteger(t))) {
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
} else {
|
||||
throw new Error('Not an array of integers');
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse logit bias token list: ${text}`, err);
|
||||
}
|
||||
}
|
||||
|
||||
// Text with a leading space
|
||||
else {
|
||||
const biasText = ` ${text}`;
|
||||
const tokens = getTextTokens(tokenizerType, biasText);
|
||||
result.push(getBiasObject(entry.value, tokens));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -176,7 +176,7 @@ const default_settings = {
|
||||
ai21_model: 'j2-ultra',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: true,
|
||||
openrouter_use_fallback: false,
|
||||
jailbreak_system: false,
|
||||
reverse_proxy: '',
|
||||
legacy_streaming: false,
|
||||
@ -220,7 +220,7 @@ const oai_settings = {
|
||||
ai21_model: 'j2-ultra',
|
||||
windowai_model: '',
|
||||
openrouter_model: openrouter_website_model,
|
||||
openrouter_use_fallback: true,
|
||||
openrouter_use_fallback: false,
|
||||
jailbreak_system: false,
|
||||
reverse_proxy: '',
|
||||
legacy_streaming: false,
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { characters, main_api, nai_settings, online_status, this_chid } from "../script.js";
|
||||
import { power_user } from "./power-user.js";
|
||||
import { encode } from "../lib/gpt-2-3-tokenizer/mod.js";
|
||||
import { GPT3BrowserTokenizer } from "../lib/gpt-3-tokenizer/gpt3-tokenizer.js";
|
||||
import { chat_completion_sources, oai_settings } from "./openai.js";
|
||||
import { groups, selected_group } from "./group-chats.js";
|
||||
import { getStringHash } from "./utils.js";
|
||||
@ -12,7 +11,7 @@ const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
|
||||
|
||||
export const tokenizers = {
|
||||
NONE: 0,
|
||||
GPT3: 1,
|
||||
GPT2: 1,
|
||||
CLASSIC: 2,
|
||||
LLAMA: 3,
|
||||
NERD: 4,
|
||||
@ -22,7 +21,6 @@ export const tokenizers = {
|
||||
};
|
||||
|
||||
const objectStore = new localforage.createInstance({ name: "SillyTavern_ChatCompletions" });
|
||||
const gpt3 = new GPT3BrowserTokenizer({ type: 'gpt3' });
|
||||
|
||||
let tokenCache = {};
|
||||
|
||||
@ -93,6 +91,35 @@ function getTokenizerBestMatch() {
|
||||
return tokenizers.NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the underlying tokenizer model to the token count for a string.
|
||||
* @param {number} type Tokenizer type.
|
||||
* @param {string} str String to tokenize.
|
||||
* @param {number} padding Number of padding tokens.
|
||||
* @returns {number} Token count.
|
||||
*/
|
||||
function callTokenizer(type, str, padding) {
|
||||
switch (type) {
|
||||
case tokenizers.NONE:
|
||||
return guesstimate(str) + padding;
|
||||
case tokenizers.GPT2:
|
||||
return countTokensRemote('/tokenize_gpt2', str, padding);
|
||||
case tokenizers.CLASSIC:
|
||||
return encode(str).length + padding;
|
||||
case tokenizers.LLAMA:
|
||||
return countTokensRemote('/tokenize_llama', str, padding);
|
||||
case tokenizers.NERD:
|
||||
return countTokensRemote('/tokenize_nerdstash', str, padding);
|
||||
case tokenizers.NERD2:
|
||||
return countTokensRemote('/tokenize_nerdstash_v2', str, padding);
|
||||
case tokenizers.API:
|
||||
return countTokensRemote('/tokenize_via_api', str, padding);
|
||||
default:
|
||||
console.warn("Unknown tokenizer type", type);
|
||||
return callTokenizer(tokenizers.NONE, str, padding);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the token count for a string using the current model tokenizer.
|
||||
* @param {string} str String to tokenize
|
||||
@ -100,33 +127,6 @@ function getTokenizerBestMatch() {
|
||||
* @returns {number} Token count.
|
||||
*/
|
||||
export function getTokenCount(str, padding = undefined) {
|
||||
/**
|
||||
* Calculates the token count for a string.
|
||||
* @param {number} [type] Tokenizer type.
|
||||
* @returns {number} Token count.
|
||||
*/
|
||||
function calculate(type) {
|
||||
switch (type) {
|
||||
case tokenizers.NONE:
|
||||
return guesstimate(str) + padding;
|
||||
case tokenizers.GPT3:
|
||||
return gpt3.encode(str).bpe.length + padding;
|
||||
case tokenizers.CLASSIC:
|
||||
return encode(str).length + padding;
|
||||
case tokenizers.LLAMA:
|
||||
return countTokensRemote('/tokenize_llama', str, padding);
|
||||
case tokenizers.NERD:
|
||||
return countTokensRemote('/tokenize_nerdstash', str, padding);
|
||||
case tokenizers.NERD2:
|
||||
return countTokensRemote('/tokenize_nerdstash_v2', str, padding);
|
||||
case tokenizers.API:
|
||||
return countTokensRemote('/tokenize_via_api', str, padding);
|
||||
default:
|
||||
console.warn("Unknown tokenizer type", type);
|
||||
return calculate(tokenizers.NONE);
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof str !== 'string' || !str?.length) {
|
||||
return 0;
|
||||
}
|
||||
@ -159,7 +159,7 @@ export function getTokenCount(str, padding = undefined) {
|
||||
return cacheObject[cacheKey];
|
||||
}
|
||||
|
||||
const result = calculate(tokenizerType);
|
||||
const result = callTokenizer(tokenizerType, str, padding);
|
||||
|
||||
if (isNaN(result)) {
|
||||
console.warn("Token count calculation returned NaN");
|
||||
@ -350,6 +350,12 @@ function countTokensRemote(endpoint, str, padding) {
|
||||
return tokenCount + padding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the underlying tokenizer model to encode a string to tokens.
|
||||
* @param {string} endpoint API endpoint.
|
||||
* @param {string} str String to tokenize.
|
||||
* @returns {number[]} Array of token ids.
|
||||
*/
|
||||
function getTextTokensRemote(endpoint, str) {
|
||||
let ids = [];
|
||||
jQuery.ajax({
|
||||
@ -366,8 +372,37 @@ function getTextTokensRemote(endpoint, str) {
|
||||
return ids;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the underlying tokenizer model to decode token ids to text.
|
||||
* @param {string} endpoint API endpoint.
|
||||
* @param {number[]} ids Array of token ids
|
||||
*/
|
||||
function decodeTextTokensRemote(endpoint, ids) {
|
||||
let text = '';
|
||||
jQuery.ajax({
|
||||
async: false,
|
||||
type: 'POST',
|
||||
url: endpoint,
|
||||
data: JSON.stringify({ ids: ids }),
|
||||
dataType: "json",
|
||||
contentType: "application/json",
|
||||
success: function (data) {
|
||||
text = data.text;
|
||||
}
|
||||
});
|
||||
return text;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes a string to tokens using the remote server API.
|
||||
* @param {number} tokenizerType Tokenizer type.
|
||||
* @param {string} str String to tokenize.
|
||||
* @returns {number[]} Array of token ids.
|
||||
*/
|
||||
export function getTextTokens(tokenizerType, str) {
|
||||
switch (tokenizerType) {
|
||||
case tokenizers.GPT2:
|
||||
return getTextTokensRemote('/tokenize_gpt2', str);
|
||||
case tokenizers.LLAMA:
|
||||
return getTextTokensRemote('/tokenize_llama', str);
|
||||
case tokenizers.NERD:
|
||||
@ -380,6 +415,27 @@ export function getTextTokens(tokenizerType, str) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes token ids to text using the remote server API.
|
||||
* @param {any} tokenizerType Tokenizer type.
|
||||
* @param {number[]} ids Array of token ids
|
||||
*/
|
||||
export function decodeTextTokens(tokenizerType, ids) {
|
||||
switch (tokenizerType) {
|
||||
case tokenizers.GPT2:
|
||||
return decodeTextTokensRemote('/decode_gpt2', ids);
|
||||
case tokenizers.LLAMA:
|
||||
return decodeTextTokensRemote('/decode_llama', ids);
|
||||
case tokenizers.NERD:
|
||||
return decodeTextTokensRemote('/decode_nerdstash', ids);
|
||||
case tokenizers.NERD2:
|
||||
return decodeTextTokensRemote('/decode_nerdstash_v2', ids);
|
||||
default:
|
||||
console.warn("Calling decodeTextTokens with unsupported tokenizer type", tokenizerType);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
jQuery(async () => {
|
||||
await loadTokenCache();
|
||||
});
|
||||
|
@ -45,6 +45,17 @@ export function getSortableDelay() {
|
||||
return isMobile() ? 750 : 50;
|
||||
}
|
||||
|
||||
export async function bufferToBase64(buffer) {
|
||||
// use a FileReader to generate a base64 data URI:
|
||||
const base64url = await new Promise(resolve => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = () => resolve(reader.result)
|
||||
reader.readAsDataURL(new Blob([buffer]))
|
||||
});
|
||||
// remove the `data:...;base64,` part from the start
|
||||
return base64url.slice(base64url.indexOf(',') + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Rearranges an array in a random order.
|
||||
* @param {any[]} array The array to shuffle.
|
||||
|
Reference in New Issue
Block a user