Require single quotes

This commit is contained in:
valadaptive
2023-12-02 13:04:51 -05:00
parent a06f1e8ad6
commit a37f874e38
76 changed files with 4135 additions and 4134 deletions

View File

@ -1,10 +1,10 @@
import { characters, getAPIServerUrl, main_api, nai_settings, online_status, this_chid } from "../script.js";
import { power_user, registerDebugFunction } from "./power-user.js";
import { chat_completion_sources, model_list, oai_settings } from "./openai.js";
import { groups, selected_group } from "./group-chats.js";
import { getStringHash } from "./utils.js";
import { kai_flags } from "./kai-settings.js";
import { isKoboldCpp, isMancer, isOoba, isTabby, textgenerationwebui_settings } from "./textgen-settings.js";
import { characters, getAPIServerUrl, main_api, nai_settings, online_status, this_chid } from '../script.js';
import { power_user, registerDebugFunction } from './power-user.js';
import { chat_completion_sources, model_list, oai_settings } from './openai.js';
import { groups, selected_group } from './group-chats.js';
import { getStringHash } from './utils.js';
import { kai_flags } from './kai-settings.js';
import { isKoboldCpp, isMancer, isOoba, isTabby, textgenerationwebui_settings } from './textgen-settings.js';
export const CHARACTERS_PER_TOKEN_RATIO = 3.35;
const TOKENIZER_WARNING_KEY = 'tokenizationWarningShown';
@ -31,7 +31,7 @@ export const SENTENCEPIECE_TOKENIZERS = [
//tokenizers.NERD2,
];
const objectStore = new localforage.createInstance({ name: "SillyTavern_ChatCompletions" });
const objectStore = new localforage.createInstance({ name: 'SillyTavern_ChatCompletions' });
let tokenCache = {};
@ -84,7 +84,7 @@ export function getFriendlyTokenizerName(forApi) {
forApi = main_api;
}
const tokenizerOption = $("#tokenizer").find(':selected');
const tokenizerOption = $('#tokenizer').find(':selected');
let tokenizerId = Number(tokenizerOption.val());
let tokenizerName = tokenizerOption.text();
@ -173,7 +173,7 @@ function callTokenizer(type, str, padding) {
case tokenizers.API:
return countTokensRemote('/tokenize_via_api', str, padding);
default:
console.warn("Unknown tokenizer type", type);
console.warn('Unknown tokenizer type', type);
return callTokenizer(tokenizers.NONE, str, padding);
}
}
@ -220,7 +220,7 @@ export function getTokenCount(str, padding = undefined) {
const result = callTokenizer(tokenizerType, str, padding);
if (isNaN(result)) {
console.warn("Token count calculation returned NaN");
console.warn('Token count calculation returned NaN');
return 0;
}
@ -349,8 +349,8 @@ export function countTokensOpenAI(messages, full = false) {
type: 'POST', //
url: shouldTokenizeAI21 ? '/api/tokenize/ai21' : `/api/tokenize/openai?model=${model}`,
data: JSON.stringify([message]),
dataType: "json",
contentType: "application/json",
dataType: 'json',
contentType: 'application/json',
success: function (data) {
token_count += Number(data.token_count);
cacheObject[cacheKey] = Number(data.token_count);
@ -415,19 +415,19 @@ function countTokensRemote(endpoint, str, padding) {
type: 'POST',
url: endpoint,
data: JSON.stringify(getRemoteTokenizationParams(str)),
dataType: "json",
contentType: "application/json",
dataType: 'json',
contentType: 'application/json',
success: function (data) {
if (typeof data.count === 'number') {
tokenCount = data.count;
} else {
tokenCount = guesstimate(str);
console.error("Error counting tokens");
console.error('Error counting tokens');
if (!sessionStorage.getItem(TOKENIZER_WARNING_KEY)) {
toastr.warning(
"Your selected API doesn't support the tokenization endpoint. Using estimated counts.",
"Error counting tokens",
'Your selected API doesn\'t support the tokenization endpoint. Using estimated counts.',
'Error counting tokens',
{ timeOut: 10000, preventDuplicates: true },
);
@ -458,8 +458,8 @@ function getTextTokensRemote(endpoint, str, model = '') {
type: 'POST',
url: endpoint,
data: JSON.stringify(getRemoteTokenizationParams(str)),
dataType: "json",
contentType: "application/json",
dataType: 'json',
contentType: 'application/json',
success: function (data) {
ids = data.ids;
@ -488,8 +488,8 @@ function decodeTextTokensRemote(endpoint, ids, model = '') {
type: 'POST',
url: endpoint,
data: JSON.stringify({ ids: ids }),
dataType: "json",
contentType: "application/json",
dataType: 'json',
contentType: 'application/json',
success: function (data) {
text = data.text;
}
@ -524,7 +524,7 @@ export function getTextTokens(tokenizerType, str) {
case tokenizers.API:
return getTextTokensRemote('/tokenize_via_api', str);
default:
console.warn("Calling getTextTokens with unsupported tokenizer type", tokenizerType);
console.warn('Calling getTextTokens with unsupported tokenizer type', tokenizerType);
return [];
}
}
@ -553,7 +553,7 @@ export function decodeTextTokens(tokenizerType, ids) {
return decodeTextTokensRemote('/api/decode/openai', ids, model);
}
default:
console.warn("Calling decodeTextTokens with unsupported tokenizer type", tokenizerType);
console.warn('Calling decodeTextTokens with unsupported tokenizer type', tokenizerType);
return '';
}
}