Enable no-case-declarations lint
This commit is contained in:
parent
a665a4897d
commit
27e63a7a77
|
@ -61,7 +61,6 @@ module.exports = {
|
|||
'no-prototype-builtins': 'off',
|
||||
'no-extra-boolean-cast': 'off',
|
||||
'require-yield': 'off',
|
||||
'no-case-declarations': 'off',
|
||||
'no-constant-condition': ['error', {checkLoops: false}]
|
||||
}
|
||||
};
|
||||
|
|
|
@ -2755,10 +2755,11 @@ export async function generateRaw(prompt, api, instructOverride) {
|
|||
generateData = getKoboldGenerationData(prompt, koboldSettings, amount_gen, max_context, isHorde, 'quiet');
|
||||
}
|
||||
break;
|
||||
case 'novel':
|
||||
case 'novel': {
|
||||
const novelSettings = novelai_settings[novelai_setting_names[nai_settings.preset_settings_novel]];
|
||||
generateData = getNovelGenerationData(prompt, novelSettings, amount_gen, false, false, null);
|
||||
break;
|
||||
}
|
||||
case 'textgenerationwebui':
|
||||
generateData = getTextGenGenerationData(prompt, amount_gen, false, false, null);
|
||||
break;
|
||||
|
|
|
@ -236,14 +236,16 @@ class PresetManager {
|
|||
return nai_settings;
|
||||
case "textgenerationwebui":
|
||||
return textgenerationwebui_settings;
|
||||
case "context":
|
||||
case "context": {
|
||||
const context_preset = getContextSettings();
|
||||
context_preset['name'] = name || power_user.context.preset;
|
||||
return context_preset;
|
||||
case "instruct":
|
||||
}
|
||||
case "instruct": {
|
||||
const instruct_preset = structuredClone(power_user.instruct);
|
||||
instruct_preset['name'] = name || power_user.instruct.preset;
|
||||
return instruct_preset;
|
||||
}
|
||||
default:
|
||||
console.warn(`Unknown API ID ${apiId}`);
|
||||
return {};
|
||||
|
|
|
@ -507,9 +507,10 @@ export function getTextTokens(tokenizerType, str) {
|
|||
return getTextTokensRemote('/api/tokenize/mistral', str);
|
||||
case tokenizers.YI:
|
||||
return getTextTokensRemote('/api/tokenize/yi', str);
|
||||
case tokenizers.OPENAI:
|
||||
case tokenizers.OPENAI: {
|
||||
const model = getTokenizerModel();
|
||||
return getTextTokensRemote('/api/tokenize/openai-encode', str, model);
|
||||
}
|
||||
case tokenizers.API:
|
||||
return getTextTokensRemote('/tokenize_via_api', str);
|
||||
default:
|
||||
|
@ -537,9 +538,10 @@ export function decodeTextTokens(tokenizerType, ids) {
|
|||
return decodeTextTokensRemote('/api/decode/mistral', ids);
|
||||
case tokenizers.YI:
|
||||
return decodeTextTokensRemote('/api/decode/yi', ids);
|
||||
case tokenizers.OPENAI:
|
||||
case tokenizers.OPENAI: {
|
||||
const model = getTokenizerModel();
|
||||
return decodeTextTokensRemote('/api/decode/openai', ids, model);
|
||||
}
|
||||
default:
|
||||
console.warn("Calling decodeTextTokens with unsupported tokenizer type", tokenizerType);
|
||||
return '';
|
||||
|
|
|
@ -1898,7 +1898,7 @@ async function checkWorldInfo(chat, maxContext) {
|
|||
case world_info_position.ANBottom:
|
||||
ANBottomEntries.unshift(entry.content);
|
||||
break;
|
||||
case world_info_position.atDepth:
|
||||
case world_info_position.atDepth: {
|
||||
const existingDepthIndex = WIDepthEntries.findIndex((e) => e.depth === entry.depth ?? DEFAULT_DEPTH);
|
||||
if (existingDepthIndex !== -1) {
|
||||
WIDepthEntries[existingDepthIndex].entries.unshift(entry.content);
|
||||
|
@ -1909,6 +1909,7 @@ async function checkWorldInfo(chat, maxContext) {
|
|||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ const parse = async (cardUrl, format) => {
|
|||
let fileFormat = format === undefined ? 'png' : format;
|
||||
|
||||
switch (fileFormat) {
|
||||
case 'png':
|
||||
case 'png': {
|
||||
const buffer = fs.readFileSync(cardUrl);
|
||||
const chunks = extract(buffer);
|
||||
|
||||
|
@ -23,6 +23,7 @@ const parse = async (cardUrl, format) => {
|
|||
}
|
||||
|
||||
return Buffer.from(textChunks[0].text, 'base64').toString('utf8');
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue