First draft of the macro lexer

This commit is contained in:
Wolfsblvt
2024-07-16 01:24:03 +02:00
parent 7a36901bfc
commit f63b875b76
4 changed files with 158 additions and 0 deletions

View File

@@ -242,6 +242,8 @@ import { INTERACTABLE_CONTROL_CLASS, initKeyboard } from './scripts/keyboard.js'
import { initDynamicStyles } from './scripts/dynamic-styles.js';
import { SlashCommandEnumValue, enumTypes } from './scripts/slash-commands/SlashCommandEnumValue.js';
import { enumIcons } from './scripts/slash-commands/SlashCommandCommonEnumsProvider.js';
import { MacroLexer } from './scripts/macros/MacroLexer.js';
import { MacroEngine } from './scripts/macros/MacroEngine.js';
//exporting functions and vars for mods
export {
@@ -7910,6 +7912,11 @@ window['SillyTavern'].getContext = function () {
Popup: Popup,
POPUP_TYPE: POPUP_TYPE,
POPUP_RESULT: POPUP_RESULT,
macros: {
MacroLexer,
MacrosParser,
MacroEngine,
},
};
};

View File

@@ -0,0 +1,39 @@
import { MacroLexer } from './MacroLexer.js';
import { MacroParser } from './MacroParser.js';
class MacroEngine {
static instance = new MacroEngine();
constructor() {
this.parser = MacroParser.instance;
}
parseDocument(input) {
const lexingResult = MacroLexer.tokenize(input);
this.parser.input = lexingResult.tokens;
// const cst = this.parser.document();
// return cst;
}
evaluate(input) {
const lexingResult = MacroLexer.tokenize(input);
this.parser.input = lexingResult.tokens;
// const cst = this.parser.macro();
// if (this.parser.errors.length > 0) {
// throw new Error('Parsing errors detected');
// }
// return this.execute(cst);
}
execute(cstNode) {
// Implement execution logic here, traversing the CST and replacing macros with their values
// For now, we'll just return a placeholder result
return 'Executed Macro';
}
}
const macroEngineInstance = MacroEngine.instance;
export { MacroEngine, macroEngineInstance };

View File

@@ -0,0 +1,86 @@
import { createToken, Lexer } from '../../lib/chevrotain.min.mjs';
/** @enum {string} */
const MODES = {
macro: 'macro_mode',
text: 'text_mode',
};
/** @readonly */
const tokens = {
// General capture-all plaintext without macros
Plaintext: createToken({ name: 'Plaintext', pattern: /(.+?)(?=\{\{)|(.+)/, line_breaks: true }), // Match everything up till opening brackets. Or to the end.
// The relevant blocks to start/end a macro
MacroStart: createToken({ name: 'MacroStart', pattern: /\{\{/, push_mode: MODES.macro }),
MacroEnd: createToken({ name: 'MacroEnd', pattern: /\}\}/, pop_mode: true }),
// All tokens that can be captured inside a macro
DoubleColon: createToken({ name: 'DoubleColon', pattern: /::/ }),
Colon: createToken({ name: 'Colon', pattern: /:/ }),
Equals: createToken({ name: 'Equals', pattern: /=/ }),
Quote: createToken({ name: 'Quote', pattern: /"/ }),
Identifier: createToken({ name: 'Identifier', pattern: /[a-zA-Z_]\w*/ }),
WhiteSpace: createToken({
name: 'WhiteSpace',
pattern: /\s+/,
group: Lexer.SKIPPED,
}),
// TODO: Capture-all rest for now, that is not the macro end or opening of a new macro. Might be replaced later down the line.
Text: createToken({ name: 'Text', pattern: /.+(?=\}\}|\{\{)/, line_breaks: true }),
};
/**
* The singleton instance of the MacroLexer.
*
* @type {MacroLexer}
*/
let instance;
export { instance as MacroLexer };
class MacroLexer extends Lexer {
/** @type {MacroLexer} */ static #instance;
/** @type {MacroLexer} */ static get instance() { return MacroLexer.#instance ?? (MacroLexer.#instance = new MacroLexer()); }
// Define the tokens
/** @readonly */ static tokens = tokens;
/** @readonly */ static def = {
modes: {
[MODES.text]: [
tokens.MacroStart,
tokens.Plaintext,
],
[MODES.macro]: [
tokens.MacroStart,
tokens.MacroEnd,
tokens.DoubleColon,
tokens.Colon,
tokens.Equals,
tokens.Quote,
tokens.Identifier,
tokens.WhiteSpace,
tokens.Text,
],
},
defaultMode: MODES.text,
};
/** @readonly */ tokens = tokens;
/** @readonly */ def = MacroLexer.def;
/** @private */
constructor() {
super(MacroLexer.def);
}
test(input) {
const result = this.tokenize(input);
return {
errors: result.errors,
groups: result.groups,
tokens: result.tokens.map(({ tokenType, ...rest }) => ({ type: tokenType.name, ...rest, tokenType: tokenType })),
};
}
}
instance = MacroLexer.instance;

View File

@@ -0,0 +1,26 @@
import { CstParser } from './lib.js';
import { MacroLexer } from './MacroLexer.js';
/**
* The singleton instance of the MacroParser.
*
* @type {MacroParser}
*/
let instance;
export { instance as MacroParser };
class MacroParser extends CstParser {
/** @type {MacroParser} */ static #instance;
/** @type {MacroParser} */ static get instance() { return MacroParser.#instance ?? (MacroParser.#instance = new MacroParser()); }
/** @private */
constructor() {
super(MacroLexer.def);
// const $ = this;
this.performSelfAnalysis();
}
}
instance = MacroParser.instance;