mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Add lexing for output modifiers
This commit is contained in:
@@ -8,6 +8,8 @@ const modes = {
|
||||
macro_def: 'macro_def_mode',
|
||||
macro_identifier_end: 'macro_identifier_end',
|
||||
macro_args: 'macro_args_mode',
|
||||
macro_filter_modifer: 'macro_filter_modifer_mode',
|
||||
macro_filter_modifier_end: 'macro_filter_modifier_end',
|
||||
};
|
||||
|
||||
/** @readonly */
|
||||
@@ -36,6 +38,13 @@ const Tokens = {
|
||||
Quote: createToken({ name: 'Quote', pattern: /"/ }),
|
||||
},
|
||||
|
||||
Filter: {
|
||||
Pipe: createToken({ name: 'Pipe', pattern: /(?<!\\)\|/ }),
|
||||
Identifier: createToken({ name: 'FilterIdentifier', pattern: /[a-zA-Z][\w-]*/ }),
|
||||
// At the end of an identifier, there has to be whitspace, or must be directly followed by colon/double-colon separator, output modifier or closing braces
|
||||
EndOfIdentifier: createToken({ name: 'FilterEndOfIdentifier', pattern: /(?:\s+|(?=:{1,2})|(?=[|}]))/, group: Lexer.SKIPPED }),
|
||||
},
|
||||
|
||||
// All tokens that can be captured inside a macro
|
||||
Identifier: createToken({ name: 'Identifier', pattern: /[a-zA-Z][\w-]*/ }),
|
||||
WhiteSpace: createToken({ name: 'WhiteSpace', pattern: /\s+/, group: Lexer.SKIPPED }),
|
||||
@@ -74,16 +83,17 @@ const Def = {
|
||||
enter(Tokens.Macro.Identifier, modes.macro_identifier_end),
|
||||
],
|
||||
[modes.macro_identifier_end]: [
|
||||
// Valid options after a macro identifier: whitespace, colon/double-colon (captured), macro end braces, or output modifier pipe.
|
||||
exits(Tokens.Macro.BeforeEnd, modes.macro_identifier_end),
|
||||
|
||||
// After a macro identifier, there are only a few valid options. We check those, before we try to find optional macro args.
|
||||
// Must either be followed with whitespace or colon/double-colon, which get captured, or must follow-up with macro end braces or an output modifier pipe.
|
||||
enter(Tokens.Macro.EndOfIdentifier, modes.macro_args, { andExits: modes.macro_identifier_end }),
|
||||
],
|
||||
[modes.macro_args]: [
|
||||
// Macro args allow nested macros
|
||||
enter(Tokens.Macro.Start, modes.macro_def),
|
||||
|
||||
// If at any place during args writing there is a pipe, we lex it as an output identifier, and then continue with lex its args
|
||||
enter(Tokens.Filter.Pipe, modes.macro_filter_modifer),
|
||||
|
||||
using(Tokens.Args.DoubleColon),
|
||||
using(Tokens.Args.Colon),
|
||||
using(Tokens.Args.Equals),
|
||||
@@ -98,6 +108,16 @@ const Def = {
|
||||
// Args are optional, and we don't know how long, so exit the mode to be able to capture the actual macro end
|
||||
exits(Tokens.ModePopper, modes.macro_args),
|
||||
],
|
||||
[modes.macro_filter_modifer]: [
|
||||
using(Tokens.WhiteSpace),
|
||||
|
||||
enter(Tokens.Filter.Identifier, modes.macro_filter_modifier_end, { andExits: modes.macro_filter_modifer }),
|
||||
],
|
||||
[modes.macro_filter_modifier_end]: [
|
||||
// Valid options after a filter itenfier: whitespace, colon/double-colon (captured), macro end braces, or output modifier pipe.
|
||||
exits(Tokens.Macro.BeforeEnd, modes.macro_identifier_end),
|
||||
exits(Tokens.Filter.EndOfIdentifier, modes.macro_filter_modifer),
|
||||
],
|
||||
},
|
||||
defaultMode: modes.plaintext,
|
||||
};
|
||||
|
@@ -659,6 +659,279 @@ describe('MacroLexer', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Macro Output Modifiers', () => {
|
||||
// {{macro | outputModifier}}
|
||||
it('should support output modifier without arguments', async () => {
|
||||
const input = '{{macro | outputModifier}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro | outputModifier arg1=val1 arg2=val2}}
|
||||
it('should support output modifier with named arguments', async () => {
|
||||
const input = '{{macro | outputModifier arg1=val1 arg2=val2}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier' },
|
||||
{ type: 'Identifier', text: 'arg1' },
|
||||
{ type: 'Equals', text: '=' },
|
||||
{ type: 'Identifier', text: 'val1' },
|
||||
{ type: 'Identifier', text: 'arg2' },
|
||||
{ type: 'Equals', text: '=' },
|
||||
{ type: 'Identifier', text: 'val2' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro | outputModifier "unnamed1" "unnamed2"}}
|
||||
it('should support output modifier with unnamed arguments', async () => {
|
||||
const input = '{{macro | outputModifier "unnamed1" "unnamed2"}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'Identifier', text: 'unnamed1' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'Identifier', text: 'unnamed2' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro arg1=val1 | outputModifier arg2=val2 "unnamed1"}}
|
||||
it('should support macro arguments before output modifier', async () => {
|
||||
const input = '{{macro arg1=val1 | outputModifier arg2=val2 "unnamed1"}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Identifier', text: 'arg1' },
|
||||
{ type: 'Equals', text: '=' },
|
||||
{ type: 'Identifier', text: 'val1' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier' },
|
||||
{ type: 'Identifier', text: 'arg2' },
|
||||
{ type: 'Equals', text: '=' },
|
||||
{ type: 'Identifier', text: 'val2' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'Identifier', text: 'unnamed1' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro | outputModifier1 | outputModifier2}}
|
||||
it('should support chaining multiple output modifiers', async () => {
|
||||
const input = '{{macro | outputModifier1 | outputModifier2}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier1' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier2' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro | outputModifier1 arg1=val1 | outputModifier2 arg2=val2}}
|
||||
it('should support chaining multiple output modifiers with arguments', async () => {
|
||||
const input = '{{macro | outputModifier1 arg1=val1 | outputModifier2 arg2=val2}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier1' },
|
||||
{ type: 'Identifier', text: 'arg1' },
|
||||
{ type: 'Equals', text: '=' },
|
||||
{ type: 'Identifier', text: 'val1' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier2' },
|
||||
{ type: 'Identifier', text: 'arg2' },
|
||||
{ type: 'Equals', text: '=' },
|
||||
{ type: 'Identifier', text: 'val2' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro|outputModifier}}
|
||||
it('should support output modifiers without whitespace', async () => {
|
||||
const input = '{{macro|outputModifier}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{ macro test escaped \| pipe }}
|
||||
it('should support escaped pipes, not treating them as output modifiers', async () => {
|
||||
const input = '{{ macro test escaped \\| pipe }}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Identifier', text: 'test' },
|
||||
{ type: 'Identifier', text: 'escaped' },
|
||||
{ type: 'Unknown', text: '\\' },
|
||||
{ type: 'Unknown', text: '|' },
|
||||
{ type: 'Identifier', text: 'pipe' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{|macro}}
|
||||
it('[Error] should not capture when starting the macro with a pipe', async () => {
|
||||
const input = '{{|macro}}';
|
||||
const { tokens, errors } = await runLexerGetTokensAndErrors(input);
|
||||
|
||||
const expectedErrors = [
|
||||
{ message: 'unexpected character: ->|<- at offset: 2, skipped 1 characters.' },
|
||||
];
|
||||
|
||||
expect(errors).toMatchObject(expectedErrors);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro | Iam$peci@l}}
|
||||
it('[Error] do not allow special characters inside output modifier identifier', async () => {
|
||||
const input = '{{macro | Iam$peci@l}}';
|
||||
const { tokens, errors } = await runLexerGetTokensAndErrors(input);
|
||||
|
||||
const expectedErrors = [
|
||||
{ message: 'unexpected character: ->$<- at offset: 13, skipped 7 characters.' },
|
||||
];
|
||||
|
||||
expect(errors).toMatchObject(expectedErrors);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'Iam' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro | !cannotBeImportant }}
|
||||
it('[Error] do not allow output modifiers to have execution modifiers', async () => {
|
||||
const input = '{{macro | !cannotBeImportant }}';
|
||||
const { tokens, errors } = await runLexerGetTokensAndErrors(input);
|
||||
|
||||
const expectedErrors = [
|
||||
{ message: 'unexpected character: ->!<- at offset: 10, skipped 1 characters.' },
|
||||
];
|
||||
|
||||
expect(errors).toMatchObject(expectedErrors);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'cannotBeImportant' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro | 2invalidIdentifier}}
|
||||
it('[Error] should throw an error for an invalid identifier starting with a number', async () => {
|
||||
const input = '{{macro | 2invalidIdentifier}}';
|
||||
const { tokens, errors } = await runLexerGetTokensAndErrors(input);
|
||||
|
||||
const expectedErrors = [
|
||||
{ message: 'unexpected character: ->2<- at offset: 10, skipped 1 characters.' },
|
||||
];
|
||||
|
||||
expect(errors).toMatchObject(expectedErrors);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'invalidIdentifier' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{macro || outputModifier}}
|
||||
it('[Error] should throw an error when double pipe is used without an identifier', async () => {
|
||||
const input = '{{macro || outputModifier}}';
|
||||
const { tokens, errors } = await runLexerGetTokensAndErrors(input);
|
||||
|
||||
const expectedErrors = [
|
||||
{ message: 'unexpected character: ->|<- at offset: 9, skipped 1 characters.' },
|
||||
];
|
||||
|
||||
expect(errors).toMatchObject(expectedErrors);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Pipe', text: '|' },
|
||||
{ type: 'FilterIdentifier', text: 'outputModifier' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('Macro While Typing..', () => {
|
||||
// {{unclosed_macro word and more. Done.
|
||||
it('lexer allows unclosed macros, but tries to parse it as a macro', async () => {
|
||||
|
Reference in New Issue
Block a user