mirror of
https://github.com/SillyTavern/SillyTavern.git
synced 2025-06-05 21:59:27 +02:00
Reorder tests
This commit is contained in:
@@ -1,39 +1,14 @@
|
||||
/** @typedef {import('../../public/lib/chevrotain.js').ILexingResult} ILexingResult */
|
||||
/** @typedef {{type: string, text: string}} TestableToken */
|
||||
|
||||
describe("MacroLexer Tests", () => {
|
||||
describe("MacroLexer", () => {
|
||||
beforeAll(async () => {
|
||||
await page.goto(global.ST_URL);
|
||||
await page.waitForFunction('document.getElementById("preloader") === null', { timeout: 0 });
|
||||
});
|
||||
|
||||
it("basic macro tokenization", async () => {
|
||||
const input = "Hello, {{user}}!";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'Plaintext', text: 'Hello, ' },
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'user' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
{ type: 'Plaintext', text: '!' },
|
||||
];
|
||||
|
||||
// Compare the actual result with expected tokens
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
it("should tokenize plaintext only", async () => {
|
||||
const input = "Just some text here.";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'Plaintext', text: 'Just some text here.' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
describe("General Macro", () => {
|
||||
// {{user}}
|
||||
it("should handle macro only", async () => {
|
||||
const input = "{{user}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -46,7 +21,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{}}
|
||||
it("should handle empty macro", async () => {
|
||||
const input = "{{}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -58,7 +33,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{ user }}
|
||||
it("should handle macro with leading and trailing whitespace inside", async () => {
|
||||
const input = "{{ user }}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -71,7 +46,62 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
// {{macro1}}{{macro2}}
|
||||
it("should handle multiple sequential macros", async () => {
|
||||
const input = "{{macro1}}{{macro2}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro1' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro2' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
// {{macro!@#%}}
|
||||
it("do not lex special characters as part of the macro identifier", async () => {
|
||||
const input = "{{macro!@#%}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Unknown', text: '!' },
|
||||
{ type: 'Unknown', text: '@' },
|
||||
{ type: 'Unknown', text: '#' },
|
||||
{ type: 'Unknown', text: '%' },
|
||||
{ type: 'MacroEnd', text: '}}' }
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{ma!@#%ro}}
|
||||
it("invalid chars in macro identifier are not parsed as valid macro identifier", async () => {
|
||||
const input = "{{ma!@#%ro}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'ma' },
|
||||
{ type: 'Unknown', text: '!' },
|
||||
{ type: 'Unknown', text: '@' },
|
||||
{ type: 'Unknown', text: '#' },
|
||||
{ type: 'Unknown', text: '%' },
|
||||
{ type: 'Identifier', text: 'ro' },
|
||||
{ type: 'MacroEnd', text: '}}' }
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Macro Nesting", () => {
|
||||
// {{outerMacro {{innerMacro}}}}
|
||||
it("should handle nested macros", async () => {
|
||||
const input = "{{outerMacro {{innerMacro}}}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -87,7 +117,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{doStuff "inner {{nested}} string"}}
|
||||
it("should handle macros with nested quotation marks", async () => {
|
||||
const input = '{{doStuff "inner {{nested}} string"}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -107,8 +137,10 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe("Macro Arguments", () => {
|
||||
// {{setvar::myVar::This is Sparta!}}
|
||||
it("should tokenize macros with double colons arguments correctly", async () => {
|
||||
const input = "{{setvar::myVar::This is Sparta!}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -128,7 +160,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{doStuff key=MyValue another=AnotherValue}}
|
||||
it("should handle named arguments with key=value syntax", async () => {
|
||||
const input = "{{doStuff key=MyValue another=AnotherValue}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -147,7 +179,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{getvar key="My variable"}}
|
||||
it("should handle named arguments with quotation marks", async () => {
|
||||
const input = '{{getvar key="My variable"}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -166,7 +198,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{random "this" "and that" "and some more"}}
|
||||
it("should handle multiple unnamed arguments in quotation marks", async () => {
|
||||
const input = '{{random "this" "and that" "and some more"}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -191,23 +223,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
it("should handle multiple sequential macros", async () => {
|
||||
const input = "{{macro1}}{{macro2}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro1' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro2' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{doStuff key="My Spaced Value" otherKey=SingleKey}}
|
||||
it("should handle named arguments with mixed style", async () => {
|
||||
const input = '{{doStuff key="My Spaced Value" otherKey=SingleKey}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -230,7 +246,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{doStuff key=}}
|
||||
it("should handle macros with empty named arguments", async () => {
|
||||
const input = "{{doStuff key=}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -245,7 +261,22 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
// {{random "" ""}}
|
||||
it("should handle empty unnamed arguments if quoted", async () => {
|
||||
const input = '{{random "" ""}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'random' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'Quote', text: '"' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
];
|
||||
});
|
||||
// {{doStuff special chars #!@&*()}}
|
||||
it("should handle macros with special characters in arguments", async () => {
|
||||
const input = '{{doStuff special chars #!@&*()}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -267,7 +298,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
// {{longMacro arg1="value1" arg2="value2" arg3="value3"}}
|
||||
it("should handle long macros with multiple arguments", async () => {
|
||||
const input = '{{longMacro arg1="value1" arg2="value2" arg3="value3"}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -295,8 +326,7 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
|
||||
// {{complexMacro "text with {{nested}} content" key=val}}
|
||||
it("should handle macros with complex argument patterns", async () => {
|
||||
const input = '{{complexMacro "text with {{nested}} content" key=val}}';
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -320,24 +350,10 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
it("do not lex special characters as part of the macro identifier", async () => {
|
||||
const input = "{{macro!@#%}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'macro' },
|
||||
{ type: 'Unknown', text: '!' },
|
||||
{ type: 'Unknown', text: '@' },
|
||||
{ type: 'Unknown', text: '#' },
|
||||
{ type: 'Unknown', text: '%' },
|
||||
{ type: 'MacroEnd', text: '}}' }
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
describe("Macro While Typing..", () => {
|
||||
// {{unclosed_macro word and more. Done.
|
||||
it("lexer allows unclosed macros, but tries to parse it as a macro", async () => {
|
||||
const input = "{{unclosed_macro word and more. Done.";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -355,7 +371,40 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Macro and Plaintext", () => {
|
||||
// Hello, {{user}}!
|
||||
it("basic macro tokenization", async () => {
|
||||
const input = "Hello, {{user}}!";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'Plaintext', text: 'Hello, ' },
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'user' },
|
||||
{ type: 'MacroEnd', text: '}}' },
|
||||
{ type: 'Plaintext', text: '!' },
|
||||
];
|
||||
|
||||
// Compare the actual result with expected tokens
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
// Just some text here.
|
||||
it("should tokenize plaintext only", async () => {
|
||||
const input = "Just some text here.";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'Plaintext', text: 'Just some text here.' },
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Cases in Macro Lexing", () => {
|
||||
// this is an unopened_macro}} and will be done
|
||||
it("lexer treats unopened macors as simple plaintext", async () => {
|
||||
const input = "this is an unopened_macro}} and will be done";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
@@ -366,23 +415,6 @@ describe("MacroLexer Tests", () => {
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
|
||||
it("invalid chars in macro identifier are not parsed as valid macro identifier", async () => {
|
||||
const input = "{{ma!@#%ro}}";
|
||||
const tokens = await runLexerGetTokens(input);
|
||||
|
||||
const expectedTokens = [
|
||||
{ type: 'MacroStart', text: '{{' },
|
||||
{ type: 'MacroIdentifier', text: 'ma' },
|
||||
{ type: 'Unknown', text: '!' },
|
||||
{ type: 'Unknown', text: '@' },
|
||||
{ type: 'Unknown', text: '#' },
|
||||
{ type: 'Unknown', text: '%' },
|
||||
{ type: 'Identifier', text: 'ro' },
|
||||
{ type: 'MacroEnd', text: '}}' }
|
||||
];
|
||||
|
||||
expect(tokens).toEqual(expectedTokens);
|
||||
});
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user