Fix WI keys regex tokenization breaking falsely

- Change regex tokenization to check commas inside regexes via opening and closing delimiter.
- Fixes #2375
This commit is contained in:
Wolfsblvt 2024-06-14 01:02:59 +02:00
parent aa4bdec79c
commit 64698ac073
1 changed files with 14 additions and 6 deletions

View File

@ -1402,22 +1402,29 @@ function splitKeywordsAndRegexes(input) {
function customTokenizer(input, _selection, callback) {
let current = input.term;
let insideRegex = false, regexClosed = false;
// Go over the input and check the current state, if we can get a token
for (let i = 0; i < current.length; i++) {
let char = current[i];
// If we find an unascaped slash, set the current regex state
if (char === '/' && (i === 0 || current[i - 1] !== '\\')) {
if (!insideRegex) insideRegex = true;
else if (!regexClosed) regexClosed = true;
}
// If a comma is typed, we tokenize the input.
// unless we are inside a possible regex, which would allow commas inside
if (char === ',') {
// We take everything up till now and consider this a token
const token = current.slice(0, i).trim();
// Now how we test if this is a valid regex? And not a finished one, but a half-finished one?
// Easy, if someone typed a comma it can't be a delimiter escape.
// So we just check if this opening with a slash, and if so, we "close" the regex and try to parse it.
// So if we are inside a valid regex, we can't take the token now, we continue processing until the regex is closed,
// or this is not a valid regex anymore
if (token.startsWith('/') && isValidRegex(token + '/')) {
// Now how we test if this is a regex? And not a finished one, but a half-finished one?
// We use the state remembered from above to check whether the delimiter was opened but not closed yet.
// We don't check validity here if we are inside a regex, because it might only get valid after its finished. (Closing brackets, etc)
// Validity will be finally checked when the next comma is typed.
if (insideRegex && !regexClosed) {
continue;
}
@ -1437,6 +1444,7 @@ function customTokenizer(input, _selection, callback) {
// Now remove the token from the current input, and the comma too
current = current.slice(i + 1);
insideRegex = false, regexClosed = false;
i = 0;
}
}