diff --git a/src/core/tokenizer.ts b/src/core/tokenizer.ts index 6b034124cb..fe37c7e3f2 100644 --- a/src/core/tokenizer.ts +++ b/src/core/tokenizer.ts @@ -35,15 +35,15 @@ const REPEATABLE_SYMBOLS_REGEX = REPEATABLE_SYMBOLS.map(escapeRegExp) .map((s) => `${s}+`) .join("|"); const FIXED_TOKENS_REGEX = FIXED_TOKENS.map(escapeRegExp).join("|"); -const IDENTIFIERS_REGEX = "[a-zA-Z_]+[a-zA-Z_0-9]*"; +const IDENTIFIERS_REGEX = "[a-zA-Z_0-9]+"; const SINGLE_SYMBOLS_REGEX = "[^\\s\\w]"; -const NUMBERS_REGEX = "(?<=[^.\\d]|^)\\d+\\.\\d+(?=[^.\\d]|$)|\\d+"; // (not-dot/digit digits dot digits not-dot/digit) OR digits +const NUMBERS_REGEX = "(?<=[^.\\d]|^)\\d+\\.\\d+(?=[^.\\d]|$)"; // (not-dot/digit digits dot digits not-dot/digit) const REGEX = [ - IDENTIFIERS_REGEX, FIXED_TOKENS_REGEX, REPEATABLE_SYMBOLS_REGEX, NUMBERS_REGEX, + IDENTIFIERS_REGEX, SINGLE_SYMBOLS_REGEX, ].join("|"); diff --git a/src/test/suite/tokenizer.test.ts b/src/test/suite/tokenizer.test.ts index 780e432270..d0da8c621c 100644 --- a/src/test/suite/tokenizer.test.ts +++ b/src/test/suite/tokenizer.test.ts @@ -40,6 +40,12 @@ const tests: TestCase[] = [ ["\\r\\n\\t", ["\\r", "\\n", "\\t"]], // Comments ["// Hello world", ["//", "Hello", "world"]], + // Hex colors + ["#aaaaaa", ["#", "aaaaaa"]], + ["#11aaaa", ["#", "11aaaa"]], + ["#aa11aa", ["#", "aa11aa"]], + ["#aaaa11", ["#", "aaaa11"]], + ["#111111", ["#", "111111"]], ]; suite("tokenizer", () => {