diff --git a/src/parser/tokenizer.ts b/src/parser/tokenizer.ts index b81bb2c..95bb8b1 100644 --- a/src/parser/tokenizer.ts +++ b/src/parser/tokenizer.ts @@ -150,11 +150,12 @@ const tokenizeLabeledLine = (line: string, row: number): Token[] | undefined => throw new ParseError(`Unknown label "${label}:"`, { row, col: 0 }); }; +const tokenizeText = (line: string, row: number): TextToken[] => { + return [{ type: "text", value: line.trim(), loc: { row, col: 0 } }]; +}; + const tokenizeRule = (line: string, row: number): Token[] => { - return ( - tokenizeLabeledLine(line, row) || - tokenizeComment(line, row) || [{ type: "text", value: line.trim(), loc: { row, col: 0 } }] - ); + return tokenizeLabeledLine(line, row) || tokenizeComment(line, row) || tokenizeText(line, row); }; export const tokenize = (file: string): Token[] => {