Split up main labelTokenizing routine

This commit is contained in:
Rene Saarsoo 2020-09-22 22:01:30 +03:00
parent 955c74be42
commit 0a6bf2fba4
1 changed files with 34 additions and 26 deletions

View File

@ -103,13 +103,7 @@ const tokenizeComment = (line: string, row: number): Token[] | undefined => {
]; ];
}; };
const tokenizeLabelToken = (line: string, row: number): Token[] | undefined => { const tokenizeHeader = (label: HeaderType, separator: string, paramString: string, row: number): Token[] => {
const [, label, separator, paramString] = line.match(/^(\w+)(:\s*)(.*?)\s*$/) || [];
if (!label) {
return undefined;
}
if (isHeaderType(label)) {
const token: HeaderToken = { const token: HeaderToken = {
type: "header", type: "header",
value: label, value: label,
@ -124,9 +118,9 @@ const tokenizeLabelToken = (line: string, row: number): Token[] | undefined => {
}, },
}; };
return [token, param]; return [token, param];
} };
if (isIntervalType(label)) { const tokenizeInterval = (label: IntervalType, separator: string, paramString: string, row: number): Token[] => {
const token: IntervalToken = { const token: IntervalToken = {
type: "interval", type: "interval",
value: label, value: label,
@ -137,6 +131,20 @@ const tokenizeLabelToken = (line: string, row: number): Token[] | undefined => {
col: label.length + separator.length, col: label.length + separator.length,
}); });
return [token, ...params]; return [token, ...params];
};
const tokenizeLabeledLine = (line: string, row: number): Token[] | undefined => {
const [, label, separator, paramString] = line.match(/^(\w+)(:\s*)(.*?)\s*$/) || [];
if (!label) {
return undefined;
}
if (isHeaderType(label)) {
return tokenizeHeader(label, separator, paramString, row);
}
if (isIntervalType(label)) {
return tokenizeInterval(label, separator, paramString, row);
} }
throw new ParseError(`Unknown label "${label}:"`, { row, col: 0 }); throw new ParseError(`Unknown label "${label}:"`, { row, col: 0 });
@ -144,7 +152,7 @@ const tokenizeLabelToken = (line: string, row: number): Token[] | undefined => {
const tokenizeRule = (line: string, row: number): Token[] => { const tokenizeRule = (line: string, row: number): Token[] => {
return ( return (
tokenizeLabelToken(line, row) || tokenizeLabeledLine(line, row) ||
tokenizeComment(line, row) || [{ type: "text", value: line.trim(), loc: { row, col: 0 } }] tokenizeComment(line, row) || [{ type: "text", value: line.trim(), loc: { row, col: 0 } }]
); );
}; };