From ca38f963140d05e5ca9b3cd6c2431f90263ebdc9 Mon Sep 17 00:00:00 2001 From: Rene Saarsoo Date: Fri, 18 Sep 2020 10:49:31 +0300 Subject: [PATCH] Refactor to simpler token types --- src/index.ts | 7 +--- src/tokenizer.ts | 104 ++++++++++++++++++++--------------------------- 2 files changed, 45 insertions(+), 66 deletions(-) diff --git a/src/index.ts b/src/index.ts index 1f8ec9b..9954615 100644 --- a/src/index.ts +++ b/src/index.ts @@ -7,9 +7,6 @@ console.log(`Parsing: ${filename}`); const file = fs.readFileSync(filename, "utf8"); -tokenizeFile(file).forEach((rule) => { - console.log(rule.type); - rule.params.forEach((p) => { - console.log(` ${p.type}: ${p.value}`); - }); +tokenizeFile(file).forEach((token) => { + console.log(token.type, token.value); }); diff --git a/src/tokenizer.ts b/src/tokenizer.ts index b8d0f24..2d7f75f 100644 --- a/src/tokenizer.ts +++ b/src/tokenizer.ts @@ -1,4 +1,4 @@ -export enum RuleType { +enum LabelTokenValue { Name = "Name", Author = "Author", Description = "Description", @@ -7,35 +7,23 @@ export enum RuleType { Interval = "Interval", Cooldown = "Cooldown", } - -export type Rule = { - type: RuleType; - params: Param[]; +type LabelToken = { + type: "label"; + value: LabelTokenValue; }; - -export enum ParamType { - Text = "Text", - Power = "Power", - PowerRange = "PowerRange", - Cadence = "Cadence", - Duration = "Duration", -} - -export type TextParam = { type: ParamType.Text; value: string }; -export type PowerParam = { type: ParamType.Power; value: number }; -export type PowerRangeParam = { - type: ParamType.PowerRange; +type TextToken = { + type: "text"; + value: string; +}; +type NumberToken = { + type: "power" | "cadence" | "duration"; + value: number; +}; +type PowerRangeToken = { + type: "power-range"; value: [number, number]; }; -export type CadenceParam = { type: ParamType.Cadence; value: number }; -export type DurationParam = { type: ParamType.Duration; value: number }; - -export type Param = - | TextParam - | PowerParam - | PowerRangeParam - | CadenceParam - | DurationParam; +type Token = LabelToken | TextToken | NumberToken | PowerRangeToken; const toInteger = (str: string): number => { return parseInt(str.replace(/[^0-9]/, ""), 10); @@ -46,67 +34,61 @@ const toSeconds = (str: string): number => { return seconds + minutes * 60 + (hours || 0) * 60 * 60; }; -const tokenizeValueParam = (text: string): Param => { +const tokenizeValueParam = (text: string): Token => { if (/^[0-9:]+$/.test(text)) { - return { type: ParamType.Duration, value: toSeconds(text) }; + return { type: "duration", value: toSeconds(text) }; } if (/^[0-9]+rpm$/.test(text)) { - return { type: ParamType.Cadence, value: toInteger(text) }; + return { type: "cadence", value: toInteger(text) }; } if (/^[0-9]+%..[0-9]+%$/.test(text)) { const [from, to] = text.split("..").map(toInteger); - return { type: ParamType.PowerRange, value: [from, to] }; + return { type: "power-range", value: [from, to] }; } if (/^[0-9]+%$/.test(text)) { - return { type: ParamType.Power, value: toInteger(text) }; + return { type: "power", value: toInteger(text) }; } throw new Error(`Unrecognized parameter "${text}"`); }; -const tokenizeParams = (type: RuleType, text: string): Param[] => { +const tokenizeParams = (type: LabelTokenValue, text: string): Token[] => { switch (type) { - case RuleType.Name: - case RuleType.Author: - case RuleType.Description: { - return [{ type: ParamType.Text, value: text }]; + case LabelTokenValue.Name: + case LabelTokenValue.Author: + case LabelTokenValue.Description: { + return [{ type: "text", value: text }]; } - case RuleType.Warmup: - case RuleType.Rest: - case RuleType.Interval: - case RuleType.Cooldown: + case LabelTokenValue.Warmup: + case LabelTokenValue.Rest: + case LabelTokenValue.Interval: + case LabelTokenValue.Cooldown: return text.split(" ").map(tokenizeValueParam); } }; -const tokenizeRule = (line: string): Rule | undefined => { +const tokenizeRule = (line: string): Token[] => { const matches = line.match(/^(\w+):(.*)$/); if (!matches) { - return undefined; + return [{ type: "text", value: line.trim() }]; } - if (!Object.keys(RuleType).includes(matches[1])) { - return undefined; + if (!Object.keys(LabelTokenValue).includes(matches[1])) { + return [{ type: "text", value: line.trim() }]; } - const type: RuleType = matches[1] as RuleType; - return { - type, - params: tokenizeParams(type, matches[2].trim()), + const labelToken: LabelToken = { + type: "label", + value: matches[1] as LabelTokenValue, }; + const params = tokenizeParams(labelToken.value, matches[2].trim()); + + return [labelToken, ...params]; }; -export const tokenizeFile = (file: string): Rule[] => { - const tokens: Rule[] = []; +export const tokenizeFile = (file: string): Token[] => { + const tokens: Token[] = []; - file.split("\n").forEach((line) => { - const rule = tokenizeRule(line); - if (rule) { - tokens.push(rule); - return; - } - const lastToken = tokens[tokens.length - 1]; - if (lastToken && lastToken.type === RuleType.Description) { - lastToken.params.push({ type: ParamType.Text, value: line.trim() }); - } + file.split("\n").map((line) => { + tokens.push(...tokenizeRule(line)); }); return tokens;