Refactor to simpler token types
This commit is contained in:
parent
de9d9feeeb
commit
ca38f96314
|
|
@ -7,9 +7,6 @@ console.log(`Parsing: ${filename}`);
|
||||||
|
|
||||||
const file = fs.readFileSync(filename, "utf8");
|
const file = fs.readFileSync(filename, "utf8");
|
||||||
|
|
||||||
tokenizeFile(file).forEach((rule) => {
|
tokenizeFile(file).forEach((token) => {
|
||||||
console.log(rule.type);
|
console.log(token.type, token.value);
|
||||||
rule.params.forEach((p) => {
|
|
||||||
console.log(` ${p.type}: ${p.value}`);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
|
||||||
104
src/tokenizer.ts
104
src/tokenizer.ts
|
|
@ -1,4 +1,4 @@
|
||||||
export enum RuleType {
|
enum LabelTokenValue {
|
||||||
Name = "Name",
|
Name = "Name",
|
||||||
Author = "Author",
|
Author = "Author",
|
||||||
Description = "Description",
|
Description = "Description",
|
||||||
|
|
@ -7,35 +7,23 @@ export enum RuleType {
|
||||||
Interval = "Interval",
|
Interval = "Interval",
|
||||||
Cooldown = "Cooldown",
|
Cooldown = "Cooldown",
|
||||||
}
|
}
|
||||||
|
type LabelToken = {
|
||||||
export type Rule = {
|
type: "label";
|
||||||
type: RuleType;
|
value: LabelTokenValue;
|
||||||
params: Param[];
|
|
||||||
};
|
};
|
||||||
|
type TextToken = {
|
||||||
export enum ParamType {
|
type: "text";
|
||||||
Text = "Text",
|
value: string;
|
||||||
Power = "Power",
|
};
|
||||||
PowerRange = "PowerRange",
|
type NumberToken = {
|
||||||
Cadence = "Cadence",
|
type: "power" | "cadence" | "duration";
|
||||||
Duration = "Duration",
|
value: number;
|
||||||
}
|
};
|
||||||
|
type PowerRangeToken = {
|
||||||
export type TextParam = { type: ParamType.Text; value: string };
|
type: "power-range";
|
||||||
export type PowerParam = { type: ParamType.Power; value: number };
|
|
||||||
export type PowerRangeParam = {
|
|
||||||
type: ParamType.PowerRange;
|
|
||||||
value: [number, number];
|
value: [number, number];
|
||||||
};
|
};
|
||||||
export type CadenceParam = { type: ParamType.Cadence; value: number };
|
type Token = LabelToken | TextToken | NumberToken | PowerRangeToken;
|
||||||
export type DurationParam = { type: ParamType.Duration; value: number };
|
|
||||||
|
|
||||||
export type Param =
|
|
||||||
| TextParam
|
|
||||||
| PowerParam
|
|
||||||
| PowerRangeParam
|
|
||||||
| CadenceParam
|
|
||||||
| DurationParam;
|
|
||||||
|
|
||||||
const toInteger = (str: string): number => {
|
const toInteger = (str: string): number => {
|
||||||
return parseInt(str.replace(/[^0-9]/, ""), 10);
|
return parseInt(str.replace(/[^0-9]/, ""), 10);
|
||||||
|
|
@ -46,67 +34,61 @@ const toSeconds = (str: string): number => {
|
||||||
return seconds + minutes * 60 + (hours || 0) * 60 * 60;
|
return seconds + minutes * 60 + (hours || 0) * 60 * 60;
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenizeValueParam = (text: string): Param => {
|
const tokenizeValueParam = (text: string): Token => {
|
||||||
if (/^[0-9:]+$/.test(text)) {
|
if (/^[0-9:]+$/.test(text)) {
|
||||||
return { type: ParamType.Duration, value: toSeconds(text) };
|
return { type: "duration", value: toSeconds(text) };
|
||||||
}
|
}
|
||||||
if (/^[0-9]+rpm$/.test(text)) {
|
if (/^[0-9]+rpm$/.test(text)) {
|
||||||
return { type: ParamType.Cadence, value: toInteger(text) };
|
return { type: "cadence", value: toInteger(text) };
|
||||||
}
|
}
|
||||||
if (/^[0-9]+%..[0-9]+%$/.test(text)) {
|
if (/^[0-9]+%..[0-9]+%$/.test(text)) {
|
||||||
const [from, to] = text.split("..").map(toInteger);
|
const [from, to] = text.split("..").map(toInteger);
|
||||||
return { type: ParamType.PowerRange, value: [from, to] };
|
return { type: "power-range", value: [from, to] };
|
||||||
}
|
}
|
||||||
if (/^[0-9]+%$/.test(text)) {
|
if (/^[0-9]+%$/.test(text)) {
|
||||||
return { type: ParamType.Power, value: toInteger(text) };
|
return { type: "power", value: toInteger(text) };
|
||||||
}
|
}
|
||||||
throw new Error(`Unrecognized parameter "${text}"`);
|
throw new Error(`Unrecognized parameter "${text}"`);
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenizeParams = (type: RuleType, text: string): Param[] => {
|
const tokenizeParams = (type: LabelTokenValue, text: string): Token[] => {
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case RuleType.Name:
|
case LabelTokenValue.Name:
|
||||||
case RuleType.Author:
|
case LabelTokenValue.Author:
|
||||||
case RuleType.Description: {
|
case LabelTokenValue.Description: {
|
||||||
return [{ type: ParamType.Text, value: text }];
|
return [{ type: "text", value: text }];
|
||||||
}
|
}
|
||||||
case RuleType.Warmup:
|
case LabelTokenValue.Warmup:
|
||||||
case RuleType.Rest:
|
case LabelTokenValue.Rest:
|
||||||
case RuleType.Interval:
|
case LabelTokenValue.Interval:
|
||||||
case RuleType.Cooldown:
|
case LabelTokenValue.Cooldown:
|
||||||
return text.split(" ").map(tokenizeValueParam);
|
return text.split(" ").map(tokenizeValueParam);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const tokenizeRule = (line: string): Rule | undefined => {
|
const tokenizeRule = (line: string): Token[] => {
|
||||||
const matches = line.match(/^(\w+):(.*)$/);
|
const matches = line.match(/^(\w+):(.*)$/);
|
||||||
if (!matches) {
|
if (!matches) {
|
||||||
return undefined;
|
return [{ type: "text", value: line.trim() }];
|
||||||
}
|
}
|
||||||
if (!Object.keys(RuleType).includes(matches[1])) {
|
if (!Object.keys(LabelTokenValue).includes(matches[1])) {
|
||||||
return undefined;
|
return [{ type: "text", value: line.trim() }];
|
||||||
}
|
}
|
||||||
const type: RuleType = matches[1] as RuleType;
|
|
||||||
|
|
||||||
return {
|
const labelToken: LabelToken = {
|
||||||
type,
|
type: "label",
|
||||||
params: tokenizeParams(type, matches[2].trim()),
|
value: matches[1] as LabelTokenValue,
|
||||||
};
|
};
|
||||||
|
const params = tokenizeParams(labelToken.value, matches[2].trim());
|
||||||
|
|
||||||
|
return [labelToken, ...params];
|
||||||
};
|
};
|
||||||
|
|
||||||
export const tokenizeFile = (file: string): Rule[] => {
|
export const tokenizeFile = (file: string): Token[] => {
|
||||||
const tokens: Rule[] = [];
|
const tokens: Token[] = [];
|
||||||
|
|
||||||
file.split("\n").forEach((line) => {
|
file.split("\n").map((line) => {
|
||||||
const rule = tokenizeRule(line);
|
tokens.push(...tokenizeRule(line));
|
||||||
if (rule) {
|
|
||||||
tokens.push(rule);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const lastToken = tokens[tokens.length - 1];
|
|
||||||
if (lastToken && lastToken.type === RuleType.Description) {
|
|
||||||
lastToken.params.push({ type: ParamType.Text, value: line.trim() });
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return tokens;
|
return tokens;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue