Rename parser to tokenizer

This commit is contained in:
Rene Saarsoo 2020-09-18 10:03:16 +03:00
parent 62f0bfbf25
commit 87f6bc34c9
2 changed files with 9 additions and 9 deletions

View File

@ -1,5 +1,5 @@
import * as fs from "fs"; import * as fs from "fs";
import { parseFile } from "./parser"; import { tokenizeFile } from "./tokenizer";
const filename = process.argv[2]; const filename = process.argv[2];
@ -7,7 +7,7 @@ console.log(`Parsing: ${filename}`);
const file = fs.readFileSync(filename, "utf8"); const file = fs.readFileSync(filename, "utf8");
parseFile(file).forEach((rule) => { tokenizeFile(file).forEach((rule) => {
console.log(rule.type); console.log(rule.type);
rule.params.forEach((p) => { rule.params.forEach((p) => {
console.log(` ${p.type}: ${p.value}`); console.log(` ${p.type}: ${p.value}`);

View File

@ -9,7 +9,7 @@ const toSeconds = (str: string): number => {
return seconds + minutes * 60 + (hours || 0) * 60 * 60; return seconds + minutes * 60 + (hours || 0) * 60 * 60;
}; };
const parseValueParam = (text: string): Param => { const tokenizeValueParam = (text: string): Param => {
if (/^[0-9:]+$/.test(text)) { if (/^[0-9:]+$/.test(text)) {
return { type: ParamType.Duration, value: toSeconds(text) }; return { type: ParamType.Duration, value: toSeconds(text) };
} }
@ -26,7 +26,7 @@ const parseValueParam = (text: string): Param => {
throw new Error(`Unrecognized parameter "${text}"`); throw new Error(`Unrecognized parameter "${text}"`);
}; };
const parseParams = (type: RuleType, text: string): Param[] => { const tokenizeParams = (type: RuleType, text: string): Param[] => {
switch (type) { switch (type) {
case RuleType.Name: case RuleType.Name:
case RuleType.Author: case RuleType.Author:
@ -37,11 +37,11 @@ const parseParams = (type: RuleType, text: string): Param[] => {
case RuleType.Rest: case RuleType.Rest:
case RuleType.Interval: case RuleType.Interval:
case RuleType.Cooldown: case RuleType.Cooldown:
return text.split(" ").map(parseValueParam); return text.split(" ").map(tokenizeValueParam);
} }
}; };
const parseRule = (line: string): Rule | undefined => { const tokenizeRule = (line: string): Rule | undefined => {
const matches = line.match(/^(\w+):(.*)$/); const matches = line.match(/^(\w+):(.*)$/);
if (!matches) { if (!matches) {
return undefined; return undefined;
@ -53,15 +53,15 @@ const parseRule = (line: string): Rule | undefined => {
return { return {
type, type,
params: parseParams(type, matches[2].trim()), params: tokenizeParams(type, matches[2].trim()),
}; };
}; };
export const parseFile = (file: string): Rule[] => { export const tokenizeFile = (file: string): Rule[] => {
const rules: Rule[] = []; const rules: Rule[] = [];
file.split("\n").forEach((line) => { file.split("\n").forEach((line) => {
const rule = parseRule(line); const rule = tokenizeRule(line);
if (rule) { if (rule) {
rules.push(rule); rules.push(rule);
return; return;