Parsing of comments
This commit is contained in:
parent
fa8f0c70ad
commit
cec481da4e
|
|
@ -250,4 +250,82 @@ Cooldown: 5:30 70%..45%
|
|||
`"Unexpected token [text CustomInterval: 15:00 10%] at line 3 char 1"`,
|
||||
);
|
||||
});
|
||||
|
||||
it("parses intervals with comments", () => {
|
||||
expect(
|
||||
parse(`
|
||||
Name: My Workout
|
||||
Interval: 10:00 90%
|
||||
# 0:00 Find your rythm.
|
||||
# 1:00 Try to settle in for the effort
|
||||
|
||||
# 5:00 Half way through
|
||||
|
||||
# 9:00 Almost there
|
||||
# 9:30 Final push. YOU GOT IT!
|
||||
|
||||
Rest: 5:00 50%
|
||||
# 0:00 Great effort!
|
||||
# 0:30 Cool down well after all of this.
|
||||
`),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"author": "",
|
||||
"description": "",
|
||||
"intervals": Array [
|
||||
Object {
|
||||
"cadence": undefined,
|
||||
"comments": Array [
|
||||
Object {
|
||||
"offset": 0,
|
||||
"text": "Find your rythm.",
|
||||
},
|
||||
Object {
|
||||
"offset": 60,
|
||||
"text": "Try to settle in for the effort",
|
||||
},
|
||||
Object {
|
||||
"offset": 300,
|
||||
"text": "Half way through",
|
||||
},
|
||||
Object {
|
||||
"offset": 540,
|
||||
"text": "Almost there",
|
||||
},
|
||||
Object {
|
||||
"offset": 570,
|
||||
"text": "Final push. YOU GOT IT!",
|
||||
},
|
||||
],
|
||||
"duration": 600,
|
||||
"intensity": Object {
|
||||
"from": 0.9,
|
||||
"to": 0.9,
|
||||
},
|
||||
"type": "Interval",
|
||||
},
|
||||
Object {
|
||||
"cadence": undefined,
|
||||
"comments": Array [
|
||||
Object {
|
||||
"offset": 0,
|
||||
"text": "Great effort!",
|
||||
},
|
||||
Object {
|
||||
"offset": 30,
|
||||
"text": "Cool down well after all of this.",
|
||||
},
|
||||
],
|
||||
"duration": 300,
|
||||
"intensity": Object {
|
||||
"from": 0.5,
|
||||
"to": 0.5,
|
||||
},
|
||||
"type": "Rest",
|
||||
},
|
||||
],
|
||||
"name": "My Workout",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,9 +1,13 @@
|
|||
import { Interval, Workout } from "../ast";
|
||||
import { Interval, Workout, Comment } from "../ast";
|
||||
import { ParseError } from "./ParseError";
|
||||
import { isIntervalLabelTokenValue, SourceLocation, Token } from "./tokenizer";
|
||||
|
||||
type Header = Partial<Omit<Workout, "intervals">>;
|
||||
|
||||
const tokenToString = (token: Token | undefined): string => {
|
||||
return token ? `[${token.type} ${token.value}]` : "EOF";
|
||||
};
|
||||
|
||||
const extractText = (tokens: Token[]): [string, Token[]] => {
|
||||
let text;
|
||||
while (tokens[0] && tokens[0].type === "text") {
|
||||
|
|
@ -49,6 +53,35 @@ const parseHeader = (tokens: Token[]): [Header, Token[]] => {
|
|||
return [header, tokens];
|
||||
};
|
||||
|
||||
const parseIntervalComments = (tokens: Token[]): [Comment[], Token[]] => {
|
||||
const comments: Comment[] = [];
|
||||
while (tokens[0]) {
|
||||
const [start, offset, text, ...rest] = tokens;
|
||||
if (start.type === "comment-start") {
|
||||
if (!offset || offset.type !== "duration") {
|
||||
throw new ParseError(
|
||||
`Expected [comment offset] instead got ${tokenToString(offset)}`,
|
||||
offset?.loc || start.loc,
|
||||
);
|
||||
}
|
||||
if (!text || text.type !== "text") {
|
||||
throw new ParseError(`Expected [comment text] instead got ${tokenToString(text)}`, text?.loc || offset.loc);
|
||||
}
|
||||
comments.push({
|
||||
offset: offset.value,
|
||||
text: text.value,
|
||||
});
|
||||
tokens = rest;
|
||||
} else if (start.type === "text" && start.value === "") {
|
||||
// skip empty lines
|
||||
tokens.shift();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return [comments, tokens];
|
||||
};
|
||||
|
||||
type IntervalData = Omit<Interval, "type">;
|
||||
|
||||
const parseIntervalParams = (tokens: Token[], loc: SourceLocation): [IntervalData, Token[]] => {
|
||||
|
|
@ -80,7 +113,10 @@ const parseIntervalParams = (tokens: Token[], loc: SourceLocation): [IntervalDat
|
|||
throw new ParseError("Power not specified", loc);
|
||||
}
|
||||
|
||||
return [data as IntervalData, tokens];
|
||||
const [comments, rest] = parseIntervalComments(tokens);
|
||||
data.comments = comments;
|
||||
|
||||
return [data as IntervalData, rest];
|
||||
};
|
||||
|
||||
const parseIntervals = (tokens: Token[]): Interval[] => {
|
||||
|
|
@ -89,19 +125,19 @@ const parseIntervals = (tokens: Token[]): Interval[] => {
|
|||
while (tokens[0]) {
|
||||
const token = tokens.shift() as Token;
|
||||
if (token.type === "label" && isIntervalLabelTokenValue(token.value)) {
|
||||
const [{ duration, intensity, cadence }, rest] = parseIntervalParams(tokens, token.loc);
|
||||
const [{ duration, intensity, cadence, comments }, rest] = parseIntervalParams(tokens, token.loc);
|
||||
intervals.push({
|
||||
type: token.value,
|
||||
duration,
|
||||
intensity,
|
||||
cadence,
|
||||
comments: [],
|
||||
comments,
|
||||
});
|
||||
tokens = rest;
|
||||
} else if (token.type === "text" && token.value === "") {
|
||||
// Ignore empty lines
|
||||
} else {
|
||||
throw new ParseError(`Unexpected token [${token.type} ${token.value}]`, token.loc);
|
||||
throw new ParseError(`Unexpected token ${tokenToString(token)}`, token.loc);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,12 @@ export type IntensityRangeToken = {
|
|||
value: [number, number];
|
||||
loc: SourceLocation;
|
||||
};
|
||||
export type Token = LabelToken | TextToken | NumberToken | IntensityRangeToken;
|
||||
export type CommentStartToken = {
|
||||
type: "comment-start";
|
||||
value?: undefined;
|
||||
loc: SourceLocation;
|
||||
};
|
||||
export type Token = LabelToken | TextToken | NumberToken | IntensityRangeToken | CommentStartToken;
|
||||
|
||||
const toInteger = (str: string): number => {
|
||||
return parseInt(str.replace(/[^0-9]/, ""), 10);
|
||||
|
|
@ -53,8 +58,10 @@ const toSeconds = (str: string): number => {
|
|||
|
||||
const toFraction = (percentage: number): number => percentage / 100;
|
||||
|
||||
const DURATION_REGEX = /^([0-9]{1,2}:)?[0-9]{1,2}:[0-9]{1,2}$/;
|
||||
|
||||
const tokenizeValueParam = (text: string, loc: SourceLocation): Token => {
|
||||
if (/^([0-9]{1,2}:)?[0-9]{1,2}:[0-9]{1,2}$/.test(text)) {
|
||||
if (DURATION_REGEX.test(text)) {
|
||||
return { type: "duration", value: toSeconds(text), loc };
|
||||
}
|
||||
if (/^[0-9]+rpm$/.test(text)) {
|
||||
|
|
@ -70,7 +77,17 @@ const tokenizeValueParam = (text: string, loc: SourceLocation): Token => {
|
|||
throw new ParseError(`Unrecognized interval parameter "${text}"`, loc);
|
||||
};
|
||||
|
||||
const tokenizeParams = (type: LabelTokenValue, text: string, loc: SourceLocation): Token[] => {
|
||||
const tokenizeParams = (text: string, loc: SourceLocation): Token[] => {
|
||||
return text.split(/\s+/).map((rawParam) => {
|
||||
return tokenizeValueParam(rawParam, {
|
||||
row: loc.row,
|
||||
// Not fully accurate, but should do for start
|
||||
col: loc.col + text.indexOf(rawParam),
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const tokenizeLabelTokenParams = (type: LabelTokenValue, text: string, loc: SourceLocation): Token[] => {
|
||||
switch (type) {
|
||||
case "Name":
|
||||
case "Author":
|
||||
|
|
@ -81,17 +98,31 @@ const tokenizeParams = (type: LabelTokenValue, text: string, loc: SourceLocation
|
|||
case "Rest":
|
||||
case "Interval":
|
||||
case "Cooldown":
|
||||
return text.split(/\s+/).map((rawParam) => {
|
||||
return tokenizeValueParam(rawParam, {
|
||||
row: loc.row,
|
||||
// Not fully accurate, but should do for start
|
||||
col: loc.col + text.indexOf(rawParam),
|
||||
});
|
||||
});
|
||||
return tokenizeParams(text, loc);
|
||||
}
|
||||
};
|
||||
|
||||
const tokenizeComment = (line: string, row: number): Token[] | undefined => {
|
||||
const [, commentHead, offset, commentText] = line.match(/^(\s*#\s*)([0-9:]+)(.*?)$/) || [];
|
||||
if (!commentHead) {
|
||||
return undefined;
|
||||
}
|
||||
if (!DURATION_REGEX.test(offset)) {
|
||||
throw new ParseError("Invalid comment offset", { row, col: commentHead.length });
|
||||
}
|
||||
return [
|
||||
{ type: "comment-start", loc: { row, col: line.indexOf("#") } },
|
||||
{ type: "duration", value: toSeconds(offset), loc: { row, col: commentHead.length } },
|
||||
{ type: "text", value: commentText.trim(), loc: { row, col: commentHead.length + offset.length } },
|
||||
];
|
||||
};
|
||||
|
||||
const tokenizeRule = (line: string, row: number): Token[] => {
|
||||
const commentTokens = tokenizeComment(line, row);
|
||||
if (commentTokens) {
|
||||
return commentTokens;
|
||||
}
|
||||
|
||||
const matches = line.match(/^(\w+)(:\s*)(.*?)\s*$/);
|
||||
if (!matches) {
|
||||
return [{ type: "text", value: line.trim(), loc: { row, col: 0 } }];
|
||||
|
|
@ -107,7 +138,7 @@ const tokenizeRule = (line: string, row: number): Token[] => {
|
|||
value: label as LabelTokenValue,
|
||||
loc: { row, col: 0 },
|
||||
};
|
||||
const params = tokenizeParams(labelToken.value, paramString, {
|
||||
const params = tokenizeLabelTokenParams(labelToken.value, paramString, {
|
||||
row,
|
||||
col: label.length + separator.length,
|
||||
});
|
||||
|
|
|
|||
Loading…
Reference in New Issue