Skip to content

Commit

Permalink
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
test_runner: tap parser
Browse files Browse the repository at this point in the history
add node parallel tests for lexer
manekinekko committed Jun 21, 2022

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
1 parent c26836f commit 180d2fb
Showing 4 changed files with 494 additions and 452 deletions.
81 changes: 41 additions & 40 deletions lib/internal/test_runner/tap_lexer.js
Original file line number Diff line number Diff line change
@@ -24,8 +24,8 @@ const TokenKind = {
};

class Token {
constructor({ type, value, stream }) {
this.type = type;
constructor({ kind, value, stream }) {
this.kind = kind;
this.value = value;
this.location = {
line: stream.line,
@@ -150,7 +150,7 @@ class TapLexer {
this.escapeStack = [];

this.lastScannedToken = new Token({
type: TokenKind.EOL,
kind: TokenKind.EOL,
value: TokenKind.EOL,
stream: this.source,
});
@@ -161,7 +161,7 @@ class TapLexer {
let token = this.scanToken();

// remember the last scanned token (except for whitespace)
if (token.type !== TokenKind.WHITESPACE) {
if (token.kind !== TokenKind.WHITESPACE) {
this.lastScannedToken = token;
}
yield token;
@@ -223,27 +223,28 @@ class TapLexer {
// escape chars from the stack and start fresh for the next line
this.escapeStack = [];
return new Token({
type: TokenKind.EOL,
kind: TokenKind.EOL,
value: char,
stream: this.source,
});
}

scanEOF() {
return new Token({
type: TokenKind.EOF,
kind: TokenKind.EOF,
value: TokenKind.EOF,
stream: this.source,
});
}

scanEscapeSymbol(char) {
// if the escape symbol has been escaped, then it is not an escape symbol
// consume it as a literal.
if (this.hasTheCurrentCharacterBeenEscaped(char)) {
// if the escape symbol has been escaped (by previous symbol),
// or if the next symbol is a whitespace symbol,
// then consume it as a literal.
if (this.hasTheCurrentCharacterBeenEscaped() || this.source.peek(1) === TokenKind.WHITESPACE) {
this.escapeStack.pop();
return new Token({
type: TokenKind.LITERAL,
kind: TokenKind.LITERAL,
value: char,
stream: this.source,
});
@@ -254,15 +255,15 @@ class TapLexer {
// and consume the next character as a literal (done in the next turn)
this.escapeStack.push(char);
return new Token({
type: TokenKind.ESCAPE,
kind: TokenKind.ESCAPE,
value: char,
stream: this.source,
});
}

scanWhitespace(char) {
return new Token({
type: TokenKind.WHITESPACE,
kind: TokenKind.WHITESPACE,
value: char,
stream: this.source,
});
@@ -279,15 +280,15 @@ class TapLexer {
}

return new Token({
type: TokenKind.DASH,
kind: TokenKind.DASH,
value: char,
stream: this.source,
});
}

scanPlus(char) {
return new Token({
type: TokenKind.PLUS,
kind: TokenKind.PLUS,
value: char,
stream: this.source,
});
@@ -296,25 +297,25 @@ class TapLexer {
scanHash(char) {
// if last token is whitespace or EOL, we consume it as a comment
if (
this.lastScannedToken.type === TokenKind.WHITESPACE ||
this.lastScannedToken.type === TokenKind.EOL
this.lastScannedToken.kind === TokenKind.WHITESPACE ||
this.lastScannedToken.kind === TokenKind.EOL
) {
this.isComment = true;
return new Token({
type: TokenKind.COMMENT,
kind: TokenKind.COMMENT,
value: char,
stream: this.source,
});
}

const charHasBeenEscaped = this.hasTheCurrentCharacterBeenEscaped(char);
const charHasBeenEscaped = this.hasTheCurrentCharacterBeenEscaped();
if (this.isComment || charHasBeenEscaped) {
if (charHasBeenEscaped) {
this.escapeStack.pop();
}

return new Token({
type: TokenKind.LITERAL,
kind: TokenKind.LITERAL,
value: char,
stream: this.source,
});
@@ -323,7 +324,7 @@ class TapLexer {
// when a hash is found, we assume the rest of the line is a comment
this.isComment = true;
return new Token({
type: TokenKind.HASH,
kind: TokenKind.HASH,
value: char,
stream: this.source,
});
@@ -349,7 +350,7 @@ class TapLexer {
this.error(
`Exepcted YAML end block: ...`,
new Token({
type: TokenKind.EOF,
kind: TokenKind.EOF,
value: TokenKind.EOF,
stream: this.source,
})
@@ -358,7 +359,7 @@ class TapLexer {
}

return new Token({
type: TokenKind.TAP_YAML,
kind: TokenKind.TAP_YAML,
value: yaml, // don't trim on purpose!
stream: this.source,
});
@@ -377,7 +378,7 @@ class TapLexer {
comment = comment.replace(/^# /, '');

return new Token({
type: TokenKind.COMMENT,
kind: TokenKind.COMMENT,
value: comment,
stream: this.source,
});
@@ -397,7 +398,7 @@ class TapLexer {
description = description.replace(/^- /, '');

return new Token({
type: TokenKind.COMMENT,
kind: TokenKind.COMMENT,
value: description.trim(),
stream: this.source,
});
@@ -444,60 +445,60 @@ class TapLexer {
}

return new Token({
type: TokenKind.LITERAL,
kind: TokenKind.LITERAL,
value: word,
stream: this.source,
});
}

scanTAPkeyword(word) {
if (word === 'TAP' && this.lastScannedToken.type === TokenKind.EOL) {
if (word === 'TAP' && this.lastScannedToken.kind === TokenKind.EOL) {
return new Token({
type: TokenKind.TAP,
kind: TokenKind.TAP,
value: word,
stream: this.source,
});
}

if (word === 'version' && this.lastScannedToken.type === TokenKind.TAP) {
if (word === 'version' && this.lastScannedToken.kind === TokenKind.TAP) {
return new Token({
type: TokenKind.TAP_VERSION,
kind: TokenKind.TAP_VERSION,
value: word,
stream: this.source,
});
}

if (word === '..' && this.lastScannedToken.type === TokenKind.NUMERIC) {
if (word === '..' && this.lastScannedToken.kind === TokenKind.NUMERIC) {
return new Token({
type: TokenKind.TAP_PLAN,
kind: TokenKind.TAP_PLAN,
value: word,
stream: this.source,
});
}

if (word === 'not' && this.lastScannedToken.type === TokenKind.EOL) {
if (word === 'not' && this.lastScannedToken.kind === TokenKind.EOL) {
return new Token({
type: TokenKind.TAP_TEST_NOTOK,
kind: TokenKind.TAP_TEST_NOTOK,
value: word,
stream: this.source,
});
}

if (
word === 'ok' &&
(this.lastScannedToken.type === TokenKind.TAP_TEST_NOTOK ||
this.lastScannedToken.type === TokenKind.EOL)
(this.lastScannedToken.kind === TokenKind.TAP_TEST_NOTOK ||
this.lastScannedToken.kind === TokenKind.EOL)
) {
return new Token({
type: TokenKind.TAP_TEST_OK,
kind: TokenKind.TAP_TEST_OK,
value: word,
stream: this.source,
});
}

if (word === 'pragma' && this.lastScannedToken.type === TokenKind.EOL) {
if (word === 'pragma' && this.lastScannedToken.kind === TokenKind.EOL) {
return new Token({
type: TokenKind.TAP_PRAGMA,
kind: TokenKind.TAP_PRAGMA,
value: word,
stream: this.source,
});
@@ -518,13 +519,13 @@ class TapLexer {
}
}
return new Token({
type: TokenKind.NUMERIC,
kind: TokenKind.NUMERIC,
value: number,
stream: this.source,
});
}

hasTheCurrentCharacterBeenEscaped(char) {
hasTheCurrentCharacterBeenEscaped() {
// use the escapeStack to keep track of the escape characters
return this.escapeStack.length > 0;
}
62 changes: 31 additions & 31 deletions lib/internal/test_runner/tap_parser.js
Original file line number Diff line number Diff line change
@@ -6,7 +6,7 @@ const util = require('util');
See https://testanything.org/tap-version-14-specification.html
Note that the following is intended as a rough “pseudocode” guidance.
Note that the following grammar is intended as a rough “pseudocode” guidance.
It is not strict EBNF:
TAPDocument := Version Plan Body | Version Body Plan
@@ -27,7 +27,7 @@ Empty := [\s\t]* "\n"
Anything := [^\n]+ "\n"
*/
const { TapLexer, TokenKind } = require('./tap_lexer');
const { TapLexer, TokenKind } = require('internal/test_runner/tap_lexer');

/**
* An LL(1) parser for TAP14.
@@ -87,7 +87,7 @@ class TapParser {
// skip the provided tokens in the current chunk
skip(...tokensToSkip) {
let token = this.tokens[this.currentTokenChunk][this.currentTokenIndex];
while (token && tokensToSkip.includes(token.type)) {
while (token && tokensToSkip.includes(token.kind)) {
// pre-increment to skip current tokens but make sure we don't advance index on the last iteration
token = this.tokens[this.currentTokenChunk][++this.currentTokenIndex];
}
@@ -106,12 +106,12 @@ class TapParser {
TokenKind.DASH,
TokenKind.WHITESPACE,
TokenKind.ESCAPE,
].includes(nextToken.type)
].includes(nextToken.kind)
) {
const word = this.next(false).value;

// don't output escaped characters
if (nextToken.type !== TokenKind.ESCAPE) {
if (nextToken.kind !== TokenKind.ESCAPE) {
literals.push(word);
}

@@ -127,7 +127,7 @@ class TapParser {
return [...tokens]
.reduce(
(acc, token) => {
if (token.type === TokenKind.EOL) {
if (token.kind === TokenKind.EOL) {
acc.push([]);
} else {
acc[acc.length - 1].push(token);
@@ -136,7 +136,7 @@ class TapParser {
},
[[]]
)
.filter((chunk) => chunk.length > 0 && chunk[0].type !== TokenKind.EOF);
.filter((chunk) => chunk.length > 0 && chunk[0].kind !== TokenKind.EOF);
}

generateNextTestId() {
@@ -147,7 +147,7 @@ class TapParser {
// count the number of whitespace tokens in the chunk, starting from the first token
let whitespaceCount = 0;
for (let i = 0; i < chunk.length; i++) {
if (chunk[i].type === TokenKind.WHITESPACE) {
if (chunk[i].kind === TokenKind.WHITESPACE) {
whitespaceCount++;
} else {
break;
@@ -323,9 +323,9 @@ class TapParser {
parseTapBlocks(chunk) {
const chunkAsString = chunk.map((token) => token.value).join('');
const firstToken = chunk[0];
const { type } = firstToken;
const { kind } = firstToken;

switch (type) {
switch (kind) {
case TokenKind.TAP:
return this.Version();
case TokenKind.NUMERIC:
@@ -356,17 +356,17 @@ class TapParser {
Version() {
const tapToken = this.next();

if (tapToken.type !== TokenKind.TAP) {
if (tapToken.kind !== TokenKind.TAP) {
this.lexer.error(`Expected "TAP"`, tapToken);
}

const versionToken = this.next();
if (versionToken.type !== TokenKind.TAP_VERSION) {
if (versionToken.kind !== TokenKind.TAP_VERSION) {
this.lexer.error(`Expected "version"`, versionToken);
}

const numberToken = this.next();
if (numberToken.type !== TokenKind.NUMERIC) {
if (numberToken.kind !== TokenKind.NUMERIC) {
this.lexer.error(`Expected Numeric`, numberToken);
}

@@ -379,17 +379,17 @@ class TapParser {
// even if specs mention plan starts at 1, we need to make sure we read the plan start value in case of a missing or invalid plan start value
const planStart = this.next();

if (planStart.type !== TokenKind.NUMERIC) {
if (planStart.kind !== TokenKind.NUMERIC) {
this.lexer.error(`Expected a Numeric`, planStart);
}

const planToken = this.next();
if (planToken.type !== TokenKind.TAP_PLAN) {
if (planToken.kind !== TokenKind.TAP_PLAN) {
this.lexer.error(`Expected ".."`, planToken);
}

const planEnd = this.next();
if (planEnd.type !== TokenKind.NUMERIC) {
if (planEnd.kind !== TokenKind.NUMERIC) {
this.lexer.error(`Expected a Numeric`, planEnd);
}

@@ -401,10 +401,10 @@ class TapParser {
// Read optional reason
const hashToken = this.peek();
if (hashToken) {
if (hashToken.type === TokenKind.HASH) {
if (hashToken.kind === TokenKind.HASH) {
this.next(); // skip hash
body.reason = this.readNextLiterals();
} else if (hashToken.type === TokenKind.LITERAL) {
} else if (hashToken.kind === TokenKind.LITERAL) {
this.lexer.error(`Expected "#"`, hashToken);
}
}
@@ -426,19 +426,19 @@ class TapParser {
const notToken = this.peek();
let isTestFailed = false;

if (notToken.type === TokenKind.TAP_TEST_NOTOK) {
if (notToken.kind === TokenKind.TAP_TEST_NOTOK) {
this.next(); // skip "not" token
isTestFailed = true;
}

const okToken = this.next();
if (okToken.type !== TokenKind.TAP_TEST_OK) {
if (okToken.kind !== TokenKind.TAP_TEST_OK) {
this.lexer.error(`Expected "ok" or "not ok"`, okToken);
}

// Read optional test number
let numberToken = this.peek();
if (numberToken && numberToken.type === TokenKind.NUMERIC) {
if (numberToken && numberToken.kind === TokenKind.NUMERIC) {
numberToken = this.next().value;
} else {
// TODO(@manekinekko): handle case when test ID is not provided
@@ -452,7 +452,7 @@ class TapParser {

// Read optional description prefix " - "
const descriptionDashToken = this.peek();
if (descriptionDashToken && descriptionDashToken.type === TokenKind.DASH) {
if (descriptionDashToken && descriptionDashToken.kind === TokenKind.DASH) {
this.next(); // skip dash
}

@@ -463,12 +463,12 @@ class TapParser {

// Read optional directive and reason
const hashToken = this.peek();
if (hashToken && hashToken.type === TokenKind.HASH) {
if (hashToken && hashToken.kind === TokenKind.HASH) {
this.next(); // skip hash
}

let todoOrSkipToken = this.peek();
if (todoOrSkipToken && todoOrSkipToken.type === TokenKind.LITERAL) {
if (todoOrSkipToken && todoOrSkipToken.kind === TokenKind.LITERAL) {
if (/todo/i.test(todoOrSkipToken.value)) {
todoOrSkipToken = 'todo'; // force set directive to "todo"
this.next(); // skip token
@@ -509,7 +509,7 @@ class TapParser {

// Read optional reason
const hashToken = this.peek();
if (hashToken && hashToken.type === TokenKind.HASH) {
if (hashToken && hashToken.kind === TokenKind.HASH) {
this.next(); // skip hash
}
this.emiteBailout(this.readNextLiterals());
@@ -519,7 +519,7 @@ class TapParser {
// Comment := ^ (" ")* "#" [^\n]* "\n"
Comment() {
const commentToken = this.next();
if (commentToken.type !== TokenKind.COMMENT) {
if (commentToken.kind !== TokenKind.COMMENT) {
this.lexer.error(`Expected " # "`, commentToken);
}

@@ -540,7 +540,7 @@ class TapParser {
// TODO(@manekinekko): Add support for YAML parsing in the future (if needed)
YAMLBlock() {
const yaml = this.peek();
if (yaml && yaml.type === TokenKind.TAP_YAML) {
if (yaml && yaml.kind === TokenKind.TAP_YAML) {
this.emitYAMLBlock(this.next().value.split('\n')); // consume raw YAML
}
}
@@ -550,7 +550,7 @@ class TapParser {
// PragmaKey := ([a-zA-Z0-9_-])+
Pragma() {
const pragmaToken = this.next();
if (pragmaToken.type !== TokenKind.TAP_PRAGMA) {
if (pragmaToken.kind !== TokenKind.TAP_PRAGMA) {
this.lexer.error(`Expected "pragma"`, pragmaToken);
}

@@ -560,16 +560,16 @@ class TapParser {
while (nextToken) {
let isEnabled = true;
const pragmaKeySign = this.next();
if (pragmaKeySign.type === TokenKind.PLUS) {
if (pragmaKeySign.kind === TokenKind.PLUS) {
isEnabled = true;
} else if (pragmaKeySign.type === TokenKind.DASH) {
} else if (pragmaKeySign.kind === TokenKind.DASH) {
isEnabled = false;
} else {
this.lexer.error(`Expected "+" or "-"`, pragmaKeySign);
}

const pragmaKeyToken = this.peek();
if (pragmaKeyToken.type !== TokenKind.LITERAL) {
if (pragmaKeyToken.kind !== TokenKind.LITERAL) {
this.lexer.error(`Expected pragma key`, pragmaKeyToken);
}

381 changes: 0 additions & 381 deletions lib/internal/test_runner/tap_parser_test.js

This file was deleted.

422 changes: 422 additions & 0 deletions test/parallel/test-runner-lexer.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,422 @@
'use strict';
// Flags: --expose-internals

require('../common');
const assert = require('assert');

const { TapLexer, TokenKind } = require('internal/test_runner/tap_lexer');

function TAPLexer(input) {
const lexer = new TapLexer(input);
return [...lexer.scanAll()];
}

{
const tokens = TAPLexer(`TAP version 14`);

assert.strictEqual(tokens.length, 6);

[
{ kind: TokenKind.TAP, value: 'TAP' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_VERSION, value: 'version' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '14' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`1..5 # reason`);

[
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.TAP_PLAN, value: '..' },
{ kind: TokenKind.NUMERIC, value: '5' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'reason' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(
`1..5 # reason "\\ !"\\#$%&'()*+,\\-./:;<=>?@[]^_\`{|}~`
);

[
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.TAP_PLAN, value: '..' },
{ kind: TokenKind.NUMERIC, value: '5' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'reason' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '"' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '!"' },
{ kind: TokenKind.LITERAL, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.LITERAL, value: "$%&'()*" },
{ kind: TokenKind.PLUS, value: '+' },
{ kind: TokenKind.LITERAL, value: ',' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.LITERAL, value: './:;<=>?@[]^_`{|}~' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`not ok`);

[
{ kind: TokenKind.TAP_TEST_NOTOK, value: 'not' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok 1`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`
ok 1
not ok 2
`);

[
{ kind: TokenKind.EOL, value: '\n' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.EOL, value: '\n' },
{ kind: TokenKind.TAP_TEST_NOTOK, value: 'not' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '2' },
{ kind: TokenKind.EOL, value: '\n' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`
ok 1
ok 1
`);

[
{ kind: TokenKind.EOL, value: '\n' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.EOL, value: '\n' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.EOL, value: '\n' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok 1 description`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok 1 - description`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok 1 - description # todo`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'todo' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok 1 - description \\# todo`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'todo' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok 1 - description \\ # todo`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'todo' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(
`ok 1 description \\# \\\\ world # TODO escape \\# characters with \\\\`
);
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '\\' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'world' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'TODO' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'escape' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'characters' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'with' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '\\' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`ok 1 - description # ##`);

[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`# comment`);

[
{ kind: TokenKind.COMMENT, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'comment' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`#`);

[
{ kind: TokenKind.COMMENT, value: '#' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`
---
message: "description"
severity: fail
...
`);

[
{ kind: TokenKind.EOL, value: '\n' },
{
kind: TokenKind.TAP_YAML,
value: ' message: "description"\n severity: fail\n',
},
{ kind: TokenKind.EOL, value: '\n' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`pragma +strict -warnings`);

[
{ kind: TokenKind.TAP_PRAGMA, value: 'pragma' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.PLUS, value: '+' },
{ kind: TokenKind.LITERAL, value: 'strict' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.LITERAL, value: 'warnings' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

{
const tokens = TAPLexer(`Bail out! Error`);

[
{ kind: TokenKind.LITERAL, value: 'Bail' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'out!' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'Error' },
{ kind: TokenKind.EOF, value: 'EOF' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

0 comments on commit 180d2fb

Please sign in to comment.