test_runner: add initial TAP parser

Work in progress

PR-URL: https://github.com/nodejs/node/pull/43525
Refs: https://github.com/nodejs/node/issues/43344
Reviewed-By: Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Moshe Atlow <moshe@atlow.co.il>
This commit is contained in:
Wassim Chegham 2022-11-21 23:50:21 +01:00 committed by GitHub
parent 4bcc3aab76
commit f8ce9117b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 4418 additions and 31 deletions

View File

@ -2690,6 +2690,25 @@ An unspecified or non-specific system error has occurred within the Node.js
process. The error object will have an `err.info` object property with
additional details.
<a id="ERR_TAP_LEXER_ERROR"></a>
### `ERR_TAP_LEXER_ERROR`
An error representing a failing lexer state.
<a id="ERR_TAP_PARSER_ERROR"></a>
### `ERR_TAP_PARSER_ERROR`
An error representing a failing parser state. Additional information about
the token causing the error is available via the `cause` property.
<a id="ERR_TAP_VALIDATION_ERROR"></a>
### `ERR_TAP_VALIDATION_ERROR`
This error represents a failed TAP validation.
<a id="ERR_TEST_FAILURE"></a>
### `ERR_TEST_FAILURE`

View File

@ -1042,8 +1042,7 @@ Emitted when [`context.diagnostic`][] is called.
### Event: `'test:fail'`
* `data` {Object}
* `duration` {number} The test duration.
* `error` {Error} The failure casing test to fail.
* `details` {Object} Additional execution metadata.
* `name` {string} The test name.
* `testNumber` {number} The ordinal number of the test.
* `todo` {string|undefined} Present if [`context.todo`][] is called
@ -1054,7 +1053,7 @@ Emitted when a test fails.
### Event: `'test:pass'`
* `data` {Object}
* `duration` {number} The test duration.
* `details` {Object} Additional execution metadata.
* `name` {string} The test name.
* `testNumber` {number} The ordinal number of the test.
* `todo` {string|undefined} Present if [`context.todo`][] is called

View File

@ -1595,6 +1595,21 @@ E('ERR_STREAM_WRAP', 'Stream has StringDecoder set or is in objectMode', Error);
E('ERR_STREAM_WRITE_AFTER_END', 'write after end', Error);
E('ERR_SYNTHETIC', 'JavaScript Callstack', Error);
E('ERR_SYSTEM_ERROR', 'A system error occurred', SystemError);
E('ERR_TAP_LEXER_ERROR', function(errorMsg) {
hideInternalStackFrames(this);
return errorMsg;
}, Error);
E('ERR_TAP_PARSER_ERROR', function(errorMsg, details, tokenCausedError, source) {
hideInternalStackFrames(this);
this.cause = tokenCausedError;
const { column, line, start, end } = tokenCausedError.location;
const errorDetails = `${details} at line ${line}, column ${column} (start ${start}, end ${end})`;
return errorMsg + errorDetails;
}, SyntaxError);
E('ERR_TAP_VALIDATION_ERROR', function(errorMsg) {
hideInternalStackFrames(this);
return errorMsg;
}, Error);
E('ERR_TEST_FAILURE', function(error, failureType) {
hideInternalStackFrames(this);
assert(typeof failureType === 'string',

View File

@ -2,6 +2,7 @@
const {
ArrayFrom,
ArrayPrototypeFilter,
ArrayPrototypeForEach,
ArrayPrototypeIncludes,
ArrayPrototypeJoin,
ArrayPrototypePush,
@ -14,6 +15,7 @@ const {
SafePromiseAllSettledReturnVoid,
SafeMap,
SafeSet,
StringPrototypeRepeat,
} = primordials;
const { spawn } = require('child_process');
@ -31,7 +33,10 @@ const { validateArray, validateBoolean } = require('internal/validators');
const { getInspectPort, isUsingInspector, isInspectorMessage } = require('internal/util/inspector');
const { kEmptyObject } = require('internal/util');
const { createTestTree } = require('internal/test_runner/harness');
const { kSubtestsFailed, Test } = require('internal/test_runner/test');
const { kDefaultIndent, kSubtestsFailed, Test } = require('internal/test_runner/test');
const { TapParser } = require('internal/test_runner/tap_parser');
const { TokenKind } = require('internal/test_runner/tap_lexer');
const {
isSupportedFileType,
doesPathMatchFilter,
@ -120,11 +125,103 @@ function getRunArgs({ path, inspectPort }) {
return argv;
}
class FileTest extends Test {
#buffer = [];
#handleReportItem({ kind, node, nesting = 0 }) {
const indent = StringPrototypeRepeat(kDefaultIndent, nesting + 1);
const details = (diagnostic) => {
return (
diagnostic && {
__proto__: null,
yaml:
`${indent} ` +
ArrayPrototypeJoin(diagnostic, `\n${indent} `) +
'\n',
}
);
};
switch (kind) {
case TokenKind.TAP_VERSION:
// TODO(manekinekko): handle TAP version coming from the parser.
// this.reporter.version(node.version);
break;
case TokenKind.TAP_PLAN:
this.reporter.plan(indent, node.end - node.start + 1);
break;
case TokenKind.TAP_SUBTEST_POINT:
this.reporter.subtest(indent, node.name);
break;
case TokenKind.TAP_TEST_POINT:
// eslint-disable-next-line no-case-declarations
const { todo, skip, pass } = node.status;
// eslint-disable-next-line no-case-declarations
let directive;
if (skip) {
directive = this.reporter.getSkip(node.reason);
} else if (todo) {
directive = this.reporter.getTodo(node.reason);
} else {
directive = kEmptyObject;
}
if (pass) {
this.reporter.ok(
indent,
node.id,
node.description,
details(node.diagnostics),
directive
);
} else {
this.reporter.fail(
indent,
node.id,
node.description,
details(node.diagnostics),
directive
);
}
break;
case TokenKind.COMMENT:
if (indent === kDefaultIndent) {
// Ignore file top level diagnostics
break;
}
this.reporter.diagnostic(indent, node.comment);
break;
case TokenKind.UNKNOWN:
this.reporter.diagnostic(indent, node.value);
break;
}
}
addToReport(ast) {
if (!this.isClearToSend()) {
ArrayPrototypePush(this.#buffer, ast);
return;
}
this.reportSubtest();
this.#handleReportItem(ast);
}
report() {
this.reportSubtest();
ArrayPrototypeForEach(this.#buffer, (ast) => this.#handleReportItem(ast));
super.report();
}
}
const runningProcesses = new SafeMap();
const runningSubtests = new SafeMap();
function runTestFile(path, root, inspectPort, filesWatcher) {
const subtest = root.createSubtest(Test, path, async (t) => {
const subtest = root.createSubtest(FileTest, path, async (t) => {
const args = getRunArgs({ path, inspectPort });
const stdio = ['pipe', 'pipe', 'pipe'];
const env = { ...process.env };
@ -135,8 +232,7 @@ function runTestFile(path, root, inspectPort, filesWatcher) {
const child = spawn(process.execPath, args, { signal: t.signal, encoding: 'utf8', env, stdio });
runningProcesses.set(path, child);
// TODO(cjihrig): Implement a TAP parser to read the child's stdout
// instead of just displaying it all if the child fails.
let err;
let stderr = '';
@ -159,6 +255,17 @@ function runTestFile(path, root, inspectPort, filesWatcher) {
});
}
const parser = new TapParser();
child.stderr.pipe(parser).on('data', (ast) => {
if (ast.lexeme && isInspectorMessage(ast.lexeme)) {
process.stderr.write(ast.lexeme + '\n');
}
});
child.stdout.pipe(parser).on('data', (ast) => {
subtest.addToReport(ast);
});
const { 0: { 0: code, 1: signal }, 1: stdout } = await SafePromiseAll([
once(child, 'exit', { signal: t.signal }),
child.stdout.toArray({ signal: t.signal }),

View File

@ -0,0 +1,155 @@
'use strict';
const {
ArrayPrototypeFilter,
ArrayPrototypeFind,
NumberParseInt,
} = primordials;
const {
codes: { ERR_TAP_VALIDATION_ERROR },
} = require('internal/errors');
const { TokenKind } = require('internal/test_runner/tap_lexer');
// TODO(@manekinekko): add more validation rules based on the TAP14 spec.
// See https://testanything.org/tap-version-14-specification.html
class TAPValidationStrategy {
validate(ast) {
this.#validateVersion(ast);
this.#validatePlan(ast);
this.#validateTestPoints(ast);
return true;
}
#validateVersion(ast) {
const entry = ArrayPrototypeFind(
ast,
(node) => node.kind === TokenKind.TAP_VERSION
);
if (!entry) {
throw new ERR_TAP_VALIDATION_ERROR('missing TAP version');
}
const { version } = entry.node;
// TAP14 specification is compatible with observed behavior of existing TAP13 consumers and producers
if (version !== '14' && version !== '13') {
throw new ERR_TAP_VALIDATION_ERROR('TAP version should be 13 or 14');
}
}
#validatePlan(ast) {
const entry = ArrayPrototypeFind(
ast,
(node) => node.kind === TokenKind.TAP_PLAN
);
if (!entry) {
throw new ERR_TAP_VALIDATION_ERROR('missing TAP plan');
}
const plan = entry.node;
if (!plan.start) {
throw new ERR_TAP_VALIDATION_ERROR('missing plan start');
}
if (!plan.end) {
throw new ERR_TAP_VALIDATION_ERROR('missing plan end');
}
const planStart = NumberParseInt(plan.start, 10);
const planEnd = NumberParseInt(plan.end, 10);
if (planEnd !== 0 && planStart > planEnd) {
throw new ERR_TAP_VALIDATION_ERROR(
`plan start ${planStart} is greater than plan end ${planEnd}`
);
}
}
// TODO(@manekinekko): since we are dealing with a flat AST, we need to
// validate test points grouped by their "nesting" level. This is because a set of
// Test points belongs to a TAP document. Each new subtest block creates a new TAP document.
// https://testanything.org/tap-version-14-specification.html#subtests
#validateTestPoints(ast) {
const bailoutEntry = ArrayPrototypeFind(
ast,
(node) => node.kind === TokenKind.TAP_BAIL_OUT
);
const planEntry = ArrayPrototypeFind(
ast,
(node) => node.kind === TokenKind.TAP_PLAN
);
const testPointEntries = ArrayPrototypeFilter(
ast,
(node) => node.kind === TokenKind.TAP_TEST_POINT
);
const plan = planEntry.node;
const planStart = NumberParseInt(plan.start, 10);
const planEnd = NumberParseInt(plan.end, 10);
if (planEnd === 0 && testPointEntries.length > 0) {
throw new ERR_TAP_VALIDATION_ERROR(
`found ${testPointEntries.length} Test Point${
testPointEntries.length > 1 ? 's' : ''
} but plan is ${planStart}..0`
);
}
if (planEnd > 0) {
if (testPointEntries.length === 0) {
throw new ERR_TAP_VALIDATION_ERROR('missing Test Points');
}
if (!bailoutEntry && testPointEntries.length !== planEnd) {
throw new ERR_TAP_VALIDATION_ERROR(
`test Points count ${testPointEntries.length} does not match plan count ${planEnd}`
);
}
for (let i = 0; i < testPointEntries.length; i++) {
const test = testPointEntries[i].node;
const testId = NumberParseInt(test.id, 10);
if (testId < planStart || testId > planEnd) {
throw new ERR_TAP_VALIDATION_ERROR(
`test ${testId} is out of plan range ${planStart}..${planEnd}`
);
}
}
}
}
}
// TAP14 and TAP13 are compatible with each other
class TAP13ValidationStrategy extends TAPValidationStrategy {}
class TAP14ValidationStrategy extends TAPValidationStrategy {}
class TapChecker {
static TAP13 = '13';
static TAP14 = '14';
constructor({ specs }) {
switch (specs) {
case TapChecker.TAP13:
this.strategy = new TAP13ValidationStrategy();
break;
default:
this.strategy = new TAP14ValidationStrategy();
}
}
check(ast) {
return this.strategy.validate(ast);
}
}
module.exports = {
TapChecker,
TAP14ValidationStrategy,
TAP13ValidationStrategy,
};

View File

@ -0,0 +1,522 @@
'use strict';
const { SafeSet, MathMax, StringPrototypeIncludes } = primordials;
const {
codes: { ERR_TAP_LEXER_ERROR },
} = require('internal/errors');
const kEOL = '';
const kEOF = '';
const TokenKind = {
EOF: 'EOF',
EOL: 'EOL',
NEWLINE: 'NewLine',
NUMERIC: 'Numeric',
LITERAL: 'Literal',
KEYWORD: 'Keyword',
WHITESPACE: 'Whitespace',
COMMENT: 'Comment',
DASH: 'Dash',
PLUS: 'Plus',
HASH: 'Hash',
ESCAPE: 'Escape',
UNKNOWN: 'Unknown',
// TAP tokens
TAP: 'TAPKeyword',
TAP_VERSION: 'VersionKeyword',
TAP_PLAN: 'PlanKeyword',
TAP_TEST_POINT: 'TestPointKeyword',
TAP_SUBTEST_POINT: 'SubTestPointKeyword',
TAP_TEST_OK: 'TestOkKeyword',
TAP_TEST_NOTOK: 'TestNotOkKeyword',
TAP_YAML_START: 'YamlStartKeyword',
TAP_YAML_END: 'YamlEndKeyword',
TAP_YAML_BLOCK: 'YamlKeyword',
TAP_PRAGMA: 'PragmaKeyword',
TAP_BAIL_OUT: 'BailOutKeyword',
};
class Token {
constructor({ kind, value, stream }) {
const valueLength = ('' + value).length;
this.kind = kind;
this.value = value;
this.location = {
line: stream.line,
column: MathMax(stream.column - valueLength + 1, 1), // 1 based
start: MathMax(stream.pos - valueLength, 0), // zero based
end: stream.pos - (value === '' ? 0 : 1), // zero based
};
// EOF is a special case
if (value === TokenKind.EOF) {
const eofPosition = stream.input.length + 1; // We consider EOF to be outside the stream
this.location.start = eofPosition;
this.location.end = eofPosition;
this.location.column = stream.column + 1; // 1 based
}
}
}
class InputStream {
constructor(input) {
this.input = input;
this.pos = 0;
this.column = 0;
this.line = 1;
}
eof() {
return this.peek() === undefined;
}
peek(offset = 0) {
return this.input[this.pos + offset];
}
next() {
const char = this.peek();
if (char === undefined) {
return undefined;
}
this.pos++;
this.column++;
if (char === '\n') {
this.line++;
this.column = 0;
}
return char;
}
}
class TapLexer {
static Keywords = new SafeSet([
'TAP',
'version',
'ok',
'not',
'...',
'---',
'..',
'pragma',
'-',
'+',
// NOTE: "Skip", "Todo" and "Bail out!" literals are deferred to the parser
]);
#isComment = false;
#source = null;
#line = 1;
#column = 0;
#escapeStack = [];
#lastScannedToken = null;
constructor(source) {
this.#source = new InputStream(source);
this.#lastScannedToken = new Token({
kind: TokenKind.EOL,
value: kEOL,
stream: this.#source,
});
}
scan() {
const tokens = [];
let chunk = [];
while (!this.eof()) {
const token = this.#scanToken();
// Remember the last scanned token (except for whitespace)
if (token.kind !== TokenKind.WHITESPACE) {
this.#lastScannedToken = token;
}
if (token.kind === TokenKind.NEWLINE) {
// Store the current chunk + NEWLINE token
tokens.push([...chunk, token]);
chunk = [];
} else {
chunk.push(token);
}
}
if (chunk.length > 0) {
tokens.push([...chunk, this.#scanEOL()]);
}
// send EOF as a separate chunk
tokens.push([this.#scanEOF()]);
return tokens;
}
next() {
return this.#source.next();
}
eof() {
return this.#source.eof();
}
error(message, token, expected = '') {
this.#source.error(message, token, expected);
}
#scanToken() {
const char = this.next();
if (this.#isEOFSymbol(char)) {
return this.#scanEOF();
} else if (this.#isNewLineSymbol(char)) {
return this.#scanNewLine(char);
} else if (this.#isNumericSymbol(char)) {
return this.#scanNumeric(char);
} else if (this.#isDashSymbol(char)) {
return this.#scanDash(char);
} else if (this.#isPlusSymbol(char)) {
return this.#scanPlus(char);
} else if (this.#isHashSymbol(char)) {
return this.#scanHash(char);
} else if (this.#isEscapeSymbol(char)) {
return this.#scanEscapeSymbol(char);
} else if (this.#isWhitespaceSymbol(char)) {
return this.#scanWhitespace(char);
} else if (this.#isLiteralSymbol(char)) {
return this.#scanLiteral(char);
}
throw new ERR_TAP_LEXER_ERROR(
`Unexpected character: ${char} at line ${this.#line}, column ${
this.#column
}`
);
}
#scanNewLine(char) {
// In case of odd number of ESCAPE symbols, we need to clear the remaining
// escape chars from the stack and start fresh for the next line.
this.#escapeStack = [];
// We also need to reset the comment flag
this.#isComment = false;
return new Token({
kind: TokenKind.NEWLINE,
value: char,
stream: this.#source,
});
}
#scanEOL() {
return new Token({
kind: TokenKind.EOL,
value: kEOL,
stream: this.#source,
});
}
#scanEOF() {
this.#isComment = false;
return new Token({
kind: TokenKind.EOF,
value: kEOF,
stream: this.#source,
});
}
#scanEscapeSymbol(char) {
// If the escape symbol has been escaped (by previous symbol),
// or if the next symbol is a whitespace symbol,
// then consume it as a literal.
if (
this.#hasTheCurrentCharacterBeenEscaped() ||
this.#source.peek(1) === TokenKind.WHITESPACE
) {
this.#escapeStack.pop();
return new Token({
kind: TokenKind.LITERAL,
value: char,
stream: this.#source,
});
}
// Otherwise, consume the escape symbol as an escape symbol that should be ignored by the parser
// we also need to push the escape symbol to the escape stack
// and consume the next character as a literal (done in the next turn)
this.#escapeStack.push(char);
return new Token({
kind: TokenKind.ESCAPE,
value: char,
stream: this.#source,
});
}
#scanWhitespace(char) {
return new Token({
kind: TokenKind.WHITESPACE,
value: char,
stream: this.#source,
});
}
#scanDash(char) {
// Peek next 3 characters and check if it's a YAML start marker
const marker = char + this.#source.peek() + this.#source.peek(1);
if (this.#isYamlStartSymbol(marker)) {
this.next(); // consume second -
this.next(); // consume third -
return new Token({
kind: TokenKind.TAP_YAML_START,
value: marker,
stream: this.#source,
});
}
return new Token({
kind: TokenKind.DASH,
value: char,
stream: this.#source,
});
}
#scanPlus(char) {
return new Token({
kind: TokenKind.PLUS,
value: char,
stream: this.#source,
});
}
#scanHash(char) {
const lastCharacter = this.#source.peek(-2);
const nextToken = this.#source.peek();
// If we encounter a hash symbol at the beginning of a line,
// we consider it as a comment
if (!lastCharacter || this.#isNewLineSymbol(lastCharacter)) {
this.#isComment = true;
return new Token({
kind: TokenKind.COMMENT,
value: char,
stream: this.#source,
});
}
// The only valid case where a hash symbol is considered as a hash token
// is when it's preceded by a whitespace symbol and followed by a non-hash symbol
if (
this.#isWhitespaceSymbol(lastCharacter) &&
!this.#isHashSymbol(nextToken)
) {
return new Token({
kind: TokenKind.HASH,
value: char,
stream: this.#source,
});
}
const charHasBeenEscaped = this.#hasTheCurrentCharacterBeenEscaped();
if (this.#isComment || charHasBeenEscaped) {
if (charHasBeenEscaped) {
this.#escapeStack.pop();
}
return new Token({
kind: TokenKind.LITERAL,
value: char,
stream: this.#source,
});
}
// As a fallback, we consume the hash symbol as a literal
return new Token({
kind: TokenKind.LITERAL,
value: char,
stream: this.#source,
});
}
#scanLiteral(char) {
let word = char;
while (!this.#source.eof()) {
const nextChar = this.#source.peek();
if (this.#isLiteralSymbol(nextChar)) {
word += this.#source.next();
} else {
break;
}
}
word = word.trim();
if (TapLexer.Keywords.has(word)) {
const token = this.#scanTAPKeyword(word);
if (token) {
return token;
}
}
if (this.#isYamlEndSymbol(word)) {
return new Token({
kind: TokenKind.TAP_YAML_END,
value: word,
stream: this.#source,
});
}
return new Token({
kind: TokenKind.LITERAL,
value: word,
stream: this.#source,
});
}
#scanTAPKeyword(word) {
const isLastScannedTokenEOLorNewLine = StringPrototypeIncludes(
[TokenKind.EOL, TokenKind.NEWLINE],
this.#lastScannedToken.kind
);
if (word === 'TAP' && isLastScannedTokenEOLorNewLine) {
return new Token({
kind: TokenKind.TAP,
value: word,
stream: this.#source,
});
}
if (word === 'version' && this.#lastScannedToken.kind === TokenKind.TAP) {
return new Token({
kind: TokenKind.TAP_VERSION,
value: word,
stream: this.#source,
});
}
if (word === '..' && this.#lastScannedToken.kind === TokenKind.NUMERIC) {
return new Token({
kind: TokenKind.TAP_PLAN,
value: word,
stream: this.#source,
});
}
if (word === 'not' && isLastScannedTokenEOLorNewLine) {
return new Token({
kind: TokenKind.TAP_TEST_NOTOK,
value: word,
stream: this.#source,
});
}
if (
word === 'ok' &&
(this.#lastScannedToken.kind === TokenKind.TAP_TEST_NOTOK ||
isLastScannedTokenEOLorNewLine)
) {
return new Token({
kind: TokenKind.TAP_TEST_OK,
value: word,
stream: this.#source,
});
}
if (word === 'pragma' && isLastScannedTokenEOLorNewLine) {
return new Token({
kind: TokenKind.TAP_PRAGMA,
value: word,
stream: this.#source,
});
}
return null;
}
#scanNumeric(char) {
let number = char;
while (!this.#source.eof()) {
const nextChar = this.#source.peek();
if (this.#isNumericSymbol(nextChar)) {
number += nextChar;
this.#source.next();
} else {
break;
}
}
return new Token({
kind: TokenKind.NUMERIC,
value: number,
stream: this.#source,
});
}
#hasTheCurrentCharacterBeenEscaped() {
// Use the escapeStack to keep track of the escape characters
return this.#escapeStack.length > 0;
}
#isNumericSymbol(char) {
return char >= '0' && char <= '9';
}
#isLiteralSymbol(char) {
return (
(char >= 'a' && char <= 'z') ||
(char >= 'A' && char <= 'Z') ||
this.#isSpecialCharacterSymbol(char)
);
}
#isSpecialCharacterSymbol(char) {
// We deliberately do not include "# \ + -"" in this list
// these are used for comments/reasons explanations, pragma and escape characters
// whitespace is not included because it is handled separately
return '!"$%&\'()*,./:;<=>?@[]^_`{|}~'.indexOf(char) > -1;
}
#isWhitespaceSymbol(char) {
return char === ' ' || char === '\t';
}
#isEOFSymbol(char) {
return char === undefined;
}
#isNewLineSymbol(char) {
return char === '\n' || char === '\r';
}
#isHashSymbol(char) {
return char === '#';
}
#isDashSymbol(char) {
return char === '-';
}
#isPlusSymbol(char) {
return char === '+';
}
#isEscapeSymbol(char) {
return char === '\\';
}
#isYamlStartSymbol(char) {
return char === '---';
}
#isYamlEndSymbol(char) {
return char === '...';
}
}
module.exports = { TapLexer, TokenKind };

View File

@ -0,0 +1,980 @@
'use strict';
const Transform = require('internal/streams/transform');
const { TapLexer, TokenKind } = require('internal/test_runner/tap_lexer');
const { TapChecker } = require('internal/test_runner/tap_checker');
const {
codes: { ERR_TAP_VALIDATION_ERROR, ERR_TAP_PARSER_ERROR },
} = require('internal/errors');
const { kEmptyObject } = require('internal/util');
const {
ArrayPrototypeFilter,
ArrayPrototypeForEach,
ArrayPrototypeJoin,
ArrayPrototypeMap,
ArrayPrototypePush,
ArrayPrototypeIncludes,
ArrayPrototypeSplice,
Boolean,
Number,
RegExpPrototypeExec,
RegExpPrototypeSymbolReplace,
String,
StringPrototypeTrim,
StringPrototypeSplit,
} = primordials;
/**
*
* TAP14 specifications
*
* See https://testanything.org/tap-version-14-specification.html
*
* Note that the following grammar is intended as a rough "pseudocode" guidance.
* It is not strict EBNF:
*
* TAPDocument := Version Plan Body | Version Body Plan
* Version := "TAP version 14\n"
* Plan := "1.." (Number) (" # " Reason)? "\n"
* Body := (TestPoint | BailOut | Pragma | Comment | Anything | Empty | Subtest)*
* TestPoint := ("not ")? "ok" (" " Number)? ((" -")? (" " Description) )? (" " Directive)? "\n" (YAMLBlock)?
* Directive := " # " ("todo" | "skip") (" " Reason)?
* YAMLBlock := " ---\n" (YAMLLine)* " ...\n"
* YAMLLine := " " (YAML)* "\n"
* BailOut := "Bail out!" (" " Reason)? "\n"
* Reason := [^\n]+
* Pragma := "pragma " [+-] PragmaKey "\n"
* PragmaKey := ([a-zA-Z0-9_-])+
* Subtest := ("# Subtest" (": " SubtestName)?)? "\n" SubtestDocument TestPoint
* Comment := ^ (" ")* "#" [^\n]* "\n"
* Empty := [\s\t]* "\n"
* Anything := [^\n]+ "\n"
*
*/
/**
* An LL(1) parser for TAP14/TAP13.
*/
class TapParser extends Transform {
#checker = null;
#lexer = null;
#currentToken = null;
#input = '';
#currentChunkAsString = '';
#lastLine = '';
#tokens = [[]];
#flatAST = [];
#bufferedComments = [];
#bufferedTestPoints = [];
#lastTestPointDetails = {};
#yamlBlockBuffer = [];
#currentTokenIndex = 0;
#currentTokenChunk = 0;
#subTestNestingLevel = 0;
#yamlCurrentIndentationLevel = 0;
#kSubtestBlockIndentationFactor = 4;
#isYAMLBlock = false;
#isSyncParsingEnabled = false;
constructor({ specs = TapChecker.TAP13 } = kEmptyObject) {
super({ __proto__: null, readableObjectMode: true });
this.#checker = new TapChecker({ specs });
}
// ----------------------------------------------------------------------//
// ----------------------------- Public API -----------------------------//
// ----------------------------------------------------------------------//
parse(chunkAsString = '', callback = null) {
this.#isSyncParsingEnabled = false;
this.#currentTokenChunk = 0;
this.#currentTokenIndex = 0;
// Note: we are overwriting the input on each stream call
// This is fine because we don't want to parse previous chunks
this.#input = chunkAsString;
this.#lexer = new TapLexer(chunkAsString);
try {
this.#tokens = this.#scanTokens();
this.#parseTokens(callback);
} catch (error) {
callback(null, error);
}
}
parseSync(input = '', callback = null) {
if (typeof input !== 'string' || input === '') {
return [];
}
this.#isSyncParsingEnabled = true;
this.#input = input;
this.#lexer = new TapLexer(input);
this.#tokens = this.#scanTokens();
this.#parseTokens(callback);
if (this.#isYAMLBlock) {
// Looks like we have a non-ending YAML block
this.#error('Expected end of YAML block');
}
// Manually flush the remaining buffered comments and test points
this._flush();
return this.#flatAST;
}
// Check if the TAP content is semantically valid
// Note: Validating the TAP content requires the whole AST to be available.
check() {
if (this.#isSyncParsingEnabled) {
return this.#checker.check(this.#flatAST);
}
// TODO(@manekinekko): when running in async mode, it doesn't make sense to
// validate the current chunk. Validation needs to whole AST to be available.
throw new ERR_TAP_VALIDATION_ERROR(
'TAP validation is not supported for async parsing'
);
}
// ----------------------------------------------------------------------//
// --------------------------- Transform API ----------------------------//
// ----------------------------------------------------------------------//
processChunk(chunk) {
const str = this.#lastLine + chunk.toString('utf8');
const lines = StringPrototypeSplit(str, '\n');
this.#lastLine = ArrayPrototypeSplice(lines, lines.length - 1, 1)[0];
let chunkAsString = lines.join('\n');
// Special case where chunk is emitted by a child process
chunkAsString = RegExpPrototypeSymbolReplace(
/\[out\] /g,
chunkAsString,
''
);
chunkAsString = RegExpPrototypeSymbolReplace(
/\[err\] /g,
chunkAsString,
''
);
chunkAsString = RegExpPrototypeSymbolReplace(/\n$/, chunkAsString, '');
chunkAsString = RegExpPrototypeSymbolReplace(/EOF$/, chunkAsString, '');
return chunkAsString;
}
_transform(chunk, _encoding, next) {
const chunkAsString = this.processChunk(chunk);
if (!chunkAsString) {
// Ignore empty chunks
next();
return;
}
this.parse(chunkAsString, (node, error) => {
if (error) {
next(error);
return;
}
if (node.kind === TokenKind.EOF) {
// Emit when the current chunk is fully processed and consumed
next();
}
});
}
// Flush the remaining buffered comments and test points
// This will be called automatically when the stream is closed
// We also call this method manually when we reach the end of the sync parsing
_flush(next = null) {
if (!this.#lastLine) {
this.#__flushPendingTestPointsAndComments();
next?.();
return;
}
// Parse the remaining line
this.parse(this.#lastLine, (node, error) => {
this.#lastLine = '';
if (error) {
next?.(error);
return;
}
if (node.kind === TokenKind.EOF) {
this.#__flushPendingTestPointsAndComments();
next?.();
}
});
}
#__flushPendingTestPointsAndComments() {
ArrayPrototypeForEach(this.#bufferedTestPoints, (node) => {
this.#emit(node);
});
ArrayPrototypeForEach(this.#bufferedComments, (node) => {
this.#emit(node);
});
// Clean up
this.#bufferedTestPoints = [];
this.#bufferedComments = [];
}
// ----------------------------------------------------------------------//
// ----------------------------- Private API ----------------------------//
// ----------------------------------------------------------------------//
#scanTokens() {
return this.#lexer.scan();
}
#parseTokens(callback = null) {
for (let index = 0; index < this.#tokens.length; index++) {
const chunk = this.#tokens[index];
this.#parseChunk(chunk);
}
callback?.({ kind: TokenKind.EOF });
}
#parseChunk(chunk) {
this.#subTestNestingLevel = this.#getCurrentIndentationLevel(chunk);
// We compute the current index of the token in the chunk
// based on the indentation level (number of spaces).
// We also need to take into account if we are in a YAML block or not.
// If we are in a YAML block, we compute the current index of the token
// based on the indentation level of the YAML block (start block).
if (this.#isYAMLBlock) {
this.#currentTokenIndex =
this.#yamlCurrentIndentationLevel *
this.#kSubtestBlockIndentationFactor;
} else {
this.#currentTokenIndex =
this.#subTestNestingLevel * this.#kSubtestBlockIndentationFactor;
this.#yamlCurrentIndentationLevel = this.#subTestNestingLevel;
}
// Parse current chunk
const node = this.#TAPDocument(chunk);
// Emit the parsed node to both the stream and the AST
this.#emitOrBufferCurrentNode(node);
// Move pointers to the next chunk and reset the current token index
this.#currentTokenChunk++;
this.#currentTokenIndex = 0;
}
#error(message) {
if (!this.#isSyncParsingEnabled) {
// When async parsing is enabled, don't throw.
// Unrecognized tokens would be ignored.
return;
}
const token = this.#currentToken || { value: '', kind: '' };
// Escape NewLine characters
if (token.value === '\n') {
token.value = '\\n';
}
throw new ERR_TAP_PARSER_ERROR(
message,
`, received "${token.value}" (${token.kind})`,
token,
this.#input
);
}
#peek(shouldSkipBlankTokens = true) {
if (shouldSkipBlankTokens) {
this.#skip(TokenKind.WHITESPACE);
}
return this.#tokens[this.#currentTokenChunk][this.#currentTokenIndex];
}
#next(shouldSkipBlankTokens = true) {
if (shouldSkipBlankTokens) {
this.#skip(TokenKind.WHITESPACE);
}
if (this.#tokens[this.#currentTokenChunk]) {
this.#currentToken =
this.#tokens[this.#currentTokenChunk][this.#currentTokenIndex++];
} else {
this.#currentToken = null;
}
return this.#currentToken;
}
// Skip the provided tokens in the current chunk
#skip(...tokensToSkip) {
let token = this.#tokens[this.#currentTokenChunk][this.#currentTokenIndex];
while (token && ArrayPrototypeIncludes(tokensToSkip, token.kind)) {
// pre-increment to skip current tokens but make sure we don't advance index on the last iteration
token = this.#tokens[this.#currentTokenChunk][++this.#currentTokenIndex];
}
}
#readNextLiterals() {
const literals = [];
let nextToken = this.#peek(false);
// Read all literal, numeric, whitespace and escape tokens until we hit a different token
// or reach end of current chunk
while (
nextToken &&
ArrayPrototypeIncludes(
[
TokenKind.LITERAL,
TokenKind.NUMERIC,
TokenKind.DASH,
TokenKind.PLUS,
TokenKind.WHITESPACE,
TokenKind.ESCAPE,
],
nextToken.kind
)
) {
const word = this.#next(false).value;
// Don't output escaped characters
if (nextToken.kind !== TokenKind.ESCAPE) {
ArrayPrototypePush(literals, word);
}
nextToken = this.#peek(false);
}
return ArrayPrototypeJoin(literals, '');
}
#countLeadingSpacesInCurrentChunk(chunk) {
// Count the number of whitespace tokens in the chunk, starting from the first token
let whitespaceCount = 0;
while (chunk?.[whitespaceCount]?.kind === TokenKind.WHITESPACE) {
whitespaceCount++;
}
return whitespaceCount;
}
#addDiagnosticsToLastTestPoint(currentNode) {
const lastTestPoint = this.#bufferedTestPoints.at(-1);
// Diagnostic nodes are only added to Test points of the same nesting level
if (lastTestPoint && lastTestPoint.nesting === currentNode.nesting) {
lastTestPoint.node.time = this.#lastTestPointDetails.duration;
// TODO(@manekinekko): figure out where to put the other diagnostic properties
// See https://github.com/nodejs/node/pull/44952
lastTestPoint.node.diagnostics ||= [];
ArrayPrototypeForEach(currentNode.node.diagnostics, (diagnostic) => {
// Avoid adding empty diagnostics
if (diagnostic) {
ArrayPrototypePush(lastTestPoint.node.diagnostics, diagnostic);
}
});
this.#bufferedTestPoints = [];
}
return lastTestPoint;
}
#flushBufferedTestPointNode(shouldClearBuffer = true) {
if (this.#bufferedTestPoints.length > 0) {
this.#emit(this.#bufferedTestPoints.at(0));
if (shouldClearBuffer) {
this.#bufferedTestPoints = [];
}
}
}
#addCommentsToCurrentNode(currentNode) {
if (this.#bufferedComments.length > 0) {
currentNode.comments = ArrayPrototypeMap(
this.#bufferedComments,
(c) => c.node.comment
);
this.#bufferedComments = [];
}
return currentNode;
}
#flushBufferedComments(shouldClearBuffer = true) {
if (this.#bufferedComments.length > 0) {
ArrayPrototypeForEach(this.#bufferedComments, (node) => {
this.#emit(node);
});
if (shouldClearBuffer) {
this.#bufferedComments = [];
}
}
}
#getCurrentIndentationLevel(chunk) {
const whitespaceCount = this.#countLeadingSpacesInCurrentChunk(chunk);
return (whitespaceCount / this.#kSubtestBlockIndentationFactor) | 0;
}
#emit(node) {
if (node.kind !== TokenKind.EOF) {
ArrayPrototypePush(this.#flatAST, node);
this.push({
__proto__: null,
...node,
});
}
}
#emitOrBufferCurrentNode(currentNode) {
currentNode = {
...currentNode,
nesting: this.#subTestNestingLevel,
lexeme: this.#currentChunkAsString,
};
switch (currentNode.kind) {
// Emit these nodes
case TokenKind.UNKNOWN:
if (!currentNode.node.value) {
// Ignore unrecognized and empty nodes
break;
}
// Otherwise continue and process node
// eslint no-fallthrough
case TokenKind.TAP_PLAN:
case TokenKind.TAP_PRAGMA:
case TokenKind.TAP_VERSION:
case TokenKind.TAP_BAIL_OUT:
case TokenKind.TAP_SUBTEST_POINT:
// Check if we have a buffered test point, and if so, emit it
this.#flushBufferedTestPointNode();
// If we have buffered comments, add them to the current node
currentNode = this.#addCommentsToCurrentNode(currentNode);
// Emit the current node
this.#emit(currentNode);
break;
// By default, we buffer the next test point node in case we have a diagnostic
// to add to it in the next iteration
// Note: in case we hit and EOF, we flush the comments buffer (see _flush())
case TokenKind.TAP_TEST_POINT:
// In case of an already buffered test point, we flush it and buffer the current one
// Because diagnostic nodes are only added to the last processed test point
this.#flushBufferedTestPointNode();
// Buffer this node (and also add any pending comments to it)
ArrayPrototypePush(
this.#bufferedTestPoints,
this.#addCommentsToCurrentNode(currentNode)
);
break;
// Keep buffering comments until we hit a non-comment node, then add them to the that node
// Note: in case we hit and EOF, we flush the comments buffer (see _flush())
case TokenKind.COMMENT:
ArrayPrototypePush(this.#bufferedComments, currentNode);
break;
// Diagnostic nodes are added to Test points of the same nesting level
case TokenKind.TAP_YAML_END:
// Emit either the last updated test point (w/ diagnostics) or the current diagnostics node alone
this.#emit(
this.#addDiagnosticsToLastTestPoint(currentNode) || currentNode
);
break;
// In case we hit an EOF, we emit it to indicate the end of the stream
case TokenKind.EOF:
this.#emit(currentNode);
break;
}
}
#serializeChunk(chunk) {
return ArrayPrototypeJoin(
ArrayPrototypeMap(
// Exclude NewLine and EOF tokens
ArrayPrototypeFilter(
chunk,
(token) =>
token.kind !== TokenKind.NEWLINE && token.kind !== TokenKind.EOF
),
(token) => token.value
),
''
);
}
// --------------------------------------------------------------------------//
// ------------------------------ Parser rules ------------------------------//
// --------------------------------------------------------------------------//
// TAPDocument := Version Plan Body | Version Body Plan
#TAPDocument(tokenChunks) {
this.#currentChunkAsString = this.#serializeChunk(tokenChunks);
const firstToken = this.#peek(false);
if (firstToken) {
const { kind } = firstToken;
switch (kind) {
case TokenKind.TAP:
return this.#Version();
case TokenKind.NUMERIC:
return this.#Plan();
case TokenKind.TAP_TEST_OK:
case TokenKind.TAP_TEST_NOTOK:
return this.#TestPoint();
case TokenKind.COMMENT:
case TokenKind.HASH:
return this.#Comment();
case TokenKind.TAP_PRAGMA:
return this.#Pragma();
case TokenKind.WHITESPACE:
return this.#YAMLBlock();
case TokenKind.LITERAL:
// Check for "Bail out!" literal (case insensitive)
if (
RegExpPrototypeExec(/^Bail\s+out!/i, this.#currentChunkAsString)
) {
return this.#Bailout();
} else if (this.#isYAMLBlock) {
return this.#YAMLBlock();
}
// Read token because error needs the last token details
this.#next(false);
this.#error('Expected a valid token');
break;
case TokenKind.EOF:
return firstToken;
case TokenKind.NEWLINE:
// Consume and ignore NewLine token
return this.#next(false);
default:
// Read token because error needs the last token details
this.#next(false);
this.#error('Expected a valid token');
}
}
const node = {
kind: TokenKind.UNKNOWN,
node: {
value: this.#currentChunkAsString,
},
};
// We make sure the emitted node has the same shape
// both in sync and async parsing (for the stream interface)
return node;
}
// ----------------Version----------------
// Version := "TAP version Number\n"
#Version() {
const tapToken = this.#peek();
if (tapToken.kind === TokenKind.TAP) {
this.#next(); // Consume the TAP token
} else {
this.#error('Expected "TAP" keyword');
}
const versionToken = this.#peek();
if (versionToken?.kind === TokenKind.TAP_VERSION) {
this.#next(); // Consume the version token
} else {
this.#error('Expected "version" keyword');
}
const numberToken = this.#peek();
if (numberToken?.kind === TokenKind.NUMERIC) {
const version = this.#next().value;
const node = { kind: TokenKind.TAP_VERSION, node: { version } };
return node;
}
this.#error('Expected a version number');
}
// ----------------Plan----------------
// Plan := "1.." (Number) (" # " Reason)? "\n"
#Plan() {
// Even if specs mention plan starts at 1, we need to make sure we read the plan start value
// in case of a missing or invalid plan start value
const planStart = this.#next();
if (planStart.kind !== TokenKind.NUMERIC) {
this.#error('Expected a plan start count');
}
const planToken = this.#next();
if (planToken?.kind !== TokenKind.TAP_PLAN) {
this.#error('Expected ".." symbol');
}
const planEnd = this.#next();
if (planEnd?.kind !== TokenKind.NUMERIC) {
this.#error('Expected a plan end count');
}
const plan = {
start: planStart.value,
end: planEnd.value,
};
// Read optional reason
const hashToken = this.#peek();
if (hashToken) {
if (hashToken.kind === TokenKind.HASH) {
this.#next(); // skip hash
plan.reason = StringPrototypeTrim(this.#readNextLiterals());
} else if (hashToken.kind === TokenKind.LITERAL) {
this.#error('Expected "#" symbol before a reason');
}
}
const node = {
kind: TokenKind.TAP_PLAN,
node: plan,
};
return node;
}
// ----------------TestPoint----------------
// TestPoint := ("not ")? "ok" (" " Number)? ((" -")? (" " Description) )? (" " Directive)? "\n" (YAMLBlock)?
// Directive := " # " ("todo" | "skip") (" " Reason)?
// YAMLBlock := " ---\n" (YAMLLine)* " ...\n"
// YAMLLine := " " (YAML)* "\n"
// Test Status: ok/not ok (required)
// Test number (recommended)
// Description (recommended, prefixed by " - ")
// Directive (only when necessary)
#TestPoint() {
const notToken = this.#peek();
let isTestFailed = false;
if (notToken.kind === TokenKind.TAP_TEST_NOTOK) {
this.#next(); // skip "not" token
isTestFailed = true;
}
const okToken = this.#next();
if (okToken.kind !== TokenKind.TAP_TEST_OK) {
this.#error('Expected "ok" or "not ok" keyword');
}
// Read optional test number
let numberToken = this.#peek();
if (numberToken && numberToken.kind === TokenKind.NUMERIC) {
numberToken = this.#next().value;
} else {
numberToken = ''; // Set an empty ID to indicate that the test hasn't provider an ID
}
const test = {
// Output both failed and passed properties to make it easier for the checker to detect the test status
status: {
fail: isTestFailed,
pass: !isTestFailed,
todo: false,
skip: false,
},
id: numberToken,
description: '',
reason: '',
time: 0,
diagnostics: [],
};
// Read optional description prefix " - "
const descriptionDashToken = this.#peek();
if (descriptionDashToken && descriptionDashToken.kind === TokenKind.DASH) {
this.#next(); // skip dash
}
// Read optional description
if (this.#peek()) {
const description = StringPrototypeTrim(this.#readNextLiterals());
if (description) {
test.description = description;
}
}
// Read optional directive and reason
const hashToken = this.#peek();
if (hashToken && hashToken.kind === TokenKind.HASH) {
this.#next(); // skip hash
}
let todoOrSkipToken = this.#peek();
if (todoOrSkipToken && todoOrSkipToken.kind === TokenKind.LITERAL) {
if (RegExpPrototypeExec(/todo/i, todoOrSkipToken.value)) {
todoOrSkipToken = 'todo';
this.#next(); // skip token
} else if (RegExpPrototypeExec(/skip/i, todoOrSkipToken.value)) {
todoOrSkipToken = 'skip';
this.#next(); // skip token
}
}
const reason = StringPrototypeTrim(this.#readNextLiterals());
if (todoOrSkipToken) {
if (reason) {
test.reason = reason;
}
test.status.todo = todoOrSkipToken === 'todo';
test.status.skip = todoOrSkipToken === 'skip';
}
const node = {
kind: TokenKind.TAP_TEST_POINT,
node: test,
};
return node;
}
// ----------------Bailout----------------
// BailOut := "Bail out!" (" " Reason)? "\n"
#Bailout() {
this.#next(); // skip "Bail"
this.#next(); // skip "out!"
// Read optional reason
const hashToken = this.#peek();
if (hashToken && hashToken.kind === TokenKind.HASH) {
this.#next(); // skip hash
}
const reason = StringPrototypeTrim(this.#readNextLiterals());
const node = {
kind: TokenKind.TAP_BAIL_OUT,
node: { bailout: true, reason },
};
return node;
}
// ----------------Comment----------------
// Comment := ^ (" ")* "#" [^\n]* "\n"
#Comment() {
const commentToken = this.#next();
if (
commentToken.kind !== TokenKind.COMMENT &&
commentToken.kind !== TokenKind.HASH
) {
this.#error('Expected "#" symbol');
}
const commentContent = this.#peek();
if (commentContent) {
if (/^Subtest:/i.test(commentContent.value)) {
this.#next(); // skip subtest keyword
const name = StringPrototypeTrim(this.#readNextLiterals());
const node = {
kind: TokenKind.TAP_SUBTEST_POINT,
node: {
name,
},
};
return node;
}
const comment = StringPrototypeTrim(this.#readNextLiterals());
const node = {
kind: TokenKind.COMMENT,
node: { comment },
};
return node;
}
// If there is no comment content, then we ignore the current node
}
// ----------------YAMLBlock----------------
// YAMLBlock := " ---\n" (YAMLLine)* " ...\n"
#YAMLBlock() {
const space1 = this.#peek(false);
if (space1 && space1.kind === TokenKind.WHITESPACE) {
this.#next(false); // skip 1st space
}
const space2 = this.#peek(false);
if (space2 && space2.kind === TokenKind.WHITESPACE) {
this.#next(false); // skip 2nd space
}
const yamlBlockSymbol = this.#peek(false);
if (yamlBlockSymbol.kind === TokenKind.WHITESPACE) {
if (this.#isYAMLBlock === false) {
this.#next(false); // skip 3rd space
this.#error('Expected valid YAML indentation (2 spaces)');
}
}
if (yamlBlockSymbol.kind === TokenKind.TAP_YAML_START) {
if (this.#isYAMLBlock) {
// Looks like we have another YAML start block, but we didn't close the previous one
this.#error('Unexpected YAML start marker');
}
this.#isYAMLBlock = true;
this.#yamlCurrentIndentationLevel = this.#subTestNestingLevel;
this.#lastTestPointDetails = {};
// Consume the YAML start marker
this.#next(false); // skip "---"
// No need to pass this token to the stream interface
return;
} else if (yamlBlockSymbol.kind === TokenKind.TAP_YAML_END) {
this.#next(false); // skip "..."
if (!this.#isYAMLBlock) {
// Looks like we have an YAML end block, but we didn't encounter any YAML start marker
this.#error('Unexpected YAML end marker');
}
this.#isYAMLBlock = false;
const diagnostics = this.#yamlBlockBuffer;
this.#yamlBlockBuffer = []; // Free the buffer for the next YAML block
const node = {
kind: TokenKind.TAP_YAML_END,
node: {
diagnostics,
},
};
return node;
}
if (this.#isYAMLBlock) {
this.#YAMLLine();
} else {
return {
kind: TokenKind.UNKNOWN,
node: {
value: yamlBlockSymbol.value,
},
};
}
}
// ----------------YAMLLine----------------
// YAMLLine := " " (YAML)* "\n"
#YAMLLine() {
const yamlLiteral = this.#readNextLiterals();
const { 0: key, 1: value } = StringPrototypeSplit(yamlLiteral, ':');
// Note that this.#lastTestPointDetails has been cleared when we encounter a YAML start marker
switch (key) {
case 'duration_ms':
this.#lastTestPointDetails.duration = Number(value);
break;
// Below are diagnostic properties introduced in https://github.com/nodejs/node/pull/44952
case 'expected':
this.#lastTestPointDetails.expected = Boolean(value);
break;
case 'actual':
this.#lastTestPointDetails.actual = Boolean(value);
break;
case 'operator':
this.#lastTestPointDetails.operator = String(value);
break;
}
ArrayPrototypePush(this.#yamlBlockBuffer, yamlLiteral);
}
// ----------------PRAGMA----------------
// Pragma := "pragma " [+-] PragmaKey "\n"
// PragmaKey := ([a-zA-Z0-9_-])+
// TODO(@manekinekko): pragmas are parsed but not used yet! TapChecker() should take care of that.
#Pragma() {
const pragmaToken = this.#next();
if (pragmaToken.kind !== TokenKind.TAP_PRAGMA) {
this.#error('Expected "pragma" keyword');
}
const pragmas = {};
let nextToken = this.#peek();
while (
nextToken &&
ArrayPrototypeIncludes(
[TokenKind.NEWLINE, TokenKind.EOF, TokenKind.EOL],
nextToken.kind
) === false
) {
let isEnabled = true;
const pragmaKeySign = this.#next();
if (pragmaKeySign.kind === TokenKind.PLUS) {
isEnabled = true;
} else if (pragmaKeySign.kind === TokenKind.DASH) {
isEnabled = false;
} else {
this.#error('Expected "+" or "-" before pragma keys');
}
const pragmaKeyToken = this.#peek();
if (pragmaKeyToken.kind !== TokenKind.LITERAL) {
this.#error('Expected pragma key');
}
let pragmaKey = this.#next().value;
// In some cases, pragma key can be followed by a comma separator,
// so we need to remove it
pragmaKey = RegExpPrototypeSymbolReplace(/,/g, pragmaKey, '');
pragmas[pragmaKey] = isEnabled;
nextToken = this.#peek();
}
const node = {
kind: TokenKind.TAP_PRAGMA,
node: {
pragmas,
},
};
return node;
}
}
module.exports = { TapParser };

View File

@ -7,6 +7,7 @@ const {
ArrayPrototypeShift,
ObjectEntries,
StringPrototypeReplaceAll,
StringPrototypeToUpperCase,
StringPrototypeSplit,
RegExpPrototypeSymbolReplace,
} = primordials;
@ -15,6 +16,7 @@ const Readable = require('internal/streams/readable');
const { isError, kEmptyObject } = require('internal/util');
const kFrameStartRegExp = /^ {4}at /;
const kLineBreakRegExp = /\n|\r\n/;
const kDefaultTAPVersion = 13;
const inspectOptions = { colors: false, breakLength: Infinity };
let testModule; // Lazy loaded due to circular dependency.
@ -50,16 +52,16 @@ class TapStream extends Readable {
this.#tryPush(`Bail out!${message ? ` ${tapEscape(message)}` : ''}\n`);
}
fail(indent, testNumber, name, duration, error, directive) {
this.emit('test:fail', { __proto__: null, name, testNumber, duration, ...directive, error });
fail(indent, testNumber, name, details, directive) {
this.emit('test:fail', { __proto__: null, name, testNumber, details, ...directive });
this.#test(indent, testNumber, 'not ok', name, directive);
this.#details(indent, duration, error);
this.#details(indent, details);
}
ok(indent, testNumber, name, duration, directive) {
this.emit('test:pass', { __proto__: null, name, testNumber, duration, ...directive });
ok(indent, testNumber, name, details, directive) {
this.emit('test:pass', { __proto__: null, name, testNumber, details, ...directive });
this.#test(indent, testNumber, 'ok', name, directive);
this.#details(indent, duration, null);
this.#details(indent, details);
}
plan(indent, count, explanation) {
@ -80,9 +82,11 @@ class TapStream extends Readable {
this.#tryPush(`${indent}# Subtest: ${tapEscape(name)}\n`);
}
#details(indent, duration, error) {
#details(indent, data = kEmptyObject) {
const { error, duration, yaml } = data;
let details = `${indent} ---\n`;
details += `${yaml ? yaml : ''}`;
details += jsToYaml(indent, 'duration_ms', duration);
details += jsToYaml(indent, null, error);
details += `${indent} ...\n`;
@ -94,8 +98,8 @@ class TapStream extends Readable {
this.#tryPush(`${indent}# ${tapEscape(message)}\n`);
}
version() {
this.#tryPush('TAP version 13\n');
version(spec = kDefaultTAPVersion) {
this.#tryPush(`TAP version ${spec}\n`);
}
#test(indent, testNumber, status, name, directive = kEmptyObject) {
@ -106,10 +110,11 @@ class TapStream extends Readable {
}
line += ArrayPrototypeJoin(ArrayPrototypeMap(ObjectEntries(directive), ({ 0: key, 1: value }) => (
` # ${key.toUpperCase()}${value ? ` ${tapEscape(value)}` : ''}`
` # ${StringPrototypeToUpperCase(key)}${value ? ` ${tapEscape(value)}` : ''}`
)), '');
line += '\n';
this.#tryPush(line);
}
@ -229,7 +234,9 @@ function jsToYaml(indent, name, value) {
StringPrototypeSplit(errStack, kLineBreakRegExp),
(frame) => {
const processed = RegExpPrototypeSymbolReplace(
kFrameStartRegExp, frame, ''
kFrameStartRegExp,
frame,
''
);
if (processed.length > 0 && processed.length !== frame.length) {
@ -242,7 +249,7 @@ function jsToYaml(indent, name, value) {
const frameDelimiter = `\n${indent} `;
result += `${indent} stack: |-${frameDelimiter}`;
result += `${ArrayPrototypeJoin(frames, `${frameDelimiter}`)}\n`;
result += `${ArrayPrototypeJoin(frames, frameDelimiter)}\n`;
}
}
}

View File

@ -62,8 +62,8 @@ const kSubtestsFailed = 'subtestsFailed';
const kTestCodeFailure = 'testCodeFailure';
const kTestTimeoutFailure = 'testTimeoutFailure';
const kHookFailure = 'hookFailed';
const kDefaultIndent = ' ';
const kDefaultTimeout = null;
const kDefaultIndent = ' '; // 4 spaces
const noop = FunctionPrototype;
const isTestRunner = getOptionValue('--test');
const testOnlyFlag = !isTestRunner && getOptionValue('--test-only');
@ -172,7 +172,6 @@ class Test extends AsyncResource {
if (parent === null) {
this.concurrency = 1;
this.indent = '';
this.indentString = kDefaultIndent;
this.only = testOnlyFlag;
this.reporter = new TapStream();
this.runOnlySubtests = this.only;
@ -180,11 +179,10 @@ class Test extends AsyncResource {
this.timeout = kDefaultTimeout;
} else {
const indent = parent.parent === null ? parent.indent :
parent.indent + parent.indentString;
parent.indent + kDefaultIndent;
this.concurrency = parent.concurrency;
this.indent = indent;
this.indentString = parent.indentString;
this.only = only ?? !parent.runOnlySubtests;
this.reporter = parent.reporter;
this.runOnlySubtests = !this.only;
@ -662,6 +660,7 @@ class Test extends AsyncResource {
this.reportSubtest();
}
let directive;
const details = { __proto__: null, duration: this.#duration() };
if (this.skipped) {
directive = this.reporter.getSkip(this.message);
@ -670,9 +669,10 @@ class Test extends AsyncResource {
}
if (this.passed) {
this.reporter.ok(this.indent, this.testNumber, this.name, this.#duration(), directive);
this.reporter.ok(this.indent, this.testNumber, this.name, details, directive);
} else {
this.reporter.fail(this.indent, this.testNumber, this.name, this.#duration(), this.error, directive);
details.error = this.error;
this.reporter.fail(this.indent, this.testNumber, this.name, details, directive);
}
for (let i = 0; i < this.diagnostics.length; i++) {

View File

@ -0,0 +1 @@
console.log('invalid tap output');

24
test/fixtures/test-runner/nested.js vendored Normal file
View File

@ -0,0 +1,24 @@
'use strict';
const test = require('node:test');
test('level 0a', { concurrency: 4 }, async (t) => {
t.test('level 1a', async (t) => {
});
t.test('level 1b', async (t) => {
throw new Error('level 1b error');
});
t.test('level 1c', { skip: 'aaa' }, async (t) => {
});
t.test('level 1d', async (t) => {
t.diagnostic('level 1d diagnostic');
});
});
test('level 0b', async (t) => {
throw new Error('level 0b error');
});

View File

@ -32,8 +32,8 @@ if (process.env.inspectPort === 'addTwo') {
const stream = run({ files: [fixtures.path('test-runner/run_inspect_assert.js')], inspectPort });
if (expectedError) {
stream.on('test:fail', common.mustCall(({ error }) => {
assert.deepStrictEqual({ name: error.cause.name, code: error.cause.code }, expectedError);
stream.on('test:fail', common.mustCall(({ details }) => {
assert.deepStrictEqual({ name: details.error.cause.name, code: details.error.cause.code }, expectedError);
}));
} else {
stream.on('test:fail', common.mustNotCall());

View File

@ -1,4 +1,5 @@
'use strict';
require('../common');
const assert = require('assert');
const { spawnSync } = require('child_process');
@ -104,7 +105,6 @@ const testFixtures = fixtures.path('test-runner');
['--print', 'console.log("should not print")', '--test'],
];
flags.forEach((args) => {
const child = spawnSync(process.execPath, args);
@ -115,3 +115,56 @@ const testFixtures = fixtures.path('test-runner');
assert.match(stderr, /--test/);
});
}
{
// Test combined stream outputs
const args = [
'--test',
'test/fixtures/test-runner/index.test.js',
'test/fixtures/test-runner/nested.js',
'test/fixtures/test-runner/invalid-tap.js',
];
const child = spawnSync(process.execPath, args);
assert.strictEqual(child.status, 1);
assert.strictEqual(child.signal, null);
assert.strictEqual(child.stderr.toString(), '');
const stdout = child.stdout.toString();
assert.match(stdout, /# Subtest: .+index\.test\.js/);
assert.match(stdout, / {4}# Subtest: this should pass/);
assert.match(stdout, / {4}ok 1 - this should pass/);
assert.match(stdout, / {6}---/);
assert.match(stdout, / {6}duration_ms: .*/);
assert.match(stdout, / {6}\.\.\./);
assert.match(stdout, / {4}1\.\.1/);
assert.match(stdout, /ok 1 - .+index\.test\.js/);
assert.match(stdout, /# Subtest: .+invalid-tap\.js/);
assert.match(stdout, / {4}# invalid tap output/);
assert.match(stdout, /ok 2 - .+invalid-tap\.js/);
assert.match(stdout, /# Subtest: .+nested\.js/);
assert.match(stdout, / {4}# Subtest: level 0a/);
assert.match(stdout, / {8}# Subtest: level 1a/);
assert.match(stdout, / {8}ok 1 - level 1a/);
assert.match(stdout, / {8}# Subtest: level 1b/);
assert.match(stdout, / {8}not ok 2 - level 1b/);
assert.match(stdout, / {10}code: 'ERR_TEST_FAILURE'/);
assert.match(stdout, / {10}stack: |-'/);
assert.match(stdout, / {12}TestContext\.<anonymous> .*/);
assert.match(stdout, / {8}# Subtest: level 1c/);
assert.match(stdout, / {8}ok 3 - level 1c # SKIP aaa/);
assert.match(stdout, / {8}# Subtest: level 1d/);
assert.match(stdout, / {8}ok 4 - level 1d/);
assert.match(stdout, / {4}not ok 1 - level 0a/);
assert.match(stdout, / {6}error: '1 subtest failed'/);
assert.match(stdout, / {4}# Subtest: level 0b/);
assert.match(stdout, / {4}not ok 2 - level 0b/);
assert.match(stdout, / {6}error: 'level 0b error'/);
assert.match(stdout, /not ok 3 - .+nested\.js/);
assert.match(stdout, /# tests 3/);
assert.match(stdout, /# pass 2/);
assert.match(stdout, /# fail 1/);
}

View File

@ -37,7 +37,6 @@ tmpdir.refresh();
assert.match(stderr,
/Warning: Using the inspector with --test forces running at a concurrency of 1\. Use the inspectPort option to run with concurrency/);
assert.match(stdout, /not ok 1 - .+index\.js/);
assert.match(stdout, /stderr: \|-\r?\n\s+Debugger listening on/);
assert.strictEqual(code, 1);
assert.strictEqual(signal, null);
}

View File

@ -28,7 +28,7 @@ describe('require(\'node:test\').run', { concurrency: true }, () => {
it('should succeed with a file', async () => {
const stream = run({ files: [join(testFixtures, 'test/random.cjs')] });
stream.on('test:fail', common.mustNotCall());
stream.on('test:pass', common.mustCall(1));
stream.on('test:pass', common.mustCall(2));
// eslint-disable-next-line no-unused-vars
for await (const _ of stream); // TODO(MoLow): assert.snapshot
});
@ -36,7 +36,7 @@ describe('require(\'node:test\').run', { concurrency: true }, () => {
it('should run same file twice', async () => {
const stream = run({ files: [join(testFixtures, 'test/random.cjs'), join(testFixtures, 'test/random.cjs')] });
stream.on('test:fail', common.mustNotCall());
stream.on('test:pass', common.mustCall(2));
stream.on('test:pass', common.mustCall(4));
// eslint-disable-next-line no-unused-vars
for await (const _ of stream); // TODO(MoLow): assert.snapshot
});

View File

@ -0,0 +1,119 @@
'use strict';
// Flags: --expose-internals
require('../common');
const assert = require('assert');
const { TapParser } = require('internal/test_runner/tap_parser');
const { TapChecker } = require('internal/test_runner/tap_checker');
function TAPChecker(input) {
// parse
const parser = new TapParser({ specs: TapChecker.TAP14 });
parser.parseSync(input);
parser.check();
}
[
['TAP version 14', 'missing TAP plan'],
[`
TAP version 14
1..1
`, 'missing Test Points'],
[`
TAP version 14
1..1
ok 2
`, 'test 2 is out of plan range 1..1'],
[`
TAP version 14
3..1
ok 2
`, 'plan start 3 is greater than plan end 1'],
[`
TAP version 14
2..3
ok 1
ok 2
ok 3
`, 'test 1 is out of plan range 2..3'],
].forEach(([str, message]) => {
assert.throws(() => TAPChecker(str), {
code: 'ERR_TAP_VALIDATION_ERROR',
message,
});
});
// Valid TAP14 should not throw
TAPChecker(`
TAP version 14
1..1
ok
`);
// Valid comment line shout not throw.
TAPChecker(`
TAP version 14
1..5
ok 1 - approved operating system
# $^0 is solaris
ok 2 - # SKIP no /sys directory
ok 3 - # SKIP no /sys directory
ok 4 - # SKIP no /sys directory
ok 5 - # SKIP no /sys directory
`);
// Valid empty test plan should not throw.
TAPChecker(`
TAP version 14
1..0 # skip because English-to-French translator isn't installed
`);
// Valid test plan count should not throw.
TAPChecker(`
TAP version 14
1..4
ok 1 - Creating test program
ok 2 - Test program runs, no error
not ok 3 - infinite loop # TODO halting problem unsolved
not ok 4 - infinite loop 2 # TODO halting problem unsolved
`);
// Valid YAML diagnostic should not throw.
TAPChecker(`
TAP version 14
ok - created Board
ok
ok
ok
ok
ok
ok
ok
---
message: "Board layout"
severity: comment
dump:
board:
- ' 16G 05C '
- ' G N C C C G '
- ' G C + '
- '10C 01G 03C '
- 'R N G G A G C C C '
- ' R G C + '
- ' 01G 17C 00C '
- ' G A G G N R R N R '
- ' G R G '
...
ok - board has 7 tiles + starter tile
1..9
`);
// Valid Bail out should not throw.
TAPChecker(`
TAP version 14
1..573
not ok 1 - database handle
Bail out! Couldn't connect to database.
`);

View File

@ -0,0 +1,446 @@
'use strict';
// Flags: --expose-internals
require('../common');
const assert = require('assert');
const { TapLexer, TokenKind } = require('internal/test_runner/tap_lexer');
function TAPLexer(input) {
const lexer = new TapLexer(input);
return lexer.scan().flat();
}
{
const tokens = TAPLexer('');
assert.strictEqual(tokens[0].kind, TokenKind.EOF);
assert.strictEqual(tokens[0].value, '');
}
{
const tokens = TAPLexer('TAP version 14');
[
{ kind: TokenKind.TAP, value: 'TAP' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_VERSION, value: 'version' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '14' },
{ kind: TokenKind.EOL, value: '' },
{ kind: TokenKind.EOF, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('1..5 # reason');
[
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.TAP_PLAN, value: '..' },
{ kind: TokenKind.NUMERIC, value: '5' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'reason' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer(
'1..5 # reason "\\ !"\\#$%&\'()*+,\\-./:;<=>?@[]^_`{|}~'
);
[
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.TAP_PLAN, value: '..' },
{ kind: TokenKind.NUMERIC, value: '5' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'reason' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '"' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '!"' },
{ kind: TokenKind.LITERAL, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.LITERAL, value: "$%&'()*" },
{ kind: TokenKind.PLUS, value: '+' },
{ kind: TokenKind.LITERAL, value: ',' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.LITERAL, value: './:;<=>?@[]^_`{|}~' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('not ok');
[
{ kind: TokenKind.TAP_TEST_NOTOK, value: 'not' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok 1');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer(`
ok 1
not ok 2
`);
[
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.TAP_TEST_NOTOK, value: 'not' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '2' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.EOF, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer(`
ok 1
ok 1
`);
[
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.EOF, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok 1 description');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok 1 - description');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok 1 - description # todo');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'todo' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok 1 - description \\# todo');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'todo' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok 1 - description \\ # todo');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'todo' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer(
'ok 1 description \\# \\\\ world # TODO escape \\# characters with \\\\'
);
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '\\' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'world' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'TODO' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'escape' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'characters' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'with' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.ESCAPE, value: '\\' },
{ kind: TokenKind.LITERAL, value: '\\' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('ok 1 - description # ##');
[
{ kind: TokenKind.TAP_TEST_OK, value: 'ok' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.NUMERIC, value: '1' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'description' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.HASH, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.LITERAL, value: '#' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('# comment');
[
{ kind: TokenKind.COMMENT, value: '#' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'comment' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('#');
[
{ kind: TokenKind.COMMENT, value: '#' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer(`
---
message: "description"
severity: fail
...
`);
[
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_YAML_START, value: '---' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'message:' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: '"description"' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'severity:' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'fail' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.TAP_YAML_END, value: '...' },
{ kind: TokenKind.NEWLINE, value: '\n' },
{ kind: TokenKind.EOF, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('pragma +strict -warnings');
[
{ kind: TokenKind.TAP_PRAGMA, value: 'pragma' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.PLUS, value: '+' },
{ kind: TokenKind.LITERAL, value: 'strict' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.DASH, value: '-' },
{ kind: TokenKind.LITERAL, value: 'warnings' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}
{
const tokens = TAPLexer('Bail out! Error');
[
{ kind: TokenKind.LITERAL, value: 'Bail' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'out!' },
{ kind: TokenKind.WHITESPACE, value: ' ' },
{ kind: TokenKind.LITERAL, value: 'Error' },
{ kind: TokenKind.EOL, value: '' },
].forEach((token, index) => {
assert.strictEqual(tokens[index].kind, token.kind);
assert.strictEqual(tokens[index].value, token.value);
});
}

View File

@ -0,0 +1,629 @@
// Flags: --expose-internals
'use strict';
const common = require('../common');
const assert = require('node:assert');
const { TapParser } = require('internal/test_runner/tap_parser');
const { TapChecker } = require('internal/test_runner/tap_checker');
const cases = [
{
input: 'TAP version 13',
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
],
},
{
input: 'invalid tap',
expected: [
{
nesting: 0,
kind: 'Unknown',
node: { value: 'invalid tap' },
lexeme: 'invalid tap',
},
],
},
{
input: 'TAP version 13\ninvalid tap after harness',
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 0,
kind: 'Unknown',
node: { value: 'invalid tap after harness' },
lexeme: 'invalid tap after harness',
},
],
},
{
input: `TAP version 13
# nested diagnostic
# diagnostic comment`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 1,
kind: 'Comment',
node: { comment: 'nested diagnostic' },
lexeme: ' # nested diagnostic',
},
{
nesting: 0,
kind: 'Comment',
node: { comment: 'diagnostic comment' },
lexeme: '# diagnostic comment',
},
],
},
{
input: `TAP version 13
1..5
1..3
2..2`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 1,
kind: 'PlanKeyword',
node: { start: '1', end: '5' },
lexeme: ' 1..5',
},
{
nesting: 0,
kind: 'PlanKeyword',
node: { start: '1', end: '3' },
lexeme: '1..3',
},
{
nesting: 0,
kind: 'PlanKeyword',
node: { start: '2', end: '2' },
lexeme: '2..2',
},
],
},
{
input: `TAP version 13
ok 1 - test
ok 2 - test # SKIP
not ok 3 - test # TODO reason`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '1',
description: 'test',
reason: '',
time: 0,
diagnostics: [],
},
lexeme: 'ok 1 - test',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: true },
id: '2',
description: 'test',
reason: '',
time: 0,
diagnostics: [],
},
lexeme: 'ok 2 - test # SKIP',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: true, pass: false, todo: true, skip: false },
id: '3',
description: 'test',
reason: 'reason',
time: 0,
diagnostics: [],
},
lexeme: 'not ok 3 - test # TODO reason',
},
],
},
{
input: `TAP version 13
# Subtest: test
ok 1 - test
ok 2 - test`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 0,
kind: 'SubTestPointKeyword',
node: { name: 'test' },
lexeme: '# Subtest: test',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '1',
description: 'test',
reason: '',
time: 0,
diagnostics: [],
},
lexeme: 'ok 1 - test',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '2',
description: 'test',
reason: '',
time: 0,
diagnostics: [],
},
lexeme: 'ok 2 - test',
},
],
},
{
input: `TAP version 13
# Subtest: test
ok 1 - test
---
foo: bar
duration_ms: 0.0001
prop: |-
multiple
lines
...`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 0,
kind: 'SubTestPointKeyword',
node: { name: 'test' },
lexeme: '# Subtest: test',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '1',
description: 'test',
reason: '',
time: 0.0001,
diagnostics: [
'foo: bar',
'duration_ms: 0.0001',
'prop: |-',
' multiple',
' lines',
],
},
lexeme: 'ok 1 - test',
},
],
},
{
input: `TAP version 13
# Subtest: test/fixtures/test-runner/index.test.js
# Subtest: this should pass
ok 1 - this should pass
---
duration_ms: 0.0001
...
1..1`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
kind: 'SubTestPointKeyword',
lexeme: '# Subtest: test/fixtures/test-runner/index.test.js',
nesting: 0,
node: {
name: 'test/fixtures/test-runner/index.test.js',
},
},
{
kind: 'SubTestPointKeyword',
lexeme: ' # Subtest: this should pass',
nesting: 1,
node: {
name: 'this should pass',
},
},
{
kind: 'TestPointKeyword',
lexeme: ' ok 1 - this should pass',
nesting: 1,
node: {
description: 'this should pass',
diagnostics: ['duration_ms: 0.0001'],
id: '1',
reason: '',
status: {
fail: false,
pass: true,
skip: false,
todo: false,
},
time: 0.0001,
},
},
{
kind: 'PlanKeyword',
lexeme: ' 1..1',
nesting: 1,
node: {
end: '1',
start: '1',
},
},
],
},
{
input: `TAP version 13
# Subtest: test 1
ok 1 - test 1
---
foo: bar
duration_ms: 1.00
prop: |-
multiple
lines
...
# Subtest: test 2
ok 2 - test 2
---
duration_ms: 2.00
...
# Subtest: test 3
ok 3 - test 3
---
foo: bar
duration_ms: 3.00
prop: |-
multiple
lines
...`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 0,
kind: 'SubTestPointKeyword',
node: { name: 'test 1' },
lexeme: '# Subtest: test 1',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '1',
description: 'test 1',
reason: '',
time: 1.0,
diagnostics: [
'foo: bar',
'duration_ms: 1.00',
'prop: |-',
' multiple',
' lines',
],
},
lexeme: 'ok 1 - test 1',
},
{
nesting: 0,
kind: 'SubTestPointKeyword',
node: { name: 'test 2' },
lexeme: '# Subtest: test 2',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '2',
description: 'test 2',
reason: '',
time: 2.0,
diagnostics: ['duration_ms: 2.00'],
},
lexeme: 'ok 2 - test 2',
},
{
nesting: 0,
kind: 'SubTestPointKeyword',
node: { name: 'test 3' },
lexeme: '# Subtest: test 3',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '3',
description: 'test 3',
reason: '',
time: 3.0,
diagnostics: [
'foo: bar',
'duration_ms: 3.00',
'prop: |-',
' multiple',
' lines',
],
},
lexeme: 'ok 3 - test 3',
},
],
},
{
input: `TAP version 13
# Subtest: test 1
ok 1 - test 1
---
foo: bar
duration_ms: 1.00
prop: |-
multiple
lines
...
# Subtest: test 11
ok 11 - test 11
---
duration_ms: 11.00
...
# Subtest: test 111
ok 111 - test 111
---
foo: bar
duration_ms: 111.00
prop: |-
multiple
lines
...`,
expected: [
{
nesting: 0,
kind: 'VersionKeyword',
node: { version: '13' },
lexeme: 'TAP version 13',
},
{
nesting: 0,
kind: 'SubTestPointKeyword',
node: { name: 'test 1' },
lexeme: '# Subtest: test 1',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '1',
description: 'test 1',
reason: '',
time: 1.0,
diagnostics: [
'foo: bar',
'duration_ms: 1.00',
'prop: |-',
' multiple',
' lines',
],
},
lexeme: 'ok 1 - test 1',
},
{
nesting: 1,
kind: 'SubTestPointKeyword',
node: { name: 'test 11' },
lexeme: ' # Subtest: test 11',
},
{
nesting: 1,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '11',
description: 'test 11',
reason: '',
time: 11.0,
diagnostics: ['duration_ms: 11.00'],
},
lexeme: ' ok 11 - test 11',
},
{
nesting: 2,
kind: 'SubTestPointKeyword',
node: { name: 'test 111' },
lexeme: ' # Subtest: test 111',
},
{
nesting: 2,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '111',
description: 'test 111',
reason: '',
time: 111.0,
diagnostics: [
'foo: bar',
'duration_ms: 111.00',
'prop: |-',
' multiple',
' lines',
],
},
lexeme: ' ok 111 - test 111',
},
],
},
];
(async () => {
for (const { input, expected } of cases) {
const parser = new TapParser();
parser.write(input);
parser.end();
const actual = await parser.toArray();
assert.deepStrictEqual(
actual,
expected.map((item) => ({ __proto__: null, ...item }))
);
}
})().then(common.mustCall());
(async () => {
const expected = [
{
kind: 'PlanKeyword',
node: { start: '1', end: '3' },
nesting: 0,
lexeme: '1..3',
},
{
nesting: 0,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: false, skip: false },
id: '1',
description: 'Input file opened',
reason: '',
time: 0,
diagnostics: [],
},
lexeme: 'ok 1 - Input file opened',
},
{
kind: 'TestPointKeyword',
node: {
status: { fail: true, pass: false, todo: false, skip: false },
id: '2',
description: '',
reason: '',
time: 0,
diagnostics: [],
},
nesting: 0,
lexeme: 'not ok 2 ',
},
{
kind: 'SubTestPointKeyword',
node: { name: 'foobar' },
nesting: 1,
lexeme: ' # Subtest: foobar',
},
{
__proto__: null,
kind: 'TestPointKeyword',
node: {
status: { fail: false, pass: true, todo: true, skip: false },
id: '3',
description: '',
reason: '',
time: 0.0001,
diagnostics: [
'foo: bar',
'duration_ms: 0.0001',
'prop: |-',
' foo',
' bar',
],
},
nesting: 0,
lexeme: 'ok 3 # TODO',
},
];
const parser = new TapParser({ specs: TapChecker.TAP14 });
parser.write('\n');
parser.write('1');
parser.write('.');
parser.write('.');
parser.write('3');
parser.write('\n');
parser.write('ok 1 ');
parser.write('- Input file opened\n');
parser.write('not');
parser.write(' ok');
parser.write(' 2 \n');
parser.write('\n');
parser.write(' # ');
parser.write('Subtest: foo');
parser.write('bar');
parser.write('\n');
parser.write('');
parser.write('ok');
parser.write(' 3 #');
parser.write(' TODO');
parser.write('\n');
parser.write(' ---\n');
parser.write(' foo: bar\n');
parser.write(' duration_ms: ');
parser.write(' 0.0001\n');
parser.write(' prop: |-\n');
parser.write(' foo\n');
parser.write(' bar\n');
parser.write(' ...\n');
parser.end();
const actual = await parser.toArray();
assert.deepStrictEqual(
actual,
expected.map((item) => ({ __proto__: null, ...item }))
);
})().then(common.mustCall());

File diff suppressed because it is too large Load Diff