mirror of
https://github.com/nodejs/node.git
synced 2024-11-21 10:59:27 +00:00
build: convert V8 test JSON to JUnit XML
This introduces some code to convert from V8's test JSON output to JUnit XML. We need this because V8's latest refactor of their test runner has made it difficult to float our JUnit reporter patch on top (see the referenced issue). I also think that there needs to be the same changes to vcbuild.bat, but I don't know how to do test those yet. I can create a Windows VM and test it if we decide to go with this approach. Refs: https://github.com/nodejs/node-v8/issues/236 PR-URL: https://github.com/nodejs/node/pull/44049 Fixes: https://github.com/nodejs/node-v8/issues/236 Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl> Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Michaël Zasso <targos@protonmail.com>
This commit is contained in:
parent
587367d107
commit
691f8d1427
1
.gitignore
vendored
1
.gitignore
vendored
@ -117,6 +117,7 @@ tools/*/*.i.tmp
|
||||
# === Rules for test artifacts ===
|
||||
/*.tap
|
||||
/*.xml
|
||||
/v8*-tap.json
|
||||
/node_trace.*.log
|
||||
# coverage related
|
||||
/gcovr
|
||||
|
24
Makefile
24
Makefile
@ -33,6 +33,27 @@ ifdef ENABLE_V8_TAP
|
||||
TAP_V8 := --junitout $(PWD)/v8-tap.xml
|
||||
TAP_V8_INTL := --junitout $(PWD)/v8-intl-tap.xml
|
||||
TAP_V8_BENCHMARKS := --junitout $(PWD)/v8-benchmarks-tap.xml
|
||||
define convert_to_junit
|
||||
@true
|
||||
endef
|
||||
endif
|
||||
|
||||
ifdef ENABLE_CONVERT_V8_JSON_TO_XML
|
||||
TAP_V8_JSON := $(PWD)/v8-tap.json
|
||||
TAP_V8_INTL_JSON := $(PWD)/v8-intl-tap.json
|
||||
TAP_V8_BENCHMARKS_JSON := $(PWD)/v8-benchmarks-tap.json
|
||||
|
||||
# By default, the V8's JSON test output only includes the tests which have
|
||||
# failed. We use --slow-tests-cutoff to ensure that all tests are present
|
||||
# in the output, including those which pass.
|
||||
TAP_V8 := --json-test-results $(TAP_V8_JSON) --slow-tests-cutoff 1000000
|
||||
TAP_V8_INTL := --json-test-results $(TAP_V8_INTL_JSON) --slow-tests-cutoff 1000000
|
||||
TAP_V8_BENCHMARKS := --json-test-results $(TAP_V8_BENCHMARKS_JSON) --slow-tests-cutoff 1000000
|
||||
|
||||
define convert_to_junit
|
||||
export PATH="$(NO_BIN_OVERRIDE_PATH)" && \
|
||||
$(PYTHON) tools/v8-json-to-junit.py < $(1) > $(1:.json=.xml)
|
||||
endef
|
||||
endif
|
||||
|
||||
V8_TEST_OPTIONS = $(V8_EXTRA_TEST_OPTIONS)
|
||||
@ -683,6 +704,7 @@ test-v8: v8 ## Runs the V8 test suite on deps/v8.
|
||||
$(PYTHON) deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) $(V8_TEST_OPTIONS) \
|
||||
mjsunit cctest debugger inspector message preparser \
|
||||
$(TAP_V8)
|
||||
$(call convert_to_junit,$(TAP_V8_JSON))
|
||||
$(info Testing hash seed)
|
||||
$(MAKE) test-hash-seed
|
||||
|
||||
@ -691,12 +713,14 @@ test-v8-intl: v8
|
||||
$(PYTHON) deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \
|
||||
intl \
|
||||
$(TAP_V8_INTL)
|
||||
$(call convert_to_junit,$(TAP_V8_INTL_JSON))
|
||||
|
||||
test-v8-benchmarks: v8
|
||||
export PATH="$(NO_BIN_OVERRIDE_PATH)" && \
|
||||
$(PYTHON) deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \
|
||||
benchmarks \
|
||||
$(TAP_V8_BENCHMARKS)
|
||||
$(call convert_to_junit,$(TAP_V8_BENCHMARKS_JSON))
|
||||
|
||||
test-v8-updates:
|
||||
$(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) v8-updates
|
||||
|
@ -20,18 +20,21 @@ if errorlevel 1 set ERROR_STATUS=1&goto test-v8-exit
|
||||
set path=%savedpath%
|
||||
|
||||
if not defined test_v8 goto test-v8-intl
|
||||
echo running 'python tools\run-tests.py %common_v8_test_options% %v8_test_options% --junitout ./v8-tap.xml'
|
||||
call python tools\run-tests.py %common_v8_test_options% %v8_test_options% --junitout ./v8-tap.xml
|
||||
echo running 'python tools\run-tests.py %common_v8_test_options% %v8_test_options% --slow-tests-cutoff 1000000 --json-test-results v8-tap.xml'
|
||||
call python tools\run-tests.py %common_v8_test_options% %v8_test_options% --slow-tests-cutoff 1000000 --json-test-results v8-tap.xml
|
||||
call python ..\..\tools\v8-json-to-junit.py < v8-tap.xml > v8-tap.json
|
||||
|
||||
:test-v8-intl
|
||||
if not defined test_v8_intl goto test-v8-benchmarks
|
||||
echo running 'python tools\run-tests.py %common_v8_test_options% intl --junitout ./v8-intl-tap.xml'
|
||||
call python tools\run-tests.py %common_v8_test_options% intl --junitout ./v8-intl-tap.xml
|
||||
echo running 'python tools\run-tests.py %common_v8_test_options% intl --slow-tests-cutoff 1000000 --json-test-results v8-intl-tap.xml'
|
||||
call python tools\run-tests.py %common_v8_test_options% intl --slow-tests-cutoff 1000000 --json-test-results ./v8-intl-tap.xml
|
||||
call python ..\..\tools\v8-json-to-junit.py < v8-intl-tap.xml > v8-intl-tap.json
|
||||
|
||||
:test-v8-benchmarks
|
||||
if not defined test_v8_benchmarks goto test-v8-exit
|
||||
echo running 'python tools\run-tests.py %common_v8_test_options% benchmarks --junitout ./v8-benchmarks-tap.xml'
|
||||
call python tools\run-tests.py %common_v8_test_options% benchmarks --junitout ./v8-benchmarks-tap.xml
|
||||
echo running 'python tools\run-tests.py %common_v8_test_options% benchmarks --slow-tests-cutoff 1000000 --json-test-results v8-benchmarks-tap.xml'
|
||||
call python tools\run-tests.py %common_v8_test_options% benchmarks --slow-tests-cutoff 1000000 --json-test-results ./v8-benchmarks-tap.xml
|
||||
call python ..\..\tools\v8-json-to-junit.py < v8-benchmarks-tap.xml > v8-benchmarks-tap.json
|
||||
goto test-v8-exit
|
||||
|
||||
:test-v8-exit
|
||||
|
124
tools/v8-json-to-junit.py
Executable file
124
tools/v8-json-to-junit.py
Executable file
@ -0,0 +1,124 @@
|
||||
#!/usr/bin/env python
|
||||
# Large parts of this file are modified from
|
||||
# deps/v8/tools/testrunner/local/junit_output.py, which no longer exists in
|
||||
# latest V8.
|
||||
#
|
||||
# Copyright 2013 the V8 project authors. All rights reserved.
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following
|
||||
# disclaimer in the documentation and/or other materials provided
|
||||
# with the distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived
|
||||
# from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import json
|
||||
import utils
|
||||
import signal
|
||||
import sys
|
||||
import xml.etree.ElementTree as xml
|
||||
|
||||
def IsExitCodeCrashing(exit_code):
|
||||
if utils.IsWindows():
|
||||
return 0x80000000 & exit_code and not (0x3FFFFF00 & exit_code)
|
||||
return exit_code < 0 and exit_code != -signal.SIGABRT
|
||||
|
||||
|
||||
class JUnitTestOutput:
|
||||
def __init__(self, test_suite_name):
|
||||
self.root = xml.Element("testsuite")
|
||||
self.root.attrib["name"] = test_suite_name
|
||||
|
||||
def HasRunTest(self, test_name, test_cmd, test_duration, test_failure):
|
||||
test_case_element = xml.Element("testcase")
|
||||
test_case_element.attrib["name"] = test_name
|
||||
test_case_element.attrib["cmd"] = test_cmd
|
||||
test_case_element.attrib["time"] = str(round(test_duration, 3))
|
||||
if test_failure is not None:
|
||||
failure_element = xml.Element("failure")
|
||||
failure_element.text = test_failure
|
||||
test_case_element.append(failure_element)
|
||||
self.root.append(test_case_element)
|
||||
|
||||
def FinishAndWrite(self, f):
|
||||
xml.ElementTree(self.root).write(f, "UTF-8")
|
||||
|
||||
|
||||
def Main():
|
||||
test_results = json.load(sys.stdin)
|
||||
|
||||
# V8's JSON test runner only logs failing and flaky tests under "results". We
|
||||
# assume the caller has put a large number for --slow-tests-cutoff, to ensure
|
||||
# that all the tests appear under "slowest_tests".
|
||||
|
||||
failing_tests = {result["name"]: result for result in test_results["results"]}
|
||||
all_tests = {result["name"]: result for result in test_results["slowest_tests"]}
|
||||
passing_tests = {
|
||||
name: result for name, result in all_tests.items() if name not in failing_tests
|
||||
}
|
||||
|
||||
# These check that --slow-tests-cutoff was passed correctly.
|
||||
assert len(failing_tests) + len(passing_tests) == len(all_tests)
|
||||
assert len(all_tests) == len(test_results["slowest_tests"])
|
||||
|
||||
output = JUnitTestOutput("v8tests")
|
||||
|
||||
for name, failing_test in failing_tests.items():
|
||||
failing_output = []
|
||||
|
||||
stdout = failing_test["stdout"].strip()
|
||||
if len(stdout):
|
||||
failing_output.append("stdout:")
|
||||
failing_output.append(stdout)
|
||||
|
||||
stderr = failing_test["stderr"].strip()
|
||||
if len(stderr):
|
||||
failing_output.append("stderr:")
|
||||
failing_output.append(stderr)
|
||||
|
||||
failing_output.append("Command: " + failing_test["command"])
|
||||
|
||||
exit_code = failing_test["exit_code"]
|
||||
if failing_test["result"] == "TIMEOUT":
|
||||
failing_output.append("--- TIMEOUT ---")
|
||||
elif IsExitCodeCrashing(exit_code):
|
||||
failing_output.append("exit code: " + str(exit_code))
|
||||
failing_output.append("--- CRASHED ---")
|
||||
|
||||
output.HasRunTest(
|
||||
test_name=name,
|
||||
test_cmd=failing_test["command"],
|
||||
test_duration=failing_test["duration"],
|
||||
test_failure="\n".join(failing_output),
|
||||
)
|
||||
|
||||
for name, passing_test in passing_tests.items():
|
||||
output.HasRunTest(
|
||||
test_name=name,
|
||||
test_cmd=passing_test["command"],
|
||||
test_duration=passing_test["duration"],
|
||||
test_failure=None,
|
||||
)
|
||||
|
||||
output.FinishAndWrite(sys.stdout.buffer)
|
||||
|
||||
if __name__ == '__main__':
|
||||
Main()
|
Loading…
Reference in New Issue
Block a user