mirror of
https://github.com/denoland/deno.git
synced 2024-11-21 20:38:55 +00:00
tests: new typescript WPT runner (#9269)
This commit is contained in:
parent
ecfda65eff
commit
2638aa03a5
@ -31,7 +31,9 @@
|
||||
"std/node_modules",
|
||||
"std/hash/_wasm",
|
||||
"target",
|
||||
"third_party"
|
||||
"third_party",
|
||||
"tools/wpt/expectation.json",
|
||||
"tools/wpt/manifest.json"
|
||||
],
|
||||
"plugins": [
|
||||
"https://plugins.dprint.dev/typescript-0.33.0.wasm",
|
||||
|
24
.github/workflows/ci.yml
vendored
24
.github/workflows/ci.yml
vendored
@ -90,7 +90,7 @@ jobs:
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: "2.7"
|
||||
python-version: "3.8"
|
||||
architecture: x64
|
||||
|
||||
- name: Install Node
|
||||
@ -234,6 +234,28 @@ jobs:
|
||||
cargo test --locked --doc
|
||||
cargo test --locked --all-targets
|
||||
|
||||
- name: Configure hosts file for WPT (unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: ./wpt make-hosts-file | sudo tee -a /etc/hosts
|
||||
working-directory: test_util/wpt/
|
||||
|
||||
- name: Configure hosts file for WPT (windows)
|
||||
if: runner.os == 'Windows'
|
||||
working-directory: test_util/wpt/
|
||||
run: python wpt make-hosts-file | Out-File $env:SystemRoot\System32\drivers\etc\hosts -Encoding ascii -Append
|
||||
|
||||
- name: Run web platform tests (release)
|
||||
if: matrix.kind == 'test_release'
|
||||
run: |
|
||||
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts setup
|
||||
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts run --quiet --release
|
||||
|
||||
- name: Run web platform tests (debug)
|
||||
if: matrix.kind == 'test_debug'
|
||||
run: |
|
||||
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts setup
|
||||
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts run --quiet
|
||||
|
||||
- name: Run Benchmarks
|
||||
if: matrix.kind == 'bench'
|
||||
run: cargo bench
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -9,6 +9,7 @@ gclient_config.py_entries
|
||||
/gh-pages/
|
||||
/target/
|
||||
/std/hash/_wasm/target
|
||||
/tools/wpt/manifest.json
|
||||
|
||||
# Files that help ensure VSCode can work but we don't want checked into the
|
||||
# repo
|
||||
|
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -8,5 +8,5 @@
|
||||
shallow = true
|
||||
[submodule "test_util/wpt"]
|
||||
path = test_util/wpt
|
||||
url = https://github.com/web-platform-tests/wpt.git
|
||||
url = https://github.com/denoland/wpt.git
|
||||
shallow = true
|
||||
|
@ -1,35 +0,0 @@
|
||||
## Web Platform Tests
|
||||
|
||||
The WPT are test suites for Web platform specs, like Fetch, WHATWG Streams, or
|
||||
console. Deno is able to run most `.any.js` and `.window.js` web platform tests.
|
||||
|
||||
This directory contains a `wpt.jsonc` file that is used to configure our WPT
|
||||
test runner. You can use this json file to set which WPT suites to run, and
|
||||
which tests we expect to fail (due to bugs or because they are out of scope for
|
||||
Deno).
|
||||
|
||||
To include a new test file to run, add it to the array of test files for the
|
||||
corresponding suite. For example we want to enable
|
||||
`streams/readable-streams/general`. The file would then look like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"streams": ["readable-streams/general"]
|
||||
}
|
||||
```
|
||||
|
||||
If you need more configurability over which test cases in a test file of a suite
|
||||
to run, you can use the object representation. In the example below, we
|
||||
configure `streams/readable-streams/general` to expect
|
||||
`ReadableStream can't be constructed with an invalid type` to fail.
|
||||
|
||||
```json
|
||||
{
|
||||
"streams": [
|
||||
{
|
||||
"name": "readable-streams/general",
|
||||
"expectFail": ["ReadableStream can't be constructed with an invalid type"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
@ -6,12 +6,9 @@ use deno_core::url;
|
||||
use deno_runtime::deno_fetch::reqwest;
|
||||
use deno_runtime::deno_websocket::tokio_tungstenite;
|
||||
use std::io::{BufRead, Write};
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use tempfile::TempDir;
|
||||
use test_util as util;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
macro_rules! itest(
|
||||
($name:ident {$( $key:ident: $value:expr,)*}) => {
|
||||
@ -5193,249 +5190,6 @@ fn denort_direct_use_error() {
|
||||
assert!(!status.success());
|
||||
}
|
||||
|
||||
fn concat_bundle(
|
||||
files: Vec<(PathBuf, String)>,
|
||||
bundle_path: &Path,
|
||||
init: String,
|
||||
) -> String {
|
||||
let bundle_url = url::Url::from_file_path(bundle_path).unwrap().to_string();
|
||||
|
||||
let mut bundle = init.clone();
|
||||
let mut bundle_line_count = init.lines().count() as u32;
|
||||
let mut source_map = sourcemap::SourceMapBuilder::new(Some(&bundle_url));
|
||||
|
||||
// In classic workers, `importScripts()` performs an actual import.
|
||||
// However, we don't implement that function in Deno as we want to enforce
|
||||
// the use of ES6 modules.
|
||||
// To work around this, we:
|
||||
// 1. Define `importScripts()` as a no-op (code below)
|
||||
// 2. Capture its parameter from the source code and add it to the list of
|
||||
// files to concatenate. (see `web_platform_tests()`)
|
||||
bundle.push_str("function importScripts() {}\n");
|
||||
bundle_line_count += 1;
|
||||
|
||||
for (path, text) in files {
|
||||
let path = std::fs::canonicalize(path).unwrap();
|
||||
let url = url::Url::from_file_path(path).unwrap().to_string();
|
||||
let src_id = source_map.add_source(&url);
|
||||
source_map.set_source_contents(src_id, Some(&text));
|
||||
|
||||
for (line_index, line) in text.lines().enumerate() {
|
||||
bundle.push_str(line);
|
||||
bundle.push('\n');
|
||||
source_map.add_raw(
|
||||
bundle_line_count,
|
||||
0,
|
||||
line_index as u32,
|
||||
0,
|
||||
Some(src_id),
|
||||
None,
|
||||
);
|
||||
|
||||
bundle_line_count += 1;
|
||||
}
|
||||
bundle.push('\n');
|
||||
bundle_line_count += 1;
|
||||
}
|
||||
|
||||
let mut source_map_buf: Vec<u8> = vec![];
|
||||
source_map
|
||||
.into_sourcemap()
|
||||
.to_writer(&mut source_map_buf)
|
||||
.unwrap();
|
||||
|
||||
bundle.push_str("//# sourceMappingURL=data:application/json;base64,");
|
||||
let encoded_map = base64::encode(source_map_buf);
|
||||
bundle.push_str(&encoded_map);
|
||||
|
||||
bundle
|
||||
}
|
||||
|
||||
// TODO(lucacasonato): DRY with tsc_config.rs
|
||||
/// Convert a jsonc libraries `JsonValue` to a serde `Value`.
|
||||
fn jsonc_to_serde(j: jsonc_parser::JsonValue) -> serde_json::Value {
|
||||
use jsonc_parser::JsonValue;
|
||||
use serde_json::Value;
|
||||
use std::str::FromStr;
|
||||
match j {
|
||||
JsonValue::Array(arr) => {
|
||||
let vec = arr.into_iter().map(jsonc_to_serde).collect();
|
||||
Value::Array(vec)
|
||||
}
|
||||
JsonValue::Boolean(bool) => Value::Bool(bool),
|
||||
JsonValue::Null => Value::Null,
|
||||
JsonValue::Number(num) => {
|
||||
let number =
|
||||
serde_json::Number::from_str(&num).expect("could not parse number");
|
||||
Value::Number(number)
|
||||
}
|
||||
JsonValue::Object(obj) => {
|
||||
let mut map = serde_json::map::Map::new();
|
||||
for (key, json_value) in obj.into_iter() {
|
||||
map.insert(key, jsonc_to_serde(json_value));
|
||||
}
|
||||
Value::Object(map)
|
||||
}
|
||||
JsonValue::String(str) => Value::String(str),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn web_platform_tests() {
|
||||
use deno_core::serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum WptConfig {
|
||||
Simple(String),
|
||||
#[serde(rename_all = "camelCase")]
|
||||
Options {
|
||||
name: String,
|
||||
expect_fail: Vec<String>,
|
||||
},
|
||||
}
|
||||
|
||||
let text =
|
||||
std::fs::read_to_string(util::tests_path().join("wpt.jsonc")).unwrap();
|
||||
let jsonc = jsonc_parser::parse_to_value(&text).unwrap().unwrap();
|
||||
let config: std::collections::HashMap<String, Vec<WptConfig>> =
|
||||
deno_core::serde_json::from_value(jsonc_to_serde(jsonc)).unwrap();
|
||||
|
||||
for (suite_name, includes) in config.into_iter() {
|
||||
let suite_path = util::wpt_path().join(suite_name);
|
||||
let dir = WalkDir::new(&suite_path)
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|e| e.file_type().is_file())
|
||||
.filter(|f| {
|
||||
let filename = f.file_name().to_str().unwrap();
|
||||
filename.ends_with(".any.js")
|
||||
|| filename.ends_with(".window.js")
|
||||
|| filename.ends_with(".worker.js")
|
||||
})
|
||||
.filter_map(|f| {
|
||||
let path = f
|
||||
.path()
|
||||
.strip_prefix(&suite_path)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap();
|
||||
for cfg in &includes {
|
||||
match cfg {
|
||||
WptConfig::Simple(name) if path.starts_with(name) => {
|
||||
return Some((f.path().to_owned(), vec![]))
|
||||
}
|
||||
WptConfig::Options { name, expect_fail }
|
||||
if path.starts_with(name) =>
|
||||
{
|
||||
return Some((f.path().to_owned(), expect_fail.to_vec()))
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
None
|
||||
});
|
||||
|
||||
let testharness_path = util::wpt_path().join("resources/testharness.js");
|
||||
let testharness_text = std::fs::read_to_string(&testharness_path)
|
||||
.unwrap()
|
||||
.replace("output:true", "output:false");
|
||||
let testharnessreporter_path =
|
||||
util::tests_path().join("wpt_testharnessconsolereporter.js");
|
||||
let testharnessreporter_text =
|
||||
std::fs::read_to_string(&testharnessreporter_path).unwrap();
|
||||
|
||||
for (test_file_path, expect_fail) in dir {
|
||||
let test_file_text = std::fs::read_to_string(&test_file_path).unwrap();
|
||||
let imports: Vec<(PathBuf, String)> = test_file_text
|
||||
.split('\n')
|
||||
.into_iter()
|
||||
.filter_map(|t| {
|
||||
// Hack: we don't implement `importScripts()`, and instead capture the
|
||||
// parameter in source code; see `concat_bundle()` for more details.
|
||||
if let Some(rest_import_scripts) = t.strip_prefix("importScripts(\"")
|
||||
{
|
||||
if let Some(import_path) = rest_import_scripts.strip_suffix("\");")
|
||||
{
|
||||
// The code in `testharness.js` silences the test outputs.
|
||||
if import_path != "/resources/testharness.js" {
|
||||
return Some(import_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
t.strip_prefix("// META: script=")
|
||||
})
|
||||
.map(|s| {
|
||||
let s = if s == "/resources/WebIDLParser.js" {
|
||||
"/resources/webidl2/lib/webidl2.js"
|
||||
} else {
|
||||
s
|
||||
};
|
||||
if s.starts_with('/') {
|
||||
util::wpt_path().join(format!(".{}", s))
|
||||
} else {
|
||||
test_file_path.parent().unwrap().join(s)
|
||||
}
|
||||
})
|
||||
.map(|path| {
|
||||
let text = std::fs::read_to_string(&path).unwrap();
|
||||
(path, text)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut variants: Vec<&str> = test_file_text
|
||||
.split('\n')
|
||||
.into_iter()
|
||||
.filter_map(|t| t.strip_prefix("// META: variant="))
|
||||
.collect();
|
||||
|
||||
if variants.is_empty() {
|
||||
variants.push("");
|
||||
}
|
||||
|
||||
for variant in variants {
|
||||
let mut files = Vec::with_capacity(3 + imports.len());
|
||||
files.push((testharness_path.clone(), testharness_text.clone()));
|
||||
files.push((
|
||||
testharnessreporter_path.clone(),
|
||||
testharnessreporter_text.clone(),
|
||||
));
|
||||
files.extend(imports.clone());
|
||||
files.push((test_file_path.clone(), test_file_text.clone()));
|
||||
|
||||
let mut file = tempfile::Builder::new()
|
||||
.prefix("wpt-bundle-")
|
||||
.suffix(".js")
|
||||
.rand_bytes(5)
|
||||
.tempfile()
|
||||
.unwrap();
|
||||
|
||||
let bundle = concat_bundle(files, file.path(), "".to_string());
|
||||
file.write_all(bundle.as_bytes()).unwrap();
|
||||
|
||||
let child = util::deno_cmd()
|
||||
.current_dir(test_file_path.parent().unwrap())
|
||||
.arg("run")
|
||||
.arg("--location")
|
||||
.arg(&format!("http://web-platform-tests/?{}", variant))
|
||||
.arg("-A")
|
||||
.arg(file.path())
|
||||
.arg(deno_core::serde_json::to_string(&expect_fail).unwrap())
|
||||
.arg("--quiet")
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.unwrap();
|
||||
|
||||
let output = child.wait_with_output().unwrap();
|
||||
if !output.status.success() {
|
||||
file.keep().unwrap();
|
||||
}
|
||||
assert!(output.status.success());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
|
||||
async fn test_resolve_dns() {
|
||||
use std::collections::BTreeMap;
|
||||
|
@ -1,250 +0,0 @@
|
||||
{
|
||||
"streams": [
|
||||
// "piping/abort",
|
||||
// "piping/close-propagation-backward",
|
||||
// "piping/close-propagation-forward",
|
||||
// "piping/error-propagation-backward",
|
||||
// "piping/error-propagation-forward",
|
||||
"piping/flow-control",
|
||||
// "piping/general",
|
||||
"piping/multiple-propagation",
|
||||
"piping/pipe-through",
|
||||
"piping/then-interception",
|
||||
// "piping/throwing-options",
|
||||
// "piping/transform-streams",
|
||||
"queuing-strategies.any",
|
||||
// "readable-byte-streams",
|
||||
// "readable-streams/async-iterator",
|
||||
// "readable-streams/bad-strategies",
|
||||
// "readable-streams/bad-underlying-source",
|
||||
// "readable-streams/cancel",
|
||||
// "readable-streams/constructor",
|
||||
"readable-streams/count-queuing-strategy-integration",
|
||||
"readable-streams/default-reader",
|
||||
"readable-streams/floating-point-total-queue-size",
|
||||
"readable-streams/garbage-collection",
|
||||
"readable-streams/general",
|
||||
{
|
||||
"name": "readable-streams/patched-global",
|
||||
"expectFail": [
|
||||
"ReadableStream async iterator should use the original values of getReader() and ReadableStreamDefaultReader methods"
|
||||
]
|
||||
},
|
||||
"readable-streams/reentrant-strategies",
|
||||
"readable-streams/tee",
|
||||
// "readable-streams/templated",
|
||||
"transform-streams/backpressure",
|
||||
"transform-streams/errors",
|
||||
"transform-streams/flush",
|
||||
"transform-streams/general",
|
||||
"transform-streams/lipfuzz",
|
||||
// "transform-streams/patched-global",
|
||||
"transform-streams/properties",
|
||||
"transform-streams/reentrant-strategies",
|
||||
"transform-streams/strategies",
|
||||
// "transform-streams/terminate",
|
||||
// "writable-streams/aborting",
|
||||
// "writable-streams/bad-strategies",
|
||||
"writable-streams/bad-underlying-sinks",
|
||||
"writable-streams/byte-length-queuing-strategy",
|
||||
// "writable-streams/close",
|
||||
// "writable-streams/constructor",
|
||||
"writable-streams/count-queuing-strategy",
|
||||
"writable-streams/error",
|
||||
"writable-streams/floating-point-total-queue-size",
|
||||
"writable-streams/general",
|
||||
"writable-streams/properties",
|
||||
"writable-streams/reentrant-strategy",
|
||||
"writable-streams/start",
|
||||
"writable-streams/write"
|
||||
],
|
||||
"encoding": [
|
||||
"api-basics",
|
||||
"api-invalid-label",
|
||||
"api-replacement-encodings",
|
||||
"api-surrogates-utf8",
|
||||
// TODO(lucacasonato): enable encodeInto. We have a bug in implementaiton.
|
||||
// {
|
||||
// "name": "encodeInto",
|
||||
// "expectFail": [
|
||||
// "encodeInto() and a detached output buffer"
|
||||
// ]
|
||||
// },
|
||||
// "encodeInto",
|
||||
// TODO(lucacasonato): enable when we support iso-2022-jp
|
||||
// "iso-2022-jp-decoder",
|
||||
// TODO(lucacasonato): uses XMLHttpRequest unnecessarily. should be fixed upstream before enabling
|
||||
// "replacement-encodings",
|
||||
"textdecoder-byte-order-marks",
|
||||
{
|
||||
"name": "textdecoder-copy",
|
||||
"expectFail": [
|
||||
// TODO(lucacasonato): enable when we have stream support
|
||||
"Modify buffer after passing it in (ArrayBuffer)",
|
||||
"Modify buffer after passing it in (SharedArrayBuffer)"
|
||||
]
|
||||
},
|
||||
"textdecoder-fatal-single-byte",
|
||||
"textdecoder-fatal.",
|
||||
"textdecoder-ignorebom",
|
||||
{
|
||||
"name": "textdecoder-labels",
|
||||
"expectFail": [
|
||||
"cseucpkdfmtjapanese => EUC-JP",
|
||||
"euc-jp => EUC-JP",
|
||||
"x-euc-jp => EUC-JP",
|
||||
"csiso2022jp => ISO-2022-JP",
|
||||
"iso-2022-jp => ISO-2022-JP",
|
||||
"csshiftjis => Shift_JIS",
|
||||
"ms932 => Shift_JIS",
|
||||
"ms_kanji => Shift_JIS",
|
||||
"shift-jis => Shift_JIS",
|
||||
"shift_jis => Shift_JIS",
|
||||
"sjis => Shift_JIS",
|
||||
"windows-31j => Shift_JIS",
|
||||
"x-sjis => Shift_JIS",
|
||||
"cseuckr => EUC-KR",
|
||||
"csksc56011987 => EUC-KR",
|
||||
"euc-kr => EUC-KR",
|
||||
"iso-ir-149 => EUC-KR",
|
||||
"korean => EUC-KR",
|
||||
"ks_c_5601-1987 => EUC-KR",
|
||||
"ks_c_5601-1989 => EUC-KR",
|
||||
"ksc5601 => EUC-KR",
|
||||
"ksc_5601 => EUC-KR",
|
||||
"windows-949 => EUC-KR",
|
||||
"x-user-defined => x-user-defined"
|
||||
]
|
||||
},
|
||||
// TODO(lucacasonato): enable when we have stream support
|
||||
// "textdecoder-streaming",
|
||||
"textdecoder-utf16-surrogates",
|
||||
{
|
||||
"name": "textencoder-constructor-non-utf",
|
||||
"expectFail": [
|
||||
"Encoding argument supported for decode: EUC-JP",
|
||||
"Encoding argument supported for decode: ISO-2022-JP",
|
||||
"Encoding argument supported for decode: Shift_JIS",
|
||||
"Encoding argument supported for decode: EUC-KR",
|
||||
"Encoding argument supported for decode: x-user-defined"
|
||||
]
|
||||
},
|
||||
"textencoder-utf16-surrogates",
|
||||
"legacy-mb-schinese"
|
||||
// TODO(lucacasonato): uses XMLHttpRequest unnecessarily. should be fixed upstream before enabling
|
||||
// "unsupported-encodings",
|
||||
],
|
||||
"dom": [
|
||||
"abort/event"
|
||||
],
|
||||
"hr-time": [
|
||||
"monotonic-clock"
|
||||
],
|
||||
"html": [
|
||||
"webappapis/microtask-queuing/queue-microtask-exceptions.any",
|
||||
"webappapis/microtask-queuing/queue-microtask.any",
|
||||
"webappapis/timers"
|
||||
],
|
||||
"user-timing": [
|
||||
"clear_all_marks",
|
||||
"clear_all_measures",
|
||||
"clear_non_existent_mark",
|
||||
"clear_non_existent_measure",
|
||||
"clear_one_mark",
|
||||
"clear_one_measure",
|
||||
"entry_type",
|
||||
"mark-entry-constructor",
|
||||
"mark-errors",
|
||||
"mark-measure-return-objects",
|
||||
"mark.any",
|
||||
"measure_syntax_err",
|
||||
"measure-l3",
|
||||
"structured-serialize-detail",
|
||||
"user_timing_exists"
|
||||
],
|
||||
"wasm": [
|
||||
"jsapi/constructor/compile",
|
||||
"jsapi/constructor/multi-value",
|
||||
"jsapi/constructor/toStringTag",
|
||||
"jsapi/constructor/validate",
|
||||
"jsapi/global/constructor",
|
||||
"jsapi/global/toString",
|
||||
"jsapi/global/value-get-set",
|
||||
"jsapi/global/valueOf",
|
||||
"jsapi/instance/toString",
|
||||
"jsapi/instance/constructor-caching",
|
||||
"jsapi/memory/toString",
|
||||
"jsapi/module/constructor",
|
||||
"jsapi/module/customSections",
|
||||
"jsapi/module/exports",
|
||||
"jsapi/module/imports",
|
||||
"jsapi/module/toString",
|
||||
"jsapi/table/get-set",
|
||||
"jsapi/table/toString",
|
||||
"webapi/body",
|
||||
"webapi/invalid-args",
|
||||
"webapi/rejected-arg",
|
||||
"webapi/status",
|
||||
"webapi/create_multiple_memory",
|
||||
"create_multiple_memory"
|
||||
//FAILING TESTS
|
||||
// "jsapi/constructor/instantiate-bad-imports",
|
||||
// "jsapi/constructor/instantiate",
|
||||
// "jsapi/global/type",
|
||||
// "jsapi/instance/constructor-bad-imports",
|
||||
// "jsapi/instance/constructor",
|
||||
// "jsapi/instance/exports",
|
||||
// "jsapi/memory/buffer",
|
||||
// "jsapi/memory/constructor-shared",
|
||||
// "jsapi/memory/constructor-types",
|
||||
// "jsapi/memory/constructor",
|
||||
// "jsapi/memory/grow",
|
||||
// "jsapi/memory/type",
|
||||
// "jsapi/table/constructor-types",
|
||||
// "jsapi/table/constructor",
|
||||
// "jsapi/table/grow-reftypes",
|
||||
// "jsapi/table/grow",
|
||||
// "jsapi/table/length",
|
||||
// "jsapi/idlharness",
|
||||
// "jsapi/instance",
|
||||
// "jsapi/prototypes",
|
||||
// "serialization/arraybuffer/transfer"
|
||||
// "serialization/module/nested-worker-success",
|
||||
// "serialization/module/serialization-via-idb",
|
||||
// "serialization/module/serialization-via-notifications-api",
|
||||
// "webapi/abort",
|
||||
// "webapi/contenttype",
|
||||
// "webapi/empty-body",
|
||||
// "webapi/historical",
|
||||
// "webapi/idlharness",
|
||||
// "webapi/instantiateStreaming-bad-imports",
|
||||
// "webapi/instantiateStreaming",
|
||||
// "webapi/invalid-code",
|
||||
// "webapi/origin",
|
||||
],
|
||||
"console": [
|
||||
"console-is-a-namespace",
|
||||
"console-label-conversion",
|
||||
"console-namespace-object-class-string",
|
||||
"console-tests-historical"
|
||||
],
|
||||
"WebCryptoApi": [
|
||||
"getRandomValues"
|
||||
],
|
||||
"WebIDL": [
|
||||
"ecmascript-binding/es-exceptions/DOMException-constants",
|
||||
"ecmascript-binding/es-exceptions/DOMException-constructor-and-prototype",
|
||||
"ecmascript-binding/es-exceptions/DOMException-constructor-behavior",
|
||||
{
|
||||
"name": "ecmascript-binding/es-exceptions/DOMException-custom-bindings",
|
||||
"expectFail": [
|
||||
// TODO(kt3k): Enable this test.
|
||||
// We can pass this test by using Object.setPrototypeOf(...) instead of
|
||||
// class...extends, but that causes a problem in printing of uncaught
|
||||
// DOMException. We might need to modify how to print uncaught error in
|
||||
// `//core/error.rs`.
|
||||
"does not inherit from Error: class-side"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@ -1,129 +0,0 @@
|
||||
const noColor = globalThis.Deno?.noColor ?? true;
|
||||
const enabled = !noColor;
|
||||
|
||||
function code(open, close) {
|
||||
return {
|
||||
open: `\x1b[${open.join(";")}m`,
|
||||
close: `\x1b[${close}m`,
|
||||
regexp: new RegExp(`\\x1b\\[${close}m`, "g"),
|
||||
};
|
||||
}
|
||||
|
||||
function run(str, code) {
|
||||
return enabled
|
||||
? `${code.open}${str.replace(code.regexp, code.open)}${code.close}`
|
||||
: str;
|
||||
}
|
||||
|
||||
function red(str) {
|
||||
return run(str, code([31], 39));
|
||||
}
|
||||
|
||||
export function green(str) {
|
||||
return run(str, code([32], 39));
|
||||
}
|
||||
|
||||
export function yellow(str) {
|
||||
return run(str, code([33], 39));
|
||||
}
|
||||
|
||||
const testResults = [];
|
||||
const testsExpectFail = JSON.parse(Deno.args[0]);
|
||||
function shouldExpectFail(name) {
|
||||
if (testsExpectFail.includes(name)) return true;
|
||||
for (const expectFail of testsExpectFail) {
|
||||
if (name.startsWith(expectFail)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
window.add_result_callback(({ message, name, stack, status }) => {
|
||||
const expectFail = shouldExpectFail(name);
|
||||
testResults.push({
|
||||
name,
|
||||
passed: status === 0,
|
||||
expectFail,
|
||||
message,
|
||||
stack,
|
||||
});
|
||||
let simpleMessage = `test ${name} ... `;
|
||||
switch (status) {
|
||||
case 0:
|
||||
if (expectFail) {
|
||||
simpleMessage += red("ok (expected fail)");
|
||||
} else {
|
||||
simpleMessage += green("ok");
|
||||
if (Deno.args[1] == "--quiet") {
|
||||
// don't print `ok` tests if --quiet is enabled
|
||||
return;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
if (expectFail) {
|
||||
simpleMessage += yellow("failed (expected)");
|
||||
} else {
|
||||
simpleMessage += red("failed");
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (expectFail) {
|
||||
simpleMessage += yellow("failed (expected)");
|
||||
} else {
|
||||
simpleMessage += red("failed (timeout)");
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
if (expectFail) {
|
||||
simpleMessage += yellow("failed (expected)");
|
||||
} else {
|
||||
simpleMessage += red("failed (incomplete)");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
console.log(simpleMessage);
|
||||
});
|
||||
|
||||
window.add_completion_callback((tests, harnessStatus) => {
|
||||
const failed = testResults.filter((t) => !t.expectFail && !t.passed);
|
||||
const expectedFailedButPassed = testResults.filter((t) =>
|
||||
t.expectFail && t.passed
|
||||
);
|
||||
const expectedFailedButPassedCount = expectedFailedButPassed.length;
|
||||
const failedCount = failed.length + expectedFailedButPassedCount;
|
||||
const expectedFailedAndFailedCount = testResults.filter((t) =>
|
||||
t.expectFail && !t.passed
|
||||
).length;
|
||||
const totalCount = testResults.length;
|
||||
const passedCount = totalCount - failedCount - expectedFailedAndFailedCount;
|
||||
|
||||
if (failed.length > 0) {
|
||||
console.log(`\nfailures:`);
|
||||
}
|
||||
for (const result of failed) {
|
||||
console.log(
|
||||
`\n${result.name}\n${result.message}\n${result.stack}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (failed.length > 0) {
|
||||
console.log(`\nfailures:\n`);
|
||||
}
|
||||
for (const result of failed) {
|
||||
console.log(` ${result.name}`);
|
||||
}
|
||||
if (expectedFailedButPassedCount > 0) {
|
||||
console.log(`\nexpected failures that passed:\n`);
|
||||
}
|
||||
for (const result of expectedFailedButPassed) {
|
||||
console.log(` ${result.name}`);
|
||||
}
|
||||
console.log(
|
||||
`\ntest result: ${
|
||||
failedCount > 0 ? red("failed") : green("ok")
|
||||
}. ${passedCount} passed; ${failedCount} failed; ${expectedFailedAndFailedCount} expected failure; total ${totalCount}\n`,
|
||||
);
|
||||
|
||||
Deno.exit(failedCount > 0 ? 1 : 0);
|
||||
});
|
122
docs/contributing/web_platform_tests.md
Normal file
122
docs/contributing/web_platform_tests.md
Normal file
@ -0,0 +1,122 @@
|
||||
## Web Platform Test
|
||||
|
||||
Deno uses a custom test runner for Web Platform Tests. It can be found at
|
||||
`./tools/wpt.ts`.
|
||||
|
||||
### Running tests
|
||||
|
||||
> If you are on Windows, or your system does not support shebangs, prefix all
|
||||
> `./tools/wpt.ts` commands with
|
||||
> `deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run`.
|
||||
|
||||
Before attempting to run WPT tests for the first time, please run the WPT setup.
|
||||
You must also run this command every time the `./test_util/wpt` submodule is
|
||||
updated:
|
||||
|
||||
```shell
|
||||
./tools/wpt.ts setup
|
||||
```
|
||||
|
||||
To run all available web platform tests, run the following command:
|
||||
|
||||
```shell
|
||||
./tools/wpt.ts run
|
||||
|
||||
# You can also filter which test files to run by specifying filters:
|
||||
./tools/wpt.ts run -- streams/piping/general hr-time
|
||||
```
|
||||
|
||||
The test runner will run each web platform test and record its status (failed or
|
||||
ok). It will then compare this output to the expected output of each test as
|
||||
specified in the `./tools/wpt/expectation.json` file. This file is a nested JSON
|
||||
structure that mirrors the `./test_utils/wpt` directory. It describes for each
|
||||
test file, if it should pass as a whole (all tests pass, `true`), if it should
|
||||
fail as a whole (test runner encounters an exception outside of a test or all
|
||||
tests fail, `false`), or which tests it expects to fail (a string array of test
|
||||
case names).
|
||||
|
||||
### Updating enabled tests or expectations
|
||||
|
||||
You can update the `./tools/wpt/expectation.json` file manually by changing the
|
||||
value of each of the test file entries in the JSON structure. The alternative
|
||||
and preferred option is to have the WPT runner run all, or a filtered subset of
|
||||
tests, and then automatically update the `expectation.json` file to match the
|
||||
current reality. You can do this with the `./wpt.ts update` command. Example:
|
||||
|
||||
```shell
|
||||
./tools/wpt.ts update -- hr-time
|
||||
```
|
||||
|
||||
After running this command the `expectation.json` file will match the current
|
||||
output of all the tests that were run. This means that running `wpt.ts run`
|
||||
right after a `wpt.ts update` should always pass.
|
||||
|
||||
### Subcommands
|
||||
|
||||
#### `setup`
|
||||
|
||||
Validate that your environment is conigured correctly, or help you configure it.
|
||||
|
||||
This will check that the python3 (or `python.exe` on Windows) is actually
|
||||
Python 3.
|
||||
|
||||
#### `run`
|
||||
|
||||
Run all tests like specified in `expectation.json`.
|
||||
|
||||
You can specify the following flags to customize behaviour:
|
||||
|
||||
```
|
||||
--release
|
||||
Use the ./target/release/deno binary instead of ./target/debug/deno
|
||||
|
||||
--quiet
|
||||
Disable printing of `ok` test cases.
|
||||
|
||||
--json=<file>
|
||||
Output the test results as JSON to the file specified.
|
||||
```
|
||||
|
||||
You can also specify exactly which tests to run by specifying one of more
|
||||
filters after a `--`:
|
||||
|
||||
```
|
||||
./tools/wpt.ts run -- hr-time streams/piping/general
|
||||
```
|
||||
|
||||
### `update`
|
||||
|
||||
Update the `expectation.json` to match the current reality.
|
||||
|
||||
You can specify the following flags to customize behaviour:
|
||||
|
||||
```
|
||||
--release
|
||||
Use the ./target/release/deno binary instead of ./target/debug/deno
|
||||
|
||||
--quiet
|
||||
Disable printing of `ok` test cases.
|
||||
|
||||
--json=<file>
|
||||
Output the test results as JSON to the file specified.
|
||||
```
|
||||
|
||||
You can also specify exactly which tests to run by specifying one of more
|
||||
filters after a `--`:
|
||||
|
||||
```
|
||||
./tools/wpt.ts update -- hr-time streams/piping/general
|
||||
```
|
||||
|
||||
### FAQ
|
||||
|
||||
#### Upgrading the wpt submodule:
|
||||
|
||||
```shell
|
||||
cd test_util/wpt/
|
||||
# Rebase to retain our modifications
|
||||
git rebase origin/master
|
||||
git push denoland
|
||||
```
|
||||
|
||||
All contributors will need to rerun `./tools/wpt.ts setup` after this.
|
@ -1 +1 @@
|
||||
Subproject commit 581873eb00db0820a0d425dd9c005705cfbbc06f
|
||||
Subproject commit 928edf7353e946398020326964d42de56b3cd542
|
@ -3,9 +3,11 @@ import {
|
||||
dirname,
|
||||
fromFileUrl,
|
||||
join,
|
||||
} from "https://deno.land/std@0.76.0/path/mod.ts";
|
||||
} from "https://deno.land/std@0.84.0/path/mod.ts";
|
||||
export { dirname, join };
|
||||
export { existsSync } from "https://deno.land/std@0.76.0/fs/mod.ts";
|
||||
export { existsSync } from "https://deno.land/std@0.84.0/fs/mod.ts";
|
||||
export { readLines } from "https://deno.land/std@0.84.0/io/mod.ts";
|
||||
export { delay } from "https://deno.land/std@0.84.0/async/delay.ts";
|
||||
|
||||
export const ROOT_PATH = dirname(dirname(fromFileUrl(import.meta.url)));
|
||||
|
||||
|
533
tools/wpt.ts
Executable file
533
tools/wpt.ts
Executable file
@ -0,0 +1,533 @@
|
||||
#!/usr/bin/env -S deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run
|
||||
// Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
// This script is used to run WPT tests for Deno.
|
||||
|
||||
import {
|
||||
runSingleTest,
|
||||
runWithTestUtil,
|
||||
TestCaseResult,
|
||||
TestResult,
|
||||
} from "./wpt/runner.ts";
|
||||
import {
|
||||
assert,
|
||||
autoConfig,
|
||||
cargoBuild,
|
||||
checkPy3Available,
|
||||
Expectation,
|
||||
getExpectation,
|
||||
getExpectFailForCase,
|
||||
getManifest,
|
||||
json,
|
||||
ManifestFolder,
|
||||
ManifestTestOptions,
|
||||
ManifestTestVariation,
|
||||
quiet,
|
||||
rest,
|
||||
runPy,
|
||||
updateManifest,
|
||||
} from "./wpt/utils.ts";
|
||||
import {
|
||||
blue,
|
||||
bold,
|
||||
green,
|
||||
red,
|
||||
yellow,
|
||||
} from "https://deno.land/std@0.84.0/fmt/colors.ts";
|
||||
import { saveExpectation } from "./wpt/utils.ts";
|
||||
|
||||
const command = Deno.args[0];
|
||||
|
||||
switch (command) {
|
||||
case "setup":
|
||||
await checkPy3Available();
|
||||
await updateManifest();
|
||||
await setup();
|
||||
break;
|
||||
|
||||
case "run":
|
||||
await cargoBuild();
|
||||
await run();
|
||||
break;
|
||||
|
||||
case "update":
|
||||
await cargoBuild();
|
||||
await update();
|
||||
break;
|
||||
|
||||
default:
|
||||
console.log(`Possible commands:
|
||||
|
||||
setup
|
||||
Validate that your environment is conigured correctly, or help you configure it.
|
||||
|
||||
run
|
||||
Run all tests like specified in \`expectation.json\`.
|
||||
|
||||
update
|
||||
Update the \`expectation.json\` to match the current reality.
|
||||
|
||||
More details at https://deno.land/manual@master/contributing/web_platform_tests
|
||||
|
||||
`);
|
||||
break;
|
||||
}
|
||||
|
||||
async function setup() {
|
||||
// TODO(lucacsonato): use this when 1.7.1 is released.
|
||||
// const records = await Deno.resolveDns("web-platform.test", "A");
|
||||
// const etcHostsConfigured = records[0] == "127.0.0.1";
|
||||
const hostsFile = await Deno.readTextFile("/etc/hosts");
|
||||
const etcHostsConfigured = hostsFile.includes("web-platform.test");
|
||||
|
||||
if (etcHostsConfigured) {
|
||||
console.log("/etc/hosts is already configured.");
|
||||
} else {
|
||||
const autoConfigure = autoConfig ||
|
||||
confirm(
|
||||
"The WPT require certain entries to be present in your /etc/hosts file. Should these be configured automatically?",
|
||||
);
|
||||
if (autoConfigure) {
|
||||
const proc = runPy(["wpt", "make-hosts-file"], { stdout: "piped" });
|
||||
const status = await proc.status();
|
||||
assert(status.success, "wpt make-hosts-file should not fail");
|
||||
const entries = new TextDecoder().decode(await proc.output());
|
||||
const hostsPath = Deno.build.os == "windows"
|
||||
? `${Deno.env.get("SystemRoot")}\\System32\\drivers\\etc\\hosts`
|
||||
: "/etc/hosts";
|
||||
const file = await Deno.open(hostsPath, { append: true }).catch((err) => {
|
||||
if (err instanceof Deno.errors.PermissionDenied) {
|
||||
throw new Error(
|
||||
`Failed to open ${hostsPath} (permission error). Please run this command again with sudo, or configure the entries manually.`,
|
||||
);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
await Deno.writeAll(
|
||||
file,
|
||||
new TextEncoder().encode(
|
||||
"\n\n# Configured for Web Platform Tests (Deno)\n" + entries,
|
||||
),
|
||||
);
|
||||
console.log("Updated /etc/hosts");
|
||||
} else {
|
||||
console.log("Please configure the /etc/hosts entries manually.");
|
||||
if (Deno.build.os == "windows") {
|
||||
console.log("To do this run the following command in PowerShell:");
|
||||
console.log("");
|
||||
console.log(" cd test_util/wpt/");
|
||||
console.log(
|
||||
" python.exe wpt make-hosts-file | Out-File $env:SystemRoot\\System32\\drivers\\etc\\hosts -Encoding ascii -Append",
|
||||
);
|
||||
console.log("");
|
||||
} else {
|
||||
console.log("To do this run the following command in your shell:");
|
||||
console.log("");
|
||||
console.log(" cd test_util/wpt/");
|
||||
console.log(
|
||||
" python3 ./wpt make-hosts-file | sudo tee -a /etc/hosts",
|
||||
);
|
||||
console.log("");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(green("Setup complete!"));
|
||||
}
|
||||
|
||||
interface TestToRun {
|
||||
sourcePath: string;
|
||||
path: string;
|
||||
url: URL;
|
||||
options: ManifestTestOptions;
|
||||
expectation: boolean | string[];
|
||||
}
|
||||
|
||||
async function run() {
|
||||
assert(Array.isArray(rest), "filter must be array");
|
||||
const tests = discoverTestsToRun(rest.length == 0 ? undefined : rest);
|
||||
console.log(`Going to run ${tests.length} test files.`);
|
||||
|
||||
const results = await runWithTestUtil(false, async () => {
|
||||
const results = [];
|
||||
|
||||
for (const test of tests) {
|
||||
console.log(`${blue("-".repeat(40))}\n${bold(test.path)}\n`);
|
||||
const result = await runSingleTest(
|
||||
test.url,
|
||||
test.options,
|
||||
json ? () => {} : createReportTestCase(test.expectation),
|
||||
);
|
||||
results.push({ test, result });
|
||||
reportVariation(result, test.expectation);
|
||||
}
|
||||
|
||||
return results;
|
||||
});
|
||||
|
||||
if (json) {
|
||||
await Deno.writeTextFile(json, JSON.stringify(results));
|
||||
}
|
||||
const code = reportFinal(results);
|
||||
Deno.exit(code);
|
||||
}
|
||||
|
||||
async function update() {
|
||||
assert(Array.isArray(rest), "filter must be array");
|
||||
const tests = discoverTestsToRun(rest.length == 0 ? undefined : rest, true);
|
||||
console.log(`Going to run ${tests.length} test files.`);
|
||||
|
||||
const results = await runWithTestUtil(false, async () => {
|
||||
const results = [];
|
||||
|
||||
for (const test of tests) {
|
||||
console.log(`${blue("-".repeat(40))}\n${bold(test.path)}\n`);
|
||||
const result = await runSingleTest(
|
||||
test.url,
|
||||
test.options,
|
||||
json ? () => {} : createReportTestCase(test.expectation),
|
||||
);
|
||||
results.push({ test, result });
|
||||
reportVariation(result, test.expectation);
|
||||
}
|
||||
|
||||
return results;
|
||||
});
|
||||
|
||||
if (json) {
|
||||
await Deno.writeTextFile(json, JSON.stringify(results));
|
||||
}
|
||||
|
||||
const resultTests: Record<
|
||||
string,
|
||||
{ passed: string[]; failed: string[]; status: number }
|
||||
> = {};
|
||||
for (const { test, result } of results) {
|
||||
if (!resultTests[test.sourcePath]) {
|
||||
resultTests[test.sourcePath] = {
|
||||
passed: [],
|
||||
failed: [],
|
||||
status: result.status,
|
||||
};
|
||||
}
|
||||
for (const case_ of result.cases) {
|
||||
if (case_.passed) {
|
||||
resultTests[test.sourcePath].passed.push(case_.name);
|
||||
} else {
|
||||
resultTests[test.sourcePath].failed.push(case_.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const currentExpectation = getExpectation();
|
||||
|
||||
for (const path in resultTests) {
|
||||
const { passed, failed, status } = resultTests[path];
|
||||
let finalExpectation: boolean | string[];
|
||||
if (failed.length == 0 && status == 0) {
|
||||
finalExpectation = true;
|
||||
} else if (failed.length > 0 && passed.length > 0 && status == 0) {
|
||||
finalExpectation = failed;
|
||||
} else {
|
||||
finalExpectation = false;
|
||||
}
|
||||
|
||||
insertExpectation(
|
||||
path.slice(1).split("/"),
|
||||
currentExpectation,
|
||||
finalExpectation,
|
||||
);
|
||||
}
|
||||
|
||||
saveExpectation(currentExpectation);
|
||||
|
||||
reportFinal(results);
|
||||
|
||||
console.log(blue("Updated expectation.json to match reality."));
|
||||
|
||||
Deno.exit(0);
|
||||
}
|
||||
|
||||
function insertExpectation(
|
||||
segments: string[],
|
||||
currentExpectation: Expectation,
|
||||
finalExpectation: boolean | string[],
|
||||
) {
|
||||
const segment = segments.shift();
|
||||
assert(segment, "segments array must never be empty");
|
||||
if (segments.length > 0) {
|
||||
if (
|
||||
!currentExpectation[segment] ||
|
||||
Array.isArray(currentExpectation[segment]) ||
|
||||
typeof currentExpectation[segment] === "boolean"
|
||||
) {
|
||||
currentExpectation[segment] = {};
|
||||
}
|
||||
insertExpectation(
|
||||
segments,
|
||||
currentExpectation[segment] as Expectation,
|
||||
finalExpectation,
|
||||
);
|
||||
} else {
|
||||
currentExpectation[segment] = finalExpectation;
|
||||
}
|
||||
}
|
||||
|
||||
function reportFinal(
|
||||
results: { test: TestToRun; result: TestResult }[],
|
||||
): number {
|
||||
const finalTotalCount = results.length;
|
||||
let finalFailedCount = 0;
|
||||
const finalFailed: [string, TestCaseResult][] = [];
|
||||
let finalExpectedFailedAndFailedCount = 0;
|
||||
const finalExpectedFailedButPassedTests: [string, TestCaseResult][] = [];
|
||||
const finalExpectedFailedButPassedFiles: string[] = [];
|
||||
for (const { test, result } of results) {
|
||||
const { failed, failedCount, expectedFailedButPassed } = analyzeTestResult(
|
||||
result,
|
||||
test.expectation,
|
||||
);
|
||||
if (result.status !== 0) {
|
||||
if (test.expectation === false) {
|
||||
finalExpectedFailedAndFailedCount += 1;
|
||||
} else {
|
||||
finalFailedCount += 1;
|
||||
finalExpectedFailedButPassedFiles.push(test.path);
|
||||
}
|
||||
} else if (failedCount > 0) {
|
||||
finalFailedCount += 1;
|
||||
for (const case_ of failed) {
|
||||
finalFailed.push([test.path, case_]);
|
||||
}
|
||||
for (const case_ of expectedFailedButPassed) {
|
||||
finalExpectedFailedButPassedTests.push([test.path, case_]);
|
||||
}
|
||||
}
|
||||
}
|
||||
const finalPassedCount = finalTotalCount - finalFailedCount;
|
||||
|
||||
console.log(bold(blue("=".repeat(40))));
|
||||
|
||||
if (finalFailed.length > 0) {
|
||||
console.log(`\nfailures:\n`);
|
||||
}
|
||||
for (const result of finalFailed) {
|
||||
console.log(
|
||||
` ${JSON.stringify(`${result[0]} - ${result[1].name}`)}`,
|
||||
);
|
||||
}
|
||||
if (finalExpectedFailedButPassedTests.length > 0) {
|
||||
console.log(`\nexpected test failures that passed:\n`);
|
||||
}
|
||||
for (const result of finalExpectedFailedButPassedTests) {
|
||||
console.log(
|
||||
` ${JSON.stringify(`${result[0]} - ${result[1].name}`)}`,
|
||||
);
|
||||
}
|
||||
if (finalExpectedFailedButPassedFiles.length > 0) {
|
||||
console.log(`\nexpected file failures that passed:\n`);
|
||||
}
|
||||
for (const result of finalExpectedFailedButPassedFiles) {
|
||||
console.log(` ${JSON.stringify(result)}`);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`\nfinal result: ${
|
||||
finalFailedCount > 0 ? red("failed") : green("ok")
|
||||
}. ${finalPassedCount} passed; ${finalFailedCount} failed; ${finalExpectedFailedAndFailedCount} expected failure; total ${finalTotalCount}\n`,
|
||||
);
|
||||
|
||||
return finalFailedCount > 0 ? 1 : 0;
|
||||
}
|
||||
|
||||
function analyzeTestResult(
|
||||
result: TestResult,
|
||||
expectation: boolean | string[],
|
||||
): {
|
||||
failed: TestCaseResult[];
|
||||
failedCount: number;
|
||||
passedCount: number;
|
||||
totalCount: number;
|
||||
expectedFailedButPassed: TestCaseResult[];
|
||||
expectedFailedButPassedCount: number;
|
||||
expectedFailedAndFailedCount: number;
|
||||
} {
|
||||
const failed = result.cases.filter(
|
||||
(t) => !getExpectFailForCase(expectation, t.name) && !t.passed,
|
||||
);
|
||||
const expectedFailedButPassed = result.cases.filter(
|
||||
(t) => getExpectFailForCase(expectation, t.name) && t.passed,
|
||||
);
|
||||
const expectedFailedButPassedCount = expectedFailedButPassed.length;
|
||||
const failedCount = failed.length + expectedFailedButPassedCount;
|
||||
const expectedFailedAndFailedCount = result.cases.filter(
|
||||
(t) => getExpectFailForCase(expectation, t.name) && !t.passed,
|
||||
).length;
|
||||
const totalCount = result.cases.length;
|
||||
const passedCount = totalCount - failedCount - expectedFailedAndFailedCount;
|
||||
|
||||
return {
|
||||
failed,
|
||||
failedCount,
|
||||
passedCount,
|
||||
totalCount,
|
||||
expectedFailedButPassed,
|
||||
expectedFailedButPassedCount,
|
||||
expectedFailedAndFailedCount,
|
||||
};
|
||||
}
|
||||
|
||||
function reportVariation(result: TestResult, expectation: boolean | string[]) {
|
||||
if (result.status !== 0) {
|
||||
console.log(`test stderr:`);
|
||||
Deno.writeAllSync(Deno.stdout, new TextEncoder().encode(result.stderr));
|
||||
|
||||
const expectFail = expectation === false;
|
||||
console.log(
|
||||
`\nfile result: ${
|
||||
expectFail ? yellow("failed (expected)") : red("failed")
|
||||
}. runner failed during test\n`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const {
|
||||
failed,
|
||||
failedCount,
|
||||
passedCount,
|
||||
totalCount,
|
||||
expectedFailedButPassed,
|
||||
expectedFailedButPassedCount,
|
||||
expectedFailedAndFailedCount,
|
||||
} = analyzeTestResult(result, expectation);
|
||||
|
||||
if (failed.length > 0) {
|
||||
console.log(`\nfailures:`);
|
||||
}
|
||||
for (const result of failed) {
|
||||
console.log(`\n${result.name}\n${result.message}\n${result.stack}`);
|
||||
}
|
||||
|
||||
if (failed.length > 0) {
|
||||
console.log(`\nfailures:\n`);
|
||||
}
|
||||
for (const result of failed) {
|
||||
console.log(` ${JSON.stringify(result.name)}`);
|
||||
}
|
||||
if (expectedFailedButPassedCount > 0) {
|
||||
console.log(`\nexpected failures that passed:\n`);
|
||||
}
|
||||
for (const result of expectedFailedButPassed) {
|
||||
console.log(` ${JSON.stringify(result.name)}`);
|
||||
}
|
||||
console.log(
|
||||
`\nfile result: ${
|
||||
failedCount > 0 ? red("failed") : green("ok")
|
||||
}. ${passedCount} passed; ${failedCount} failed; ${expectedFailedAndFailedCount} expected failure; total ${totalCount}\n`,
|
||||
);
|
||||
}
|
||||
|
||||
function createReportTestCase(expectation: boolean | string[]) {
|
||||
return function reportTestCase({ name, status }: TestCaseResult) {
|
||||
const expectFail = getExpectFailForCase(expectation, name);
|
||||
let simpleMessage = `test ${name} ... `;
|
||||
switch (status) {
|
||||
case 0:
|
||||
if (expectFail) {
|
||||
simpleMessage += red("ok (expected fail)");
|
||||
} else {
|
||||
simpleMessage += green("ok");
|
||||
if (quiet) {
|
||||
// don't print `ok` tests if --quiet is enabled
|
||||
return;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
if (expectFail) {
|
||||
simpleMessage += yellow("failed (expected)");
|
||||
} else {
|
||||
simpleMessage += red("failed");
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (expectFail) {
|
||||
simpleMessage += yellow("failed (expected)");
|
||||
} else {
|
||||
simpleMessage += red("failed (timeout)");
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
if (expectFail) {
|
||||
simpleMessage += yellow("failed (expected)");
|
||||
} else {
|
||||
simpleMessage += red("failed (incomplete)");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
console.log(simpleMessage);
|
||||
};
|
||||
}
|
||||
|
||||
function discoverTestsToRun(
|
||||
filter?: string[],
|
||||
expectation: Expectation | string[] | boolean = getExpectation(),
|
||||
): TestToRun[] {
|
||||
const manifestFolder = getManifest().items.testharness;
|
||||
|
||||
const testsToRun: TestToRun[] = [];
|
||||
|
||||
function walk(
|
||||
parentFolder: ManifestFolder,
|
||||
parentExpectation: Expectation | string[] | boolean,
|
||||
prefix: string,
|
||||
) {
|
||||
for (const key in parentFolder) {
|
||||
const sourcePath = `${prefix}/${key}`;
|
||||
const entry = parentFolder[key];
|
||||
const expectation = Array.isArray(parentExpectation) ||
|
||||
typeof parentExpectation == "boolean"
|
||||
? parentExpectation
|
||||
: parentExpectation[key];
|
||||
|
||||
if (expectation === undefined) continue;
|
||||
|
||||
if (Array.isArray(entry)) {
|
||||
assert(
|
||||
Array.isArray(expectation) || typeof expectation == "boolean",
|
||||
"test entry must not have a folder expectation",
|
||||
);
|
||||
if (
|
||||
filter &&
|
||||
!filter.find((filter) => sourcePath.substring(1).startsWith(filter))
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (
|
||||
const [path, options] of entry.slice(
|
||||
1,
|
||||
) as ManifestTestVariation[]
|
||||
) {
|
||||
if (!path) continue;
|
||||
const url = new URL(path, "http://web-platform.test:8000");
|
||||
if (!url.pathname.endsWith(".any.html")) continue;
|
||||
testsToRun.push({
|
||||
sourcePath,
|
||||
path: url.pathname + url.search,
|
||||
url,
|
||||
options,
|
||||
expectation,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
walk(entry, expectation, sourcePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
walk(manifestFolder, expectation, "");
|
||||
|
||||
return testsToRun;
|
||||
}
|
622
tools/wpt/expectation.json
Normal file
622
tools/wpt/expectation.json
Normal file
@ -0,0 +1,622 @@
|
||||
{
|
||||
"WebCryptoAPI": {
|
||||
"getRandomValues.any.js": true
|
||||
},
|
||||
"console": {
|
||||
"console-is-a-namespace.any.js": true,
|
||||
"console-label-conversion.any.js": true,
|
||||
"console-namespace-object-class-string.any.js": true,
|
||||
"console-tests-historical.any.js": true,
|
||||
"idlharness.any.js": false
|
||||
},
|
||||
"dom": {
|
||||
"abort": {
|
||||
"event.any.js": true
|
||||
},
|
||||
"events": {
|
||||
"AddEventListenerOptions-signal.any.js": true,
|
||||
"Event-dispatch-listener-order.window.js": true,
|
||||
"Event-isTrusted.any.js": true,
|
||||
"EventListener-addEventListener.sub.window.js": true,
|
||||
"EventTarget-constructible.any.js": true,
|
||||
"event-global-extra.window.js": true,
|
||||
"event-global.worker.js": true,
|
||||
"legacy-pre-activation-behavior.window.js": true,
|
||||
"relatedTarget.window.js": true
|
||||
},
|
||||
"idlharness.any.js": false,
|
||||
"idlharness.window.js": false
|
||||
},
|
||||
"encoding": {
|
||||
"api-basics.any.js": true,
|
||||
"api-invalid-label.any.js": true,
|
||||
"api-replacement-encodings.any.js": true,
|
||||
"api-surrogates-utf8.any.js": true,
|
||||
"encodeInto.any.js": [
|
||||
"encodeInto() into SharedArrayBuffer with Hi and destination length 0, offset 0, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with Hi and destination length 0, offset 4, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with Hi and destination length 0, offset 0, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with Hi and destination length 0, offset 4, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with Hi and destination length 0, offset 0, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with Hi and destination length 0, offset 4, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with A and destination length 10, offset 0, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with A and destination length 10, offset 4, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with A and destination length 10, offset 0, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with A and destination length 10, offset 4, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with A and destination length 10, offset 0, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with A and destination length 10, offset 4, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆 and destination length 4, offset 0, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆 and destination length 4, offset 4, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆 and destination length 4, offset 0, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆 and destination length 4, offset 4, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆 and destination length 4, offset 0, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆 and destination length 4, offset 4, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆A and destination length 3, offset 0, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆A and destination length 3, offset 4, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆A and destination length 3, offset 0, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆A and destination length 3, offset 4, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆A and destination length 3, offset 0, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with 𝌆A and destination length 3, offset 4, filler random",
|
||||
"encodeInto() into ArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 0, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 0, filler 0",
|
||||
"encodeInto() into ArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 4, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 4, filler 0",
|
||||
"encodeInto() into ArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 0, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 0, filler 128",
|
||||
"encodeInto() into ArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 4, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 4, filler 128",
|
||||
"encodeInto() into ArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 0, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 0, filler random",
|
||||
"encodeInto() into ArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 4, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with \ud834A\udf06A¥Hi and destination length 10, offset 4, filler random",
|
||||
"encodeInto() into ArrayBuffer with A\udf06 and destination length 4, offset 0, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with A\udf06 and destination length 4, offset 0, filler 0",
|
||||
"encodeInto() into ArrayBuffer with A\udf06 and destination length 4, offset 4, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with A\udf06 and destination length 4, offset 4, filler 0",
|
||||
"encodeInto() into ArrayBuffer with A\udf06 and destination length 4, offset 0, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with A\udf06 and destination length 4, offset 0, filler 128",
|
||||
"encodeInto() into ArrayBuffer with A\udf06 and destination length 4, offset 4, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with A\udf06 and destination length 4, offset 4, filler 128",
|
||||
"encodeInto() into ArrayBuffer with A\udf06 and destination length 4, offset 0, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with A\udf06 and destination length 4, offset 0, filler random",
|
||||
"encodeInto() into ArrayBuffer with A\udf06 and destination length 4, offset 4, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with A\udf06 and destination length 4, offset 4, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with ¥¥ and destination length 4, offset 0, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with ¥¥ and destination length 4, offset 4, filler 0",
|
||||
"encodeInto() into SharedArrayBuffer with ¥¥ and destination length 4, offset 0, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with ¥¥ and destination length 4, offset 4, filler 128",
|
||||
"encodeInto() into SharedArrayBuffer with ¥¥ and destination length 4, offset 0, filler random",
|
||||
"encodeInto() into SharedArrayBuffer with ¥¥ and destination length 4, offset 4, filler random",
|
||||
"encodeInto() and a detached output buffer",
|
||||
"Invalid encodeInto() destination: DataView, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Int8Array, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Int16Array, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Int32Array, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Uint16Array, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Uint32Array, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Uint8ClampedArray, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Float32Array, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: Float64Array, backed by: SharedArrayBuffer",
|
||||
"Invalid encodeInto() destination: SharedArrayBuffer"
|
||||
],
|
||||
"idlharness.any.js": false,
|
||||
"iso-2022-jp-decoder.any.js": false,
|
||||
"legacy-mb-schinese": {
|
||||
"gb18030": {
|
||||
"gb18030-decoder.any.js": true
|
||||
},
|
||||
"gbk": {
|
||||
"gbk-decoder.any.js": true
|
||||
}
|
||||
},
|
||||
"replacement-encodings.any.js": false,
|
||||
"streams": {
|
||||
"backpressure.any.js": false,
|
||||
"decode-attributes.any.js": false,
|
||||
"decode-bad-chunks.any.js": false,
|
||||
"decode-ignore-bom.any.js": false,
|
||||
"decode-incomplete-input.any.js": false,
|
||||
"decode-non-utf8.any.js": false,
|
||||
"decode-split-character.any.js": false,
|
||||
"decode-utf8.any.js": false,
|
||||
"encode-bad-chunks.any.js": false,
|
||||
"encode-utf8.any.js": false,
|
||||
"readable-writable-properties.any.js": false,
|
||||
"realms.window.js": false
|
||||
},
|
||||
"textdecoder-byte-order-marks.any.js": true,
|
||||
"textdecoder-copy.any.js": [
|
||||
"Modify buffer after passing it in (ArrayBuffer)",
|
||||
"Modify buffer after passing it in (SharedArrayBuffer)"
|
||||
],
|
||||
"textdecoder-fatal-single-byte.any.js": true,
|
||||
"textdecoder-fatal-streaming.any.js": [
|
||||
"Fatal flag, streaming cases"
|
||||
],
|
||||
"textdecoder-fatal.any.js": true,
|
||||
"textdecoder-ignorebom.any.js": true,
|
||||
"textdecoder-labels.any.js": [
|
||||
"cseucpkdfmtjapanese => EUC-JP",
|
||||
"euc-jp => EUC-JP",
|
||||
"x-euc-jp => EUC-JP",
|
||||
"csiso2022jp => ISO-2022-JP",
|
||||
"iso-2022-jp => ISO-2022-JP",
|
||||
"csshiftjis => Shift_JIS",
|
||||
"ms932 => Shift_JIS",
|
||||
"ms_kanji => Shift_JIS",
|
||||
"shift-jis => Shift_JIS",
|
||||
"shift_jis => Shift_JIS",
|
||||
"sjis => Shift_JIS",
|
||||
"windows-31j => Shift_JIS",
|
||||
"x-sjis => Shift_JIS",
|
||||
"cseuckr => EUC-KR",
|
||||
"csksc56011987 => EUC-KR",
|
||||
"euc-kr => EUC-KR",
|
||||
"iso-ir-149 => EUC-KR",
|
||||
"korean => EUC-KR",
|
||||
"ks_c_5601-1987 => EUC-KR",
|
||||
"ks_c_5601-1989 => EUC-KR",
|
||||
"ksc5601 => EUC-KR",
|
||||
"ksc_5601 => EUC-KR",
|
||||
"windows-949 => EUC-KR",
|
||||
"x-user-defined => x-user-defined"
|
||||
],
|
||||
"textdecoder-streaming.any.js": false,
|
||||
"textdecoder-utf16-surrogates.any.js": true,
|
||||
"textencoder-constructor-non-utf.any.js": [
|
||||
"Encoding argument supported for decode: EUC-JP",
|
||||
"Encoding argument supported for decode: ISO-2022-JP",
|
||||
"Encoding argument supported for decode: Shift_JIS",
|
||||
"Encoding argument supported for decode: EUC-KR",
|
||||
"Encoding argument supported for decode: x-user-defined"
|
||||
],
|
||||
"textencoder-utf16-surrogates.any.js": true,
|
||||
"unsupported-encodings.any.js": false
|
||||
},
|
||||
"hr-time": {
|
||||
"monotonic-clock.any.js": true,
|
||||
"basic.any.js": [
|
||||
"Performance interface extends EventTarget."
|
||||
],
|
||||
"idlharness.any.js": false
|
||||
},
|
||||
"streams": {
|
||||
"idlharness.any.js": false,
|
||||
"piping": {
|
||||
"abort.any.js": [
|
||||
"a signal argument 'null' should cause pipeTo() to reject",
|
||||
"a signal argument 'AbortSignal' should cause pipeTo() to reject",
|
||||
"a signal argument 'true' should cause pipeTo() to reject",
|
||||
"a signal argument '-1' should cause pipeTo() to reject",
|
||||
"a signal argument '[object AbortSignal]' should cause pipeTo() to reject"
|
||||
],
|
||||
"close-propagation-backward.any.js": [
|
||||
"Closing must be propagated backward: starts closed; preventCancel = null (falsy); fulfilled cancel promise",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = 0 (falsy); fulfilled cancel promise",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = -0 (falsy); fulfilled cancel promise",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = NaN (falsy); fulfilled cancel promise",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = (falsy); fulfilled cancel promise",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = a (truthy)",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = 1 (truthy)",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = Symbol() (truthy)",
|
||||
"Closing must be propagated backward: starts closed; preventCancel = [object Object] (truthy)"
|
||||
],
|
||||
"close-propagation-forward.any.js": [
|
||||
"Closing must be propagated forward: starts closed; preventClose = null (falsy); fulfilled close promise",
|
||||
"Closing must be propagated forward: starts closed; preventClose = 0 (falsy); fulfilled close promise",
|
||||
"Closing must be propagated forward: starts closed; preventClose = -0 (falsy); fulfilled close promise",
|
||||
"Closing must be propagated forward: starts closed; preventClose = NaN (falsy); fulfilled close promise",
|
||||
"Closing must be propagated forward: starts closed; preventClose = (falsy); fulfilled close promise",
|
||||
"Closing must be propagated forward: starts closed; preventClose = a (truthy)",
|
||||
"Closing must be propagated forward: starts closed; preventClose = 1 (truthy)",
|
||||
"Closing must be propagated forward: starts closed; preventClose = Symbol() (truthy)",
|
||||
"Closing must be propagated forward: starts closed; preventClose = [object Object] (truthy)"
|
||||
],
|
||||
"error-propagation-backward.any.js": [
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = null (falsy); fulfilled cancel promise",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = 0 (falsy); fulfilled cancel promise",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = -0 (falsy); fulfilled cancel promise",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = NaN (falsy); fulfilled cancel promise",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = (falsy); fulfilled cancel promise",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = a (truthy)",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = 1 (truthy)",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = Symbol() (truthy)",
|
||||
"Errors must be propagated backward: becomes errored before piping due to write; preventCancel = [object Object] (truthy)"
|
||||
],
|
||||
"error-propagation-forward.any.js": [
|
||||
"Errors must be propagated forward: starts errored; preventAbort = null (falsy); fulfilled abort promise",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = 0 (falsy); fulfilled abort promise",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = -0 (falsy); fulfilled abort promise",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = NaN (falsy); fulfilled abort promise",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = (falsy); fulfilled abort promise",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = a (truthy)",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = 1 (truthy)",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = Symbol() (truthy)",
|
||||
"Errors must be propagated forward: starts errored; preventAbort = [object Object] (truthy)"
|
||||
],
|
||||
"flow-control.any.js": true,
|
||||
"general.any.js": [
|
||||
"pipeTo must check the brand of its ReadableStream this value",
|
||||
"pipeTo must check the brand of its WritableStream argument",
|
||||
"pipeTo() promise should resolve if null is passed"
|
||||
],
|
||||
"multiple-propagation.any.js": true,
|
||||
"pipe-through.any.js": true,
|
||||
"then-interception.any.js": true,
|
||||
"throwing-options.any.js": false,
|
||||
"transform-streams.any.js": true
|
||||
},
|
||||
"queuing-strategies-size-function-per-global.window.js": false,
|
||||
"queuing-strategies.any.js": true,
|
||||
"readable-byte-streams": {
|
||||
"bad-buffers-and-views.any.js": [
|
||||
"ReadableStream with byte source: respond() throws if the BYOB request's buffer has been detached (in the readable state)",
|
||||
"ReadableStream with byte source: respond() throws if the BYOB request's buffer has been detached (in the closed state)",
|
||||
"ReadableStream with byte source: respondWithNewView() throws if the supplied view's buffer has been detached (in the readable state)",
|
||||
"ReadableStream with byte source: respondWithNewView() throws if the supplied view's buffer is zero-length (in the readable state)",
|
||||
"ReadableStream with byte source: respondWithNewView() throws if the supplied view is zero-length on a non-zero-length buffer (in the readable state)",
|
||||
"ReadableStream with byte source: respondWithNewView() throws if the supplied view's buffer has been detached (in the closed state)",
|
||||
"ReadableStream with byte source: respondWithNewView() throws if the supplied view's buffer is zero-length (in the closed state)",
|
||||
"ReadableStream with byte source: respondWithNewView() throws if the supplied view is zero-length on a non-zero-length buffer (in the closed state)",
|
||||
"ReadableStream with byte source: read()ing from a closed stream still transfers the buffer",
|
||||
"ReadableStream with byte source: read()ing from a stream with queued chunks still transfers the buffer",
|
||||
"ReadableStream with byte source: reading into an already-detached buffer rejects",
|
||||
"ReadableStream with byte source: reading into a zero-length buffer rejects",
|
||||
"ReadableStream with byte source: reading into a zero-length view on a non-zero-length buffer rejects"
|
||||
],
|
||||
"construct-byob-request.any.js": false,
|
||||
"general.any.js": [
|
||||
"getReader({mode: \"byob\"}) throws on non-bytes streams",
|
||||
"ReadableStream with byte source can be constructed with no errors",
|
||||
"getReader({mode}) must perform ToString()",
|
||||
"ReadableStream with byte source: autoAllocateChunkSize cannot be 0",
|
||||
"ReadableStreamBYOBReader can be constructed directly",
|
||||
"ReadableStreamBYOBReader constructor requires a ReadableStream argument",
|
||||
"ReadableStreamBYOBReader constructor requires an unlocked ReadableStream",
|
||||
"ReadableStreamBYOBReader constructor requires a ReadableStream with type \"bytes\"",
|
||||
"ReadableStream with byte source: getReader() with mode set to byob, then releaseLock()",
|
||||
"ReadableStream with byte source: Test that closing a stream does not release a BYOB reader automatically",
|
||||
"ReadableStream with byte source: Test that erroring a stream does not release a BYOB reader automatically",
|
||||
"ReadableStream with byte source: autoAllocateChunkSize",
|
||||
"ReadableStream with byte source: Mix of auto allocate and BYOB",
|
||||
"ReadableStream with byte source: enqueue(), read(view) partially, then read()",
|
||||
"ReadableStream with byte source: Respond to pull() by enqueue()",
|
||||
"ReadableStream with byte source: Respond to pull() by enqueue() asynchronously",
|
||||
"ReadableStream with byte source: Respond to multiple pull() by separate enqueue()",
|
||||
"ReadableStream with byte source: read(view), then respond()",
|
||||
"ReadableStream with byte source: read(view), then respond() with a transferred ArrayBuffer",
|
||||
"ReadableStream with byte source: read(view), then respond() with too big value",
|
||||
"ReadableStream with byte source: respond(3) to read(view) with 2 element Uint16Array enqueues the 1 byte remainder",
|
||||
"ReadableStream with byte source: enqueue(), getReader(), then read(view)",
|
||||
"ReadableStream with byte source: enqueue(), getReader(), then cancel() (mode = BYOB)",
|
||||
"ReadableStream with byte source: getReader(), read(view), then cancel()",
|
||||
"ReadableStream with byte source: cancel() with partially filled pending pull() request",
|
||||
"ReadableStream with byte source: enqueue(), getReader(), then read(view) where view.buffer is not fully covered by view",
|
||||
"ReadableStream with byte source: Multiple enqueue(), getReader(), then read(view)",
|
||||
"ReadableStream with byte source: enqueue(), getReader(), then read(view) with a bigger view",
|
||||
"ReadableStream with byte source: enqueue(), getReader(), then read(view) with smaller views",
|
||||
"ReadableStream with byte source: enqueue() 1 byte, getReader(), then read(view) with Uint16Array",
|
||||
"ReadableStream with byte source: enqueue() 3 byte, getReader(), then read(view) with 2-element Uint16Array",
|
||||
"ReadableStream with byte source: read(view) with Uint16Array on close()-d stream with 1 byte enqueue()-d must fail",
|
||||
"ReadableStream with byte source: A stream must be errored if close()-d before fulfilling read(view) with Uint16Array",
|
||||
"ReadableStream with byte source: read(view), then respond() and close() in pull()",
|
||||
"ReadableStream with byte source: read(view) with Uint32Array, then fill it by multiple respond() calls",
|
||||
"ReadableStream with byte source: read() twice, then enqueue() twice",
|
||||
"ReadableStream with byte source: Multiple read(view), close() and respond()",
|
||||
"ReadableStream with byte source: Multiple read(view), big enqueue()",
|
||||
"ReadableStream with byte source: Multiple read(view) and multiple enqueue()",
|
||||
"ReadableStream with byte source: read(view) with passing undefined as view must fail",
|
||||
"ReadableStream with byte source: read(view) with passing an empty object as view must fail",
|
||||
"ReadableStream with byte source: Even read(view) with passing ArrayBufferView like object as view must fail",
|
||||
"ReadableStream with byte source: read(view) on an errored stream",
|
||||
"ReadableStream with byte source: read(view), then error()",
|
||||
"ReadableStream with byte source: Throwing in pull function must error the stream",
|
||||
"ReadableStream with byte source: Throwing in pull in response to read() must be ignored if the stream is errored in it",
|
||||
"ReadableStream with byte source: Throwing in pull in response to read(view) function must error the stream",
|
||||
"ReadableStream with byte source: Throwing in pull in response to read(view) must be ignored if the stream is errored in it",
|
||||
"calling respond() twice on the same byobRequest should throw",
|
||||
"calling respondWithNewView() twice on the same byobRequest should throw",
|
||||
"calling respond(0) twice on the same byobRequest should throw even when closed",
|
||||
"pull() resolving should not make releaseLock() possible",
|
||||
"ReadableStream with byte source: default reader + autoAllocateChunkSize + byobRequest interaction"
|
||||
]
|
||||
},
|
||||
"readable-streams": {
|
||||
"async-iterator.any.js": [
|
||||
"Async iterator instances should have the correct list of properties",
|
||||
"values() throws if there's already a lock",
|
||||
"return() should unlock the stream synchronously when preventCancel = false",
|
||||
"return() should unlock the stream synchronously when preventCancel = true",
|
||||
"Async-iterating a pull source manually",
|
||||
"Cancellation behavior when throwing inside loop body; preventCancel = false",
|
||||
"Cancellation behavior when throwing inside loop body; preventCancel = true",
|
||||
"Cancellation behavior when breaking inside loop body; preventCancel = false",
|
||||
"Cancellation behavior when breaking inside loop body; preventCancel = true",
|
||||
"Cancellation behavior when returning inside loop body; preventCancel = false",
|
||||
"Cancellation behavior when returning inside loop body; preventCancel = true",
|
||||
"Cancellation behavior when manually calling return(); preventCancel = false",
|
||||
"Cancellation behavior when manually calling return(); preventCancel = true",
|
||||
"next() rejects if the stream errors",
|
||||
"return() does not rejects if the stream has not errored yet",
|
||||
"return() rejects if the stream has errored",
|
||||
"next() that succeeds; next() that reports an error; next()"
|
||||
],
|
||||
"bad-strategies.any.js": true,
|
||||
"bad-underlying-sources.any.js": true,
|
||||
"cancel.any.js": false,
|
||||
"constructor.any.js": false,
|
||||
"count-queuing-strategy-integration.any.js": true,
|
||||
"default-reader.any.js": true,
|
||||
"floating-point-total-queue-size.any.js": true,
|
||||
"garbage-collection.any.js": true,
|
||||
"general.any.js": true,
|
||||
"patched-global.any.js": true,
|
||||
"reentrant-strategies.any.js": true,
|
||||
"tee.any.js": true,
|
||||
"templated.any.js": [
|
||||
"ReadableStream (empty) reader: canceling via the stream should fail"
|
||||
]
|
||||
},
|
||||
"transform-streams": {
|
||||
"backpressure.any.js": true,
|
||||
"errors.any.js": true,
|
||||
"flush.any.js": true,
|
||||
"general.any.js": true,
|
||||
"lipfuzz.any.js": true,
|
||||
"patched-global.any.js": false,
|
||||
"properties.any.js": true,
|
||||
"reentrant-strategies.any.js": true,
|
||||
"strategies.any.js": true,
|
||||
"terminate.any.js": [
|
||||
"controller.terminate() inside flush() should not prevent writer.close() from succeeding"
|
||||
]
|
||||
},
|
||||
"writable-streams": {
|
||||
"aborting.any.js": false,
|
||||
"bad-strategies.any.js": [
|
||||
"reject any non-function value for strategy.size",
|
||||
"Writable stream: invalid size beats invalid highWaterMark"
|
||||
],
|
||||
"bad-underlying-sinks.any.js": true,
|
||||
"byte-length-queuing-strategy.any.js": true,
|
||||
"close.any.js": false,
|
||||
"constructor.any.js": [
|
||||
"underlyingSink argument should be converted after queuingStrategy argument",
|
||||
"WritableStreamDefaultController constructor should throw",
|
||||
"WritableStreamDefaultController constructor should throw when passed an initialised WritableStream",
|
||||
"WritableStreamDefaultWriter should throw unless passed a WritableStream"
|
||||
],
|
||||
"count-queuing-strategy.any.js": true,
|
||||
"error.any.js": true,
|
||||
"floating-point-total-queue-size.any.js": true,
|
||||
"general.any.js": true,
|
||||
"properties.any.js": true,
|
||||
"reentrant-strategy.any.js": true,
|
||||
"start.any.js": true,
|
||||
"write.any.js": true
|
||||
}
|
||||
},
|
||||
"user-timing": {
|
||||
"buffered-flag.any.js": false,
|
||||
"case-sensitivity.any.js": false,
|
||||
"clear_all_marks.any.js": true,
|
||||
"clear_all_measures.any.js": true,
|
||||
"clear_non_existent_mark.any.js": true,
|
||||
"clear_non_existent_measure.any.js": true,
|
||||
"clear_one_mark.any.js": true,
|
||||
"clear_one_measure.any.js": true,
|
||||
"entry_type.any.js": true,
|
||||
"idlharness.any.js": false,
|
||||
"mark-entry-constructor.any.js": true,
|
||||
"mark-errors.any.js": true,
|
||||
"mark-l3.any.js": false,
|
||||
"mark-measure-return-objects.any.js": true,
|
||||
"mark.any.js": true,
|
||||
"measure-l3.any.js": true,
|
||||
"measure-with-dict.any.js": [
|
||||
"measure entries' detail and start/end are customizable"
|
||||
],
|
||||
"measure_syntax_err.any.js": true,
|
||||
"structured-serialize-detail.any.js": true,
|
||||
"supported-usertiming-types.any.js": false,
|
||||
"user_timing_exists.any.js": true
|
||||
},
|
||||
"wasm": {
|
||||
"jsapi": {
|
||||
"constructor": {
|
||||
"compile.any.js": true,
|
||||
"instantiate-bad-imports.any.js": false,
|
||||
"instantiate.any.js": [
|
||||
"Synchronous options handling: Buffer argument"
|
||||
],
|
||||
"multi-value.any.js": true,
|
||||
"toStringTag.any.js": true,
|
||||
"validate.any.js": true
|
||||
},
|
||||
"global": {
|
||||
"constructor.any.js": true,
|
||||
"toString.any.js": true,
|
||||
"type.tentative.any.js": false,
|
||||
"value-get-set.any.js": true,
|
||||
"valueOf.any.js": true
|
||||
},
|
||||
"idlharness.any.js": false,
|
||||
"instance": {
|
||||
"constructor-bad-imports.any.js": false,
|
||||
"constructor-caching.any.js": true,
|
||||
"constructor.any.js": true,
|
||||
"exports.any.js": [
|
||||
"Setting (sloppy mode)"
|
||||
],
|
||||
"toString.any.js": true
|
||||
},
|
||||
"interface.any.js": [
|
||||
"WebAssembly: property descriptor"
|
||||
],
|
||||
"memory": {
|
||||
"buffer.any.js": [
|
||||
"Setting (sloppy mode)"
|
||||
],
|
||||
"constructor.any.js": true,
|
||||
"grow.any.js": [
|
||||
"Growing shared memory does not detach old buffer"
|
||||
],
|
||||
"toString.any.js": true,
|
||||
"type.tentative.any.js": false
|
||||
},
|
||||
"module": {
|
||||
"constructor.any.js": true,
|
||||
"customSections.any.js": true,
|
||||
"exports.any.js": true,
|
||||
"imports.any.js": true,
|
||||
"toString.any.js": true
|
||||
},
|
||||
"prototypes.any.js": false,
|
||||
"table": {
|
||||
"constructor.any.js": true,
|
||||
"get-set.any.js": true,
|
||||
"grow.any.js": true,
|
||||
"length.any.js": [
|
||||
"Setting (sloppy mode)"
|
||||
],
|
||||
"toString.any.js": true
|
||||
}
|
||||
},
|
||||
"serialization": {
|
||||
"arraybuffer": {
|
||||
"transfer.window.js": false
|
||||
},
|
||||
"module": {
|
||||
"nested-worker-success.any.js": false,
|
||||
"serialization-via-idb.any.js": false,
|
||||
"serialization-via-notifications-api.any.js": false
|
||||
}
|
||||
},
|
||||
"webapi": {
|
||||
"abort.any.js": false,
|
||||
"body.any.js": true,
|
||||
"contenttype.any.js": [
|
||||
"Response with Content-Type \"application/wasm\": compileStreaming",
|
||||
"Response with Content-Type \"application/wasm\": instantiateStreaming",
|
||||
"Response with Content-Type \"APPLICATION/wasm\": compileStreaming",
|
||||
"Response with Content-Type \"APPLICATION/wasm\": instantiateStreaming",
|
||||
"Response with Content-Type \"APPLICATION/WASM\": compileStreaming",
|
||||
"Response with Content-Type \"APPLICATION/WASM\": instantiateStreaming"
|
||||
],
|
||||
"empty-body.any.js": false,
|
||||
"historical.any.js": false,
|
||||
"idlharness.any.js": false,
|
||||
"instantiateStreaming-bad-imports.any.js": [
|
||||
"Importing a function with an incorrectly-typed value: undefined",
|
||||
"Importing a function with an incorrectly-typed value: null",
|
||||
"Importing a function with an incorrectly-typed value: true",
|
||||
"Importing a function with an incorrectly-typed value: \"\"",
|
||||
"Importing a function with an incorrectly-typed value: symbol \"Symbol()\"",
|
||||
"Importing a function with an incorrectly-typed value: 1",
|
||||
"Importing a function with an incorrectly-typed value: 0.1",
|
||||
"Importing a function with an incorrectly-typed value: NaN",
|
||||
"Importing a function with an incorrectly-typed value: object \"[object Object]\"",
|
||||
"Importing an i32 global with an incorrectly-typed value: undefined",
|
||||
"Importing an i32 global with an incorrectly-typed value: null",
|
||||
"Importing an i32 global with an incorrectly-typed value: true",
|
||||
"Importing an i32 global with an incorrectly-typed value: \"\"",
|
||||
"Importing an i32 global with an incorrectly-typed value: symbol \"Symbol()\"",
|
||||
"Importing an i32 global with an incorrectly-typed value: plain object",
|
||||
"Importing an i32 global with an incorrectly-typed value: WebAssembly.Global",
|
||||
"Importing an i32 global with an incorrectly-typed value: WebAssembly.Global.prototype",
|
||||
"Importing an i32 global with an incorrectly-typed value: Object.create(WebAssembly.Global.prototype)",
|
||||
"Importing an i32 global with an incorrectly-typed value: BigInt",
|
||||
"Importing an i32 global with an incorrectly-typed value: WebAssembly.Global object (wrong value type)",
|
||||
"Importing an i64 global with an incorrectly-typed value: undefined",
|
||||
"Importing an i64 global with an incorrectly-typed value: null",
|
||||
"Importing an i64 global with an incorrectly-typed value: true",
|
||||
"Importing an i64 global with an incorrectly-typed value: \"\"",
|
||||
"Importing an i64 global with an incorrectly-typed value: symbol \"Symbol()\"",
|
||||
"Importing an i64 global with an incorrectly-typed value: plain object",
|
||||
"Importing an i64 global with an incorrectly-typed value: WebAssembly.Global",
|
||||
"Importing an i64 global with an incorrectly-typed value: WebAssembly.Global.prototype",
|
||||
"Importing an i64 global with an incorrectly-typed value: Object.create(WebAssembly.Global.prototype)",
|
||||
"Importing an i64 global with an incorrectly-typed value: Number",
|
||||
"Importing an i64 global with an incorrectly-typed value: WebAssembly.Global object (wrong value type)",
|
||||
"Importing an f32 global with an incorrectly-typed value: undefined",
|
||||
"Importing an f32 global with an incorrectly-typed value: null",
|
||||
"Importing an f32 global with an incorrectly-typed value: true",
|
||||
"Importing an f32 global with an incorrectly-typed value: \"\"",
|
||||
"Importing an f32 global with an incorrectly-typed value: symbol \"Symbol()\"",
|
||||
"Importing an f32 global with an incorrectly-typed value: plain object",
|
||||
"Importing an f32 global with an incorrectly-typed value: WebAssembly.Global",
|
||||
"Importing an f32 global with an incorrectly-typed value: WebAssembly.Global.prototype",
|
||||
"Importing an f32 global with an incorrectly-typed value: Object.create(WebAssembly.Global.prototype)",
|
||||
"Importing an f32 global with an incorrectly-typed value: BigInt",
|
||||
"Importing an f32 global with an incorrectly-typed value: WebAssembly.Global object (wrong value type)",
|
||||
"Importing an f64 global with an incorrectly-typed value: undefined",
|
||||
"Importing an f64 global with an incorrectly-typed value: null",
|
||||
"Importing an f64 global with an incorrectly-typed value: true",
|
||||
"Importing an f64 global with an incorrectly-typed value: \"\"",
|
||||
"Importing an f64 global with an incorrectly-typed value: symbol \"Symbol()\"",
|
||||
"Importing an f64 global with an incorrectly-typed value: plain object",
|
||||
"Importing an f64 global with an incorrectly-typed value: WebAssembly.Global",
|
||||
"Importing an f64 global with an incorrectly-typed value: WebAssembly.Global.prototype",
|
||||
"Importing an f64 global with an incorrectly-typed value: Object.create(WebAssembly.Global.prototype)",
|
||||
"Importing an f64 global with an incorrectly-typed value: BigInt",
|
||||
"Importing an f64 global with an incorrectly-typed value: WebAssembly.Global object (wrong value type)",
|
||||
"Importing an i32 mutable global with a primitive value",
|
||||
"Importing an i32 mutable global with an immutable Global object",
|
||||
"Importing an i64 mutable global with a primitive value",
|
||||
"Importing an i64 mutable global with an immutable Global object",
|
||||
"Importing an f32 mutable global with a primitive value",
|
||||
"Importing an f32 mutable global with an immutable Global object",
|
||||
"Importing an f64 mutable global with a primitive value",
|
||||
"Importing an f64 mutable global with an immutable Global object",
|
||||
"Importing memory with an incorrectly-typed value: undefined",
|
||||
"Importing memory with an incorrectly-typed value: null",
|
||||
"Importing memory with an incorrectly-typed value: true",
|
||||
"Importing memory with an incorrectly-typed value: \"\"",
|
||||
"Importing memory with an incorrectly-typed value: symbol \"Symbol()\"",
|
||||
"Importing memory with an incorrectly-typed value: 1",
|
||||
"Importing memory with an incorrectly-typed value: 0.1",
|
||||
"Importing memory with an incorrectly-typed value: NaN",
|
||||
"Importing memory with an incorrectly-typed value: plain object",
|
||||
"Importing memory with an incorrectly-typed value: WebAssembly.Memory",
|
||||
"Importing memory with an incorrectly-typed value: WebAssembly.Memory.prototype",
|
||||
"Importing memory with an incorrectly-typed value: Object.create(WebAssembly.Memory.prototype)",
|
||||
"Importing memory with an incorrectly-typed value: WebAssembly.Memory object (too large)",
|
||||
"Importing table with an incorrectly-typed value: undefined",
|
||||
"Importing table with an incorrectly-typed value: null",
|
||||
"Importing table with an incorrectly-typed value: true",
|
||||
"Importing table with an incorrectly-typed value: \"\"",
|
||||
"Importing table with an incorrectly-typed value: symbol \"Symbol()\"",
|
||||
"Importing table with an incorrectly-typed value: 1",
|
||||
"Importing table with an incorrectly-typed value: 0.1",
|
||||
"Importing table with an incorrectly-typed value: NaN",
|
||||
"Importing table with an incorrectly-typed value: plain object",
|
||||
"Importing table with an incorrectly-typed value: WebAssembly.Table",
|
||||
"Importing table with an incorrectly-typed value: WebAssembly.Table.prototype",
|
||||
"Importing table with an incorrectly-typed value: Object.create(WebAssembly.Table.prototype)",
|
||||
"Importing table with an incorrectly-typed value: WebAssembly.Table object (too large)"
|
||||
],
|
||||
"instantiateStreaming.any.js": false,
|
||||
"invalid-args.any.js": true,
|
||||
"invalid-code.any.js": false,
|
||||
"modified-contenttype.any.js": [
|
||||
"compileStreaming with Content-Type set late",
|
||||
"instantiateStreaming with Content-Type set late"
|
||||
],
|
||||
"origin.sub.any.js": true,
|
||||
"rejected-arg.any.js": true,
|
||||
"status.any.js": true
|
||||
}
|
||||
},
|
||||
"WebIDL": {
|
||||
"ecmascript-binding": {
|
||||
"es-exceptions": {
|
||||
"DOMException-constants.any.js": true,
|
||||
"DOMException-constructor-and-prototype.any.js": true,
|
||||
"DOMException-constructor-behavior.any.js": true,
|
||||
"DOMException-custom-bindings.any.js": [
|
||||
"does not inherit from Error: class-side"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
133
tools/wpt/runner.ts
Normal file
133
tools/wpt/runner.ts
Normal file
@ -0,0 +1,133 @@
|
||||
import { delay, join, readLines, ROOT_PATH } from "../util.js";
|
||||
import { assert, ManifestTestOptions, release, runPy } from "./utils.ts";
|
||||
import { DOMParser } from "https://deno.land/x/deno_dom@v0.1.3-alpha2/deno-dom-wasm.ts";
|
||||
|
||||
export async function runWithTestUtil<T>(
|
||||
verbose: boolean,
|
||||
f: () => Promise<T>,
|
||||
): Promise<T> {
|
||||
const proc = runPy(["wpt", "serve"], {
|
||||
stdout: verbose ? "inherit" : "piped",
|
||||
stderr: verbose ? "inherit" : "piped",
|
||||
});
|
||||
|
||||
const start = performance.now();
|
||||
while (true) {
|
||||
await delay(1000);
|
||||
try {
|
||||
const req = await fetch("http://localhost:8000/");
|
||||
await req.body?.cancel();
|
||||
if (req.status == 200) {
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
// do nothing if this fails
|
||||
}
|
||||
const passedTime = performance.now() - start;
|
||||
if (passedTime > 15000) {
|
||||
throw new Error("Timed out while trying to start wpt test util.");
|
||||
}
|
||||
}
|
||||
|
||||
if (verbose) console.log(`Started wpt test util.`);
|
||||
|
||||
try {
|
||||
return await f();
|
||||
} finally {
|
||||
if (verbose) console.log("Killing wpt test util.");
|
||||
proc.kill(2);
|
||||
await proc.status();
|
||||
proc.close();
|
||||
}
|
||||
}
|
||||
|
||||
export interface TestResult {
|
||||
cases: TestCaseResult[];
|
||||
status: number;
|
||||
stderr: string;
|
||||
}
|
||||
|
||||
export interface TestCaseResult {
|
||||
name: string;
|
||||
passed: boolean;
|
||||
status: number;
|
||||
message: string | null;
|
||||
stack: string | null;
|
||||
}
|
||||
|
||||
export async function runSingleTest(
|
||||
url: URL,
|
||||
options: ManifestTestOptions,
|
||||
reporter: (result: TestCaseResult) => void,
|
||||
): Promise<TestResult> {
|
||||
const bundle = await generateBundle(url);
|
||||
const tempFile = await Deno.makeTempFile({
|
||||
prefix: "wpt-bundle-",
|
||||
suffix: ".js",
|
||||
});
|
||||
await Deno.writeTextFile(tempFile, bundle);
|
||||
|
||||
const proc = Deno.run({
|
||||
cmd: [
|
||||
join(ROOT_PATH, `./target/${release ? "release" : "debug"}/deno`),
|
||||
"run",
|
||||
"-A",
|
||||
"--location",
|
||||
url.toString(),
|
||||
tempFile,
|
||||
"[]",
|
||||
],
|
||||
env: {
|
||||
NO_COLOR: "1",
|
||||
},
|
||||
stdout: "null",
|
||||
stderr: "piped",
|
||||
});
|
||||
|
||||
const cases = [];
|
||||
let stderr = "";
|
||||
|
||||
const lines = readLines(proc.stderr);
|
||||
for await (const line of lines) {
|
||||
if (line.startsWith("{")) {
|
||||
const data = JSON.parse(line);
|
||||
const result = { ...data, passed: data.status == 0 };
|
||||
cases.push(result);
|
||||
reporter(result);
|
||||
} else {
|
||||
stderr += line + "\n";
|
||||
}
|
||||
}
|
||||
|
||||
const { code } = await proc.status();
|
||||
return {
|
||||
status: code,
|
||||
cases,
|
||||
stderr,
|
||||
};
|
||||
}
|
||||
|
||||
async function generateBundle(location: URL): Promise<string> {
|
||||
const res = await fetch(location);
|
||||
const body = await res.text();
|
||||
const doc = new DOMParser().parseFromString(body, "text/html");
|
||||
assert(doc, "document should have been parsed");
|
||||
const scripts = doc.getElementsByTagName("script");
|
||||
const scriptContents = [];
|
||||
for (const script of scripts) {
|
||||
const src = script.getAttribute("src");
|
||||
if (src === "/resources/testharnessreport.js") {
|
||||
scriptContents.push(
|
||||
await Deno.readTextFile(
|
||||
join(ROOT_PATH, "./tools/wpt/testharnessreport.js"),
|
||||
),
|
||||
);
|
||||
} else if (src) {
|
||||
const res = await fetch(new URL(src, location));
|
||||
scriptContents.push(await res.text());
|
||||
} else {
|
||||
scriptContents.push(script.textContent);
|
||||
}
|
||||
}
|
||||
return scriptContents.join("\n");
|
||||
}
|
12
tools/wpt/testharnessreport.js
Normal file
12
tools/wpt/testharnessreport.js
Normal file
@ -0,0 +1,12 @@
|
||||
window.add_result_callback(({ message, name, stack, status }) => {
|
||||
Deno.writeAllSync(
|
||||
Deno.stderr,
|
||||
new TextEncoder().encode(
|
||||
`${JSON.stringify({ name, status, message, stack })}\n`,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
window.add_completion_callback((tests, harnessStatus) => {
|
||||
Deno.exit(0);
|
||||
});
|
168
tools/wpt/utils.ts
Normal file
168
tools/wpt/utils.ts
Normal file
@ -0,0 +1,168 @@
|
||||
/// FLAGS
|
||||
|
||||
import { parse } from "https://deno.land/std@0.84.0/flags/mod.ts";
|
||||
import { join, ROOT_PATH } from "../util.js";
|
||||
|
||||
export const {
|
||||
json,
|
||||
quiet,
|
||||
release,
|
||||
["--"]: rest,
|
||||
["auto-config"]: autoConfig,
|
||||
} = parse(Deno.args, {
|
||||
"--": true,
|
||||
boolean: ["quiet", "release", "no-interactive"],
|
||||
string: ["json"],
|
||||
});
|
||||
|
||||
/// PAGE ROOT
|
||||
|
||||
/// WPT TEST MANIFEST
|
||||
|
||||
export interface Manifest {
|
||||
items: {
|
||||
testharness: ManifestFolder;
|
||||
};
|
||||
}
|
||||
export interface ManifestFolder {
|
||||
[key: string]: ManifestFolder | ManifestTest;
|
||||
}
|
||||
export type ManifestTest = [
|
||||
hash: string,
|
||||
...variations: ManifestTestVariation[],
|
||||
];
|
||||
export type ManifestTestVariation = [
|
||||
path: string,
|
||||
options: ManifestTestOptions,
|
||||
];
|
||||
export interface ManifestTestOptions {
|
||||
name?: string;
|
||||
}
|
||||
|
||||
const MANIFEST_PATH = join(ROOT_PATH, "./tools/wpt/manifest.json");
|
||||
|
||||
export async function updateManifest() {
|
||||
const proc = runPy(
|
||||
["wpt", "manifest", "--tests-root", ".", "-p", MANIFEST_PATH],
|
||||
{},
|
||||
);
|
||||
const status = await proc.status();
|
||||
assert(status.success, "updating wpt manifest should succeed");
|
||||
}
|
||||
|
||||
export function getManifest(): Manifest {
|
||||
const manifestText = Deno.readTextFileSync(MANIFEST_PATH);
|
||||
return JSON.parse(manifestText);
|
||||
}
|
||||
|
||||
/// WPT TEST EXPECTATIONS
|
||||
|
||||
const EXPECTATION_PATH = join(ROOT_PATH, "./tools/wpt/expectation.json");
|
||||
|
||||
export interface Expectation {
|
||||
[key: string]: Expectation | boolean | string[];
|
||||
}
|
||||
|
||||
export function getExpectation(): Expectation {
|
||||
const expectationText = Deno.readTextFileSync(EXPECTATION_PATH);
|
||||
return JSON.parse(expectationText);
|
||||
}
|
||||
|
||||
export function saveExpectation(expectation: Expectation) {
|
||||
Deno.writeTextFileSync(
|
||||
EXPECTATION_PATH,
|
||||
JSON.stringify(expectation, undefined, " "),
|
||||
);
|
||||
}
|
||||
|
||||
export function generateTestExpectations(filter: string[]) {
|
||||
const manifest = getManifest();
|
||||
|
||||
function walk(folder: ManifestFolder, prefix: string): Expectation {
|
||||
const expectation: Expectation = {};
|
||||
for (const key in folder) {
|
||||
const path = `${prefix}/${key}`;
|
||||
const entry = folder[key];
|
||||
if (Array.isArray(entry)) {
|
||||
if (!filter.find((filter) => path.startsWith(filter))) continue;
|
||||
if (key.endsWith(".js")) {
|
||||
expectation[key] = false;
|
||||
}
|
||||
} else {
|
||||
if (!filter.find((filter) => `${path}/`.startsWith(filter))) continue;
|
||||
expectation[key] = walk(entry, path);
|
||||
}
|
||||
}
|
||||
for (const key in expectation) {
|
||||
const entry = expectation[key];
|
||||
if (typeof entry === "object") {
|
||||
if (Object.keys(expectation[key]).length === 0) {
|
||||
delete expectation[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
return expectation;
|
||||
}
|
||||
|
||||
return walk(manifest.items.testharness, "");
|
||||
}
|
||||
|
||||
export function getExpectFailForCase(
|
||||
expectation: boolean | string[],
|
||||
caseName: string,
|
||||
): boolean {
|
||||
if (typeof expectation == "boolean") {
|
||||
return !expectation;
|
||||
}
|
||||
return expectation.includes(caseName);
|
||||
}
|
||||
|
||||
/// UTILS
|
||||
|
||||
class AssertionError extends Error {
|
||||
name = "AssertionError";
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
export function assert(condition: unknown, message: string): asserts condition {
|
||||
if (!condition) {
|
||||
throw new AssertionError(message);
|
||||
}
|
||||
}
|
||||
|
||||
export function runPy(
|
||||
args: string[],
|
||||
options: Omit<Omit<Deno.RunOptions, "cmd">, "cwd">,
|
||||
): Deno.Process {
|
||||
const cmd = Deno.build.os == "windows" ? "python.exe" : "python3";
|
||||
return Deno.run({
|
||||
cmd: [cmd, ...args],
|
||||
cwd: join(ROOT_PATH, "./test_util/wpt/"),
|
||||
...options,
|
||||
});
|
||||
}
|
||||
|
||||
export async function checkPy3Available() {
|
||||
const proc = runPy(["--version"], { stdout: "piped" });
|
||||
const status = await proc.status();
|
||||
assert(status.success, "failed to run python --version");
|
||||
const output = new TextDecoder().decode(await proc.output());
|
||||
assert(
|
||||
output.includes("Python 3."),
|
||||
`The ${
|
||||
Deno.build.os == "windows" ? "python.exe" : "python3"
|
||||
} in your path is not is not Python 3.`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function cargoBuild() {
|
||||
const proc = Deno.run({
|
||||
cmd: ["cargo", "build", ...(release ? ["--release"] : [])],
|
||||
cwd: ROOT_PATH,
|
||||
});
|
||||
const status = await proc.status();
|
||||
proc.close();
|
||||
assert(status.success, "cargo build failed");
|
||||
}
|
Loading…
Reference in New Issue
Block a user