Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: align Encoding APIs to spec using WPT #9004

Merged
merged 5 commits into from
Jan 5, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions cli/tests/WPT.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
The WPT are test suites for Web platform specs, like Fetch, WHATWG Streams, or
console. Deno is able to run most `.any.js` and `.window.js` web platform tests.

This directory contains a `wpt.json` file that is used to configure our WPT test
runner. You can use this json file to set which WPT suites to run, and which
tests we expect to fail (due to bugs or because they are out of scope for Deno).
This directory contains a `wpt.jsonc` file that is used to configure our WPT
test runner. You can use this json file to set which WPT suites to run, and
which tests we expect to fail (due to bugs or because they are out of scope for
Deno).

To include a new test file to run, add it to the array of test files for the
corresponding suite. For example we want to enable
Expand Down
130 changes: 89 additions & 41 deletions cli/tests/integration_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4919,11 +4919,15 @@ fn standalone_runtime_flags() {
.contains("PermissionDenied: write access"));
}

fn concat_bundle(files: Vec<(PathBuf, String)>, bundle_path: &Path) -> String {
fn concat_bundle(
files: Vec<(PathBuf, String)>,
bundle_path: &Path,
init: String,
) -> String {
let bundle_url = url::Url::from_file_path(bundle_path).unwrap().to_string();

let mut bundle = String::new();
let mut bundle_line_count = 0;
let mut bundle = init.clone();
let mut bundle_line_count = init.lines().count() as u32;
let mut source_map = sourcemap::SourceMapBuilder::new(Some(&bundle_url));

for (path, text) in files {
Expand Down Expand Up @@ -4963,6 +4967,35 @@ fn concat_bundle(files: Vec<(PathBuf, String)>, bundle_path: &Path) -> String {
bundle
}

// TODO(lucacasonato): DRY with tsc_config.rs
/// Convert a jsonc libraries `JsonValue` to a serde `Value`.
fn jsonc_to_serde(j: jsonc_parser::JsonValue) -> serde_json::Value {
use jsonc_parser::JsonValue;
use serde_json::Value;
use std::str::FromStr;
match j {
JsonValue::Array(arr) => {
let vec = arr.into_iter().map(jsonc_to_serde).collect();
Value::Array(vec)
}
JsonValue::Boolean(bool) => Value::Bool(bool),
JsonValue::Null => Value::Null,
JsonValue::Number(num) => {
let number =
serde_json::Number::from_str(&num).expect("could not parse number");
Value::Number(number)
}
JsonValue::Object(obj) => {
let mut map = serde_json::map::Map::new();
for (key, json_value) in obj.into_iter() {
map.insert(key, jsonc_to_serde(json_value));
}
Value::Object(map)
}
JsonValue::String(str) => Value::String(str),
}
}

#[test]
fn web_platform_tests() {
use deno_core::serde::Deserialize;
Expand All @@ -4979,9 +5012,10 @@ fn web_platform_tests() {
}

let text =
std::fs::read_to_string(util::tests_path().join("wpt.json")).unwrap();
std::fs::read_to_string(util::tests_path().join("wpt.jsonc")).unwrap();
let jsonc = jsonc_parser::parse_to_value(&text).unwrap().unwrap();
let config: std::collections::HashMap<String, Vec<WptConfig>> =
deno_core::serde_json::from_str(&text).unwrap();
deno_core::serde_json::from_value(jsonc_to_serde(jsonc)).unwrap();

for (suite_name, includes) in config.into_iter() {
let suite_path = util::wpt_path().join(suite_name);
Expand Down Expand Up @@ -5037,10 +5071,8 @@ fn web_platform_tests() {
};
if s.starts_with('/') {
util::wpt_path().join(format!(".{}", s))
} else if s.starts_with('.') {
test_file_path.parent().unwrap().join(s)
} else {
PathBuf::from(s)
test_file_path.parent().unwrap().join(s)
}
})
.map(|path| {
Expand All @@ -5049,40 +5081,56 @@ fn web_platform_tests() {
})
.collect();

let mut files = Vec::with_capacity(3 + imports.len());
files.push((testharness_path.clone(), testharness_text.clone()));
files.push((
testharnessreporter_path.clone(),
testharnessreporter_text.clone(),
));
files.extend(imports);
files.push((test_file_path.clone(), test_file_text));

let mut file = tempfile::Builder::new()
.prefix("wpt-bundle-")
.suffix(".js")
.rand_bytes(5)
.tempfile()
.unwrap();

let bundle = concat_bundle(files, file.path());
file.write_all(bundle.as_bytes()).unwrap();

let child = util::deno_cmd()
.current_dir(test_file_path.parent().unwrap())
.arg("run")
.arg("-A")
.arg(file.path())
.arg(deno_core::serde_json::to_string(&expect_fail).unwrap())
.stdin(std::process::Stdio::piped())
.spawn()
.unwrap();

let output = child.wait_with_output().unwrap();
if !output.status.success() {
file.keep().unwrap();
let mut variants: Vec<&str> = test_file_text
.split('\n')
.into_iter()
.filter_map(|t| t.strip_prefix("// META: variant="))
.collect();

if variants.is_empty() {
variants.push("");
}

for variant in variants {
let mut files = Vec::with_capacity(3 + imports.len());
files.push((testharness_path.clone(), testharness_text.clone()));
files.push((
testharnessreporter_path.clone(),
testharnessreporter_text.clone(),
));
files.extend(imports.clone());
files.push((test_file_path.clone(), test_file_text.clone()));

let mut file = tempfile::Builder::new()
.prefix("wpt-bundle-")
.suffix(".js")
.rand_bytes(5)
.tempfile()
.unwrap();

let bundle = concat_bundle(
files,
file.path(),
format!("window.location = {{search: \"{}\"}};\n", variant),
);
file.write_all(bundle.as_bytes()).unwrap();

let child = util::deno_cmd()
.current_dir(test_file_path.parent().unwrap())
.arg("run")
.arg("-A")
.arg(file.path())
.arg(deno_core::serde_json::to_string(&expect_fail).unwrap())
.stdin(std::process::Stdio::piped())
.spawn()
.unwrap();

let output = child.wait_with_output().unwrap();
if !output.status.success() {
file.keep().unwrap();
}
assert!(output.status.success());
}
assert!(output.status.success());
}
}
}
26 changes: 1 addition & 25 deletions cli/tests/unit/text_encoding_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,31 +69,7 @@ unitTest(function textDecoder2(): void {
assertEquals(decoder.decode(fixture), "𝓽𝓮𝔁𝓽");
});

unitTest(function textDecoderIgnoreBOM(): void {
// deno-fmt-ignore
const fixture = new Uint8Array([
0xef, 0xbb, 0xbf,
0xf0, 0x9d, 0x93, 0xbd,
0xf0, 0x9d, 0x93, 0xae,
0xf0, 0x9d, 0x94, 0x81,
0xf0, 0x9d, 0x93, 0xbd
]);
const decoder = new TextDecoder("utf-8", { ignoreBOM: true });
assertEquals(decoder.decode(fixture), "𝓽𝓮𝔁𝓽");
});

unitTest(function textDecoderNotBOM(): void {
// deno-fmt-ignore
const fixture = new Uint8Array([
0xef, 0xbb, 0x89,
0xf0, 0x9d, 0x93, 0xbd,
0xf0, 0x9d, 0x93, 0xae,
0xf0, 0x9d, 0x94, 0x81,
0xf0, 0x9d, 0x93, 0xbd
]);
const decoder = new TextDecoder("utf-8", { ignoreBOM: true });
assertEquals(decoder.decode(fixture), "ﻉ𝓽𝓮𝔁𝓽");
});
// ignoreBOM is tested through WPT

unitTest(function textDecoderASCII(): void {
const fixture = new Uint8Array([0x89, 0x95, 0x9f, 0xbf]);
Expand Down
12 changes: 0 additions & 12 deletions cli/tests/wpt.json

This file was deleted.

138 changes: 138 additions & 0 deletions cli/tests/wpt.jsonc
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
{
"streams": [
{
"name": "readable-streams/general",
"expectFail": [
"ReadableStream can't be constructed with an invalid type",
"default ReadableStream getReader() should only accept mode:undefined"
]
},
"writable-streams/general"
],
"encoding": [
{
"name": "api-basics",
"expectFail": [
// TODO(lucacasonato): enable when we support utf-16
"Decode sample: utf-16le",
"Decode sample: utf-16be",
"Decode sample: utf-16"
]
},
"api-invalid-label",
"api-replacement-encodings",
"api-surrogates-utf8",
// TODO(lucacasonato): enable encodeInto. It is buggy at the moment.
// "encodeInto",
// TODO(lucacasonato): enable when we support iso-2022-jp
// "iso-2022-jp-decoder",
// TODO(lucacasonato): uses XMLHttpRequest unnecessarily. should be fixed upstream before enabling
// "replacement-encodings",
{
"name": "textdecoder-byte-order-marks",
"expectFail": [
// TODO(lucacasonato): enable when we support utf-16
"Byte-order marks: utf-16le",
"Byte-order marks: utf-16be"
]
},
{
"name": "textdecoder-copy",
"expectFail": [
// TODO(lucacasonato): enable when we have stream support
"Modify buffer after passing it in (ArrayBuffer)",
"Modify buffer after passing it in (SharedArrayBuffer)"
]
},
"textdecoder-fatal-single-byte",
{
"name": "textdecoder-fatal.",
// TODO(lucacasonato): enable when we support utf-16
"expectFail": ["Fatal flag: utf-16le - truncated code unit"]
},
{
"name": "textdecoder-ignorebom",
"expectFail": [
// TODO(lucacasonato): enable when we support utf-16
"BOM is ignored if ignoreBOM option is specified: utf-16le",
"BOM is ignored if ignoreBOM option is specified: utf-16be"
]
},
{
"name": "textdecoder-labels",
"expectFail": [
"chinese => GBK",
"csgb2312 => GBK",
"csiso58gb231280 => GBK",
"gb2312 => GBK",
"gb_2312 => GBK",
"gb_2312-80 => GBK",
"gbk => GBK",
"iso-ir-58 => GBK",
"x-gbk => GBK",
"gb18030 => gb18030",
"big5 => Big5",
"big5-hkscs => Big5",
"cn-big5 => Big5",
"csbig5 => Big5",
"x-x-big5 => Big5",
"cseucpkdfmtjapanese => EUC-JP",
"euc-jp => EUC-JP",
"x-euc-jp => EUC-JP",
"csiso2022jp => ISO-2022-JP",
"iso-2022-jp => ISO-2022-JP",
"csshiftjis => Shift_JIS",
"ms932 => Shift_JIS",
"ms_kanji => Shift_JIS",
"shift-jis => Shift_JIS",
"shift_jis => Shift_JIS",
"sjis => Shift_JIS",
"windows-31j => Shift_JIS",
"x-sjis => Shift_JIS",
"cseuckr => EUC-KR",
"csksc56011987 => EUC-KR",
"euc-kr => EUC-KR",
"iso-ir-149 => EUC-KR",
"korean => EUC-KR",
"ks_c_5601-1987 => EUC-KR",
"ks_c_5601-1989 => EUC-KR",
"ksc5601 => EUC-KR",
"ksc_5601 => EUC-KR",
"windows-949 => EUC-KR",
"unicodefffe => UTF-16BE",
"utf-16be => UTF-16BE",
"csunicode => UTF-16LE",
"iso-10646-ucs-2 => UTF-16LE",
"ucs-2 => UTF-16LE",
"unicode => UTF-16LE",
"unicodefeff => UTF-16LE",
"utf-16 => UTF-16LE",
"utf-16le => UTF-16LE",
"x-user-defined => x-user-defined"
]
}
// TODO(lucacasonato): enable when we have stream support
// "textdecoder-streaming",
// TODO(lucacasonato): enable when we support utf-16
// "textdecoder-utf16-surrogates",
{
"name": "textencoder-constructor-non-utf",
"expectFail": [
"Encoding argument supported for decode: GBK",
"Encoding argument supported for decode: gb18030",
"Encoding argument supported for decode: Big5",
"Encoding argument supported for decode: EUC-JP",
"Encoding argument supported for decode: ISO-2022-JP",
"Encoding argument supported for decode: Shift_JIS",
"Encoding argument supported for decode: EUC-KR",
"Encoding argument supported for decode: UTF-16BE",
"Encoding argument supported for decode: UTF-16LE",
"Encoding argument supported for decode: x-user-defined"
]
},
// TODO(lucacasonato): enable when we support utf-16
// "textencoder-utf16-surrogates",
// TODO(lucacasonato): uses XMLHttpRequest unnecessarily. should be fixed upstream before enabling
// "unsupported-encodings",
]
}
9 changes: 8 additions & 1 deletion cli/tests/wpt_testharnessconsolereporter.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,16 @@ export function yellow(str) {

const testResults = [];
const testsExpectFail = JSON.parse(Deno.args[0]);
function shouldExpectFail(name) {
if (testsExpectFail.includes(name)) return true;
for (const expectFail of testsExpectFail) {
if (name.startsWith(expectFail)) return true;
}
return false;
}

window.add_result_callback(({ message, name, stack, status }) => {
const expectFail = testsExpectFail.includes(name);
const expectFail = shouldExpectFail(name);
let simpleMessage = `test ${name} ... `;
switch (status) {
case 0:
Expand Down
Loading