Skip to content

Commit

Permalink
test,streams: add compression tests to the WPT test suite
Browse files Browse the repository at this point in the history
  • Loading branch information
panva committed Nov 8, 2023
1 parent 06c4b12 commit fedc480
Show file tree
Hide file tree
Showing 32 changed files with 1,433 additions and 1 deletion.
3 changes: 2 additions & 1 deletion test/common/wpt.js
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ class ResourceLoader {
const data = await fsPromises.readFile(file);
return {
ok: true,
arrayBuffer() { return data.buffer; },
json() { return JSON.parse(data.toString()); },
text() { return data.toString(); },
};
Expand Down Expand Up @@ -440,7 +441,7 @@ class StatusLoader {
const list = this.grep(filepath);
result = result.concat(list);
} else {
if (!(/\.\w+\.js$/.test(filepath)) || filepath.endsWith('.helper.js')) {
if (!(/\.\w+\.js$/.test(filepath))) {
continue;
}
result.push(filepath);
Expand Down
1 change: 1 addition & 0 deletions test/fixtures/wpt/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ See [test/wpt](../../wpt/README.md) for information on how these tests are run.
Last update:

- common: https://github.com/web-platform-tests/wpt/tree/dbd648158d/common
- compression: https://github.com/web-platform-tests/wpt/tree/c82521cfa5/compression
- console: https://github.com/web-platform-tests/wpt/tree/767ae35464/console
- dom/abort: https://github.com/web-platform-tests/wpt/tree/d1f1ecbd52/dom/abort
- dom/events: https://github.com/web-platform-tests/wpt/tree/ab8999891c/dom/events
Expand Down
3 changes: 3 additions & 0 deletions test/fixtures/wpt/compression/META.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
spec: https://wicg.github.io/compression/
suggested_reviewers:
- ricea
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
// META: global=window,worker,shadowrealm

'use strict';

const badChunks = [
{
name: 'undefined',
value: undefined
},
{
name: 'null',
value: null
},
{
name: 'numeric',
value: 3.14
},
{
name: 'object, not BufferSource',
value: {}
},
{
name: 'array',
value: [65]
},
{
name: 'SharedArrayBuffer',
// Use a getter to postpone construction so that all tests don't fail where
// SharedArrayBuffer is not yet implemented.
get value() {
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer;
}
},
{
name: 'shared Uint8Array',
get value() {
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer)
}
},
];

for (const chunk of badChunks) {
promise_test(async t => {
const cs = new CompressionStream('gzip');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for gzip`);

promise_test(async t => {
const cs = new CompressionStream('deflate');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for deflate`);

promise_test(async t => {
const cs = new CompressionStream('deflate-raw');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
// META: global=window,worker,shadowrealm

'use strict';

test(t => {
assert_throws_js(TypeError, () => new CompressionStream('a'), 'constructor should throw');
}, '"a" should cause the constructor to throw');

test(t => {
assert_throws_js(TypeError, () => new CompressionStream(), 'constructor should throw');
}, 'no input should cause the constructor to throw');

test(t => {
assert_throws_js(Error, () => new CompressionStream({ toString() { throw Error(); } }), 'constructor should throw');
}, 'non-string input should cause the constructor to throw');
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: timeout=long

'use strict';

// This test asserts that compressing '' doesn't affect the compressed data.
// Example: compressing ['Hello', '', 'Hello'] results in 'HelloHello'

async function compressChunkList(chunkList, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
for (const chunk of chunkList) {
const chunkByte = new TextEncoder().encode(chunk);
writer.write(chunkByte);
}
const closePromise = writer.close();
const out = [];
const reader = cs.readable.getReader();
let totalSize = 0;
while (true) {
const { value, done } = await reader.read();
if (done)
break;
out.push(value);
totalSize += value.byteLength;
}
await closePromise;
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const array of out) {
concatenated.set(array, offset);
offset += array.byteLength;
}
return concatenated;
}

const chunkLists = [
['', 'Hello', 'Hello'],
['Hello', '', 'Hello'],
['Hello', 'Hello', '']
];
const expectedValue = new TextEncoder().encode('HelloHello');

for (const chunkList of chunkLists) {
promise_test(async t => {
const compressedData = await compressChunkList(chunkList, 'deflate');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `the result of compressing [${chunkList}] with deflate should be 'HelloHello'`);

promise_test(async t => {
const compressedData = await compressChunkList(chunkList, 'gzip');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `the result of compressing [${chunkList}] with gzip should be 'HelloHello'`);

promise_test(async t => {
const compressedData = await compressChunkList(chunkList, 'deflate-raw');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `the result of compressing [${chunkList}] with deflate-raw should be 'HelloHello'`);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: script=resources/concatenate-stream.js
// META: timeout=long

'use strict';

// This test verifies that a large flush output will not truncate the
// final results.

async function compressData(chunk, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
writer.write(chunk);
writer.close();
return await concatenateStream(cs.readable);
}

// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data.
const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i)));
const data = fullData.subarray(0, 35_579);
const expectedValue = data;

promise_test(async t => {
const compressedData = await compressData(data, 'deflate');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `deflate compression with large flush output`);

promise_test(async t => {
const compressedData = await compressData(data, 'gzip');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `gzip compression with large flush output`);

promise_test(async t => {
const compressedData = await compressData(data, 'deflate-raw');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `deflate-raw compression with large flush output`);

Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: timeout=long

'use strict';

// This test asserts that compressing multiple chunks should work.

// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello')
function makeExpectedChunk(input, numberOfChunks) {
const expectedChunk = input.repeat(numberOfChunks);
return new TextEncoder().encode(expectedChunk);
}

// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello']
async function compressMultipleChunks(input, numberOfChunks, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
const chunk = new TextEncoder().encode(input);
for (let i = 0; i < numberOfChunks; ++i) {
writer.write(chunk);
}
const closePromise = writer.close();
const out = [];
const reader = cs.readable.getReader();
let totalSize = 0;
while (true) {
const { value, done } = await reader.read();
if (done)
break;
out.push(value);
totalSize += value.byteLength;
}
await closePromise;
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const array of out) {
concatenated.set(array, offset);
offset += array.byteLength;
}
return concatenated;
}

const hello = 'Hello';

for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with deflate should work`);

promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with gzip should work`);

promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with deflate-raw should work`);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
// META: global=window,worker,shadowrealm

'use strict';

// This test asserts that compressed data length is shorter than the original
// data length. If the input is extremely small, the compressed data may be
// larger than the original data.

const LARGE_FILE = '/media/test-av-384k-44100Hz-1ch-320x240-30fps-10kfr.webm';

async function compressArrayBuffer(input, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
writer.write(input);
const closePromise = writer.close();
const out = [];
const reader = cs.readable.getReader();
let totalSize = 0;
while (true) {
const { value, done } = await reader.read();
if (done)
break;
out.push(value);
totalSize += value.byteLength;
}
await closePromise;
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const array of out) {
concatenated.set(array, offset);
offset += array.byteLength;
}
return concatenated;
}

promise_test(async () => {
const response = await fetch(LARGE_FILE);
const buffer = await response.arrayBuffer();
const bufferView = new Uint8Array(buffer);
const originalLength = bufferView.length;
const compressedData = await compressArrayBuffer(bufferView, 'deflate');
const compressedLength = compressedData.length;
assert_less_than(compressedLength, originalLength, 'output should be smaller');
}, 'the length of deflated data should be shorter than that of the original data');

promise_test(async () => {
const response = await fetch(LARGE_FILE);
const buffer = await response.arrayBuffer();
const bufferView = new Uint8Array(buffer);
const originalLength = bufferView.length;
const compressedData = await compressArrayBuffer(bufferView, 'gzip');
const compressedLength = compressedData.length;
assert_less_than(compressedLength, originalLength, 'output should be smaller');
}, 'the length of gzipped data should be shorter than that of the original data');

promise_test(async () => {
const response = await fetch(LARGE_FILE);
const buffer = await response.arrayBuffer();
const bufferView = new Uint8Array(buffer);
const originalLength = bufferView.length;
const compressedData = await compressArrayBuffer(bufferView, 'deflate-raw');
const compressedLength = compressedData.length;
assert_less_than(compressedLength, originalLength, 'output should be smaller');
}, 'the length of deflated (with -raw) data should be shorter than that of the original data');
Loading

0 comments on commit fedc480

Please sign in to comment.