LibWeb: Add some compression WPT tests

These are relevant for the next commits.
This commit is contained in:
devgianlu 2025-03-01 18:03:36 +01:00 committed by Jelle Raaijmakers
commit 1c836588d9
Notes: github-actions[bot] 2025-03-19 12:49:07 +00:00
14 changed files with 457 additions and 0 deletions

View file

@ -0,0 +1,15 @@
<!doctype html>
<meta charset=utf-8>
<script>
self.GLOBAL = {
isWindow: function() { return true; },
isWorker: function() { return false; },
isShadowRealm: function() { return false; },
};
</script>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<div id=log></div>
<script src="../compression/compression-bad-chunks.tentative.any.js"></script>

View file

@ -0,0 +1,74 @@
// META: global=window,worker,shadowrealm
'use strict';
const badChunks = [
{
name: 'undefined',
value: undefined
},
{
name: 'null',
value: null
},
{
name: 'numeric',
value: 3.14
},
{
name: 'object, not BufferSource',
value: {}
},
{
name: 'array',
value: [65]
},
{
name: 'SharedArrayBuffer',
// Use a getter to postpone construction so that all tests don't fail where
// SharedArrayBuffer is not yet implemented.
get value() {
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
return new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer;
}
},
{
name: 'shared Uint8Array',
get value() {
// See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()`
return new Uint8Array(new WebAssembly.Memory({ shared:true, initial:1, maximum:1 }).buffer)
}
},
];
for (const chunk of badChunks) {
promise_test(async t => {
const cs = new CompressionStream('gzip');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for gzip`);
promise_test(async t => {
const cs = new CompressionStream('deflate');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for deflate`);
promise_test(async t => {
const cs = new CompressionStream('deflate-raw');
const reader = cs.readable.getReader();
const writer = cs.writable.getWriter();
const writePromise = writer.write(chunk.value);
const readPromise = reader.read();
await promise_rejects_js(t, TypeError, writePromise, 'write should reject');
await promise_rejects_js(t, TypeError, readPromise, 'read should reject');
}, `chunk of type ${chunk.name} should error the stream for deflate-raw`);
}

View file

@ -0,0 +1,16 @@
<!doctype html>
<meta charset=utf-8>
<meta name="timeout" content="long">
<script>
self.GLOBAL = {
isWindow: function() { return true; },
isWorker: function() { return false; },
isShadowRealm: function() { return false; },
};
</script>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<script src="third_party/pako/pako_inflate.min.js"></script>
<script src="resources/concatenate-stream.js"></script>
<div id=log></div>
<script src="../compression/compression-large-flush-output.any.js"></script>

View file

@ -0,0 +1,41 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: script=resources/concatenate-stream.js
// META: timeout=long
'use strict';
// This test verifies that a large flush output will not truncate the
// final results.
async function compressData(chunk, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
writer.write(chunk);
writer.close();
return await concatenateStream(cs.readable);
}
// JSON-encoded array of 10 thousands numbers ("[0,1,2,...]"). This produces 48_891 bytes of data.
const fullData = new TextEncoder().encode(JSON.stringify(Array.from({ length: 10_000 }, (_, i) => i)));
const data = fullData.subarray(0, 35_579);
const expectedValue = data;
promise_test(async t => {
const compressedData = await compressData(data, 'deflate');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `deflate compression with large flush output`);
promise_test(async t => {
const compressedData = await compressData(data, 'gzip');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `gzip compression with large flush output`);
promise_test(async t => {
const compressedData = await compressData(data, 'deflate-raw');
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `deflate-raw compression with large flush output`);

View file

@ -0,0 +1,15 @@
<!doctype html>
<meta charset=utf-8>
<meta name="timeout" content="long">
<script>
self.GLOBAL = {
isWindow: function() { return true; },
isWorker: function() { return false; },
isShadowRealm: function() { return false; },
};
</script>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<script src="third_party/pako/pako_inflate.min.js"></script>
<div id=log></div>
<script src="../compression/compression-multiple-chunks.tentative.any.js"></script>

View file

@ -0,0 +1,67 @@
// META: global=window,worker,shadowrealm
// META: script=third_party/pako/pako_inflate.min.js
// META: timeout=long
'use strict';
// This test asserts that compressing multiple chunks should work.
// Example: ('Hello', 3) => TextEncoder().encode('HelloHelloHello')
function makeExpectedChunk(input, numberOfChunks) {
const expectedChunk = input.repeat(numberOfChunks);
return new TextEncoder().encode(expectedChunk);
}
// Example: ('Hello', 3, 'deflate') => compress ['Hello', 'Hello', Hello']
async function compressMultipleChunks(input, numberOfChunks, format) {
const cs = new CompressionStream(format);
const writer = cs.writable.getWriter();
const chunk = new TextEncoder().encode(input);
for (let i = 0; i < numberOfChunks; ++i) {
writer.write(chunk);
}
const closePromise = writer.close();
const out = [];
const reader = cs.readable.getReader();
let totalSize = 0;
while (true) {
const { value, done } = await reader.read();
if (done)
break;
out.push(value);
totalSize += value.byteLength;
}
await closePromise;
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const array of out) {
concatenated.set(array, offset);
offset += array.byteLength;
}
return concatenated;
}
const hello = 'Hello';
for (let numberOfChunks = 2; numberOfChunks <= 16; ++numberOfChunks) {
promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with deflate should work`);
promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'gzip');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflate(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with gzip should work`);
promise_test(async t => {
const compressedData = await compressMultipleChunks(hello, numberOfChunks, 'deflate-raw');
const expectedValue = makeExpectedChunk(hello, numberOfChunks);
// decompress with pako, and check that we got the same result as our original string
assert_array_equals(expectedValue, pako.inflateRaw(compressedData), 'value should match');
}, `compressing ${numberOfChunks} chunks with deflate-raw should work`);
}

View file

@ -0,0 +1,15 @@
<!doctype html>
<meta charset=utf-8>
<script>
self.GLOBAL = {
isWindow: function() { return true; },
isWorker: function() { return false; },
isShadowRealm: function() { return false; },
};
</script>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<div id=log></div>
<script src="../compression/decompression-split-chunk.tentative.any.js"></script>

View file

@ -0,0 +1,53 @@
// META: global=window,worker,shadowrealm
'use strict';
const compressedBytesWithDeflate = new Uint8Array([120, 156, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 48, 173, 6, 36]);
const compressedBytesWithGzip = new Uint8Array([31, 139, 8, 0, 0, 0, 0, 0, 0, 3, 75, 173, 40, 72, 77, 46, 73, 77, 81, 200, 47, 45, 41, 40, 45, 1, 0, 176, 1, 57, 179, 15, 0, 0, 0]);
const compressedBytesWithDeflateRaw = new Uint8Array([
0x4b, 0xad, 0x28, 0x48, 0x4d, 0x2e, 0x49, 0x4d, 0x51, 0xc8,
0x2f, 0x2d, 0x29, 0x28, 0x2d, 0x01, 0x00,
]);
const expectedChunkValue = new TextEncoder().encode('expected output');
async function decompressArrayBuffer(input, format, chunkSize) {
const ds = new DecompressionStream(format);
const reader = ds.readable.getReader();
const writer = ds.writable.getWriter();
for (let beginning = 0; beginning < input.length; beginning += chunkSize) {
writer.write(input.slice(beginning, beginning + chunkSize));
}
writer.close();
const out = [];
let totalSize = 0;
while (true) {
const { value, done } = await reader.read();
if (done) break;
out.push(value);
totalSize += value.byteLength;
}
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const array of out) {
concatenated.set(array, offset);
offset += array.byteLength;
}
return concatenated;
}
for (let chunkSize = 1; chunkSize < 16; ++chunkSize) {
promise_test(async t => {
const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflate, 'deflate', chunkSize);
assert_array_equals(decompressedData, expectedChunkValue, "value should match");
}, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate`);
promise_test(async t => {
const decompressedData = await decompressArrayBuffer(compressedBytesWithGzip, 'gzip', chunkSize);
assert_array_equals(decompressedData, expectedChunkValue, "value should match");
}, `decompressing splitted chunk into pieces of size ${chunkSize} should work in gzip`);
promise_test(async t => {
const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflateRaw, 'deflate-raw', chunkSize);
assert_array_equals(decompressedData, expectedChunkValue, "value should match");
}, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate-raw`);
}

View file

@ -0,0 +1,25 @@
'use strict';
// Read all the chunks from a stream that returns BufferSource objects and
// concatenate them into a single Uint8Array.
async function concatenateStream(readableStream) {
const reader = readableStream.getReader();
let totalSize = 0;
const buffers = [];
while (true) {
const { value, done } = await reader.read();
if (done) {
break;
}
buffers.push(value);
totalSize += value.byteLength;
}
reader.releaseLock();
const concatenated = new Uint8Array(totalSize);
let offset = 0;
for (const buffer of buffers) {
concatenated.set(buffer, offset);
offset += buffer.byteLength;
}
return concatenated;
}

File diff suppressed because one or more lines are too long