mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-08-01 05:39:11 +00:00
LibWeb: Implement TextEncoderStream
Required by the server-side rendering mode of React Router, used by https://chatgpt.com/ Note that the imported tests do not have the worker variants to prevent freezing on macOS.
This commit is contained in:
parent
24d5f24749
commit
cae0ee6fa7
Notes:
github-actions[bot]
2025-02-07 16:05:51 +00:00
Author: https://github.com/Lubrsi
Commit: cae0ee6fa7
Pull-request: https://github.com/LadybirdBrowser/ladybird/pull/3481
Reviewed-by: https://github.com/trflynn89 ✅
36 changed files with 1375 additions and 0 deletions
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/backpressure.any.js"></script>
|
|
@ -0,0 +1,60 @@
|
|||
// META: global=window,worker,shadowrealm
|
||||
|
||||
'use strict';
|
||||
|
||||
const classes = [
|
||||
{
|
||||
name: 'TextDecoderStream',
|
||||
input: new Uint8Array([65])
|
||||
},
|
||||
{
|
||||
name: 'TextEncoderStream',
|
||||
input: 'A'
|
||||
}
|
||||
];
|
||||
|
||||
const microtasksRun = () => new Promise(resolve => step_timeout(resolve, 0));
|
||||
|
||||
for (const streamClass of classes) {
|
||||
promise_test(async () => {
|
||||
const stream = new self[streamClass.name]();
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
const events = [];
|
||||
await microtasksRun();
|
||||
const writePromise = writer.write(streamClass.input);
|
||||
writePromise.then(() => events.push('write'));
|
||||
await microtasksRun();
|
||||
events.push('paused');
|
||||
await reader.read();
|
||||
events.push('read');
|
||||
await writePromise;
|
||||
assert_array_equals(events, ['paused', 'read', 'write'],
|
||||
'write should happen after read');
|
||||
}, 'write() should not complete until read relieves backpressure for ' +
|
||||
`${streamClass.name}`);
|
||||
|
||||
promise_test(async () => {
|
||||
const stream = new self[streamClass.name]();
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
const events = [];
|
||||
await microtasksRun();
|
||||
const readPromise1 = reader.read();
|
||||
readPromise1.then(() => events.push('read1'));
|
||||
const writePromise1 = writer.write(streamClass.input);
|
||||
const writePromise2 = writer.write(streamClass.input);
|
||||
writePromise1.then(() => events.push('write1'));
|
||||
writePromise2.then(() => events.push('write2'));
|
||||
await microtasksRun();
|
||||
events.push('paused');
|
||||
const readPromise2 = reader.read();
|
||||
readPromise2.then(() => events.push('read2'));
|
||||
await Promise.all([writePromise1, writePromise2,
|
||||
readPromise1, readPromise2]);
|
||||
assert_array_equals(events, ['read1', 'write1', 'paused', 'read2',
|
||||
'write2'],
|
||||
'writes should not happen before read2');
|
||||
}, 'additional writes should wait for backpressure to be relieved for ' +
|
||||
`class ${streamClass.name}`);
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const outer = new ShadowRealm();
|
||||
outer.evaluate(`
|
||||
var inner = new ShadowRealm();
|
||||
`);
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await import("../../resources/testharness-shadowrealm-outer.js");
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
inner.evaluate("setShadowRealmGlobalProperties")
|
||||
`)(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
|
||||
await import("/encoding/streams/backpressure.any.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
function begin_shadow_realm_tests(windowCallback) {
|
||||
inner.evaluate("begin_shadow_realm_tests")(windowCallback);
|
||||
}
|
||||
`);
|
||||
await fetch_tests_from_shadow_realm(outer);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,24 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const r = new ShadowRealm();
|
||||
await shadowRealmEvalAsync(r, `
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
`);
|
||||
r.evaluate("setShadowRealmGlobalProperties")(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(r, `
|
||||
|
||||
await import("/encoding/streams/backpressure.any.js");
|
||||
`);
|
||||
|
||||
await fetch_tests_from_shadow_realm(r);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,16 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="resources/readable-stream-from-array.js"></script>
|
||||
<script src="resources/readable-stream-to-array.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/encode-bad-chunks.any.js"></script>
|
|
@ -0,0 +1,63 @@
|
|||
// META: global=window,worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const error1 = new Error('error1');
|
||||
error1.name = 'error1';
|
||||
|
||||
promise_test(t => {
|
||||
const ts = new TextEncoderStream();
|
||||
const writer = ts.writable.getWriter();
|
||||
const reader = ts.readable.getReader();
|
||||
const writePromise = writer.write({
|
||||
toString() { throw error1; }
|
||||
});
|
||||
const readPromise = reader.read();
|
||||
return Promise.all([
|
||||
promise_rejects_exactly(t, error1, readPromise, 'read should reject with error1'),
|
||||
promise_rejects_exactly(t, error1, writePromise, 'write should reject with error1'),
|
||||
promise_rejects_exactly(t, error1, reader.closed, 'readable should be errored with error1'),
|
||||
promise_rejects_exactly(t, error1, writer.closed, 'writable should be errored with error1'),
|
||||
]);
|
||||
}, 'a chunk that cannot be converted to a string should error the streams');
|
||||
|
||||
const oddInputs = [
|
||||
{
|
||||
name: 'undefined',
|
||||
value: undefined,
|
||||
expected: 'undefined'
|
||||
},
|
||||
{
|
||||
name: 'null',
|
||||
value: null,
|
||||
expected: 'null'
|
||||
},
|
||||
{
|
||||
name: 'numeric',
|
||||
value: 3.14,
|
||||
expected: '3.14'
|
||||
},
|
||||
{
|
||||
name: 'object',
|
||||
value: {},
|
||||
expected: '[object Object]'
|
||||
},
|
||||
{
|
||||
name: 'array',
|
||||
value: ['hi'],
|
||||
expected: 'hi'
|
||||
}
|
||||
];
|
||||
|
||||
for (const input of oddInputs) {
|
||||
promise_test(async () => {
|
||||
const outputReadable = readableStreamFromArray([input.value])
|
||||
.pipeThrough(new TextEncoderStream())
|
||||
.pipeThrough(new TextDecoderStream());
|
||||
const output = await readableStreamToArray(outputReadable);
|
||||
assert_equals(output.length, 1, 'output should contain one chunk');
|
||||
assert_equals(output[0], input.expected, 'output should be correct');
|
||||
}, `input of type ${input.name} should be converted correctly to string`);
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="resources/readable-stream-from-array.js"></script>
|
||||
<script src="resources/readable-stream-to-array.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/encode-utf8.any.js"></script>
|
|
@ -0,0 +1,144 @@
|
|||
// META: global=window,worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
const inputString = 'I \u{1F499} streams';
|
||||
const expectedOutputBytes = [0x49, 0x20, 0xf0, 0x9f, 0x92, 0x99, 0x20, 0x73,
|
||||
0x74, 0x72, 0x65, 0x61, 0x6d, 0x73];
|
||||
// This is a character that must be represented in two code units in a string,
|
||||
// ie. it is not in the Basic Multilingual Plane.
|
||||
const astralCharacter = '\u{1F499}'; // BLUE HEART
|
||||
const astralCharacterEncoded = [0xf0, 0x9f, 0x92, 0x99];
|
||||
const leading = astralCharacter[0];
|
||||
const trailing = astralCharacter[1];
|
||||
const replacementEncoded = [0xef, 0xbf, 0xbd];
|
||||
|
||||
// These tests assume that the implementation correctly classifies leading and
|
||||
// trailing surrogates and treats all the code units in each set equivalently.
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
input: [inputString],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'encoding one string of UTF-8 should give one complete chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing],
|
||||
output: [astralCharacterEncoded],
|
||||
description: 'a character split between chunks should be correctly encoded'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + astralCharacter],
|
||||
output: [astralCharacterEncoded.concat(astralCharacterEncoded)],
|
||||
description: 'a character following one split between chunks should be ' +
|
||||
'correctly encoded'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + leading, trailing],
|
||||
output: [astralCharacterEncoded, astralCharacterEncoded],
|
||||
description: 'two consecutive astral characters each split down the ' +
|
||||
'middle should be correctly reassembled'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + leading + leading, trailing],
|
||||
output: [astralCharacterEncoded.concat(replacementEncoded), astralCharacterEncoded],
|
||||
description: 'two consecutive astral characters each split down the ' +
|
||||
'middle with an invalid surrogate in the middle should be correctly ' +
|
||||
'encoded'
|
||||
},
|
||||
{
|
||||
input: [leading],
|
||||
output: [replacementEncoded],
|
||||
description: 'a stream ending in a leading surrogate should emit a ' +
|
||||
'replacement character as a final chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, astralCharacter],
|
||||
output: [replacementEncoded.concat(astralCharacterEncoded)],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'an astral character in the next chunk should be replaced with ' +
|
||||
'the replacement character at the start of the next output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, 'A'],
|
||||
output: [replacementEncoded.concat([65])],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'an ascii character in the next chunk should be replaced with ' +
|
||||
'the replacement character at the start of the next output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, leading, trailing],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'a plane 1 character split into two chunks should result in ' +
|
||||
'the encoded plane 1 character appearing in the last output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, leading],
|
||||
output: [replacementEncoded, replacementEncoded],
|
||||
description: 'two leading chunks should result in two replacement ' +
|
||||
'characters'
|
||||
},
|
||||
{
|
||||
input: [leading + leading, trailing],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'a non-terminal unpaired leading surrogate should ' +
|
||||
'immediately be replaced'
|
||||
},
|
||||
{
|
||||
input: [trailing, astralCharacter],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'a terminal unpaired trailing surrogate should ' +
|
||||
'immediately be replaced'
|
||||
},
|
||||
{
|
||||
input: [leading, '', trailing],
|
||||
output: [astralCharacterEncoded],
|
||||
description: 'a leading surrogate chunk should be carried past empty chunks'
|
||||
},
|
||||
{
|
||||
input: [leading, ''],
|
||||
output: [replacementEncoded],
|
||||
description: 'a leading surrogate chunk should error when it is clear ' +
|
||||
'it didn\'t form a pair'
|
||||
},
|
||||
{
|
||||
input: [''],
|
||||
output: [],
|
||||
description: 'an empty string should result in no output chunk'
|
||||
},
|
||||
{
|
||||
input: ['', inputString],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'a leading empty chunk should be ignored'
|
||||
},
|
||||
{
|
||||
input: [inputString, ''],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'a trailing empty chunk should be ignored'
|
||||
},
|
||||
{
|
||||
input: ['A'],
|
||||
output: [[65]],
|
||||
description: 'a plain ASCII chunk should be converted'
|
||||
},
|
||||
{
|
||||
input: ['\xff'],
|
||||
output: [[195, 191]],
|
||||
description: 'characters in the ISO-8859-1 range should be encoded correctly'
|
||||
},
|
||||
];
|
||||
|
||||
for (const {input, output, description} of testCases) {
|
||||
promise_test(async () => {
|
||||
const inputStream = readableStreamFromArray(input);
|
||||
const outputStream = inputStream.pipeThrough(new TextEncoderStream());
|
||||
const chunkArray = await readableStreamToArray(outputStream);
|
||||
assert_equals(chunkArray.length, output.length,
|
||||
'number of chunks should match');
|
||||
for (let i = 0; i < output.length; ++i) {
|
||||
assert_array_equals(chunkArray[i], output[i], `chunk ${i} should match`);
|
||||
}
|
||||
}, description);
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/invalid-realm.window.js"></script>
|
|
@ -0,0 +1,37 @@
|
|||
// Text*Stream should still work even if the realm is detached.
|
||||
|
||||
// Adds an iframe to the document and returns it.
|
||||
function addIframe() {
|
||||
const iframe = document.createElement('iframe');
|
||||
document.body.appendChild(iframe);
|
||||
return iframe;
|
||||
}
|
||||
|
||||
promise_test(async t => {
|
||||
const iframe = addIframe();
|
||||
const stream = new iframe.contentWindow.TextDecoderStream();
|
||||
const readPromise = stream.readable.getReader().read();
|
||||
const writer = stream.writable.getWriter();
|
||||
await writer.ready;
|
||||
iframe.remove();
|
||||
return Promise.all([writer.write(new Uint8Array([65])),readPromise]);
|
||||
}, 'TextDecoderStream: write in detached realm should succeed');
|
||||
|
||||
promise_test(async t => {
|
||||
const iframe = addIframe();
|
||||
const stream = new iframe.contentWindow.TextEncoderStream();
|
||||
const readPromise = stream.readable.getReader().read();
|
||||
const writer = stream.writable.getWriter();
|
||||
await writer.ready;
|
||||
iframe.remove();
|
||||
return Promise.all([writer.write('A'), readPromise]);
|
||||
}, 'TextEncoderStream: write in detached realm should succeed');
|
||||
|
||||
for (const type of ['TextEncoderStream', 'TextDecoderStream']) {
|
||||
promise_test(async t => {
|
||||
const iframe = addIframe();
|
||||
const stream = new iframe.contentWindow[type]();
|
||||
iframe.remove();
|
||||
return stream.writable.close();
|
||||
}, `${type}: close in detached realm should succeed`);
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/readable-writable-properties.any.js"></script>
|
|
@ -0,0 +1,22 @@
|
|||
// META: global=window,worker,shadowrealm
|
||||
|
||||
// This just tests that the "readable" and "writable" properties pass the brand
|
||||
// checks. All other relevant attributes are covered by the IDL tests.
|
||||
|
||||
'use strict';
|
||||
|
||||
test(() => {
|
||||
const te = new TextEncoderStream();
|
||||
assert_equals(typeof ReadableStream.prototype.getReader.call(te.readable),
|
||||
'object', 'readable property must pass brand check');
|
||||
assert_equals(typeof WritableStream.prototype.getWriter.call(te.writable),
|
||||
'object', 'writable property must pass brand check');
|
||||
}, 'TextEncoderStream readable and writable properties must pass brand checks');
|
||||
|
||||
test(() => {
|
||||
const td = new TextDecoderStream();
|
||||
assert_equals(typeof ReadableStream.prototype.getReader.call(td.readable),
|
||||
'object', 'readable property must pass brand check');
|
||||
assert_equals(typeof WritableStream.prototype.getWriter.call(td.writable),
|
||||
'object', 'writable property must pass brand check');
|
||||
}, 'TextDecoderStream readable and writable properties must pass brand checks');
|
|
@ -0,0 +1,40 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const outer = new ShadowRealm();
|
||||
outer.evaluate(`
|
||||
var inner = new ShadowRealm();
|
||||
`);
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await import("../../resources/testharness-shadowrealm-outer.js");
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
inner.evaluate("setShadowRealmGlobalProperties")
|
||||
`)(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
|
||||
await import("/encoding/streams/readable-writable-properties.any.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
function begin_shadow_realm_tests(windowCallback) {
|
||||
inner.evaluate("begin_shadow_realm_tests")(windowCallback);
|
||||
}
|
||||
`);
|
||||
await fetch_tests_from_shadow_realm(outer);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,24 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const r = new ShadowRealm();
|
||||
await shadowRealmEvalAsync(r, `
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
`);
|
||||
r.evaluate("setShadowRealmGlobalProperties")(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(r, `
|
||||
|
||||
await import("/encoding/streams/readable-writable-properties.any.js");
|
||||
`);
|
||||
|
||||
await fetch_tests_from_shadow_realm(r);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,8 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/realms.window.js"></script>
|
|
@ -0,0 +1,304 @@
|
|||
'use strict';
|
||||
|
||||
// Test that objects created by the TextEncoderStream and TextDecoderStream APIs
|
||||
// are created in the correct realm. The tests work by creating an iframe for
|
||||
// each realm and then posting Javascript to them to be evaluated. Inputs and
|
||||
// outputs are passed around via global variables in each realm's scope.
|
||||
|
||||
// Async setup is required before creating any tests, so require done() to be
|
||||
// called.
|
||||
setup({explicit_done: true});
|
||||
|
||||
function createRealm() {
|
||||
let iframe = document.createElement('iframe');
|
||||
const scriptEndTag = '<' + '/script>';
|
||||
iframe.srcdoc = `<!doctype html>
|
||||
<script>
|
||||
onmessage = event => {
|
||||
if (event.source !== window.parent) {
|
||||
throw new Error('unexpected message with source ' + event.source);
|
||||
}
|
||||
eval(event.data);
|
||||
};
|
||||
${scriptEndTag}`;
|
||||
iframe.style.display = 'none';
|
||||
document.body.appendChild(iframe);
|
||||
let realmPromiseResolve;
|
||||
const realmPromise = new Promise(resolve => {
|
||||
realmPromiseResolve = resolve;
|
||||
});
|
||||
iframe.onload = () => {
|
||||
realmPromiseResolve(iframe.contentWindow);
|
||||
};
|
||||
return realmPromise;
|
||||
}
|
||||
|
||||
async function createRealms() {
|
||||
// All realms are visible on the global object so they can access each other.
|
||||
|
||||
// The realm that the constructor function comes from.
|
||||
window.constructorRealm = await createRealm();
|
||||
|
||||
// The realm in which the constructor object is called.
|
||||
window.constructedRealm = await createRealm();
|
||||
|
||||
// The realm in which reading happens.
|
||||
window.readRealm = await createRealm();
|
||||
|
||||
// The realm in which writing happens.
|
||||
window.writeRealm = await createRealm();
|
||||
|
||||
// The realm that provides the definitions of Readable and Writable methods.
|
||||
window.methodRealm = await createRealm();
|
||||
|
||||
await evalInRealmAndWait(methodRealm, `
|
||||
window.ReadableStreamDefaultReader =
|
||||
new ReadableStream().getReader().constructor;
|
||||
window.WritableStreamDefaultWriter =
|
||||
new WritableStream().getWriter().constructor;
|
||||
`);
|
||||
window.readMethod = methodRealm.ReadableStreamDefaultReader.prototype.read;
|
||||
window.writeMethod = methodRealm.WritableStreamDefaultWriter.prototype.write;
|
||||
}
|
||||
|
||||
// In order for values to be visible between realms, they need to be
|
||||
// global. To prevent interference between tests, variable names are generated
|
||||
// automatically.
|
||||
const id = (() => {
|
||||
let nextId = 0;
|
||||
return () => {
|
||||
return `realmsId${nextId++}`;
|
||||
};
|
||||
})();
|
||||
|
||||
// Eval string "code" in the content of realm "realm". Evaluation happens
|
||||
// asynchronously, meaning it hasn't happened when the function returns.
|
||||
function evalInRealm(realm, code) {
|
||||
realm.postMessage(code, window.origin);
|
||||
}
|
||||
|
||||
// Same as evalInRealm() but returns a Promise which will resolve when the
|
||||
// function has actually.
|
||||
async function evalInRealmAndWait(realm, code) {
|
||||
const resolve = id();
|
||||
const waitOn = new Promise(r => {
|
||||
realm[resolve] = r;
|
||||
});
|
||||
evalInRealm(realm, code);
|
||||
evalInRealm(realm, `${resolve}();`);
|
||||
await waitOn;
|
||||
}
|
||||
|
||||
// The same as evalInRealmAndWait but returns the result of evaluating "code" as
|
||||
// an expression.
|
||||
async function evalInRealmAndReturn(realm, code) {
|
||||
const myId = id();
|
||||
await evalInRealmAndWait(realm, `window.${myId} = ${code};`);
|
||||
return realm[myId];
|
||||
}
|
||||
|
||||
// Constructs an object in constructedRealm and copies it into readRealm and
|
||||
// writeRealm. Returns the id that can be used to access the object in those
|
||||
// realms. |what| can contain constructor arguments.
|
||||
async function constructAndStore(what) {
|
||||
const objId = id();
|
||||
// Call |constructorRealm|'s constructor from inside |constructedRealm|.
|
||||
writeRealm[objId] = await evalInRealmAndReturn(
|
||||
constructedRealm, `new parent.constructorRealm.${what}`);
|
||||
readRealm[objId] = writeRealm[objId];
|
||||
return objId;
|
||||
}
|
||||
|
||||
// Calls read() on the readable side of the TransformStream stored in
|
||||
// readRealm[objId]. Locks the readable side as a side-effect.
|
||||
function readInReadRealm(objId) {
|
||||
return evalInRealmAndReturn(readRealm, `
|
||||
parent.readMethod.call(window.${objId}.readable.getReader())`);
|
||||
}
|
||||
|
||||
// Calls write() on the writable side of the TransformStream stored in
|
||||
// writeRealm[objId], passing |value|. Locks the writable side as a
|
||||
// side-effect.
|
||||
function writeInWriteRealm(objId, value) {
|
||||
const valueId = id();
|
||||
writeRealm[valueId] = value;
|
||||
return evalInRealmAndReturn(writeRealm, `
|
||||
parent.writeMethod.call(window.${objId}.writable.getWriter(),
|
||||
window.${valueId})`);
|
||||
}
|
||||
|
||||
window.onload = () => {
|
||||
createRealms().then(() => {
|
||||
runGenericTests('TextEncoderStream');
|
||||
runTextEncoderStreamTests();
|
||||
runGenericTests('TextDecoderStream');
|
||||
runTextDecoderStreamTests();
|
||||
done();
|
||||
});
|
||||
};
|
||||
|
||||
function runGenericTests(classname) {
|
||||
promise_test(async () => {
|
||||
const obj = await evalInRealmAndReturn(
|
||||
constructedRealm, `new parent.constructorRealm.${classname}()`);
|
||||
assert_equals(obj.constructor, constructorRealm[classname],
|
||||
'obj should be in constructor realm');
|
||||
}, `a ${classname} object should be associated with the realm the ` +
|
||||
'constructor came from');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore(classname);
|
||||
const readableGetterId = id();
|
||||
readRealm[readableGetterId] = Object.getOwnPropertyDescriptor(
|
||||
methodRealm[classname].prototype, 'readable').get;
|
||||
const writableGetterId = id();
|
||||
writeRealm[writableGetterId] = Object.getOwnPropertyDescriptor(
|
||||
methodRealm[classname].prototype, 'writable').get;
|
||||
const readable = await evalInRealmAndReturn(
|
||||
readRealm, `${readableGetterId}.call(${objId})`);
|
||||
const writable = await evalInRealmAndReturn(
|
||||
writeRealm, `${writableGetterId}.call(${objId})`);
|
||||
assert_equals(readable.constructor, constructorRealm.ReadableStream,
|
||||
'readable should be in constructor realm');
|
||||
assert_equals(writable.constructor, constructorRealm.WritableStream,
|
||||
'writable should be in constructor realm');
|
||||
}, `${classname}'s readable and writable attributes should come from the ` +
|
||||
'same realm as the constructor definition');
|
||||
}
|
||||
|
||||
function runTextEncoderStreamTests() {
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
const writePromise = writeInWriteRealm(objId, 'A');
|
||||
const result = await readInReadRealm(objId);
|
||||
await writePromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
assert_equals(result.value.constructor, constructorRealm.Uint8Array,
|
||||
'chunk should be in constructor realm');
|
||||
}, 'the output chunks when read is called after write should come from the ' +
|
||||
'same realm as the constructor of TextEncoderStream');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
const chunkPromise = readInReadRealm(objId);
|
||||
writeInWriteRealm(objId, 'A');
|
||||
// Now the read() should resolve.
|
||||
const result = await chunkPromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
assert_equals(result.value.constructor, constructorRealm.Uint8Array,
|
||||
'chunk should be in constructor realm');
|
||||
}, 'the output chunks when write is called with a pending read should come ' +
|
||||
'from the same realm as the constructor of TextEncoderStream');
|
||||
|
||||
// There is not absolute consensus regarding what realm exceptions should be
|
||||
// created in. Implementations may vary. The expectations in exception-related
|
||||
// tests may change in future once consensus is reached.
|
||||
promise_test(async t => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
|
||||
await promise_rejects_js(t, constructorRealm.TypeError,
|
||||
writeInWriteRealm(objId, {
|
||||
toString() { return {}; }
|
||||
}),
|
||||
'write TypeError should come from constructor realm');
|
||||
|
||||
return promise_rejects_js(t, constructorRealm.TypeError, readPromise,
|
||||
'read TypeError should come from constructor realm');
|
||||
}, 'TypeError for unconvertable chunk should come from constructor realm ' +
|
||||
'of TextEncoderStream');
|
||||
}
|
||||
|
||||
function runTextDecoderStreamTests() {
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
const writePromise = writeInWriteRealm(objId, new Uint8Array([65]));
|
||||
const result = await readInReadRealm(objId);
|
||||
await writePromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
// A string is not an object, so doesn't have an associated realm. Accessing
|
||||
// string properties will create a transient object wrapper belonging to the
|
||||
// current realm. So checking the realm of result.value is not useful.
|
||||
}, 'the result object when read is called after write should come from the ' +
|
||||
'same realm as the constructor of TextDecoderStream');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
const chunkPromise = readInReadRealm(objId);
|
||||
writeInWriteRealm(objId, new Uint8Array([65]));
|
||||
// Now the read() should resolve.
|
||||
const result = await chunkPromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
// A string is not an object, so doesn't have an associated realm. Accessing
|
||||
// string properties will create a transient object wrapper belonging to the
|
||||
// current realm. So checking the realm of result.value is not useful.
|
||||
}, 'the result object when write is called with a pending ' +
|
||||
'read should come from the same realm as the constructor of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
await promise_rejects_js(
|
||||
t, constructorRealm.TypeError,
|
||||
writeInWriteRealm(objId, {}),
|
||||
'write TypeError should come from constructor realm'
|
||||
);
|
||||
|
||||
return promise_rejects_js(
|
||||
t, constructorRealm.TypeError, readPromise,
|
||||
'read TypeError should come from constructor realm'
|
||||
);
|
||||
}, 'TypeError for chunk with the wrong type should come from constructor ' +
|
||||
'realm of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId =
|
||||
await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
|
||||
await promise_rejects_js(
|
||||
t, constructorRealm.TypeError,
|
||||
writeInWriteRealm(objId, new Uint8Array([0xff])),
|
||||
'write TypeError should come from constructor realm'
|
||||
);
|
||||
|
||||
return promise_rejects_js(
|
||||
t, constructorRealm.TypeError, readPromise,
|
||||
'read TypeError should come from constructor realm'
|
||||
);
|
||||
}, 'TypeError for invalid chunk should come from constructor realm ' +
|
||||
'of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId =
|
||||
await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
|
||||
// Read first to relieve backpressure.
|
||||
readInReadRealm(objId);
|
||||
// Write an unfinished sequence of bytes.
|
||||
const incompleteBytesId = id();
|
||||
writeRealm[incompleteBytesId] = new Uint8Array([0xf0]);
|
||||
|
||||
return promise_rejects_js(
|
||||
t, constructorRealm.TypeError,
|
||||
// Can't use writeInWriteRealm() here because it doesn't make it possible
|
||||
// to reuse the writer.
|
||||
evalInRealmAndReturn(writeRealm, `
|
||||
(() => {
|
||||
const writer = window.${objId}.writable.getWriter();
|
||||
parent.writeMethod.call(writer, window.${incompleteBytesId});
|
||||
return parent.methodRealm.WritableStreamDefaultWriter.prototype
|
||||
.close.call(writer);
|
||||
})();
|
||||
`),
|
||||
'close TypeError should come from constructor realm'
|
||||
);
|
||||
}, 'TypeError for incomplete input should come from constructor realm ' +
|
||||
'of TextDecoderStream');
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
'use strict';
|
||||
|
||||
function readableStreamFromArray(array) {
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
for (let entry of array) {
|
||||
controller.enqueue(entry);
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
'use strict';
|
||||
|
||||
function readableStreamToArray(stream) {
|
||||
var array = [];
|
||||
var writable = new WritableStream({
|
||||
write(chunk) {
|
||||
array.push(chunk);
|
||||
}
|
||||
});
|
||||
return stream.pipeTo(writable).then(() => array);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue