mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-04-20 03:25:13 +00:00
LibWeb: Implement TextEncoderStream
Required by the server-side rendering mode of React Router, used by https://chatgpt.com/ Note that the imported tests do not have the worker variants to prevent freezing on macOS.
This commit is contained in:
parent
24d5f24749
commit
cae0ee6fa7
Notes:
github-actions[bot]
2025-02-07 16:05:51 +00:00
Author: https://github.com/Lubrsi Commit: https://github.com/LadybirdBrowser/ladybird/commit/cae0ee6fa78 Pull-request: https://github.com/LadybirdBrowser/ladybird/pull/3481 Reviewed-by: https://github.com/trflynn89 ✅
36 changed files with 1375 additions and 0 deletions
|
@ -244,6 +244,7 @@ set(SOURCES
|
|||
Encoding/TextDecoder.cpp
|
||||
Encoding/TextEncoder.cpp
|
||||
Encoding/TextEncoderCommon.cpp
|
||||
Encoding/TextEncoderStream.cpp
|
||||
EntriesAPI/FileSystemEntry.cpp
|
||||
EventTiming/PerformanceEventTiming.cpp
|
||||
Fetch/Body.cpp
|
||||
|
|
215
Libraries/LibWeb/Encoding/TextEncoderStream.cpp
Normal file
215
Libraries/LibWeb/Encoding/TextEncoderStream.cpp
Normal file
|
@ -0,0 +1,215 @@
|
|||
/*
|
||||
* Copyright (c) 2025, Luke Wilde <luke@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <AK/UnicodeUtils.h>
|
||||
#include <LibJS/Runtime/ArrayBuffer.h>
|
||||
#include <LibJS/Runtime/Realm.h>
|
||||
#include <LibJS/Runtime/TypedArray.h>
|
||||
#include <LibWeb/Bindings/ExceptionOrUtils.h>
|
||||
#include <LibWeb/Bindings/Intrinsics.h>
|
||||
#include <LibWeb/Bindings/TextEncoderStreamPrototype.h>
|
||||
#include <LibWeb/Encoding/TextEncoderStream.h>
|
||||
#include <LibWeb/Streams/AbstractOperations.h>
|
||||
#include <LibWeb/Streams/TransformStream.h>
|
||||
#include <LibWeb/WebIDL/Promise.h>
|
||||
|
||||
namespace Web::Encoding {
|
||||
|
||||
GC_DEFINE_ALLOCATOR(TextEncoderStream);
|
||||
|
||||
// https://encoding.spec.whatwg.org/#dom-textencoderstream
|
||||
WebIDL::ExceptionOr<GC::Ref<TextEncoderStream>> TextEncoderStream::construct_impl(JS::Realm& realm)
|
||||
{
|
||||
// 1. Set this’s encoder to an instance of the UTF-8 encoder.
|
||||
// NOTE: No-op, as AK::String is already in UTF-8 format.
|
||||
|
||||
// NOTE: We do these steps first so that we may store it as nonnull in the GenericTransformStream.
|
||||
// 4. Let transformStream be a new TransformStream.
|
||||
auto transform_stream = realm.create<Streams::TransformStream>(realm);
|
||||
|
||||
// 6. Set this's transform to a new TransformStream.
|
||||
auto stream = realm.create<TextEncoderStream>(realm, transform_stream);
|
||||
|
||||
// 2. Let transformAlgorithm be an algorithm which takes a chunk argument and runs the encode and enqueue a chunk
|
||||
// algorithm with this and chunk.
|
||||
auto transform_algorithm = GC::create_function(realm.heap(), [stream](JS::Value chunk) -> GC::Ref<WebIDL::Promise> {
|
||||
auto& realm = stream->realm();
|
||||
auto& vm = realm.vm();
|
||||
|
||||
if (auto result = stream->encode_and_enqueue_chunk(chunk); result.is_error()) {
|
||||
auto throw_completion = Bindings::exception_to_throw_completion(vm, result.exception());
|
||||
return WebIDL::create_rejected_promise(realm, *throw_completion.release_value());
|
||||
}
|
||||
|
||||
return WebIDL::create_resolved_promise(realm, JS::js_undefined());
|
||||
});
|
||||
|
||||
// 3. Let flushAlgorithm be an algorithm which runs the encode and flush algorithm with this.
|
||||
auto flush_algorithm = GC::create_function(realm.heap(), [stream]() -> GC::Ref<WebIDL::Promise> {
|
||||
auto& realm = stream->realm();
|
||||
auto& vm = realm.vm();
|
||||
|
||||
if (auto result = stream->encode_and_flush(); result.is_error()) {
|
||||
auto throw_completion = Bindings::exception_to_throw_completion(vm, result.exception());
|
||||
return WebIDL::create_rejected_promise(realm, *throw_completion.release_value());
|
||||
}
|
||||
|
||||
return WebIDL::create_resolved_promise(realm, JS::js_undefined());
|
||||
});
|
||||
|
||||
// 5. Set up transformStream with transformAlgorithm set to transformAlgorithm and flushAlgorithm set to flushAlgorithm.
|
||||
transform_stream->set_up(transform_algorithm, flush_algorithm);
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
TextEncoderStream::TextEncoderStream(JS::Realm& realm, GC::Ref<Streams::TransformStream> transform)
|
||||
: Bindings::PlatformObject(realm)
|
||||
, Streams::GenericTransformStreamMixin(transform)
|
||||
{
|
||||
}
|
||||
|
||||
TextEncoderStream::~TextEncoderStream() = default;
|
||||
|
||||
void TextEncoderStream::initialize(JS::Realm& realm)
|
||||
{
|
||||
Base::initialize(realm);
|
||||
WEB_SET_PROTOTYPE_FOR_INTERFACE(TextEncoderStream);
|
||||
}
|
||||
|
||||
void TextEncoderStream::visit_edges(JS::Cell::Visitor& visitor)
|
||||
{
|
||||
Base::visit_edges(visitor);
|
||||
Streams::GenericTransformStreamMixin::visit_edges(visitor);
|
||||
}
|
||||
|
||||
// https://encoding.spec.whatwg.org/#encode-and-enqueue-a-chunk
|
||||
WebIDL::ExceptionOr<void> TextEncoderStream::encode_and_enqueue_chunk(JS::Value chunk)
|
||||
{
|
||||
// Spec Note: This is equivalent to the "convert a string into a scalar value string" algorithm from the Infra
|
||||
// Standard, but allows for surrogate pairs that are split between strings. [INFRA]
|
||||
|
||||
auto& realm = this->realm();
|
||||
auto& vm = this->vm();
|
||||
|
||||
// 1. Let input be the result of converting chunk to a DOMString.
|
||||
auto input = TRY(chunk.to_string(vm));
|
||||
|
||||
// 2. Convert input to an I/O queue of code units.
|
||||
// Spec Note: DOMString, as well as an I/O queue of code units rather than scalar values, are used here so that a
|
||||
// surrogate pair that is split between chunks can be reassembled into the appropriate scalar value.
|
||||
// The behavior is otherwise identical to USVString. In particular, lone surrogates will be replaced
|
||||
// with U+FFFD.
|
||||
auto code_points = input.code_points();
|
||||
auto it = code_points.begin();
|
||||
|
||||
// 3. Let output be the I/O queue of bytes « end-of-queue ».
|
||||
ByteBuffer output;
|
||||
|
||||
// 4. While true:
|
||||
while (true) {
|
||||
// 2. If item is end-of-queue, then:
|
||||
// NOTE: This is done out-of-order so that we're not dereferencing a code point iterator that points to the end.
|
||||
if (it.done()) {
|
||||
// 1. Convert output into a byte sequence.
|
||||
// Note: No-op.
|
||||
|
||||
// 2. If output is non-empty, then:
|
||||
if (!output.is_empty()) {
|
||||
// 1. Let chunk be a Uint8Array object wrapping an ArrayBuffer containing output.
|
||||
auto array_buffer = JS::ArrayBuffer::create(realm, move(output));
|
||||
auto array = JS::Uint8Array::create(realm, array_buffer->byte_length(), *array_buffer);
|
||||
|
||||
// 2. Enqueue chunk into encoder’s transform.
|
||||
TRY(Streams::transform_stream_default_controller_enqueue(*m_transform->controller(), array));
|
||||
}
|
||||
|
||||
// 3. Return.
|
||||
return {};
|
||||
}
|
||||
|
||||
// 1. Let item be the result of reading from input.
|
||||
auto item = *it;
|
||||
|
||||
// 3. Let result be the result of executing the convert code unit to scalar value algorithm with encoder, item and input.
|
||||
auto result = convert_code_unit_to_scalar_value(item, it);
|
||||
|
||||
// 4. If result is not continue, then process an item with result, encoder’s encoder, input, output, and "fatal".
|
||||
if (result.has_value()) {
|
||||
(void)AK::UnicodeUtils::code_point_to_utf8(result.value(), [&output](char utf8_byte) {
|
||||
output.append(static_cast<u8>(utf8_byte));
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://encoding.spec.whatwg.org/#encode-and-flush
|
||||
WebIDL::ExceptionOr<void> TextEncoderStream::encode_and_flush()
|
||||
{
|
||||
auto& realm = this->realm();
|
||||
|
||||
// 1. If encoder’s leading surrogate is non-null, then:
|
||||
if (m_leading_surrogate.has_value()) {
|
||||
// 1. Let chunk be a Uint8Array object wrapping an ArrayBuffer containing 0xEF 0xBF 0xBD.
|
||||
// Spec Note: This is U+FFFD (<28>) in UTF-8 bytes.
|
||||
constexpr static u8 replacement_character_utf8_bytes[3] = { 0xEF, 0xBF, 0xBD };
|
||||
auto bytes = MUST(ByteBuffer::copy(replacement_character_utf8_bytes, sizeof(replacement_character_utf8_bytes)));
|
||||
auto array_buffer = JS::ArrayBuffer::create(realm, bytes);
|
||||
auto chunk = JS::Uint8Array::create(realm, array_buffer->byte_length(), *array_buffer);
|
||||
|
||||
// 2. Enqueue chunk into encoder’s transform.
|
||||
TRY(Streams::transform_stream_default_controller_enqueue(*m_transform->controller(), chunk));
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
// https://encoding.spec.whatwg.org/#convert-code-unit-to-scalar-value
|
||||
Optional<u32> TextEncoderStream::convert_code_unit_to_scalar_value(u32 item, Utf8CodePointIterator& code_point_iterator)
|
||||
{
|
||||
ArmedScopeGuard move_to_next_code_point_guard = [&] {
|
||||
++code_point_iterator;
|
||||
};
|
||||
|
||||
// 1. If encoder’s leading surrogate is non-null, then:
|
||||
if (m_leading_surrogate.has_value()) {
|
||||
// 1. Let leadingSurrogate be encoder’s leading surrogate.
|
||||
auto leading_surrogate = m_leading_surrogate.value();
|
||||
|
||||
// 2. Set encoder’s leading surrogate to null.
|
||||
m_leading_surrogate.clear();
|
||||
|
||||
// 3. If item is a trailing surrogate, then return a scalar value from surrogates given leadingSurrogate
|
||||
// and item.
|
||||
if (Utf16View::is_low_surrogate(item)) {
|
||||
// https://encoding.spec.whatwg.org/#scalar-value-from-surrogates
|
||||
// To obtain a scalar value from surrogates, given a leading surrogate leading and a trailing surrogate
|
||||
// trailing, return 0x10000 + ((leading − 0xD800) << 10) + (trailing − 0xDC00).
|
||||
return Utf16View::decode_surrogate_pair(leading_surrogate, item);
|
||||
}
|
||||
|
||||
// 4. Restore item to input.
|
||||
move_to_next_code_point_guard.disarm();
|
||||
|
||||
// 5. Return U+FFFD.
|
||||
return 0xFFFD;
|
||||
}
|
||||
|
||||
// 2. If item is a leading surrogate, then set encoder’s leading surrogate to item and return continue.
|
||||
if (Utf16View::is_high_surrogate(item)) {
|
||||
m_leading_surrogate = item;
|
||||
return OptionalNone {};
|
||||
}
|
||||
|
||||
// 3. If item is a trailing surrogate, then return U+FFFD.
|
||||
if (Utf16View::is_low_surrogate(item))
|
||||
return 0xFFFD;
|
||||
|
||||
// 4. Return item.
|
||||
return item;
|
||||
}
|
||||
|
||||
}
|
41
Libraries/LibWeb/Encoding/TextEncoderStream.h
Normal file
41
Libraries/LibWeb/Encoding/TextEncoderStream.h
Normal file
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright (c) 2025, Luke Wilde <luke@ladybird.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <LibWeb/Bindings/PlatformObject.h>
|
||||
#include <LibWeb/Encoding/TextEncoderCommon.h>
|
||||
#include <LibWeb/Streams/GenericTransformStream.h>
|
||||
|
||||
namespace Web::Encoding {
|
||||
|
||||
class TextEncoderStream final
|
||||
: public Bindings::PlatformObject
|
||||
, public Streams::GenericTransformStreamMixin
|
||||
, public TextEncoderCommonMixin {
|
||||
WEB_PLATFORM_OBJECT(TextEncoderStream, Bindings::PlatformObject);
|
||||
GC_DECLARE_ALLOCATOR(TextEncoderStream);
|
||||
|
||||
public:
|
||||
static WebIDL::ExceptionOr<GC::Ref<TextEncoderStream>> construct_impl(JS::Realm&);
|
||||
virtual ~TextEncoderStream() override;
|
||||
|
||||
private:
|
||||
TextEncoderStream(JS::Realm&, GC::Ref<Streams::TransformStream>);
|
||||
|
||||
virtual void initialize(JS::Realm&) override;
|
||||
virtual void visit_edges(Cell::Visitor&) override;
|
||||
|
||||
WebIDL::ExceptionOr<void> encode_and_enqueue_chunk(JS::Value);
|
||||
WebIDL::ExceptionOr<void> encode_and_flush();
|
||||
|
||||
Optional<u32> convert_code_unit_to_scalar_value(u32 item, Utf8CodePointIterator& code_point_iterator);
|
||||
|
||||
// https://encoding.spec.whatwg.org/#textencoderstream-pending-high-surrogate
|
||||
Optional<u32> m_leading_surrogate;
|
||||
};
|
||||
|
||||
}
|
11
Libraries/LibWeb/Encoding/TextEncoderStream.idl
Normal file
11
Libraries/LibWeb/Encoding/TextEncoderStream.idl
Normal file
|
@ -0,0 +1,11 @@
|
|||
#import <Encoding/TextEncoder.idl>
|
||||
#import <Streams/GenericTransformStream.idl>
|
||||
|
||||
// https://encoding.spec.whatwg.org/#textencoderstream
|
||||
[Exposed=*]
|
||||
interface TextEncoderStream {
|
||||
constructor();
|
||||
};
|
||||
|
||||
TextEncoderStream includes TextEncoderCommon;
|
||||
TextEncoderStream includes GenericTransformStream;
|
|
@ -327,6 +327,7 @@ class XMLSerializer;
|
|||
namespace Web::Encoding {
|
||||
class TextDecoder;
|
||||
class TextEncoder;
|
||||
class TextEncoderStream;
|
||||
|
||||
struct TextDecodeOptions;
|
||||
struct TextDecoderOptions;
|
||||
|
|
|
@ -88,6 +88,7 @@ libweb_js_bindings(DOMURL/DOMURL)
|
|||
libweb_js_bindings(DOMURL/URLSearchParams ITERABLE)
|
||||
libweb_js_bindings(Encoding/TextDecoder)
|
||||
libweb_js_bindings(Encoding/TextEncoder)
|
||||
libweb_js_bindings(Encoding/TextEncoderStream)
|
||||
libweb_js_bindings(EntriesAPI/FileSystemEntry)
|
||||
libweb_js_bindings(EventTiming/PerformanceEventTiming)
|
||||
libweb_js_bindings(Fetch/Headers ITERABLE)
|
||||
|
|
|
@ -381,6 +381,7 @@ SyntaxError
|
|||
Text
|
||||
TextDecoder
|
||||
TextEncoder
|
||||
TextEncoderStream
|
||||
TextEvent
|
||||
TextMetrics
|
||||
TextTrack
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
Harness status: Error
|
||||
|
||||
Found 0 tests
|
|
@ -0,0 +1,3 @@
|
|||
Harness status: Error
|
||||
|
||||
Found 0 tests
|
|
@ -0,0 +1,10 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 4 tests
|
||||
|
||||
2 Pass
|
||||
2 Fail
|
||||
Fail write() should not complete until read relieves backpressure for TextDecoderStream
|
||||
Fail additional writes should wait for backpressure to be relieved for class TextDecoderStream
|
||||
Pass write() should not complete until read relieves backpressure for TextEncoderStream
|
||||
Pass additional writes should wait for backpressure to be relieved for class TextEncoderStream
|
|
@ -0,0 +1,12 @@
|
|||
Harness status: Error
|
||||
|
||||
Found 6 tests
|
||||
|
||||
1 Pass
|
||||
5 Fail
|
||||
Pass a chunk that cannot be converted to a string should error the streams
|
||||
Fail input of type undefined should be converted correctly to string
|
||||
Fail input of type null should be converted correctly to string
|
||||
Fail input of type numeric should be converted correctly to string
|
||||
Fail input of type object should be converted correctly to string
|
||||
Fail input of type array should be converted correctly to string
|
|
@ -0,0 +1,24 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 19 tests
|
||||
|
||||
19 Pass
|
||||
Pass encoding one string of UTF-8 should give one complete chunk
|
||||
Pass a character split between chunks should be correctly encoded
|
||||
Pass a character following one split between chunks should be correctly encoded
|
||||
Pass two consecutive astral characters each split down the middle should be correctly reassembled
|
||||
Pass two consecutive astral characters each split down the middle with an invalid surrogate in the middle should be correctly encoded
|
||||
Pass a stream ending in a leading surrogate should emit a replacement character as a final chunk
|
||||
Pass an unmatched surrogate at the end of a chunk followed by an astral character in the next chunk should be replaced with the replacement character at the start of the next output chunk
|
||||
Pass an unmatched surrogate at the end of a chunk followed by an ascii character in the next chunk should be replaced with the replacement character at the start of the next output chunk
|
||||
Pass an unmatched surrogate at the end of a chunk followed by a plane 1 character split into two chunks should result in the encoded plane 1 character appearing in the last output chunk
|
||||
Pass two leading chunks should result in two replacement characters
|
||||
Pass a non-terminal unpaired leading surrogate should immediately be replaced
|
||||
Pass a terminal unpaired trailing surrogate should immediately be replaced
|
||||
Pass a leading surrogate chunk should be carried past empty chunks
|
||||
Pass a leading surrogate chunk should error when it is clear it didn't form a pair
|
||||
Pass an empty string should result in no output chunk
|
||||
Pass a leading empty chunk should be ignored
|
||||
Pass a trailing empty chunk should be ignored
|
||||
Pass a plain ASCII chunk should be converted
|
||||
Pass characters in the ISO-8859-1 range should be encoded correctly
|
|
@ -0,0 +1,10 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 4 tests
|
||||
|
||||
2 Pass
|
||||
2 Fail
|
||||
Fail TextDecoderStream: write in detached realm should succeed
|
||||
Pass TextEncoderStream: write in detached realm should succeed
|
||||
Pass TextEncoderStream: close in detached realm should succeed
|
||||
Fail TextDecoderStream: close in detached realm should succeed
|
|
@ -0,0 +1,3 @@
|
|||
Harness status: Error
|
||||
|
||||
Found 0 tests
|
|
@ -0,0 +1,3 @@
|
|||
Harness status: Error
|
||||
|
||||
Found 0 tests
|
|
@ -0,0 +1,8 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 2 tests
|
||||
|
||||
1 Pass
|
||||
1 Fail
|
||||
Pass TextEncoderStream readable and writable properties must pass brand checks
|
||||
Fail TextDecoderStream readable and writable properties must pass brand checks
|
|
@ -0,0 +1,18 @@
|
|||
Harness status: Error
|
||||
|
||||
Found 12 tests
|
||||
|
||||
2 Pass
|
||||
10 Fail
|
||||
Pass a TextEncoderStream object should be associated with the realm the constructor came from
|
||||
Pass TextEncoderStream's readable and writable attributes should come from the same realm as the constructor definition
|
||||
Fail the output chunks when read is called after write should come from the same realm as the constructor of TextEncoderStream
|
||||
Fail the output chunks when write is called with a pending read should come from the same realm as the constructor of TextEncoderStream
|
||||
Fail TypeError for unconvertable chunk should come from constructor realm of TextEncoderStream
|
||||
Fail a TextDecoderStream object should be associated with the realm the constructor came from
|
||||
Fail TextDecoderStream's readable and writable attributes should come from the same realm as the constructor definition
|
||||
Fail the result object when read is called after write should come from the same realm as the constructor of TextDecoderStream
|
||||
Fail the result object when write is called with a pending read should come from the same realm as the constructor of TextDecoderStream
|
||||
Fail TypeError for chunk with the wrong type should come from constructor realm of TextDecoderStream
|
||||
Fail TypeError for invalid chunk should come from constructor realm of TextDecoderStream
|
||||
Fail TypeError for incomplete input should come from constructor realm of TextDecoderStream
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/backpressure.any.js"></script>
|
|
@ -0,0 +1,60 @@
|
|||
// META: global=window,worker,shadowrealm
|
||||
|
||||
'use strict';
|
||||
|
||||
const classes = [
|
||||
{
|
||||
name: 'TextDecoderStream',
|
||||
input: new Uint8Array([65])
|
||||
},
|
||||
{
|
||||
name: 'TextEncoderStream',
|
||||
input: 'A'
|
||||
}
|
||||
];
|
||||
|
||||
const microtasksRun = () => new Promise(resolve => step_timeout(resolve, 0));
|
||||
|
||||
for (const streamClass of classes) {
|
||||
promise_test(async () => {
|
||||
const stream = new self[streamClass.name]();
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
const events = [];
|
||||
await microtasksRun();
|
||||
const writePromise = writer.write(streamClass.input);
|
||||
writePromise.then(() => events.push('write'));
|
||||
await microtasksRun();
|
||||
events.push('paused');
|
||||
await reader.read();
|
||||
events.push('read');
|
||||
await writePromise;
|
||||
assert_array_equals(events, ['paused', 'read', 'write'],
|
||||
'write should happen after read');
|
||||
}, 'write() should not complete until read relieves backpressure for ' +
|
||||
`${streamClass.name}`);
|
||||
|
||||
promise_test(async () => {
|
||||
const stream = new self[streamClass.name]();
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
const events = [];
|
||||
await microtasksRun();
|
||||
const readPromise1 = reader.read();
|
||||
readPromise1.then(() => events.push('read1'));
|
||||
const writePromise1 = writer.write(streamClass.input);
|
||||
const writePromise2 = writer.write(streamClass.input);
|
||||
writePromise1.then(() => events.push('write1'));
|
||||
writePromise2.then(() => events.push('write2'));
|
||||
await microtasksRun();
|
||||
events.push('paused');
|
||||
const readPromise2 = reader.read();
|
||||
readPromise2.then(() => events.push('read2'));
|
||||
await Promise.all([writePromise1, writePromise2,
|
||||
readPromise1, readPromise2]);
|
||||
assert_array_equals(events, ['read1', 'write1', 'paused', 'read2',
|
||||
'write2'],
|
||||
'writes should not happen before read2');
|
||||
}, 'additional writes should wait for backpressure to be relieved for ' +
|
||||
`class ${streamClass.name}`);
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const outer = new ShadowRealm();
|
||||
outer.evaluate(`
|
||||
var inner = new ShadowRealm();
|
||||
`);
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await import("../../resources/testharness-shadowrealm-outer.js");
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
inner.evaluate("setShadowRealmGlobalProperties")
|
||||
`)(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
|
||||
await import("/encoding/streams/backpressure.any.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
function begin_shadow_realm_tests(windowCallback) {
|
||||
inner.evaluate("begin_shadow_realm_tests")(windowCallback);
|
||||
}
|
||||
`);
|
||||
await fetch_tests_from_shadow_realm(outer);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,24 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const r = new ShadowRealm();
|
||||
await shadowRealmEvalAsync(r, `
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
`);
|
||||
r.evaluate("setShadowRealmGlobalProperties")(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(r, `
|
||||
|
||||
await import("/encoding/streams/backpressure.any.js");
|
||||
`);
|
||||
|
||||
await fetch_tests_from_shadow_realm(r);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,16 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="resources/readable-stream-from-array.js"></script>
|
||||
<script src="resources/readable-stream-to-array.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/encode-bad-chunks.any.js"></script>
|
|
@ -0,0 +1,63 @@
|
|||
// META: global=window,worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const error1 = new Error('error1');
|
||||
error1.name = 'error1';
|
||||
|
||||
promise_test(t => {
|
||||
const ts = new TextEncoderStream();
|
||||
const writer = ts.writable.getWriter();
|
||||
const reader = ts.readable.getReader();
|
||||
const writePromise = writer.write({
|
||||
toString() { throw error1; }
|
||||
});
|
||||
const readPromise = reader.read();
|
||||
return Promise.all([
|
||||
promise_rejects_exactly(t, error1, readPromise, 'read should reject with error1'),
|
||||
promise_rejects_exactly(t, error1, writePromise, 'write should reject with error1'),
|
||||
promise_rejects_exactly(t, error1, reader.closed, 'readable should be errored with error1'),
|
||||
promise_rejects_exactly(t, error1, writer.closed, 'writable should be errored with error1'),
|
||||
]);
|
||||
}, 'a chunk that cannot be converted to a string should error the streams');
|
||||
|
||||
const oddInputs = [
|
||||
{
|
||||
name: 'undefined',
|
||||
value: undefined,
|
||||
expected: 'undefined'
|
||||
},
|
||||
{
|
||||
name: 'null',
|
||||
value: null,
|
||||
expected: 'null'
|
||||
},
|
||||
{
|
||||
name: 'numeric',
|
||||
value: 3.14,
|
||||
expected: '3.14'
|
||||
},
|
||||
{
|
||||
name: 'object',
|
||||
value: {},
|
||||
expected: '[object Object]'
|
||||
},
|
||||
{
|
||||
name: 'array',
|
||||
value: ['hi'],
|
||||
expected: 'hi'
|
||||
}
|
||||
];
|
||||
|
||||
for (const input of oddInputs) {
|
||||
promise_test(async () => {
|
||||
const outputReadable = readableStreamFromArray([input.value])
|
||||
.pipeThrough(new TextEncoderStream())
|
||||
.pipeThrough(new TextDecoderStream());
|
||||
const output = await readableStreamToArray(outputReadable);
|
||||
assert_equals(output.length, 1, 'output should contain one chunk');
|
||||
assert_equals(output[0], input.expected, 'output should be correct');
|
||||
}, `input of type ${input.name} should be converted correctly to string`);
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="resources/readable-stream-from-array.js"></script>
|
||||
<script src="resources/readable-stream-to-array.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/encode-utf8.any.js"></script>
|
|
@ -0,0 +1,144 @@
|
|||
// META: global=window,worker
|
||||
// META: script=resources/readable-stream-from-array.js
|
||||
// META: script=resources/readable-stream-to-array.js
|
||||
|
||||
'use strict';
|
||||
const inputString = 'I \u{1F499} streams';
|
||||
const expectedOutputBytes = [0x49, 0x20, 0xf0, 0x9f, 0x92, 0x99, 0x20, 0x73,
|
||||
0x74, 0x72, 0x65, 0x61, 0x6d, 0x73];
|
||||
// This is a character that must be represented in two code units in a string,
|
||||
// ie. it is not in the Basic Multilingual Plane.
|
||||
const astralCharacter = '\u{1F499}'; // BLUE HEART
|
||||
const astralCharacterEncoded = [0xf0, 0x9f, 0x92, 0x99];
|
||||
const leading = astralCharacter[0];
|
||||
const trailing = astralCharacter[1];
|
||||
const replacementEncoded = [0xef, 0xbf, 0xbd];
|
||||
|
||||
// These tests assume that the implementation correctly classifies leading and
|
||||
// trailing surrogates and treats all the code units in each set equivalently.
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
input: [inputString],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'encoding one string of UTF-8 should give one complete chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing],
|
||||
output: [astralCharacterEncoded],
|
||||
description: 'a character split between chunks should be correctly encoded'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + astralCharacter],
|
||||
output: [astralCharacterEncoded.concat(astralCharacterEncoded)],
|
||||
description: 'a character following one split between chunks should be ' +
|
||||
'correctly encoded'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + leading, trailing],
|
||||
output: [astralCharacterEncoded, astralCharacterEncoded],
|
||||
description: 'two consecutive astral characters each split down the ' +
|
||||
'middle should be correctly reassembled'
|
||||
},
|
||||
{
|
||||
input: [leading, trailing + leading + leading, trailing],
|
||||
output: [astralCharacterEncoded.concat(replacementEncoded), astralCharacterEncoded],
|
||||
description: 'two consecutive astral characters each split down the ' +
|
||||
'middle with an invalid surrogate in the middle should be correctly ' +
|
||||
'encoded'
|
||||
},
|
||||
{
|
||||
input: [leading],
|
||||
output: [replacementEncoded],
|
||||
description: 'a stream ending in a leading surrogate should emit a ' +
|
||||
'replacement character as a final chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, astralCharacter],
|
||||
output: [replacementEncoded.concat(astralCharacterEncoded)],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'an astral character in the next chunk should be replaced with ' +
|
||||
'the replacement character at the start of the next output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, 'A'],
|
||||
output: [replacementEncoded.concat([65])],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'an ascii character in the next chunk should be replaced with ' +
|
||||
'the replacement character at the start of the next output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, leading, trailing],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'an unmatched surrogate at the end of a chunk followed by ' +
|
||||
'a plane 1 character split into two chunks should result in ' +
|
||||
'the encoded plane 1 character appearing in the last output chunk'
|
||||
},
|
||||
{
|
||||
input: [leading, leading],
|
||||
output: [replacementEncoded, replacementEncoded],
|
||||
description: 'two leading chunks should result in two replacement ' +
|
||||
'characters'
|
||||
},
|
||||
{
|
||||
input: [leading + leading, trailing],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'a non-terminal unpaired leading surrogate should ' +
|
||||
'immediately be replaced'
|
||||
},
|
||||
{
|
||||
input: [trailing, astralCharacter],
|
||||
output: [replacementEncoded, astralCharacterEncoded],
|
||||
description: 'a terminal unpaired trailing surrogate should ' +
|
||||
'immediately be replaced'
|
||||
},
|
||||
{
|
||||
input: [leading, '', trailing],
|
||||
output: [astralCharacterEncoded],
|
||||
description: 'a leading surrogate chunk should be carried past empty chunks'
|
||||
},
|
||||
{
|
||||
input: [leading, ''],
|
||||
output: [replacementEncoded],
|
||||
description: 'a leading surrogate chunk should error when it is clear ' +
|
||||
'it didn\'t form a pair'
|
||||
},
|
||||
{
|
||||
input: [''],
|
||||
output: [],
|
||||
description: 'an empty string should result in no output chunk'
|
||||
},
|
||||
{
|
||||
input: ['', inputString],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'a leading empty chunk should be ignored'
|
||||
},
|
||||
{
|
||||
input: [inputString, ''],
|
||||
output: [expectedOutputBytes],
|
||||
description: 'a trailing empty chunk should be ignored'
|
||||
},
|
||||
{
|
||||
input: ['A'],
|
||||
output: [[65]],
|
||||
description: 'a plain ASCII chunk should be converted'
|
||||
},
|
||||
{
|
||||
input: ['\xff'],
|
||||
output: [[195, 191]],
|
||||
description: 'characters in the ISO-8859-1 range should be encoded correctly'
|
||||
},
|
||||
];
|
||||
|
||||
for (const {input, output, description} of testCases) {
|
||||
promise_test(async () => {
|
||||
const inputStream = readableStreamFromArray(input);
|
||||
const outputStream = inputStream.pipeThrough(new TextEncoderStream());
|
||||
const chunkArray = await readableStreamToArray(outputStream);
|
||||
assert_equals(chunkArray.length, output.length,
|
||||
'number of chunks should match');
|
||||
for (let i = 0; i < output.length; ++i) {
|
||||
assert_array_equals(chunkArray[i], output[i], `chunk ${i} should match`);
|
||||
}
|
||||
}, description);
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/invalid-realm.window.js"></script>
|
|
@ -0,0 +1,37 @@
|
|||
// Text*Stream should still work even if the realm is detached.
|
||||
|
||||
// Adds an iframe to the document and returns it.
|
||||
function addIframe() {
|
||||
const iframe = document.createElement('iframe');
|
||||
document.body.appendChild(iframe);
|
||||
return iframe;
|
||||
}
|
||||
|
||||
promise_test(async t => {
|
||||
const iframe = addIframe();
|
||||
const stream = new iframe.contentWindow.TextDecoderStream();
|
||||
const readPromise = stream.readable.getReader().read();
|
||||
const writer = stream.writable.getWriter();
|
||||
await writer.ready;
|
||||
iframe.remove();
|
||||
return Promise.all([writer.write(new Uint8Array([65])),readPromise]);
|
||||
}, 'TextDecoderStream: write in detached realm should succeed');
|
||||
|
||||
promise_test(async t => {
|
||||
const iframe = addIframe();
|
||||
const stream = new iframe.contentWindow.TextEncoderStream();
|
||||
const readPromise = stream.readable.getReader().read();
|
||||
const writer = stream.writable.getWriter();
|
||||
await writer.ready;
|
||||
iframe.remove();
|
||||
return Promise.all([writer.write('A'), readPromise]);
|
||||
}, 'TextEncoderStream: write in detached realm should succeed');
|
||||
|
||||
for (const type of ['TextEncoderStream', 'TextDecoderStream']) {
|
||||
promise_test(async t => {
|
||||
const iframe = addIframe();
|
||||
const stream = new iframe.contentWindow[type]();
|
||||
iframe.remove();
|
||||
return stream.writable.close();
|
||||
}, `${type}: close in detached realm should succeed`);
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/readable-writable-properties.any.js"></script>
|
|
@ -0,0 +1,22 @@
|
|||
// META: global=window,worker,shadowrealm
|
||||
|
||||
// This just tests that the "readable" and "writable" properties pass the brand
|
||||
// checks. All other relevant attributes are covered by the IDL tests.
|
||||
|
||||
'use strict';
|
||||
|
||||
test(() => {
|
||||
const te = new TextEncoderStream();
|
||||
assert_equals(typeof ReadableStream.prototype.getReader.call(te.readable),
|
||||
'object', 'readable property must pass brand check');
|
||||
assert_equals(typeof WritableStream.prototype.getWriter.call(te.writable),
|
||||
'object', 'writable property must pass brand check');
|
||||
}, 'TextEncoderStream readable and writable properties must pass brand checks');
|
||||
|
||||
test(() => {
|
||||
const td = new TextDecoderStream();
|
||||
assert_equals(typeof ReadableStream.prototype.getReader.call(td.readable),
|
||||
'object', 'readable property must pass brand check');
|
||||
assert_equals(typeof WritableStream.prototype.getWriter.call(td.writable),
|
||||
'object', 'writable property must pass brand check');
|
||||
}, 'TextDecoderStream readable and writable properties must pass brand checks');
|
|
@ -0,0 +1,40 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const outer = new ShadowRealm();
|
||||
outer.evaluate(`
|
||||
var inner = new ShadowRealm();
|
||||
`);
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await import("../../resources/testharness-shadowrealm-outer.js");
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
inner.evaluate("setShadowRealmGlobalProperties")
|
||||
`)(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(outer, `
|
||||
await shadowRealmEvalAsync(inner, \`
|
||||
|
||||
await import("/encoding/streams/readable-writable-properties.any.js");
|
||||
\`);
|
||||
`);
|
||||
|
||||
outer.evaluate(`
|
||||
function begin_shadow_realm_tests(windowCallback) {
|
||||
inner.evaluate("begin_shadow_realm_tests")(windowCallback);
|
||||
}
|
||||
`);
|
||||
await fetch_tests_from_shadow_realm(outer);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,24 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
<script src="../../resources/testharness-shadowrealm-outer.js"></script>
|
||||
<script>
|
||||
(async function() {
|
||||
const r = new ShadowRealm();
|
||||
await shadowRealmEvalAsync(r, `
|
||||
await import("/resources/testharness-shadowrealm-inner.js");
|
||||
await import("../../resources/testharness.js");
|
||||
`);
|
||||
r.evaluate("setShadowRealmGlobalProperties")(location.search, fetchAdaptor);
|
||||
|
||||
await shadowRealmEvalAsync(r, `
|
||||
|
||||
await import("/encoding/streams/readable-writable-properties.any.js");
|
||||
`);
|
||||
|
||||
await fetch_tests_from_shadow_realm(r);
|
||||
done();
|
||||
})().catch(e => setup(() => { throw e; }));
|
||||
</script>
|
|
@ -0,0 +1,8 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script src="../../resources/testharness.js"></script>
|
||||
<script src="../../resources/testharnessreport.js"></script>
|
||||
|
||||
<div id=log></div>
|
||||
<script src="../../encoding/streams/realms.window.js"></script>
|
|
@ -0,0 +1,304 @@
|
|||
'use strict';
|
||||
|
||||
// Test that objects created by the TextEncoderStream and TextDecoderStream APIs
|
||||
// are created in the correct realm. The tests work by creating an iframe for
|
||||
// each realm and then posting Javascript to them to be evaluated. Inputs and
|
||||
// outputs are passed around via global variables in each realm's scope.
|
||||
|
||||
// Async setup is required before creating any tests, so require done() to be
|
||||
// called.
|
||||
setup({explicit_done: true});
|
||||
|
||||
function createRealm() {
|
||||
let iframe = document.createElement('iframe');
|
||||
const scriptEndTag = '<' + '/script>';
|
||||
iframe.srcdoc = `<!doctype html>
|
||||
<script>
|
||||
onmessage = event => {
|
||||
if (event.source !== window.parent) {
|
||||
throw new Error('unexpected message with source ' + event.source);
|
||||
}
|
||||
eval(event.data);
|
||||
};
|
||||
${scriptEndTag}`;
|
||||
iframe.style.display = 'none';
|
||||
document.body.appendChild(iframe);
|
||||
let realmPromiseResolve;
|
||||
const realmPromise = new Promise(resolve => {
|
||||
realmPromiseResolve = resolve;
|
||||
});
|
||||
iframe.onload = () => {
|
||||
realmPromiseResolve(iframe.contentWindow);
|
||||
};
|
||||
return realmPromise;
|
||||
}
|
||||
|
||||
async function createRealms() {
|
||||
// All realms are visible on the global object so they can access each other.
|
||||
|
||||
// The realm that the constructor function comes from.
|
||||
window.constructorRealm = await createRealm();
|
||||
|
||||
// The realm in which the constructor object is called.
|
||||
window.constructedRealm = await createRealm();
|
||||
|
||||
// The realm in which reading happens.
|
||||
window.readRealm = await createRealm();
|
||||
|
||||
// The realm in which writing happens.
|
||||
window.writeRealm = await createRealm();
|
||||
|
||||
// The realm that provides the definitions of Readable and Writable methods.
|
||||
window.methodRealm = await createRealm();
|
||||
|
||||
await evalInRealmAndWait(methodRealm, `
|
||||
window.ReadableStreamDefaultReader =
|
||||
new ReadableStream().getReader().constructor;
|
||||
window.WritableStreamDefaultWriter =
|
||||
new WritableStream().getWriter().constructor;
|
||||
`);
|
||||
window.readMethod = methodRealm.ReadableStreamDefaultReader.prototype.read;
|
||||
window.writeMethod = methodRealm.WritableStreamDefaultWriter.prototype.write;
|
||||
}
|
||||
|
||||
// In order for values to be visible between realms, they need to be
|
||||
// global. To prevent interference between tests, variable names are generated
|
||||
// automatically.
|
||||
const id = (() => {
|
||||
let nextId = 0;
|
||||
return () => {
|
||||
return `realmsId${nextId++}`;
|
||||
};
|
||||
})();
|
||||
|
||||
// Eval string "code" in the content of realm "realm". Evaluation happens
|
||||
// asynchronously, meaning it hasn't happened when the function returns.
|
||||
function evalInRealm(realm, code) {
|
||||
realm.postMessage(code, window.origin);
|
||||
}
|
||||
|
||||
// Same as evalInRealm() but returns a Promise which will resolve when the
|
||||
// function has actually.
|
||||
async function evalInRealmAndWait(realm, code) {
|
||||
const resolve = id();
|
||||
const waitOn = new Promise(r => {
|
||||
realm[resolve] = r;
|
||||
});
|
||||
evalInRealm(realm, code);
|
||||
evalInRealm(realm, `${resolve}();`);
|
||||
await waitOn;
|
||||
}
|
||||
|
||||
// The same as evalInRealmAndWait but returns the result of evaluating "code" as
|
||||
// an expression.
|
||||
async function evalInRealmAndReturn(realm, code) {
|
||||
const myId = id();
|
||||
await evalInRealmAndWait(realm, `window.${myId} = ${code};`);
|
||||
return realm[myId];
|
||||
}
|
||||
|
||||
// Constructs an object in constructedRealm and copies it into readRealm and
|
||||
// writeRealm. Returns the id that can be used to access the object in those
|
||||
// realms. |what| can contain constructor arguments.
|
||||
async function constructAndStore(what) {
|
||||
const objId = id();
|
||||
// Call |constructorRealm|'s constructor from inside |constructedRealm|.
|
||||
writeRealm[objId] = await evalInRealmAndReturn(
|
||||
constructedRealm, `new parent.constructorRealm.${what}`);
|
||||
readRealm[objId] = writeRealm[objId];
|
||||
return objId;
|
||||
}
|
||||
|
||||
// Calls read() on the readable side of the TransformStream stored in
|
||||
// readRealm[objId]. Locks the readable side as a side-effect.
|
||||
function readInReadRealm(objId) {
|
||||
return evalInRealmAndReturn(readRealm, `
|
||||
parent.readMethod.call(window.${objId}.readable.getReader())`);
|
||||
}
|
||||
|
||||
// Calls write() on the writable side of the TransformStream stored in
|
||||
// writeRealm[objId], passing |value|. Locks the writable side as a
|
||||
// side-effect.
|
||||
function writeInWriteRealm(objId, value) {
|
||||
const valueId = id();
|
||||
writeRealm[valueId] = value;
|
||||
return evalInRealmAndReturn(writeRealm, `
|
||||
parent.writeMethod.call(window.${objId}.writable.getWriter(),
|
||||
window.${valueId})`);
|
||||
}
|
||||
|
||||
window.onload = () => {
|
||||
createRealms().then(() => {
|
||||
runGenericTests('TextEncoderStream');
|
||||
runTextEncoderStreamTests();
|
||||
runGenericTests('TextDecoderStream');
|
||||
runTextDecoderStreamTests();
|
||||
done();
|
||||
});
|
||||
};
|
||||
|
||||
function runGenericTests(classname) {
|
||||
promise_test(async () => {
|
||||
const obj = await evalInRealmAndReturn(
|
||||
constructedRealm, `new parent.constructorRealm.${classname}()`);
|
||||
assert_equals(obj.constructor, constructorRealm[classname],
|
||||
'obj should be in constructor realm');
|
||||
}, `a ${classname} object should be associated with the realm the ` +
|
||||
'constructor came from');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore(classname);
|
||||
const readableGetterId = id();
|
||||
readRealm[readableGetterId] = Object.getOwnPropertyDescriptor(
|
||||
methodRealm[classname].prototype, 'readable').get;
|
||||
const writableGetterId = id();
|
||||
writeRealm[writableGetterId] = Object.getOwnPropertyDescriptor(
|
||||
methodRealm[classname].prototype, 'writable').get;
|
||||
const readable = await evalInRealmAndReturn(
|
||||
readRealm, `${readableGetterId}.call(${objId})`);
|
||||
const writable = await evalInRealmAndReturn(
|
||||
writeRealm, `${writableGetterId}.call(${objId})`);
|
||||
assert_equals(readable.constructor, constructorRealm.ReadableStream,
|
||||
'readable should be in constructor realm');
|
||||
assert_equals(writable.constructor, constructorRealm.WritableStream,
|
||||
'writable should be in constructor realm');
|
||||
}, `${classname}'s readable and writable attributes should come from the ` +
|
||||
'same realm as the constructor definition');
|
||||
}
|
||||
|
||||
function runTextEncoderStreamTests() {
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
const writePromise = writeInWriteRealm(objId, 'A');
|
||||
const result = await readInReadRealm(objId);
|
||||
await writePromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
assert_equals(result.value.constructor, constructorRealm.Uint8Array,
|
||||
'chunk should be in constructor realm');
|
||||
}, 'the output chunks when read is called after write should come from the ' +
|
||||
'same realm as the constructor of TextEncoderStream');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
const chunkPromise = readInReadRealm(objId);
|
||||
writeInWriteRealm(objId, 'A');
|
||||
// Now the read() should resolve.
|
||||
const result = await chunkPromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
assert_equals(result.value.constructor, constructorRealm.Uint8Array,
|
||||
'chunk should be in constructor realm');
|
||||
}, 'the output chunks when write is called with a pending read should come ' +
|
||||
'from the same realm as the constructor of TextEncoderStream');
|
||||
|
||||
// There is not absolute consensus regarding what realm exceptions should be
|
||||
// created in. Implementations may vary. The expectations in exception-related
|
||||
// tests may change in future once consensus is reached.
|
||||
promise_test(async t => {
|
||||
const objId = await constructAndStore('TextEncoderStream');
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
|
||||
await promise_rejects_js(t, constructorRealm.TypeError,
|
||||
writeInWriteRealm(objId, {
|
||||
toString() { return {}; }
|
||||
}),
|
||||
'write TypeError should come from constructor realm');
|
||||
|
||||
return promise_rejects_js(t, constructorRealm.TypeError, readPromise,
|
||||
'read TypeError should come from constructor realm');
|
||||
}, 'TypeError for unconvertable chunk should come from constructor realm ' +
|
||||
'of TextEncoderStream');
|
||||
}
|
||||
|
||||
function runTextDecoderStreamTests() {
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
const writePromise = writeInWriteRealm(objId, new Uint8Array([65]));
|
||||
const result = await readInReadRealm(objId);
|
||||
await writePromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
// A string is not an object, so doesn't have an associated realm. Accessing
|
||||
// string properties will create a transient object wrapper belonging to the
|
||||
// current realm. So checking the realm of result.value is not useful.
|
||||
}, 'the result object when read is called after write should come from the ' +
|
||||
'same realm as the constructor of TextDecoderStream');
|
||||
|
||||
promise_test(async () => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
const chunkPromise = readInReadRealm(objId);
|
||||
writeInWriteRealm(objId, new Uint8Array([65]));
|
||||
// Now the read() should resolve.
|
||||
const result = await chunkPromise;
|
||||
assert_equals(result.constructor, constructorRealm.Object,
|
||||
'result should be in constructor realm');
|
||||
// A string is not an object, so doesn't have an associated realm. Accessing
|
||||
// string properties will create a transient object wrapper belonging to the
|
||||
// current realm. So checking the realm of result.value is not useful.
|
||||
}, 'the result object when write is called with a pending ' +
|
||||
'read should come from the same realm as the constructor of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId = await constructAndStore('TextDecoderStream');
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
await promise_rejects_js(
|
||||
t, constructorRealm.TypeError,
|
||||
writeInWriteRealm(objId, {}),
|
||||
'write TypeError should come from constructor realm'
|
||||
);
|
||||
|
||||
return promise_rejects_js(
|
||||
t, constructorRealm.TypeError, readPromise,
|
||||
'read TypeError should come from constructor realm'
|
||||
);
|
||||
}, 'TypeError for chunk with the wrong type should come from constructor ' +
|
||||
'realm of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId =
|
||||
await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
|
||||
// Read first to relieve backpressure.
|
||||
const readPromise = readInReadRealm(objId);
|
||||
|
||||
await promise_rejects_js(
|
||||
t, constructorRealm.TypeError,
|
||||
writeInWriteRealm(objId, new Uint8Array([0xff])),
|
||||
'write TypeError should come from constructor realm'
|
||||
);
|
||||
|
||||
return promise_rejects_js(
|
||||
t, constructorRealm.TypeError, readPromise,
|
||||
'read TypeError should come from constructor realm'
|
||||
);
|
||||
}, 'TypeError for invalid chunk should come from constructor realm ' +
|
||||
'of TextDecoderStream');
|
||||
|
||||
promise_test(async t => {
|
||||
const objId =
|
||||
await constructAndStore(`TextDecoderStream('utf-8', {fatal: true})`);
|
||||
// Read first to relieve backpressure.
|
||||
readInReadRealm(objId);
|
||||
// Write an unfinished sequence of bytes.
|
||||
const incompleteBytesId = id();
|
||||
writeRealm[incompleteBytesId] = new Uint8Array([0xf0]);
|
||||
|
||||
return promise_rejects_js(
|
||||
t, constructorRealm.TypeError,
|
||||
// Can't use writeInWriteRealm() here because it doesn't make it possible
|
||||
// to reuse the writer.
|
||||
evalInRealmAndReturn(writeRealm, `
|
||||
(() => {
|
||||
const writer = window.${objId}.writable.getWriter();
|
||||
parent.writeMethod.call(writer, window.${incompleteBytesId});
|
||||
return parent.methodRealm.WritableStreamDefaultWriter.prototype
|
||||
.close.call(writer);
|
||||
})();
|
||||
`),
|
||||
'close TypeError should come from constructor realm'
|
||||
);
|
||||
}, 'TypeError for incomplete input should come from constructor realm ' +
|
||||
'of TextDecoderStream');
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
'use strict';
|
||||
|
||||
function readableStreamFromArray(array) {
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
for (let entry of array) {
|
||||
controller.enqueue(entry);
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
'use strict';
|
||||
|
||||
function readableStreamToArray(stream) {
|
||||
var array = [];
|
||||
var writable = new WritableStream({
|
||||
write(chunk) {
|
||||
array.push(chunk);
|
||||
}
|
||||
});
|
||||
return stream.pipeTo(writable).then(() => array);
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
// testharness file with ShadowRealm utilities to be imported in the realm
|
||||
// hosting the ShadowRealm
|
||||
|
||||
/**
|
||||
* Convenience function for evaluating some async code in the ShadowRealm and
|
||||
* waiting for the result.
|
||||
*
|
||||
* In case of error, this function intentionally exposes the stack trace (if it
|
||||
* is available) to the hosting realm, for debugging purposes.
|
||||
*
|
||||
* @param {ShadowRealm} realm - the ShadowRealm to evaluate the code in
|
||||
* @param {string} asyncBody - the code to evaluate; will be put in the body of
|
||||
* an async function, and must return a value explicitly if a value is to be
|
||||
* returned to the hosting realm.
|
||||
*/
|
||||
globalThis.shadowRealmEvalAsync = function (realm, asyncBody) {
|
||||
return new Promise(realm.evaluate(`
|
||||
(resolve, reject) => {
|
||||
(async () => {
|
||||
${asyncBody}
|
||||
})().then(resolve, (e) => reject(e.toString() + "\\n" + (e.stack || "")));
|
||||
}
|
||||
`));
|
||||
};
|
||||
|
||||
/**
|
||||
* Convenience adaptor function for fetch() that can be passed to
|
||||
* setShadowRealmGlobalProperties() (see testharness-shadowrealm-inner.js).
|
||||
* Used to adapt the hosting realm's fetch(), if present, to fetch a resource
|
||||
* and pass its text through the callable boundary to the ShadowRealm.
|
||||
*/
|
||||
globalThis.fetchAdaptor = (resource) => (resolve, reject) => {
|
||||
fetch(resource)
|
||||
.then(res => res.text())
|
||||
.then(resolve, (e) => reject(e.toString()));
|
||||
};
|
||||
|
||||
let workerMessagePortPromise;
|
||||
/**
|
||||
* Used when the hosting realm is a worker. This value is a Promise that
|
||||
* resolves to a function that posts a message to the worker's message port,
|
||||
* just like postMessage(). The message port is only available asynchronously in
|
||||
* SharedWorkers and ServiceWorkers.
|
||||
*/
|
||||
globalThis.getPostMessageFunc = async function () {
|
||||
if (typeof postMessage === "function") {
|
||||
return postMessage; // postMessage available directly in dedicated worker
|
||||
}
|
||||
|
||||
if (workerMessagePortPromise) {
|
||||
return await workerMessagePortPromise;
|
||||
}
|
||||
|
||||
throw new Error("getPostMessageFunc is intended for Worker scopes");
|
||||
}
|
||||
|
||||
// Port available asynchronously in shared worker, but not via an async func
|
||||
let savedResolver;
|
||||
if (globalThis.constructor.name === "SharedWorkerGlobalScope") {
|
||||
workerMessagePortPromise = new Promise((resolve) => {
|
||||
savedResolver = resolve;
|
||||
});
|
||||
addEventListener("connect", function (event) {
|
||||
const port = event.ports[0];
|
||||
savedResolver(port.postMessage.bind(port));
|
||||
});
|
||||
} else if (globalThis.constructor.name === "ServiceWorkerGlobalScope") {
|
||||
workerMessagePortPromise = new Promise((resolve) => {
|
||||
savedResolver = resolve;
|
||||
});
|
||||
addEventListener("message", (e) => {
|
||||
if (typeof e.data === "object" && e.data !== null && e.data.type === "connect") {
|
||||
const client = e.source;
|
||||
savedResolver(client.postMessage.bind(client));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Used when the hosting realm does not permit dynamic import, e.g. in
|
||||
* ServiceWorkers or AudioWorklets. Requires an adaptor function such as
|
||||
* fetchAdaptor() above, or an equivalent if fetch() is not present in the
|
||||
* hosting realm.
|
||||
*
|
||||
* @param {ShadowRealm} realm - the ShadowRealm in which to setup a
|
||||
* fakeDynamicImport() global function.
|
||||
* @param {function} adaptor - an adaptor function that does what fetchAdaptor()
|
||||
* does.
|
||||
*/
|
||||
globalThis.setupFakeDynamicImportInShadowRealm = function(realm, adaptor) {
|
||||
function fetchModuleTextExecutor(url) {
|
||||
return (resolve, reject) => {
|
||||
new Promise(adaptor(url))
|
||||
.then(text => realm.evaluate(text + ";\nundefined"))
|
||||
.then(resolve, (e) => reject(e.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
realm.evaluate(`
|
||||
(fetchModuleTextExecutor) => {
|
||||
globalThis.fakeDynamicImport = function (url) {
|
||||
return new Promise(fetchModuleTextExecutor(url));
|
||||
}
|
||||
}
|
||||
`)(fetchModuleTextExecutor);
|
||||
};
|
||||
|
||||
/**
|
||||
* Used when the hosting realm does not expose fetch(), i.e. in worklets. The
|
||||
* port on the other side of the channel needs to send messages starting with
|
||||
* 'fetchRequest::' and listen for messages starting with 'fetchResult::'. See
|
||||
* testharness-shadowrealm-audioworkletprocessor.js.
|
||||
*
|
||||
* @param {port} MessagePort - the message port on which to listen for fetch
|
||||
* requests
|
||||
*/
|
||||
globalThis.setupFakeFetchOverMessagePort = function (port) {
|
||||
port.addEventListener("message", (event) => {
|
||||
if (typeof event.data !== "string" || !event.data.startsWith("fetchRequest::")) {
|
||||
return;
|
||||
}
|
||||
|
||||
fetch(event.data.slice("fetchRequest::".length))
|
||||
.then(res => res.text())
|
||||
.then(
|
||||
text => port.postMessage(`fetchResult::success::${text}`),
|
||||
error => port.postMessage(`fetchResult::fail::${error}`),
|
||||
);
|
||||
});
|
||||
port.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a message suitable for posting with postMessage() that will signal to
|
||||
* the test harness that the tests are finished and there was an error in the
|
||||
* setup code.
|
||||
*
|
||||
* @param {message} any - error
|
||||
*/
|
||||
globalThis.createSetupErrorResult = function (message) {
|
||||
return {
|
||||
type: "complete",
|
||||
tests: [],
|
||||
asserts: [],
|
||||
status: {
|
||||
status: 1, // TestsStatus.ERROR,
|
||||
message: String(message),
|
||||
stack: typeof message === "object" && message !== null && "stack" in message ? message.stack : undefined,
|
||||
},
|
||||
};
|
||||
};
|
Loading…
Add table
Reference in a new issue