LibGC+Everywhere: Factor out a LibGC from LibJS

Resulting in a massive rename across almost everywhere! Alongside the
namespace change, we now have the following names:

 * JS::NonnullGCPtr -> GC::Ref
 * JS::GCPtr -> GC::Ptr
 * JS::HeapFunction -> GC::Function
 * JS::CellImpl -> GC::Cell
 * JS::Handle -> GC::Root
This commit is contained in:
Shannon Booth 2024-11-15 04:01:23 +13:00 committed by Andreas Kling
commit f87041bf3a
Notes: github-actions[bot] 2024-11-15 13:50:17 +00:00
1722 changed files with 9939 additions and 9906 deletions

View file

@ -16,14 +16,14 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioBuffer);
GC_DEFINE_ALLOCATOR(AudioBuffer);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> AudioBuffer::create(JS::Realm& realm, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
WebIDL::ExceptionOr<GC::Ref<AudioBuffer>> AudioBuffer::create(JS::Realm& realm, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
{
return construct_impl(realm, { number_of_channels, length, sample_rate });
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> AudioBuffer::construct_impl(JS::Realm& realm, AudioBufferOptions const& options)
WebIDL::ExceptionOr<GC::Ref<AudioBuffer>> AudioBuffer::construct_impl(JS::Realm& realm, AudioBufferOptions const& options)
{
// 1. If any of the values in options lie outside its nominal range, throw a NotSupportedError exception and abort the following steps.
TRY(BaseAudioContext::verify_audio_options_inside_nominal_range(realm, options.number_of_channels, options.length, options.sample_rate));
@ -73,7 +73,7 @@ WebIDL::UnsignedLong AudioBuffer::number_of_channels() const
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-getchanneldata
WebIDL::ExceptionOr<JS::NonnullGCPtr<JS::Float32Array>> AudioBuffer::get_channel_data(WebIDL::UnsignedLong channel) const
WebIDL::ExceptionOr<GC::Ref<JS::Float32Array>> AudioBuffer::get_channel_data(WebIDL::UnsignedLong channel) const
{
if (channel >= m_channels.size())
return WebIDL::IndexSizeError::create(realm(), "Channel index is out of range"_string);
@ -82,7 +82,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<JS::Float32Array>> AudioBuffer::get_channel
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-copyfromchannel
WebIDL::ExceptionOr<void> AudioBuffer::copy_from_channel(JS::Handle<WebIDL::BufferSource> const& destination, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset) const
WebIDL::ExceptionOr<void> AudioBuffer::copy_from_channel(GC::Root<WebIDL::BufferSource> const& destination, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset) const
{
// The copyFromChannel() method copies the samples from the specified channel of the AudioBuffer to the destination array.
//
@ -108,7 +108,7 @@ WebIDL::ExceptionOr<void> AudioBuffer::copy_from_channel(JS::Handle<WebIDL::Buff
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-copytochannel
WebIDL::ExceptionOr<void> AudioBuffer::copy_to_channel(JS::Handle<WebIDL::BufferSource> const& source, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset)
WebIDL::ExceptionOr<void> AudioBuffer::copy_to_channel(GC::Root<WebIDL::BufferSource> const& source, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset)
{
// The copyToChannel() method copies the samples to the specified channel of the AudioBuffer from the source array.
//

View file

@ -24,11 +24,11 @@ struct AudioBufferOptions {
// https://webaudio.github.io/web-audio-api/#AudioBuffer
class AudioBuffer final : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(AudioBuffer, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(AudioBuffer);
GC_DECLARE_ALLOCATOR(AudioBuffer);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> create(JS::Realm&, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> construct_impl(JS::Realm&, AudioBufferOptions const&);
static WebIDL::ExceptionOr<GC::Ref<AudioBuffer>> create(JS::Realm&, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
static WebIDL::ExceptionOr<GC::Ref<AudioBuffer>> construct_impl(JS::Realm&, AudioBufferOptions const&);
virtual ~AudioBuffer() override;
@ -36,9 +36,9 @@ public:
WebIDL::UnsignedLong length() const;
double duration() const;
WebIDL::UnsignedLong number_of_channels() const;
WebIDL::ExceptionOr<JS::NonnullGCPtr<JS::Float32Array>> get_channel_data(WebIDL::UnsignedLong channel) const;
WebIDL::ExceptionOr<void> copy_from_channel(JS::Handle<WebIDL::BufferSource> const&, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset = 0) const;
WebIDL::ExceptionOr<void> copy_to_channel(JS::Handle<WebIDL::BufferSource> const&, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset = 0);
WebIDL::ExceptionOr<GC::Ref<JS::Float32Array>> get_channel_data(WebIDL::UnsignedLong channel) const;
WebIDL::ExceptionOr<void> copy_from_channel(GC::Root<WebIDL::BufferSource> const&, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset = 0) const;
WebIDL::ExceptionOr<void> copy_to_channel(GC::Root<WebIDL::BufferSource> const&, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset = 0);
private:
explicit AudioBuffer(JS::Realm&, AudioBufferOptions const&);
@ -51,7 +51,7 @@ private:
//
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-internal-data-slot
// A data block holding the audio sample data.
Vector<JS::NonnullGCPtr<JS::Float32Array>> m_channels; // [[internal data]] / [[number_of_channels]]
Vector<GC::Ref<JS::Float32Array>> m_channels; // [[internal data]] / [[number_of_channels]]
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-length-slot
// The length of each channel of this AudioBuffer, which is an unsigned long.

View file

@ -13,9 +13,9 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioBufferSourceNode);
GC_DEFINE_ALLOCATOR(AudioBufferSourceNode);
AudioBufferSourceNode::AudioBufferSourceNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options)
AudioBufferSourceNode::AudioBufferSourceNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context, AudioBufferSourceOptions const& options)
: AudioScheduledSourceNode(realm, context)
, m_buffer(options.buffer)
, m_playback_rate(AudioParam::create(realm, options.playback_rate, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
@ -29,26 +29,26 @@ AudioBufferSourceNode::AudioBufferSourceNode(JS::Realm& realm, JS::NonnullGCPtr<
AudioBufferSourceNode::~AudioBufferSourceNode() = default;
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_buffer(JS::GCPtr<AudioBuffer> buffer)
WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_buffer(GC::Ptr<AudioBuffer> buffer)
{
m_buffer = buffer;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
JS::GCPtr<AudioBuffer> AudioBufferSourceNode::buffer() const
GC::Ptr<AudioBuffer> AudioBufferSourceNode::buffer() const
{
return m_buffer;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-playbackrate
JS::NonnullGCPtr<AudioParam> AudioBufferSourceNode::playback_rate() const
GC::Ref<AudioParam> AudioBufferSourceNode::playback_rate() const
{
return m_playback_rate;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-detune
JS::NonnullGCPtr<AudioParam> AudioBufferSourceNode::detune() const
GC::Ref<AudioParam> AudioBufferSourceNode::detune() const
{
return m_detune;
}
@ -102,13 +102,13 @@ WebIDL::ExceptionOr<void> AudioBufferSourceNode::start(Optional<double> when, Op
return {};
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> AudioBufferSourceNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options)
WebIDL::ExceptionOr<GC::Ref<AudioBufferSourceNode>> AudioBufferSourceNode::create(JS::Realm& realm, GC::Ref<BaseAudioContext> context, AudioBufferSourceOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-audiobuffersourcenode
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> AudioBufferSourceNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options)
WebIDL::ExceptionOr<GC::Ref<AudioBufferSourceNode>> AudioBufferSourceNode::construct_impl(JS::Realm& realm, GC::Ref<BaseAudioContext> context, AudioBufferSourceOptions const& options)
{
// When the constructor is called with a BaseAudioContext c and an option object option, the user agent
// MUST initialize the AudioNode this, with context and options as arguments.

View file

@ -15,7 +15,7 @@ namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioBufferSourceOptions
struct AudioBufferSourceOptions {
JS::GCPtr<AudioBuffer> buffer;
GC::Ptr<AudioBuffer> buffer;
float detune { 0 };
bool loop { false };
double loop_end { 0 };
@ -26,15 +26,15 @@ struct AudioBufferSourceOptions {
// https://webaudio.github.io/web-audio-api/#AudioBufferSourceNode
class AudioBufferSourceNode : public AudioScheduledSourceNode {
WEB_PLATFORM_OBJECT(AudioBufferSourceNode, AudioScheduledSourceNode);
JS_DECLARE_ALLOCATOR(AudioBufferSourceNode);
GC_DECLARE_ALLOCATOR(AudioBufferSourceNode);
public:
virtual ~AudioBufferSourceNode() override;
WebIDL::ExceptionOr<void> set_buffer(JS::GCPtr<AudioBuffer>);
JS::GCPtr<AudioBuffer> buffer() const;
JS::NonnullGCPtr<AudioParam> playback_rate() const;
JS::NonnullGCPtr<AudioParam> detune() const;
WebIDL::ExceptionOr<void> set_buffer(GC::Ptr<AudioBuffer>);
GC::Ptr<AudioBuffer> buffer() const;
GC::Ref<AudioParam> playback_rate() const;
GC::Ref<AudioParam> detune() const;
WebIDL::ExceptionOr<void> set_loop(bool);
bool loop() const;
WebIDL::ExceptionOr<void> set_loop_start(double);
@ -46,19 +46,19 @@ public:
WebIDL::ExceptionOr<void> start(Optional<double>, Optional<double>, Optional<double>);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, AudioBufferSourceOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, AudioBufferSourceOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<AudioBufferSourceNode>> create(JS::Realm&, GC::Ref<BaseAudioContext>, AudioBufferSourceOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<AudioBufferSourceNode>> construct_impl(JS::Realm&, GC::Ref<BaseAudioContext>, AudioBufferSourceOptions const& = {});
protected:
AudioBufferSourceNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, AudioBufferSourceOptions const& = {});
AudioBufferSourceNode(JS::Realm&, GC::Ref<BaseAudioContext>, AudioBufferSourceOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
JS::GCPtr<AudioBuffer> m_buffer;
JS::NonnullGCPtr<AudioParam> m_playback_rate;
JS::NonnullGCPtr<AudioParam> m_detune;
GC::Ptr<AudioBuffer> m_buffer;
GC::Ref<AudioParam> m_playback_rate;
GC::Ref<AudioParam> m_detune;
bool m_loop { false };
double m_loop_start { 0.0 };
double m_loop_end { 0.0 };

View file

@ -15,10 +15,10 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioContext);
GC_DEFINE_ALLOCATOR(AudioContext);
// https://webaudio.github.io/web-audio-api/#dom-audiocontext-audiocontext
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioContext>> AudioContext::construct_impl(JS::Realm& realm, AudioContextOptions const& context_options)
WebIDL::ExceptionOr<GC::Ref<AudioContext>> AudioContext::construct_impl(JS::Realm& realm, AudioContextOptions const& context_options)
{
return realm.create<AudioContext>(realm, context_options);
}
@ -72,7 +72,7 @@ AudioContext::AudioContext(JS::Realm& realm, AudioContextOptions const& context_
BaseAudioContext::set_rendering_state(Bindings::AudioContextState::Running);
// 5.3: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
queue_a_media_element_task(GC::create_function(heap(), [&realm, this]() {
// 5.3.1: Set the state attribute of the AudioContext to "running".
BaseAudioContext::set_control_state(Bindings::AudioContextState::Running);
@ -104,7 +104,7 @@ AudioTimestamp AudioContext::get_output_timestamp()
}
// https://www.w3.org/TR/webaudio/#dom-audiocontext-resume
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::resume()
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> AudioContext::resume()
{
auto& realm = this->realm();
@ -145,7 +145,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::resume()
// 7.3: Start rendering the audio graph.
if (!start_rendering_audio_graph()) {
// 7.4: In case of failure, queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
queue_a_media_element_task(GC::create_function(heap(), [&realm, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 7.4.1: Reject all promises from [[pending resume promises]] in order, then clear [[pending resume promises]].
@ -162,7 +162,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::resume()
}
// 7.5: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, promise, this]() {
queue_a_media_element_task(GC::create_function(heap(), [&realm, promise, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 7.5.1: Resolve all promises from [[pending resume promises]] in order.
@ -185,7 +185,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::resume()
set_control_state(Bindings::AudioContextState::Running);
// 7.5.4.2: queue a media element task to fire an event named statechange at the AudioContext.
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
queue_a_media_element_task(GC::create_function(heap(), [&realm, this]() {
this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange));
}));
}
@ -196,7 +196,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::resume()
}
// https://www.w3.org/TR/webaudio/#dom-audiocontext-suspend
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::suspend()
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> AudioContext::suspend()
{
auto& realm = this->realm();
@ -232,7 +232,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::suspend()
set_rendering_state(Bindings::AudioContextState::Suspended);
// 7.3: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, promise, this]() {
queue_a_media_element_task(GC::create_function(heap(), [&realm, promise, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 7.3.1: Resolve promise.
@ -244,7 +244,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::suspend()
set_control_state(Bindings::AudioContextState::Suspended);
// 7.3.2.2: queue a media element task to fire an event named statechange at the AudioContext.
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
queue_a_media_element_task(GC::create_function(heap(), [&realm, this]() {
this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange));
}));
}
@ -255,7 +255,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::suspend()
}
// https://www.w3.org/TR/webaudio/#dom-audiocontext-close
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::close()
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> AudioContext::close()
{
auto& realm = this->realm();
@ -287,7 +287,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::close()
// FIXME: 5.3: If this control message is being run in a reaction to the document being unloaded, abort this algorithm.
// 5.4: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, promise, this]() {
queue_a_media_element_task(GC::create_function(heap(), [&realm, promise, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 5.4.1: Resolve promise.

View file

@ -25,19 +25,19 @@ struct AudioTimestamp {
// https://webaudio.github.io/web-audio-api/#AudioContext
class AudioContext final : public BaseAudioContext {
WEB_PLATFORM_OBJECT(AudioContext, BaseAudioContext);
JS_DECLARE_ALLOCATOR(AudioContext);
GC_DECLARE_ALLOCATOR(AudioContext);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioContext>> construct_impl(JS::Realm&, AudioContextOptions const& context_options = {});
static WebIDL::ExceptionOr<GC::Ref<AudioContext>> construct_impl(JS::Realm&, AudioContextOptions const& context_options = {});
virtual ~AudioContext() override;
double base_latency() const { return m_base_latency; }
double output_latency() const { return m_output_latency; }
AudioTimestamp get_output_timestamp();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> resume();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> suspend();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> close();
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> resume();
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> suspend();
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> close();
private:
explicit AudioContext(JS::Realm&, AudioContextOptions const& context_options);
@ -49,7 +49,7 @@ private:
double m_output_latency { 0 };
bool m_allowed_to_start = true;
Vector<JS::NonnullGCPtr<WebIDL::Promise>> m_pending_resume_promises;
Vector<GC::Ref<WebIDL::Promise>> m_pending_resume_promises;
bool m_suspended_by_user = false;
bool start_rendering_audio_graph();

View file

@ -15,9 +15,9 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioDestinationNode);
GC_DEFINE_ALLOCATOR(AudioDestinationNode);
AudioDestinationNode::AudioDestinationNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
AudioDestinationNode::AudioDestinationNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context)
: AudioNode(realm, context)
{
}
@ -31,7 +31,7 @@ WebIDL::UnsignedLong AudioDestinationNode::max_channel_count()
return 2;
}
JS::NonnullGCPtr<AudioDestinationNode> AudioDestinationNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
GC::Ref<AudioDestinationNode> AudioDestinationNode::construct_impl(JS::Realm& realm, GC::Ref<BaseAudioContext> context)
{
return realm.create<AudioDestinationNode>(realm, context);
}

View file

@ -17,7 +17,7 @@ namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioDestinationNode
class AudioDestinationNode : public AudioNode {
WEB_PLATFORM_OBJECT(AudioDestinationNode, AudioNode);
JS_DECLARE_ALLOCATOR(AudioDestinationNode);
GC_DECLARE_ALLOCATOR(AudioDestinationNode);
public:
virtual ~AudioDestinationNode() override;
@ -27,10 +27,10 @@ public:
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
WebIDL::ExceptionOr<void> set_channel_count(WebIDL::UnsignedLong) override;
static JS::NonnullGCPtr<AudioDestinationNode> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
static GC::Ref<AudioDestinationNode> construct_impl(JS::Realm&, GC::Ref<BaseAudioContext>);
protected:
AudioDestinationNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
AudioDestinationNode(JS::Realm&, GC::Ref<BaseAudioContext>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;

View file

@ -4,13 +4,13 @@
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibJS/Heap/CellAllocator.h>
#include <LibGC/CellAllocator.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioListener.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioListener);
GC_DEFINE_ALLOCATOR(AudioListener);
AudioListener::AudioListener(JS::Realm& realm)
: Bindings::PlatformObject(realm)
@ -26,7 +26,7 @@ AudioListener::AudioListener(JS::Realm& realm)
{
}
JS::NonnullGCPtr<AudioListener> AudioListener::create(JS::Realm& realm)
GC::Ref<AudioListener> AudioListener::create(JS::Realm& realm)
{
return realm.create<AudioListener>(realm);
}

View file

@ -17,21 +17,21 @@ namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioListener
class AudioListener final : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(AudioListener, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(AudioListener);
GC_DECLARE_ALLOCATOR(AudioListener);
public:
static JS::NonnullGCPtr<AudioListener> create(JS::Realm&);
static GC::Ref<AudioListener> create(JS::Realm&);
virtual ~AudioListener() override;
JS::NonnullGCPtr<AudioParam> forward_x() const { return m_forward_x; }
JS::NonnullGCPtr<AudioParam> forward_y() const { return m_forward_y; }
JS::NonnullGCPtr<AudioParam> forward_z() const { return m_forward_z; }
JS::NonnullGCPtr<AudioParam> position_x() const { return m_position_x; }
JS::NonnullGCPtr<AudioParam> position_y() const { return m_position_y; }
JS::NonnullGCPtr<AudioParam> position_z() const { return m_position_z; }
JS::NonnullGCPtr<AudioParam> up_x() const { return m_up_x; }
JS::NonnullGCPtr<AudioParam> up_y() const { return m_up_y; }
JS::NonnullGCPtr<AudioParam> up_z() const { return m_up_z; }
GC::Ref<AudioParam> forward_x() const { return m_forward_x; }
GC::Ref<AudioParam> forward_y() const { return m_forward_y; }
GC::Ref<AudioParam> forward_z() const { return m_forward_z; }
GC::Ref<AudioParam> position_x() const { return m_position_x; }
GC::Ref<AudioParam> position_y() const { return m_position_y; }
GC::Ref<AudioParam> position_z() const { return m_position_z; }
GC::Ref<AudioParam> up_x() const { return m_up_x; }
GC::Ref<AudioParam> up_y() const { return m_up_y; }
GC::Ref<AudioParam> up_z() const { return m_up_z; }
WebIDL::ExceptionOr<void> set_position(float x, float y, float z);
WebIDL::ExceptionOr<void> set_orientation(float x, float y, float z, float x_up, float y_up, float z_up);
@ -42,15 +42,15 @@ private:
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
JS::NonnullGCPtr<AudioParam> m_forward_x;
JS::NonnullGCPtr<AudioParam> m_forward_y;
JS::NonnullGCPtr<AudioParam> m_forward_z;
JS::NonnullGCPtr<AudioParam> m_position_x;
JS::NonnullGCPtr<AudioParam> m_position_y;
JS::NonnullGCPtr<AudioParam> m_position_z;
JS::NonnullGCPtr<AudioParam> m_up_x;
JS::NonnullGCPtr<AudioParam> m_up_y;
JS::NonnullGCPtr<AudioParam> m_up_z;
GC::Ref<AudioParam> m_forward_x;
GC::Ref<AudioParam> m_forward_y;
GC::Ref<AudioParam> m_forward_z;
GC::Ref<AudioParam> m_position_x;
GC::Ref<AudioParam> m_position_y;
GC::Ref<AudioParam> m_position_z;
GC::Ref<AudioParam> m_up_x;
GC::Ref<AudioParam> m_up_y;
GC::Ref<AudioParam> m_up_z;
};
}

View file

@ -10,9 +10,9 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioNode);
GC_DEFINE_ALLOCATOR(AudioNode);
AudioNode::AudioNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
AudioNode::AudioNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context)
: DOM::EventTarget(realm)
, m_context(context)
@ -48,7 +48,7 @@ WebIDL::ExceptionOr<void> AudioNode::initialize_audio_node_options(AudioNodeOpti
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioNode>> AudioNode::connect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input)
WebIDL::ExceptionOr<GC::Ref<AudioNode>> AudioNode::connect(GC::Ref<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input)
{
// There can only be one connection between a given output of one specific node and a given input of another specific node.
// Multiple connections with the same termini are ignored.
@ -65,7 +65,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioNode>> AudioNode::connect(JS::NonnullG
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect-destinationparam-output
void AudioNode::connect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output)
void AudioNode::connect(GC::Ref<AudioParam> destination_param, WebIDL::UnsignedLong output)
{
(void)destination_param;
(void)output;
@ -86,14 +86,14 @@ void AudioNode::disconnect(WebIDL::UnsignedLong output)
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode
void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node)
void AudioNode::disconnect(GC::Ref<AudioNode> destination_node)
{
(void)destination_node;
dbgln("FIXME: Implement AudioNode::disconnect(destination_node)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output
void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output)
void AudioNode::disconnect(GC::Ref<AudioNode> destination_node, WebIDL::UnsignedLong output)
{
(void)destination_node;
(void)output;
@ -101,7 +101,7 @@ void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL:
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output-input
void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input)
void AudioNode::disconnect(GC::Ref<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input)
{
(void)destination_node;
(void)output;
@ -110,14 +110,14 @@ void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL:
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam
void AudioNode::disconnect(JS::NonnullGCPtr<AudioParam> destination_param)
void AudioNode::disconnect(GC::Ref<AudioParam> destination_param)
{
(void)destination_param;
dbgln("FIXME: Implement AudioNode::disconnect(destination_param)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam-output
void AudioNode::disconnect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output)
void AudioNode::disconnect(GC::Ref<AudioParam> destination_param, WebIDL::UnsignedLong output)
{
(void)destination_param;
(void)output;

View file

@ -31,24 +31,24 @@ struct AudioNodeDefaultOptions {
// https://webaudio.github.io/web-audio-api/#AudioNode
class AudioNode : public DOM::EventTarget {
WEB_PLATFORM_OBJECT(AudioNode, DOM::EventTarget);
JS_DECLARE_ALLOCATOR(AudioNode);
GC_DECLARE_ALLOCATOR(AudioNode);
public:
virtual ~AudioNode() override;
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioNode>> connect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output = 0, WebIDL::UnsignedLong input = 0);
void connect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output = 0);
WebIDL::ExceptionOr<GC::Ref<AudioNode>> connect(GC::Ref<AudioNode> destination_node, WebIDL::UnsignedLong output = 0, WebIDL::UnsignedLong input = 0);
void connect(GC::Ref<AudioParam> destination_param, WebIDL::UnsignedLong output = 0);
void disconnect();
void disconnect(WebIDL::UnsignedLong output);
void disconnect(JS::NonnullGCPtr<AudioNode> destination_node);
void disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output);
void disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input);
void disconnect(JS::NonnullGCPtr<AudioParam> destination_param);
void disconnect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output);
void disconnect(GC::Ref<AudioNode> destination_node);
void disconnect(GC::Ref<AudioNode> destination_node, WebIDL::UnsignedLong output);
void disconnect(GC::Ref<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input);
void disconnect(GC::Ref<AudioParam> destination_param);
void disconnect(GC::Ref<AudioParam> destination_param, WebIDL::UnsignedLong output);
// https://webaudio.github.io/web-audio-api/#dom-audionode-context
JS::NonnullGCPtr<BaseAudioContext const> context() const
GC::Ref<BaseAudioContext const> context() const
{
// The BaseAudioContext which owns this AudioNode.
return m_context;
@ -71,13 +71,13 @@ public:
WebIDL::ExceptionOr<void> initialize_audio_node_options(AudioNodeOptions const& given_options, AudioNodeDefaultOptions const& default_options);
protected:
AudioNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
AudioNode(JS::Realm&, GC::Ref<BaseAudioContext>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
JS::NonnullGCPtr<BaseAudioContext> m_context;
GC::Ref<BaseAudioContext> m_context;
WebIDL::UnsignedLong m_channel_count { 2 };
Bindings::ChannelCountMode m_channel_count_mode { Bindings::ChannelCountMode::Max };
Bindings::ChannelInterpretation m_channel_interpretation { Bindings::ChannelInterpretation::Speakers };

View file

@ -11,7 +11,7 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioParam);
GC_DEFINE_ALLOCATOR(AudioParam);
AudioParam::AudioParam(JS::Realm& realm, float default_value, float min_value, float max_value, Bindings::AutomationRate automation_rate)
: Bindings::PlatformObject(realm)
@ -23,7 +23,7 @@ AudioParam::AudioParam(JS::Realm& realm, float default_value, float min_value, f
{
}
JS::NonnullGCPtr<AudioParam> AudioParam::create(JS::Realm& realm, float default_value, float min_value, float max_value, Bindings::AutomationRate automation_rate)
GC::Ref<AudioParam> AudioParam::create(JS::Realm& realm, float default_value, float min_value, float max_value, Bindings::AutomationRate automation_rate)
{
return realm.create<AudioParam>(realm, default_value, min_value, max_value, automation_rate);
}
@ -78,16 +78,16 @@ float AudioParam::max_value() const
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-setvalueattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_value_at_time(float value, double start_time)
WebIDL::ExceptionOr<GC::Ref<AudioParam>> AudioParam::set_value_at_time(float value, double start_time)
{
(void)value;
(void)start_time;
dbgln("FIXME: Implement AudioParam::set_value_at_time");
return JS::NonnullGCPtr<AudioParam> { *this };
return GC::Ref<AudioParam> { *this };
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-linearramptovalueattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::linear_ramp_to_value_at_time(float value, double end_time)
WebIDL::ExceptionOr<GC::Ref<AudioParam>> AudioParam::linear_ramp_to_value_at_time(float value, double end_time)
{
(void)value;
(void)end_time;
@ -95,7 +95,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::linear_ramp_to_val
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-exponentialramptovalueattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::exponential_ramp_to_value_at_time(float value, double end_time)
WebIDL::ExceptionOr<GC::Ref<AudioParam>> AudioParam::exponential_ramp_to_value_at_time(float value, double end_time)
{
(void)value;
(void)end_time;
@ -103,7 +103,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::exponential_ramp_t
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-settargetattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_target_at_time(float target, double start_time, float time_constant)
WebIDL::ExceptionOr<GC::Ref<AudioParam>> AudioParam::set_target_at_time(float target, double start_time, float time_constant)
{
(void)target;
(void)start_time;
@ -112,7 +112,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_target_at_time
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-setvaluecurveattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_value_curve_at_time(Span<float> values, double start_time, double duration)
WebIDL::ExceptionOr<GC::Ref<AudioParam>> AudioParam::set_value_curve_at_time(Span<float> values, double start_time, double duration)
{
(void)values;
(void)start_time;
@ -121,14 +121,14 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_value_curve_at
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-cancelscheduledvalues
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::cancel_scheduled_values(double cancel_time)
WebIDL::ExceptionOr<GC::Ref<AudioParam>> AudioParam::cancel_scheduled_values(double cancel_time)
{
(void)cancel_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::cancel_scheduled_values"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-cancelandholdattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::cancel_and_hold_at_time(double cancel_time)
WebIDL::ExceptionOr<GC::Ref<AudioParam>> AudioParam::cancel_and_hold_at_time(double cancel_time)
{
(void)cancel_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::cancel_and_hold_at_time"_string);

View file

@ -15,10 +15,10 @@ namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioParam
class AudioParam final : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(AudioParam, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(AudioParam);
GC_DECLARE_ALLOCATOR(AudioParam);
public:
static JS::NonnullGCPtr<AudioParam> create(JS::Realm&, float default_value, float min_value, float max_value, Bindings::AutomationRate);
static GC::Ref<AudioParam> create(JS::Realm&, float default_value, float min_value, float max_value, Bindings::AutomationRate);
virtual ~AudioParam() override;
@ -32,13 +32,13 @@ public:
float min_value() const;
float max_value() const;
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> set_value_at_time(float value, double start_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> linear_ramp_to_value_at_time(float value, double end_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> exponential_ramp_to_value_at_time(float value, double end_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> set_target_at_time(float target, double start_time, float time_constant);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> set_value_curve_at_time(Span<float> values, double start_time, double duration);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> cancel_scheduled_values(double cancel_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> cancel_and_hold_at_time(double cancel_time);
WebIDL::ExceptionOr<GC::Ref<AudioParam>> set_value_at_time(float value, double start_time);
WebIDL::ExceptionOr<GC::Ref<AudioParam>> linear_ramp_to_value_at_time(float value, double end_time);
WebIDL::ExceptionOr<GC::Ref<AudioParam>> exponential_ramp_to_value_at_time(float value, double end_time);
WebIDL::ExceptionOr<GC::Ref<AudioParam>> set_target_at_time(float target, double start_time, float time_constant);
WebIDL::ExceptionOr<GC::Ref<AudioParam>> set_value_curve_at_time(Span<float> values, double start_time, double duration);
WebIDL::ExceptionOr<GC::Ref<AudioParam>> cancel_scheduled_values(double cancel_time);
WebIDL::ExceptionOr<GC::Ref<AudioParam>> cancel_and_hold_at_time(double cancel_time);
private:
AudioParam(JS::Realm&, float default_value, float min_value, float max_value, Bindings::AutomationRate);

View file

@ -11,9 +11,9 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioScheduledSourceNode);
GC_DEFINE_ALLOCATOR(AudioScheduledSourceNode);
AudioScheduledSourceNode::AudioScheduledSourceNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
AudioScheduledSourceNode::AudioScheduledSourceNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context)
: AudioNode(realm, context)
{
}
@ -21,13 +21,13 @@ AudioScheduledSourceNode::AudioScheduledSourceNode(JS::Realm& realm, JS::Nonnull
AudioScheduledSourceNode::~AudioScheduledSourceNode() = default;
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-onended
JS::GCPtr<WebIDL::CallbackType> AudioScheduledSourceNode::onended()
GC::Ptr<WebIDL::CallbackType> AudioScheduledSourceNode::onended()
{
return event_handler_attribute(HTML::EventNames::ended);
}
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-onended
void AudioScheduledSourceNode::set_onended(JS::GCPtr<WebIDL::CallbackType> value)
void AudioScheduledSourceNode::set_onended(GC::Ptr<WebIDL::CallbackType> value)
{
set_event_handler_attribute(HTML::EventNames::ended, value);
}

View file

@ -13,19 +13,19 @@ namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioScheduledSourceNode
class AudioScheduledSourceNode : public AudioNode {
WEB_PLATFORM_OBJECT(AudioScheduledSourceNode, AudioNode);
JS_DECLARE_ALLOCATOR(AudioScheduledSourceNode);
GC_DECLARE_ALLOCATOR(AudioScheduledSourceNode);
public:
virtual ~AudioScheduledSourceNode() override;
JS::GCPtr<WebIDL::CallbackType> onended();
void set_onended(JS::GCPtr<WebIDL::CallbackType>);
GC::Ptr<WebIDL::CallbackType> onended();
void set_onended(GC::Ptr<WebIDL::CallbackType>);
WebIDL::ExceptionOr<void> start(double when = 0);
WebIDL::ExceptionOr<void> stop(double when = 0);
protected:
AudioScheduledSourceNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
AudioScheduledSourceNode(JS::Realm&, GC::Ref<BaseAudioContext>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;

View file

@ -59,14 +59,14 @@ WebIDL::CallbackType* BaseAudioContext::onstatechange()
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbiquadfilter
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> BaseAudioContext::create_biquad_filter()
WebIDL::ExceptionOr<GC::Ref<BiquadFilterNode>> BaseAudioContext::create_biquad_filter()
{
// Factory method for a BiquadFilterNode representing a second order filter which can be configured as one of several common filter types.
return BiquadFilterNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> BaseAudioContext::create_buffer(WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
WebIDL::ExceptionOr<GC::Ref<AudioBuffer>> BaseAudioContext::create_buffer(WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
{
// Creates an AudioBuffer of the given size. The audio data in the buffer will be zero-initialized (silent).
// A NotSupportedError exception MUST be thrown if any of the arguments is negative, zero, or outside its nominal range.
@ -74,28 +74,28 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> BaseAudioContext::create_buff
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffersource
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> BaseAudioContext::create_buffer_source()
WebIDL::ExceptionOr<GC::Ref<AudioBufferSourceNode>> BaseAudioContext::create_buffer_source()
{
// Factory method for a AudioBufferSourceNode.
return AudioBufferSourceNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createoscillator
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> BaseAudioContext::create_oscillator()
WebIDL::ExceptionOr<GC::Ref<OscillatorNode>> BaseAudioContext::create_oscillator()
{
// Factory method for an OscillatorNode.
return OscillatorNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createdynamicscompressor
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> BaseAudioContext::create_dynamics_compressor()
WebIDL::ExceptionOr<GC::Ref<DynamicsCompressorNode>> BaseAudioContext::create_dynamics_compressor()
{
// Factory method for a DynamicsCompressorNode.
return DynamicsCompressorNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-creategain
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> BaseAudioContext::create_gain()
WebIDL::ExceptionOr<GC::Ref<GainNode>> BaseAudioContext::create_gain()
{
// Factory method for GainNode.
return GainNode::create(realm(), *this);
@ -121,14 +121,14 @@ WebIDL::ExceptionOr<void> BaseAudioContext::verify_audio_options_inside_nominal_
return {};
}
void BaseAudioContext::queue_a_media_element_task(JS::NonnullGCPtr<JS::HeapFunction<void()>> steps)
void BaseAudioContext::queue_a_media_element_task(GC::Ref<GC::Function<void()>> steps)
{
auto task = HTML::Task::create(vm(), m_media_element_event_task_source.source, HTML::current_principal_settings_object().responsible_document(), steps);
HTML::main_thread_event_loop().task_queue().add(task);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-decodeaudiodata
JS::NonnullGCPtr<WebIDL::Promise> BaseAudioContext::decode_audio_data(JS::Handle<WebIDL::BufferSource> audio_data, JS::GCPtr<WebIDL::CallbackType> success_callback, JS::GCPtr<WebIDL::CallbackType> error_callback)
GC::Ref<WebIDL::Promise> BaseAudioContext::decode_audio_data(GC::Root<WebIDL::BufferSource> audio_data, GC::Ptr<WebIDL::CallbackType> success_callback, GC::Ptr<WebIDL::CallbackType> error_callback)
{
auto& realm = this->realm();
@ -169,7 +169,7 @@ JS::NonnullGCPtr<WebIDL::Promise> BaseAudioContext::decode_audio_data(JS::Handle
// 4.3. Queue a media element task to invoke errorCallback with error.
if (error_callback) {
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, error_callback, error] {
queue_a_media_element_task(GC::create_function(heap(), [&realm, error_callback, error] {
auto completion = WebIDL::invoke_callback(*error_callback, {}, error);
if (completion.is_abrupt())
HTML::report_exception(completion, realm);
@ -182,7 +182,7 @@ JS::NonnullGCPtr<WebIDL::Promise> BaseAudioContext::decode_audio_data(JS::Handle
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-decodeaudiodata
void BaseAudioContext::queue_a_decoding_operation(JS::NonnullGCPtr<JS::PromiseCapability> promise, [[maybe_unused]] JS::Handle<WebIDL::BufferSource> audio_data, JS::GCPtr<WebIDL::CallbackType> success_callback, JS::GCPtr<WebIDL::CallbackType> error_callback)
void BaseAudioContext::queue_a_decoding_operation(GC::Ref<JS::PromiseCapability> promise, [[maybe_unused]] GC::Root<WebIDL::BufferSource> audio_data, GC::Ptr<WebIDL::CallbackType> success_callback, GC::Ptr<WebIDL::CallbackType> error_callback)
{
auto& realm = this->realm();
@ -208,7 +208,7 @@ void BaseAudioContext::queue_a_decoding_operation(JS::NonnullGCPtr<JS::PromiseCa
// 4. If can decode is false,
if (!can_decode) {
// queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [this, &realm, promise, error_callback] {
queue_a_media_element_task(GC::create_function(heap(), [this, &realm, promise, error_callback] {
// 4.1. Let error be a DOMException whose name is EncodingError.
auto error = WebIDL::EncodingError::create(realm, "Unable to decode."_string);

View file

@ -36,10 +36,10 @@ public:
static constexpr float MIN_SAMPLE_RATE { 8000 };
static constexpr float MAX_SAMPLE_RATE { 192000 };
JS::NonnullGCPtr<AudioDestinationNode> destination() const { return m_destination; }
GC::Ref<AudioDestinationNode> destination() const { return m_destination; }
float sample_rate() const { return m_sample_rate; }
double current_time() const { return m_current_time; }
JS::NonnullGCPtr<AudioListener> listener() const { return m_listener; }
GC::Ref<AudioListener> listener() const { return m_listener; }
Bindings::AudioContextState state() const { return m_control_thread_state; }
// https://webaudio.github.io/web-audio-api/#--nyquist-frequency
@ -54,33 +54,33 @@ public:
static WebIDL::ExceptionOr<void> verify_audio_options_inside_nominal_range(JS::Realm&, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> create_biquad_filter();
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> create_buffer(WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> create_buffer_source();
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> create_oscillator();
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> create_dynamics_compressor();
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> create_gain();
WebIDL::ExceptionOr<GC::Ref<BiquadFilterNode>> create_biquad_filter();
WebIDL::ExceptionOr<GC::Ref<AudioBuffer>> create_buffer(WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
WebIDL::ExceptionOr<GC::Ref<AudioBufferSourceNode>> create_buffer_source();
WebIDL::ExceptionOr<GC::Ref<OscillatorNode>> create_oscillator();
WebIDL::ExceptionOr<GC::Ref<DynamicsCompressorNode>> create_dynamics_compressor();
WebIDL::ExceptionOr<GC::Ref<GainNode>> create_gain();
JS::NonnullGCPtr<WebIDL::Promise> decode_audio_data(JS::Handle<WebIDL::BufferSource>, JS::GCPtr<WebIDL::CallbackType>, JS::GCPtr<WebIDL::CallbackType>);
GC::Ref<WebIDL::Promise> decode_audio_data(GC::Root<WebIDL::BufferSource>, GC::Ptr<WebIDL::CallbackType>, GC::Ptr<WebIDL::CallbackType>);
protected:
explicit BaseAudioContext(JS::Realm&, float m_sample_rate = 0);
void queue_a_media_element_task(JS::NonnullGCPtr<JS::HeapFunction<void()>>);
void queue_a_media_element_task(GC::Ref<GC::Function<void()>>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
JS::NonnullGCPtr<AudioDestinationNode> m_destination;
Vector<JS::NonnullGCPtr<WebIDL::Promise>> m_pending_promises;
GC::Ref<AudioDestinationNode> m_destination;
Vector<GC::Ref<WebIDL::Promise>> m_pending_promises;
private:
void queue_a_decoding_operation(JS::NonnullGCPtr<JS::PromiseCapability>, JS::Handle<WebIDL::BufferSource>, JS::GCPtr<WebIDL::CallbackType>, JS::GCPtr<WebIDL::CallbackType>);
void queue_a_decoding_operation(GC::Ref<JS::PromiseCapability>, GC::Root<WebIDL::BufferSource>, GC::Ptr<WebIDL::CallbackType>, GC::Ptr<WebIDL::CallbackType>);
float m_sample_rate { 0 };
double m_current_time { 0 };
JS::NonnullGCPtr<AudioListener> m_listener;
GC::Ref<AudioListener> m_listener;
Bindings::AudioContextState m_control_thread_state = Bindings::AudioContextState::Suspended;
Bindings::AudioContextState m_rendering_thread_state = Bindings::AudioContextState::Suspended;

View file

@ -13,9 +13,9 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(BiquadFilterNode);
GC_DEFINE_ALLOCATOR(BiquadFilterNode);
BiquadFilterNode::BiquadFilterNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, BiquadFilterOptions const& options)
BiquadFilterNode::BiquadFilterNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context, BiquadFilterOptions const& options)
: AudioNode(realm, context)
, m_type(options.type)
, m_frequency(AudioParam::create(realm, options.frequency, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
@ -40,31 +40,31 @@ Bindings::BiquadFilterType BiquadFilterNode::type() const
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-frequency
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::frequency() const
GC::Ref<AudioParam> BiquadFilterNode::frequency() const
{
return m_frequency;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-detune
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::detune() const
GC::Ref<AudioParam> BiquadFilterNode::detune() const
{
return m_detune;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-q
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::q() const
GC::Ref<AudioParam> BiquadFilterNode::q() const
{
return m_q;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-gain
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::gain() const
GC::Ref<AudioParam> BiquadFilterNode::gain() const
{
return m_gain;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-getfrequencyresponse
WebIDL::ExceptionOr<void> BiquadFilterNode::get_frequency_response(JS::Handle<WebIDL::BufferSource> const& frequency_hz, JS::Handle<WebIDL::BufferSource> const& mag_response, JS::Handle<WebIDL::BufferSource> const& phase_response)
WebIDL::ExceptionOr<void> BiquadFilterNode::get_frequency_response(GC::Root<WebIDL::BufferSource> const& frequency_hz, GC::Root<WebIDL::BufferSource> const& mag_response, GC::Root<WebIDL::BufferSource> const& phase_response)
{
(void)frequency_hz;
(void)mag_response;
@ -73,13 +73,13 @@ WebIDL::ExceptionOr<void> BiquadFilterNode::get_frequency_response(JS::Handle<We
return {};
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> BiquadFilterNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, BiquadFilterOptions const& options)
WebIDL::ExceptionOr<GC::Ref<BiquadFilterNode>> BiquadFilterNode::create(JS::Realm& realm, GC::Ref<BaseAudioContext> context, BiquadFilterOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-biquadfilternode
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> BiquadFilterNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, BiquadFilterOptions const& options)
WebIDL::ExceptionOr<GC::Ref<BiquadFilterNode>> BiquadFilterNode::construct_impl(JS::Realm& realm, GC::Ref<BaseAudioContext> context, BiquadFilterOptions const& options)
{
// When the constructor is called with a BaseAudioContext c and an option object option, the user agent
// MUST initialize the AudioNode this, with context and options as arguments.

View file

@ -24,7 +24,7 @@ struct BiquadFilterOptions : AudioNodeOptions {
// https://webaudio.github.io/web-audio-api/#BiquadFilterNode
class BiquadFilterNode : public AudioNode {
WEB_PLATFORM_OBJECT(BiquadFilterNode, AudioNode);
JS_DECLARE_ALLOCATOR(BiquadFilterNode);
GC_DECLARE_ALLOCATOR(BiquadFilterNode);
public:
virtual ~BiquadFilterNode() override;
@ -34,27 +34,27 @@ public:
void set_type(Bindings::BiquadFilterType);
Bindings::BiquadFilterType type() const;
JS::NonnullGCPtr<AudioParam> frequency() const;
JS::NonnullGCPtr<AudioParam> detune() const;
JS::NonnullGCPtr<AudioParam> q() const;
JS::NonnullGCPtr<AudioParam> gain() const;
WebIDL::ExceptionOr<void> get_frequency_response(JS::Handle<WebIDL::BufferSource> const&, JS::Handle<WebIDL::BufferSource> const&, JS::Handle<WebIDL::BufferSource> const&);
GC::Ref<AudioParam> frequency() const;
GC::Ref<AudioParam> detune() const;
GC::Ref<AudioParam> q() const;
GC::Ref<AudioParam> gain() const;
WebIDL::ExceptionOr<void> get_frequency_response(GC::Root<WebIDL::BufferSource> const&, GC::Root<WebIDL::BufferSource> const&, GC::Root<WebIDL::BufferSource> const&);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, BiquadFilterOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, BiquadFilterOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<BiquadFilterNode>> create(JS::Realm&, GC::Ref<BaseAudioContext>, BiquadFilterOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<BiquadFilterNode>> construct_impl(JS::Realm&, GC::Ref<BaseAudioContext>, BiquadFilterOptions const& = {});
protected:
BiquadFilterNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, BiquadFilterOptions const& = {});
BiquadFilterNode(JS::Realm&, GC::Ref<BaseAudioContext>, BiquadFilterOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
Bindings::BiquadFilterType m_type { Bindings::BiquadFilterType::Lowpass };
JS::NonnullGCPtr<AudioParam> m_frequency;
JS::NonnullGCPtr<AudioParam> m_detune;
JS::NonnullGCPtr<AudioParam> m_q;
JS::NonnullGCPtr<AudioParam> m_gain;
GC::Ref<AudioParam> m_frequency;
GC::Ref<AudioParam> m_detune;
GC::Ref<AudioParam> m_q;
GC::Ref<AudioParam> m_gain;
};
}

View file

@ -11,17 +11,17 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(DynamicsCompressorNode);
GC_DEFINE_ALLOCATOR(DynamicsCompressorNode);
DynamicsCompressorNode::~DynamicsCompressorNode() = default;
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> DynamicsCompressorNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, DynamicsCompressorOptions const& options)
WebIDL::ExceptionOr<GC::Ref<DynamicsCompressorNode>> DynamicsCompressorNode::create(JS::Realm& realm, GC::Ref<BaseAudioContext> context, DynamicsCompressorOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-dynamicscompressornode
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> DynamicsCompressorNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, DynamicsCompressorOptions const& options)
WebIDL::ExceptionOr<GC::Ref<DynamicsCompressorNode>> DynamicsCompressorNode::construct_impl(JS::Realm& realm, GC::Ref<BaseAudioContext> context, DynamicsCompressorOptions const& options)
{
// Create the node and allocate memory
auto node = realm.create<DynamicsCompressorNode>(realm, context, options);
@ -39,7 +39,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> DynamicsCompressor
return node;
}
DynamicsCompressorNode::DynamicsCompressorNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, DynamicsCompressorOptions const& options)
DynamicsCompressorNode::DynamicsCompressorNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context, DynamicsCompressorOptions const& options)
: AudioNode(realm, context)
, m_threshold(AudioParam::create(realm, options.threshold, -100, 0, Bindings::AutomationRate::KRate))
, m_knee(AudioParam::create(realm, options.knee, 0, 40, Bindings::AutomationRate::KRate))

View file

@ -22,48 +22,48 @@ struct DynamicsCompressorOptions : AudioNodeOptions {
// https://webaudio.github.io/web-audio-api/#DynamicsCompressorNode
class DynamicsCompressorNode : public AudioNode {
WEB_PLATFORM_OBJECT(DynamicsCompressorNode, AudioNode);
JS_DECLARE_ALLOCATOR(DynamicsCompressorNode);
GC_DECLARE_ALLOCATOR(DynamicsCompressorNode);
public:
virtual ~DynamicsCompressorNode() override;
static WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, DynamicsCompressorOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, DynamicsCompressorOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<DynamicsCompressorNode>> create(JS::Realm&, GC::Ref<BaseAudioContext>, DynamicsCompressorOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<DynamicsCompressorNode>> construct_impl(JS::Realm&, GC::Ref<BaseAudioContext>, DynamicsCompressorOptions const& = {});
WebIDL::UnsignedLong number_of_inputs() override { return 1; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
JS::NonnullGCPtr<AudioParam const> threshold() const { return m_threshold; }
JS::NonnullGCPtr<AudioParam const> knee() const { return m_knee; }
JS::NonnullGCPtr<AudioParam const> ratio() const { return m_ratio; }
JS::NonnullGCPtr<AudioParam const> attack() const { return m_attack; }
JS::NonnullGCPtr<AudioParam const> release() const { return m_release; }
GC::Ref<AudioParam const> threshold() const { return m_threshold; }
GC::Ref<AudioParam const> knee() const { return m_knee; }
GC::Ref<AudioParam const> ratio() const { return m_ratio; }
GC::Ref<AudioParam const> attack() const { return m_attack; }
GC::Ref<AudioParam const> release() const { return m_release; }
float reduction() const { return m_reduction; }
WebIDL::ExceptionOr<void> set_channel_count_mode(Bindings::ChannelCountMode) override;
WebIDL::ExceptionOr<void> set_channel_count(WebIDL::UnsignedLong) override;
protected:
DynamicsCompressorNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, DynamicsCompressorOptions const& = {});
DynamicsCompressorNode(JS::Realm&, GC::Ref<BaseAudioContext>, DynamicsCompressorOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-threshold
JS::NonnullGCPtr<AudioParam> m_threshold;
GC::Ref<AudioParam> m_threshold;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-knee
JS::NonnullGCPtr<AudioParam> m_knee;
GC::Ref<AudioParam> m_knee;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-ratio
JS::NonnullGCPtr<AudioParam> m_ratio;
GC::Ref<AudioParam> m_ratio;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-attack
JS::NonnullGCPtr<AudioParam> m_attack;
GC::Ref<AudioParam> m_attack;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-release
JS::NonnullGCPtr<AudioParam> m_release;
GC::Ref<AudioParam> m_release;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-internal-reduction-slot
float m_reduction { 0 }; // [[internal reduction]]

View file

@ -12,17 +12,17 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(GainNode);
GC_DEFINE_ALLOCATOR(GainNode);
GainNode::~GainNode() = default;
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> GainNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, GainOptions const& options)
WebIDL::ExceptionOr<GC::Ref<GainNode>> GainNode::create(JS::Realm& realm, GC::Ref<BaseAudioContext> context, GainOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-gainnode-gainnode
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> GainNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, GainOptions const& options)
WebIDL::ExceptionOr<GC::Ref<GainNode>> GainNode::construct_impl(JS::Realm& realm, GC::Ref<BaseAudioContext> context, GainOptions const& options)
{
// Create the node and allocate memory
auto node = realm.create<GainNode>(realm, context, options);
@ -39,7 +39,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> GainNode::construct_impl(JS::Rea
return node;
}
GainNode::GainNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, GainOptions const& options)
GainNode::GainNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context, GainOptions const& options)
: AudioNode(realm, context)
, m_gain(AudioParam::create(realm, options.gain, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
{

View file

@ -19,28 +19,28 @@ struct GainOptions : AudioNodeOptions {
// https://webaudio.github.io/web-audio-api/#GainNode
class GainNode : public AudioNode {
WEB_PLATFORM_OBJECT(GainNode, AudioNode);
JS_DECLARE_ALLOCATOR(GainNode);
GC_DECLARE_ALLOCATOR(GainNode);
public:
virtual ~GainNode() override;
static WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, GainOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, GainOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<GainNode>> create(JS::Realm&, GC::Ref<BaseAudioContext>, GainOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<GainNode>> construct_impl(JS::Realm&, GC::Ref<BaseAudioContext>, GainOptions const& = {});
WebIDL::UnsignedLong number_of_inputs() override { return 1; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
JS::NonnullGCPtr<AudioParam const> gain() const { return m_gain; }
GC::Ref<AudioParam const> gain() const { return m_gain; }
protected:
GainNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, GainOptions const& = {});
GainNode(JS::Realm&, GC::Ref<BaseAudioContext>, GainOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
// https://webaudio.github.io/web-audio-api/#dom-gainnode-gain
JS::NonnullGCPtr<AudioParam> m_gain;
GC::Ref<AudioParam> m_gain;
};
}

View file

@ -11,15 +11,15 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(OfflineAudioContext);
GC_DEFINE_ALLOCATOR(OfflineAudioContext);
WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> OfflineAudioContext::construct_impl(JS::Realm& realm, OfflineAudioContextOptions const& context_options)
WebIDL::ExceptionOr<GC::Ref<OfflineAudioContext>> OfflineAudioContext::construct_impl(JS::Realm& realm, OfflineAudioContextOptions const& context_options)
{
return construct_impl(realm, context_options.number_of_channels, context_options.length, context_options.sample_rate);
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-offlineaudiocontext-numberofchannels-length-samplerate
WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> OfflineAudioContext::construct_impl(JS::Realm& realm,
WebIDL::ExceptionOr<GC::Ref<OfflineAudioContext>> OfflineAudioContext::construct_impl(JS::Realm& realm,
WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
{
// The OfflineAudioContext can be constructed with the same arguments as AudioContext.createBuffer.
@ -32,17 +32,17 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> OfflineAudioContext::
OfflineAudioContext::~OfflineAudioContext() = default;
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-startrendering
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> OfflineAudioContext::start_rendering()
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> OfflineAudioContext::start_rendering()
{
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement OfflineAudioContext::start_rendering"_string);
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> OfflineAudioContext::resume()
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> OfflineAudioContext::resume()
{
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement OfflineAudioContext::resume"_string);
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> OfflineAudioContext::suspend(double suspend_time)
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> OfflineAudioContext::suspend(double suspend_time)
{
(void)suspend_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement OfflineAudioContext::suspend"_string);
@ -56,13 +56,13 @@ WebIDL::UnsignedLong OfflineAudioContext::length() const
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-oncomplete
JS::GCPtr<WebIDL::CallbackType> OfflineAudioContext::oncomplete()
GC::Ptr<WebIDL::CallbackType> OfflineAudioContext::oncomplete()
{
return event_handler_attribute(HTML::EventNames::complete);
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-oncomplete
void OfflineAudioContext::set_oncomplete(JS::GCPtr<WebIDL::CallbackType> value)
void OfflineAudioContext::set_oncomplete(GC::Ptr<WebIDL::CallbackType> value)
{
set_event_handler_attribute(HTML::EventNames::complete, value);
}

View file

@ -23,23 +23,23 @@ struct OfflineAudioContextOptions {
// https://webaudio.github.io/web-audio-api/#OfflineAudioContext
class OfflineAudioContext final : public BaseAudioContext {
WEB_PLATFORM_OBJECT(OfflineAudioContext, BaseAudioContext);
JS_DECLARE_ALLOCATOR(OfflineAudioContext);
GC_DECLARE_ALLOCATOR(OfflineAudioContext);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> construct_impl(JS::Realm&, OfflineAudioContextOptions const&);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> construct_impl(JS::Realm&,
static WebIDL::ExceptionOr<GC::Ref<OfflineAudioContext>> construct_impl(JS::Realm&, OfflineAudioContextOptions const&);
static WebIDL::ExceptionOr<GC::Ref<OfflineAudioContext>> construct_impl(JS::Realm&,
WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
virtual ~OfflineAudioContext() override;
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> start_rendering();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> resume();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> suspend(double suspend_time);
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> start_rendering();
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> resume();
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> suspend(double suspend_time);
WebIDL::UnsignedLong length() const;
JS::GCPtr<WebIDL::CallbackType> oncomplete();
void set_oncomplete(JS::GCPtr<WebIDL::CallbackType>);
GC::Ptr<WebIDL::CallbackType> oncomplete();
void set_oncomplete(GC::Ptr<WebIDL::CallbackType>);
private:
OfflineAudioContext(JS::Realm&, OfflineAudioContextOptions const&);

View file

@ -12,17 +12,17 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(OscillatorNode);
GC_DEFINE_ALLOCATOR(OscillatorNode);
OscillatorNode::~OscillatorNode() = default;
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> OscillatorNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, OscillatorOptions const& options)
WebIDL::ExceptionOr<GC::Ref<OscillatorNode>> OscillatorNode::create(JS::Realm& realm, GC::Ref<BaseAudioContext> context, OscillatorOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-oscillatornode
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> OscillatorNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, OscillatorOptions const& options)
WebIDL::ExceptionOr<GC::Ref<OscillatorNode>> OscillatorNode::construct_impl(JS::Realm& realm, GC::Ref<BaseAudioContext> context, OscillatorOptions const& options)
{
// FIXME: Invoke "Initialize the AudioNode" steps.
TRY(verify_valid_type(realm, options.type));
@ -30,7 +30,7 @@ WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> OscillatorNode::construct_
return node;
}
OscillatorNode::OscillatorNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, OscillatorOptions const& options)
OscillatorNode::OscillatorNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context, OscillatorOptions const& options)
: AudioScheduledSourceNode(realm, context)
, m_frequency(AudioParam::create(realm, options.frequency, -context->nyquist_frequency(), context->nyquist_frequency(), Bindings::AutomationRate::ARate))
{

View file

@ -16,29 +16,29 @@ struct OscillatorOptions : AudioNodeOptions {
Bindings::OscillatorType type { Bindings::OscillatorType::Sine };
float frequency { 440 };
float detune { 0 };
JS::GCPtr<PeriodicWave> periodic_wave;
GC::Ptr<PeriodicWave> periodic_wave;
};
// https://webaudio.github.io/web-audio-api/#OscillatorNode
class OscillatorNode : public AudioScheduledSourceNode {
WEB_PLATFORM_OBJECT(OscillatorNode, AudioScheduledSourceNode);
JS_DECLARE_ALLOCATOR(OscillatorNode);
GC_DECLARE_ALLOCATOR(OscillatorNode);
public:
virtual ~OscillatorNode() override;
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, OscillatorOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, OscillatorOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<OscillatorNode>> create(JS::Realm&, GC::Ref<BaseAudioContext>, OscillatorOptions const& = {});
static WebIDL::ExceptionOr<GC::Ref<OscillatorNode>> construct_impl(JS::Realm&, GC::Ref<BaseAudioContext>, OscillatorOptions const& = {});
Bindings::OscillatorType type() const;
WebIDL::ExceptionOr<void> set_type(Bindings::OscillatorType);
JS::NonnullGCPtr<AudioParam const> frequency() const { return m_frequency; }
GC::Ref<AudioParam const> frequency() const { return m_frequency; }
WebIDL::UnsignedLong number_of_inputs() override { return 0; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
protected:
OscillatorNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, OscillatorOptions const& = {});
OscillatorNode(JS::Realm&, GC::Ref<BaseAudioContext>, OscillatorOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
@ -50,7 +50,7 @@ private:
Bindings::OscillatorType m_type { Bindings::OscillatorType::Sine };
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-frequency
JS::NonnullGCPtr<AudioParam> m_frequency;
GC::Ref<AudioParam> m_frequency;
};
}

View file

@ -11,10 +11,10 @@
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(PeriodicWave);
GC_DEFINE_ALLOCATOR(PeriodicWave);
// https://webaudio.github.io/web-audio-api/#dom-periodicwave-periodicwave
WebIDL::ExceptionOr<JS::NonnullGCPtr<PeriodicWave>> PeriodicWave::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext>, PeriodicWaveOptions const&)
WebIDL::ExceptionOr<GC::Ref<PeriodicWave>> PeriodicWave::construct_impl(JS::Realm& realm, GC::Ref<BaseAudioContext>, PeriodicWaveOptions const&)
{
return WebIDL::NotSupportedError::create(realm, "FIXME: Implement PeriodicWave::construct_impl"_string);
}

View file

@ -26,10 +26,10 @@ struct PeriodicWaveOptions : PeriodicWaveConstraints {
// https://webaudio.github.io/web-audio-api/#PeriodicWave
class PeriodicWave : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(PeriodicWave, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(PeriodicWave);
GC_DECLARE_ALLOCATOR(PeriodicWave);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<PeriodicWave>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, PeriodicWaveOptions const&);
static WebIDL::ExceptionOr<GC::Ref<PeriodicWave>> construct_impl(JS::Realm&, GC::Ref<BaseAudioContext>, PeriodicWaveOptions const&);
virtual ~PeriodicWave() override;