LibWeb: Add BaseAudioContext::createScriptProcessor()
Some checks are pending
CI / Linux, x86_64, Fuzzers, Clang (push) Waiting to run
CI / Linux, x86_64, Sanitizer, GNU (push) Waiting to run
CI / Linux, x86_64, Sanitizer, Clang (push) Waiting to run
Package the js repl as a binary artifact / Linux, arm64 (push) Waiting to run
Package the js repl as a binary artifact / macOS, arm64 (push) Waiting to run
Package the js repl as a binary artifact / Linux, x86_64 (push) Waiting to run
Run test262 and test-wasm / run_and_update_results (push) Waiting to run
Lint Code / lint (push) Waiting to run
Push notes / build (push) Waiting to run
CI / macOS, arm64, Sanitizer, Clang (push) Waiting to run
Label PRs with merge conflicts / auto-labeler (push) Waiting to run

This is a deprecated node, but it's still widely used on the web.
This commit is contained in:
Jelle Raaijmakers 2025-07-25 09:45:13 +02:00 committed by Andreas Kling
commit 35ca7f82b0
Notes: github-actions[bot] 2025-07-25 09:49:28 +00:00
17 changed files with 2614 additions and 6 deletions

View file

@ -919,6 +919,7 @@ set(SOURCES
WebAudio/OscillatorNode.cpp
WebAudio/PannerNode.cpp
WebAudio/PeriodicWave.cpp
WebAudio/ScriptProcessorNode.cpp
WebAudio/StereoPannerNode.cpp
WebDriver/Actions.cpp
WebDriver/Capabilities.cpp

View file

@ -18,6 +18,7 @@ namespace Web::HTML::EventNames {
__ENUMERATE_HTML_EVENT(animationend) \
__ENUMERATE_HTML_EVENT(animationiteration) \
__ENUMERATE_HTML_EVENT(animationstart) \
__ENUMERATE_HTML_EVENT(audioprocess) \
__ENUMERATE_HTML_EVENT(beforeinput) \
__ENUMERATE_HTML_EVENT(beforematch) \
__ENUMERATE_HTML_EVENT(beforeprint) \

View file

@ -160,6 +160,21 @@ WebIDL::ExceptionOr<GC::Ref<PeriodicWave>> BaseAudioContext::create_periodic_wav
return PeriodicWave::construct_impl(realm(), *this, options);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createscriptprocessor
WebIDL::ExceptionOr<GC::Ref<ScriptProcessorNode>> BaseAudioContext::create_script_processor(
WebIDL::UnsignedLong buffer_size, WebIDL::UnsignedLong number_of_input_channels,
WebIDL::UnsignedLong number_of_output_channels)
{
// The bufferSize parameter determines the buffer size in units of sample-frames. If its not passed in, or if the
// value is 0, then the implementation will choose the best buffer size for the given environment, which will be
// constant power of 2 throughout the lifetime of the node.
if (buffer_size == 0)
buffer_size = ScriptProcessorNode::DEFAULT_BUFFER_SIZE;
return ScriptProcessorNode::create(realm(), *this, buffer_size, number_of_input_channels,
number_of_output_channels);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createstereopanner
WebIDL::ExceptionOr<GC::Ref<StereoPannerNode>> BaseAudioContext::create_stereo_panner()
{

View file

@ -18,6 +18,7 @@
#include <LibWeb/WebAudio/ConstantSourceNode.h>
#include <LibWeb/WebAudio/DelayNode.h>
#include <LibWeb/WebAudio/PeriodicWave.h>
#include <LibWeb/WebAudio/ScriptProcessorNode.h>
#include <LibWeb/WebAudio/StereoPannerNode.h>
#include <LibWeb/WebIDL/Types.h>
@ -75,6 +76,8 @@ public:
WebIDL::ExceptionOr<GC::Ref<GainNode>> create_gain();
WebIDL::ExceptionOr<GC::Ref<PannerNode>> create_panner();
WebIDL::ExceptionOr<GC::Ref<PeriodicWave>> create_periodic_wave(Vector<float> const& real, Vector<float> const& imag, Optional<PeriodicWaveConstraints> const& constraints = {});
WebIDL::ExceptionOr<GC::Ref<ScriptProcessorNode>> create_script_processor(WebIDL::UnsignedLong buffer_size,
WebIDL::UnsignedLong number_of_input_channels, WebIDL::UnsignedLong number_of_output_channels);
WebIDL::ExceptionOr<GC::Ref<StereoPannerNode>> create_stereo_panner();
GC::Ref<WebIDL::Promise> decode_audio_data(GC::Root<WebIDL::BufferSource>, GC::Ptr<WebIDL::CallbackType>, GC::Ptr<WebIDL::CallbackType>);

View file

@ -45,7 +45,7 @@ interface BaseAudioContext : EventTarget {
OscillatorNode createOscillator();
PannerNode createPanner();
PeriodicWave createPeriodicWave (sequence<float> real, sequence<float> imag, optional PeriodicWaveConstraints constraints = {});
[FIXME] ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0, optional unsigned long numberOfInputChannels = 2, optional unsigned long numberOfOutputChannels = 2);
ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0, optional unsigned long numberOfInputChannels = 2, optional unsigned long numberOfOutputChannels = 2);
StereoPannerNode createStereoPanner ();
[FIXME] WaveShaperNode createWaveShaper ();

View file

@ -0,0 +1,125 @@
/*
* Copyright (c) 2025, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include "BaseAudioContext.h"
#include <AK/GenericShorthands.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/Bindings/ScriptProcessorNodePrototype.h>
#include <LibWeb/HTML/EventNames.h>
#include <LibWeb/WebAudio/ScriptProcessorNode.h>
namespace Web::WebAudio {
GC_DEFINE_ALLOCATOR(ScriptProcessorNode);
ScriptProcessorNode::ScriptProcessorNode(JS::Realm& realm, GC::Ref<BaseAudioContext> context,
u8 number_of_input_channels, u8 number_of_output_channels)
: AudioNode(realm, context)
, m_number_of_input_channels(number_of_input_channels)
, m_number_of_output_channels(number_of_output_channels)
{
}
ScriptProcessorNode::~ScriptProcessorNode() = default;
WebIDL::ExceptionOr<GC::Ref<ScriptProcessorNode>> ScriptProcessorNode::create(JS::Realm& realm,
GC::Ref<BaseAudioContext> context, WebIDL::Long buffer_size, WebIDL::UnsignedLong number_of_input_channels,
WebIDL::UnsignedLong number_of_output_channels)
{
// https://webaudio.github.io/web-audio-api/#ScriptProcessorNode
// It is invalid for both numberOfInputChannels and numberOfOutputChannels to be zero. In this case an
// IndexSizeError MUST be thrown.
if (number_of_input_channels == 0 && number_of_output_channels == 0) {
return WebIDL::IndexSizeError::create(realm,
"Number of input and output channels cannot both be zero in a ScriptProcessorNode"_string);
}
// This parameter determines the number of channels for this nodes input. The default value is 2. Values of up to
// 32 must be supported. A NotSupportedError must be thrown if the number of channels is not supported.
if (number_of_input_channels > BaseAudioContext::MAX_NUMBER_OF_CHANNELS)
return WebIDL::NotSupportedError::create(realm, "Invalid number of input channels"_string);
// This parameter determines the number of channels for this nodes output. The default value is 2. Values of up to
// 32 must be supported. A NotSupportedError must be thrown if the number of channels is not supported.
if (number_of_output_channels > BaseAudioContext::MAX_NUMBER_OF_CHANNELS)
return WebIDL::NotSupportedError::create(realm, "Invalid number of output channels"_string);
auto script_processor_node = realm.create<ScriptProcessorNode>(realm, context,
number_of_input_channels, number_of_output_channels);
TRY(script_processor_node->set_buffer_size(buffer_size));
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcountmode
// The channel count mode cannot be changed from "explicit" and an NotSupportedError exception MUST be thrown for
// any attempt to change the value.
TRY(script_processor_node->set_channel_count_mode(Bindings::ChannelCountMode::Explicit));
return script_processor_node;
}
void ScriptProcessorNode::initialize(JS::Realm& realm)
{
WEB_SET_PROTOTYPE_FOR_INTERFACE(ScriptProcessorNode);
Base::initialize(realm);
}
// https://webaudio.github.io/web-audio-api/#ScriptProcessorNode
WebIDL::UnsignedLong ScriptProcessorNode::channel_count() const
{
// This is the number of channels specified when constructing this node.
return m_number_of_input_channels;
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcount
WebIDL::ExceptionOr<void> ScriptProcessorNode::set_channel_count(WebIDL::UnsignedLong)
{
// ScriptProcessorNode: The channel count cannot be changed, and an NotSupportedError exception MUST be thrown for
// any attempt to change the value.
return WebIDL::InvalidStateError::create(realm(),
"Cannot modify channel count in a ScriptProcessorNode"_string);
}
WebIDL::ExceptionOr<void> ScriptProcessorNode::set_channel_count_mode(Bindings::ChannelCountMode channel_count_mode)
{
// https://webaudio.github.io/web-audio-api/#audionode-channelcountmode-constraints
// ScriptProcessorNode: The channel count mode cannot be changed from "explicit" and an NotSupportedError exception
// MUST be thrown for any attempt to change the value.
if (channel_count_mode != Bindings::ChannelCountMode::Explicit)
return WebIDL::InvalidStateError::create(realm(), "Channel count mode must be 'explicit'"_string);
return AudioNode::set_channel_count_mode(channel_count_mode);
}
// https://webaudio.github.io/web-audio-api/#dom-scriptprocessornode-onaudioprocess
GC::Ptr<WebIDL::CallbackType> ScriptProcessorNode::onaudioprocess()
{
return event_handler_attribute(HTML::EventNames::audioprocess);
}
// https://webaudio.github.io/web-audio-api/#dom-scriptprocessornode-onaudioprocess
void ScriptProcessorNode::set_onaudioprocess(GC::Ptr<WebIDL::CallbackType> value)
{
set_event_handler_attribute(HTML::EventNames::audioprocess, value);
}
// https://webaudio.github.io/web-audio-api/#dom-scriptprocessornode-buffersize
WebIDL::ExceptionOr<void> ScriptProcessorNode::set_buffer_size(WebIDL::Long buffer_size)
{
// The size of the buffer (in sample-frames) which needs to be processed each time audioprocess is fired. Legal
// values are (256, 512, 1024, 2048, 4096, 8192, 16384).
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createscriptprocessor
// If the value of this parameter is not one of the allowed power-of-2 values listed above, an IndexSizeError MUST
// be thrown.
if (!first_is_one_of(buffer_size, 256, 512, 1024, 2048, 4096, 8192, 16384))
return WebIDL::IndexSizeError::create(realm(), "Unsupported buffer size for a ScriptProcessorNode"_string);
m_buffer_size = buffer_size;
return {};
}
}

View file

@ -0,0 +1,51 @@
/*
* Copyright (c) 2025, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/WebAudio/AudioNode.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#ScriptProcessorNode
class ScriptProcessorNode final : public AudioNode {
WEB_PLATFORM_OBJECT(ScriptProcessorNode, AudioNode);
GC_DECLARE_ALLOCATOR(ScriptProcessorNode);
public:
static constexpr WebIDL::Long DEFAULT_BUFFER_SIZE = 1024;
virtual ~ScriptProcessorNode() override;
static WebIDL::ExceptionOr<GC::Ref<ScriptProcessorNode>> create(JS::Realm&, GC::Ref<BaseAudioContext>,
WebIDL::Long buffer_size, WebIDL::UnsignedLong number_of_input_channels,
WebIDL::UnsignedLong number_of_output_channel);
// ^AudioNode
virtual WebIDL::UnsignedLong channel_count() const override;
virtual WebIDL::ExceptionOr<void> set_channel_count(WebIDL::UnsignedLong) override;
virtual WebIDL::ExceptionOr<void> set_channel_count_mode(Bindings::ChannelCountMode) override;
virtual WebIDL::UnsignedLong number_of_inputs() override { return 1; }
virtual WebIDL::UnsignedLong number_of_outputs() override { return 1; }
GC::Ptr<WebIDL::CallbackType> onaudioprocess();
void set_onaudioprocess(GC::Ptr<WebIDL::CallbackType>);
WebIDL::Long buffer_size() const { return m_buffer_size; }
WebIDL::ExceptionOr<void> set_buffer_size(WebIDL::Long buffer_size);
private:
ScriptProcessorNode(JS::Realm&, GC::Ref<BaseAudioContext>, u8 number_of_input_channels,
u8 number_of_output_channels);
virtual void initialize(JS::Realm&) override;
WebIDL::Long m_buffer_size { 0 };
u8 m_number_of_input_channels { 0 };
u8 m_number_of_output_channels { 0 };
};
}

View file

@ -0,0 +1,9 @@
#import <DOM/EventHandler.idl>
#import <WebAudio/AudioNode.idl>
// https://webaudio.github.io/web-audio-api/#ScriptProcessorNode
[Exposed=Window]
interface ScriptProcessorNode : AudioNode {
attribute EventHandler onaudioprocess;
readonly attribute long bufferSize;
};

View file

@ -404,16 +404,17 @@ libweb_js_bindings(WebAudio/AudioScheduledSourceNode)
libweb_js_bindings(WebAudio/BaseAudioContext)
libweb_js_bindings(WebAudio/BiquadFilterNode)
libweb_js_bindings(WebAudio/DynamicsCompressorNode)
libweb_js_bindings(WebAudio/GainNode)
libweb_js_bindings(WebAudio/ChannelMergerNode)
libweb_js_bindings(WebAudio/ChannelSplitterNode)
libweb_js_bindings(WebAudio/ConstantSourceNode)
libweb_js_bindings(WebAudio/DelayNode)
libweb_js_bindings(WebAudio/GainNode)
libweb_js_bindings(WebAudio/MediaElementAudioSourceNode)
libweb_js_bindings(WebAudio/OfflineAudioContext)
libweb_js_bindings(WebAudio/OscillatorNode)
libweb_js_bindings(WebAudio/PannerNode)
libweb_js_bindings(WebAudio/PeriodicWave)
libweb_js_bindings(WebAudio/ScriptProcessorNode)
libweb_js_bindings(WebAudio/StereoPannerNode)
libweb_js_bindings(WebGL/Extensions/ANGLEInstancedArrays)
libweb_js_bindings(WebGL/Extensions/EXTBlendMinMax)

View file

@ -380,6 +380,7 @@ SVGUseElement
SVGViewElement
Screen
ScreenOrientation
ScriptProcessorNode
SecurityPolicyViolationEvent
Selection
ServiceWorker

File diff suppressed because it is too large Load diff

View file

@ -1,9 +1,9 @@
Harness status: OK
Found 317 tests
Found 319 tests
312 Pass
5 Fail
315 Pass
4 Fail
Pass # AUDIT TASK RUNNER STARTED.
Pass Executing "initialize"
Pass Executing "Offline createGain"
@ -20,7 +20,7 @@ Fail Executing "Offline createIIRFilter"
Fail Executing "Offline createWaveShaper"
Fail Executing "Offline createConvolver"
Pass Executing "Offline createAnalyser"
Fail Executing "Offline createScriptProcessor"
Pass Executing "Offline createScriptProcessor"
Pass Executing "Offline createPeriodicWave"
Pass Executing "Offline createChannelSplitter"
Pass Executing "Offline createChannelMerger"
@ -244,6 +244,8 @@ Pass > [Offline createAnalyser]
Pass AnalyserNode has no AudioParams as expected
Pass < [Offline createAnalyser] All assertions passed. (total 1 assertions)
Pass > [Offline createScriptProcessor]
Pass ScriptProcessorNode has no AudioParams as expected
Pass < [Offline createScriptProcessor] All assertions passed. (total 1 assertions)
Pass > [Offline createPeriodicWave]
Pass PeriodicWave has no AudioParams as expected
Pass < [Offline createPeriodicWave] All assertions passed. (total 1 assertions)

View file

@ -0,0 +1,261 @@
// GENERATED CONTENT - DO NOT EDIT
// Content was automatically extracted by Reffy into webref
// (https://github.com/w3c/webref)
// Source: Media Capture and Streams (https://w3c.github.io/mediacapture-main/)
[Exposed=Window]
interface MediaStream : EventTarget {
constructor();
constructor(MediaStream stream);
constructor(sequence<MediaStreamTrack> tracks);
readonly attribute DOMString id;
sequence<MediaStreamTrack> getAudioTracks();
sequence<MediaStreamTrack> getVideoTracks();
sequence<MediaStreamTrack> getTracks();
MediaStreamTrack? getTrackById(DOMString trackId);
undefined addTrack(MediaStreamTrack track);
undefined removeTrack(MediaStreamTrack track);
MediaStream clone();
readonly attribute boolean active;
attribute EventHandler onaddtrack;
attribute EventHandler onremovetrack;
};
[Exposed=Window]
interface MediaStreamTrack : EventTarget {
readonly attribute DOMString kind;
readonly attribute DOMString id;
readonly attribute DOMString label;
attribute boolean enabled;
readonly attribute boolean muted;
attribute EventHandler onmute;
attribute EventHandler onunmute;
readonly attribute MediaStreamTrackState readyState;
attribute EventHandler onended;
MediaStreamTrack clone();
undefined stop();
MediaTrackCapabilities getCapabilities();
MediaTrackConstraints getConstraints();
MediaTrackSettings getSettings();
Promise<undefined> applyConstraints(optional MediaTrackConstraints constraints = {});
};
enum MediaStreamTrackState {
"live",
"ended"
};
dictionary MediaTrackSupportedConstraints {
boolean width = true;
boolean height = true;
boolean aspectRatio = true;
boolean frameRate = true;
boolean facingMode = true;
boolean resizeMode = true;
boolean sampleRate = true;
boolean sampleSize = true;
boolean echoCancellation = true;
boolean autoGainControl = true;
boolean noiseSuppression = true;
boolean latency = true;
boolean channelCount = true;
boolean deviceId = true;
boolean groupId = true;
boolean backgroundBlur = true;
};
dictionary MediaTrackCapabilities {
ULongRange width;
ULongRange height;
DoubleRange aspectRatio;
DoubleRange frameRate;
sequence<DOMString> facingMode;
sequence<DOMString> resizeMode;
ULongRange sampleRate;
ULongRange sampleSize;
sequence<(boolean or DOMString)> echoCancellation;
sequence<boolean> autoGainControl;
sequence<boolean> noiseSuppression;
DoubleRange latency;
ULongRange channelCount;
DOMString deviceId;
DOMString groupId;
sequence<boolean> backgroundBlur;
};
dictionary MediaTrackConstraints : MediaTrackConstraintSet {
sequence<MediaTrackConstraintSet> advanced;
};
dictionary MediaTrackConstraintSet {
ConstrainULong width;
ConstrainULong height;
ConstrainDouble aspectRatio;
ConstrainDouble frameRate;
ConstrainDOMString facingMode;
ConstrainDOMString resizeMode;
ConstrainULong sampleRate;
ConstrainULong sampleSize;
ConstrainBooleanOrDOMString echoCancellation;
ConstrainBoolean autoGainControl;
ConstrainBoolean noiseSuppression;
ConstrainDouble latency;
ConstrainULong channelCount;
ConstrainDOMString deviceId;
ConstrainDOMString groupId;
ConstrainBoolean backgroundBlur;
};
dictionary MediaTrackSettings {
unsigned long width;
unsigned long height;
double aspectRatio;
double frameRate;
DOMString facingMode;
DOMString resizeMode;
unsigned long sampleRate;
unsigned long sampleSize;
(boolean or DOMString) echoCancellation;
boolean autoGainControl;
boolean noiseSuppression;
double latency;
unsigned long channelCount;
DOMString deviceId;
DOMString groupId;
boolean backgroundBlur;
};
enum VideoFacingModeEnum {
"user",
"environment",
"left",
"right"
};
enum VideoResizeModeEnum {
"none",
"crop-and-scale"
};
enum EchoCancellationModeEnum {
"all",
"remote-only"
};
[Exposed=Window]
interface MediaStreamTrackEvent : Event {
constructor(DOMString type, MediaStreamTrackEventInit eventInitDict);
[SameObject] readonly attribute MediaStreamTrack track;
};
dictionary MediaStreamTrackEventInit : EventInit {
required MediaStreamTrack track;
};
[Exposed=Window]
interface OverconstrainedError : DOMException {
constructor(DOMString constraint, optional DOMString message = "");
readonly attribute DOMString constraint;
};
partial interface Navigator {
[SameObject, SecureContext] readonly attribute MediaDevices mediaDevices;
};
[Exposed=Window, SecureContext]
interface MediaDevices : EventTarget {
attribute EventHandler ondevicechange;
Promise<sequence<MediaDeviceInfo>> enumerateDevices();
};
[Exposed=Window, SecureContext]
interface MediaDeviceInfo {
readonly attribute DOMString deviceId;
readonly attribute MediaDeviceKind kind;
readonly attribute DOMString label;
readonly attribute DOMString groupId;
[Default] object toJSON();
};
enum MediaDeviceKind {
"audioinput",
"audiooutput",
"videoinput"
};
[Exposed=Window, SecureContext]
interface InputDeviceInfo : MediaDeviceInfo {
MediaTrackCapabilities getCapabilities();
};
[Exposed=Window]
interface DeviceChangeEvent : Event {
constructor(DOMString type, optional DeviceChangeEventInit eventInitDict = {});
[SameObject] readonly attribute FrozenArray<MediaDeviceInfo> devices;
[SameObject] readonly attribute FrozenArray<MediaDeviceInfo> userInsertedDevices;
};
dictionary DeviceChangeEventInit : EventInit {
sequence<MediaDeviceInfo> devices = [];
};
partial interface MediaDevices {
MediaTrackSupportedConstraints getSupportedConstraints();
Promise<MediaStream> getUserMedia(optional MediaStreamConstraints constraints = {});
};
dictionary MediaStreamConstraints {
(boolean or MediaTrackConstraints) video = false;
(boolean or MediaTrackConstraints) audio = false;
};
dictionary DoubleRange {
double max;
double min;
};
dictionary ConstrainDoubleRange : DoubleRange {
double exact;
double ideal;
};
dictionary ULongRange {
[Clamp] unsigned long max;
[Clamp] unsigned long min;
};
dictionary ConstrainULongRange : ULongRange {
[Clamp] unsigned long exact;
[Clamp] unsigned long ideal;
};
dictionary ConstrainBooleanParameters {
boolean exact;
boolean ideal;
};
dictionary ConstrainDOMStringParameters {
(DOMString or sequence<DOMString>) exact;
(DOMString or sequence<DOMString>) ideal;
};
dictionary ConstrainBooleanOrDOMStringParameters {
(boolean or DOMString) exact;
(boolean or DOMString) ideal;
};
typedef ([Clamp] unsigned long or ConstrainULongRange) ConstrainULong;
typedef (double or ConstrainDoubleRange) ConstrainDouble;
typedef (boolean or ConstrainBooleanParameters) ConstrainBoolean;
typedef (DOMString or
sequence<DOMString> or
ConstrainDOMStringParameters) ConstrainDOMString;
typedef (boolean or DOMString or ConstrainBooleanOrDOMStringParameters) ConstrainBooleanOrDOMString;
dictionary CameraDevicePermissionDescriptor : PermissionDescriptor {
boolean panTiltZoom = false;
};

View file

@ -0,0 +1,237 @@
// GENERATED CONTENT - DO NOT EDIT
// Content was automatically extracted by Reffy into webref
// (https://github.com/w3c/webref)
// Source: UI Events (https://w3c.github.io/uievents/)
[Exposed=Window]
interface UIEvent : Event {
constructor(DOMString type, optional UIEventInit eventInitDict = {});
readonly attribute Window? view;
readonly attribute long detail;
};
dictionary UIEventInit : EventInit {
Window? view = null;
long detail = 0;
};
[Exposed=Window]
interface FocusEvent : UIEvent {
constructor(DOMString type, optional FocusEventInit eventInitDict = {});
readonly attribute EventTarget? relatedTarget;
};
dictionary FocusEventInit : UIEventInit {
EventTarget? relatedTarget = null;
};
[Exposed=Window]
interface MouseEvent : UIEvent {
constructor(DOMString type, optional MouseEventInit eventInitDict = {});
readonly attribute long screenX;
readonly attribute long screenY;
readonly attribute long clientX;
readonly attribute long clientY;
readonly attribute long layerX;
readonly attribute long layerY;
readonly attribute boolean ctrlKey;
readonly attribute boolean shiftKey;
readonly attribute boolean altKey;
readonly attribute boolean metaKey;
readonly attribute short button;
readonly attribute unsigned short buttons;
readonly attribute EventTarget? relatedTarget;
boolean getModifierState(DOMString keyArg);
};
dictionary MouseEventInit : EventModifierInit {
long screenX = 0;
long screenY = 0;
long clientX = 0;
long clientY = 0;
short button = 0;
unsigned short buttons = 0;
EventTarget? relatedTarget = null;
};
dictionary EventModifierInit : UIEventInit {
boolean ctrlKey = false;
boolean shiftKey = false;
boolean altKey = false;
boolean metaKey = false;
boolean modifierAltGraph = false;
boolean modifierCapsLock = false;
boolean modifierFn = false;
boolean modifierFnLock = false;
boolean modifierHyper = false;
boolean modifierNumLock = false;
boolean modifierScrollLock = false;
boolean modifierSuper = false;
boolean modifierSymbol = false;
boolean modifierSymbolLock = false;
};
[Exposed=Window]
interface WheelEvent : MouseEvent {
constructor(DOMString type, optional WheelEventInit eventInitDict = {});
// DeltaModeCode
const unsigned long DOM_DELTA_PIXEL = 0x00;
const unsigned long DOM_DELTA_LINE = 0x01;
const unsigned long DOM_DELTA_PAGE = 0x02;
readonly attribute double deltaX;
readonly attribute double deltaY;
readonly attribute double deltaZ;
readonly attribute unsigned long deltaMode;
};
dictionary WheelEventInit : MouseEventInit {
double deltaX = 0.0;
double deltaY = 0.0;
double deltaZ = 0.0;
unsigned long deltaMode = 0;
};
[Exposed=Window]
interface InputEvent : UIEvent {
constructor(DOMString type, optional InputEventInit eventInitDict = {});
readonly attribute USVString? data;
readonly attribute boolean isComposing;
readonly attribute DOMString inputType;
};
dictionary InputEventInit : UIEventInit {
DOMString? data = null;
boolean isComposing = false;
DOMString inputType = "";
};
[Exposed=Window]
interface KeyboardEvent : UIEvent {
constructor(DOMString type, optional KeyboardEventInit eventInitDict = {});
// KeyLocationCode
const unsigned long DOM_KEY_LOCATION_STANDARD = 0x00;
const unsigned long DOM_KEY_LOCATION_LEFT = 0x01;
const unsigned long DOM_KEY_LOCATION_RIGHT = 0x02;
const unsigned long DOM_KEY_LOCATION_NUMPAD = 0x03;
readonly attribute DOMString key;
readonly attribute DOMString code;
readonly attribute unsigned long location;
readonly attribute boolean ctrlKey;
readonly attribute boolean shiftKey;
readonly attribute boolean altKey;
readonly attribute boolean metaKey;
readonly attribute boolean repeat;
readonly attribute boolean isComposing;
boolean getModifierState(DOMString keyArg);
};
dictionary KeyboardEventInit : EventModifierInit {
DOMString key = "";
DOMString code = "";
unsigned long location = 0;
boolean repeat = false;
boolean isComposing = false;
};
[Exposed=Window]
interface CompositionEvent : UIEvent {
constructor(DOMString type, optional CompositionEventInit eventInitDict = {});
readonly attribute USVString data;
};
dictionary CompositionEventInit : UIEventInit {
DOMString data = "";
};
partial interface UIEvent {
// Deprecated in this specification
undefined initUIEvent(DOMString typeArg,
optional boolean bubblesArg = false,
optional boolean cancelableArg = false,
optional Window? viewArg = null,
optional long detailArg = 0);
};
partial interface MouseEvent {
// Deprecated in this specification
undefined initMouseEvent(DOMString typeArg,
optional boolean bubblesArg = false,
optional boolean cancelableArg = false,
optional Window? viewArg = null,
optional long detailArg = 0,
optional long screenXArg = 0,
optional long screenYArg = 0,
optional long clientXArg = 0,
optional long clientYArg = 0,
optional boolean ctrlKeyArg = false,
optional boolean altKeyArg = false,
optional boolean shiftKeyArg = false,
optional boolean metaKeyArg = false,
optional short buttonArg = 0,
optional EventTarget? relatedTargetArg = null);
};
partial interface KeyboardEvent {
// Originally introduced (and deprecated) in this specification
undefined initKeyboardEvent(DOMString typeArg,
optional boolean bubblesArg = false,
optional boolean cancelableArg = false,
optional Window? viewArg = null,
optional DOMString keyArg = "",
optional unsigned long locationArg = 0,
optional boolean ctrlKey = false,
optional boolean altKey = false,
optional boolean shiftKey = false,
optional boolean metaKey = false);
};
partial interface CompositionEvent {
// Originally introduced (and deprecated) in this specification
undefined initCompositionEvent(DOMString typeArg,
optional boolean bubblesArg = false,
optional boolean cancelableArg = false,
optional WindowProxy? viewArg = null,
optional DOMString dataArg = "");
};
partial interface UIEvent {
// The following support legacy user agents
readonly attribute unsigned long which;
};
partial dictionary UIEventInit {
unsigned long which = 0;
};
partial interface KeyboardEvent {
// The following support legacy user agents
readonly attribute unsigned long charCode;
readonly attribute unsigned long keyCode;
};
partial dictionary KeyboardEventInit {
// The following support legacy user agents
unsigned long charCode = 0;
unsigned long keyCode = 0;
};
[Exposed=Window]
interface TextEvent : UIEvent {
readonly attribute DOMString data;
undefined initTextEvent(DOMString type,
optional boolean bubbles = false,
optional boolean cancelable = false,
optional Window? view = null,
optional DOMString data = "undefined");
};

View file

@ -0,0 +1,657 @@
// GENERATED CONTENT - DO NOT EDIT
// Content was automatically extracted by Reffy into webref
// (https://github.com/w3c/webref)
// Source: Web Audio API 1.1 (https://webaudio.github.io/web-audio-api/)
enum AudioContextState {
"suspended",
"running",
"closed",
"interrupted"
};
enum AudioContextRenderSizeCategory {
"default",
"hardware"
};
callback DecodeErrorCallback = undefined (DOMException error);
callback DecodeSuccessCallback = undefined (AudioBuffer decodedData);
[Exposed=Window]
interface BaseAudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
readonly attribute AudioListener listener;
readonly attribute AudioContextState state;
readonly attribute unsigned long renderQuantumSize;
[SameObject, SecureContext]
readonly attribute AudioWorklet audioWorklet;
attribute EventHandler onstatechange;
AnalyserNode createAnalyser ();
BiquadFilterNode createBiquadFilter ();
AudioBuffer createBuffer (unsigned long numberOfChannels,
unsigned long length,
float sampleRate);
AudioBufferSourceNode createBufferSource ();
ChannelMergerNode createChannelMerger (optional unsigned long numberOfInputs = 6);
ChannelSplitterNode createChannelSplitter (
optional unsigned long numberOfOutputs = 6);
ConstantSourceNode createConstantSource ();
ConvolverNode createConvolver ();
DelayNode createDelay (optional double maxDelayTime = 1.0);
DynamicsCompressorNode createDynamicsCompressor ();
GainNode createGain ();
IIRFilterNode createIIRFilter (sequence<double> feedforward,
sequence<double> feedback);
OscillatorNode createOscillator ();
PannerNode createPanner ();
PeriodicWave createPeriodicWave (sequence<float> real,
sequence<float> imag,
optional PeriodicWaveConstraints constraints = {});
ScriptProcessorNode createScriptProcessor(
optional unsigned long bufferSize = 0,
optional unsigned long numberOfInputChannels = 2,
optional unsigned long numberOfOutputChannels = 2);
StereoPannerNode createStereoPanner ();
WaveShaperNode createWaveShaper ();
Promise<AudioBuffer> decodeAudioData (
ArrayBuffer audioData,
optional DecodeSuccessCallback? successCallback,
optional DecodeErrorCallback? errorCallback);
};
enum AudioContextLatencyCategory {
"balanced",
"interactive",
"playback"
};
enum AudioSinkType {
"none"
};
[Exposed=Window]
interface AudioContext : BaseAudioContext {
constructor (optional AudioContextOptions contextOptions = {});
readonly attribute double baseLatency;
readonly attribute double outputLatency;
[SecureContext] readonly attribute (DOMString or AudioSinkInfo) sinkId;
attribute EventHandler onsinkchange;
attribute EventHandler onerror;
AudioTimestamp getOutputTimestamp ();
Promise<undefined> resume ();
Promise<undefined> suspend ();
Promise<undefined> close ();
[SecureContext] Promise<undefined> setSinkId ((DOMString or AudioSinkOptions) sinkId);
MediaElementAudioSourceNode createMediaElementSource (HTMLMediaElement mediaElement);
MediaStreamAudioSourceNode createMediaStreamSource (MediaStream mediaStream);
MediaStreamTrackAudioSourceNode createMediaStreamTrackSource (
MediaStreamTrack mediaStreamTrack);
MediaStreamAudioDestinationNode createMediaStreamDestination ();
};
dictionary AudioContextOptions {
(AudioContextLatencyCategory or double) latencyHint = "interactive";
float sampleRate;
(DOMString or AudioSinkOptions) sinkId;
(AudioContextRenderSizeCategory or unsigned long) renderSizeHint = "default";
};
dictionary AudioSinkOptions {
required AudioSinkType type;
};
[Exposed=Window]
interface AudioSinkInfo {
readonly attribute AudioSinkType type;
};
dictionary AudioTimestamp {
double contextTime;
DOMHighResTimeStamp performanceTime;
};
[Exposed=Window]
interface OfflineAudioContext : BaseAudioContext {
constructor(OfflineAudioContextOptions contextOptions);
constructor(unsigned long numberOfChannels, unsigned long length, float sampleRate);
Promise<AudioBuffer> startRendering();
Promise<undefined> resume();
Promise<undefined> suspend(double suspendTime);
readonly attribute unsigned long length;
attribute EventHandler oncomplete;
};
dictionary OfflineAudioContextOptions {
unsigned long numberOfChannels = 1;
required unsigned long length;
required float sampleRate;
(AudioContextRenderSizeCategory or unsigned long) renderSizeHint = "default";
};
[Exposed=Window]
interface OfflineAudioCompletionEvent : Event {
constructor (DOMString type, OfflineAudioCompletionEventInit eventInitDict);
readonly attribute AudioBuffer renderedBuffer;
};
dictionary OfflineAudioCompletionEventInit : EventInit {
required AudioBuffer renderedBuffer;
};
[Exposed=Window]
interface AudioBuffer {
constructor (AudioBufferOptions options);
readonly attribute float sampleRate;
readonly attribute unsigned long length;
readonly attribute double duration;
readonly attribute unsigned long numberOfChannels;
Float32Array getChannelData (unsigned long channel);
undefined copyFromChannel (Float32Array destination,
unsigned long channelNumber,
optional unsigned long bufferOffset = 0);
undefined copyToChannel (Float32Array source,
unsigned long channelNumber,
optional unsigned long bufferOffset = 0);
};
dictionary AudioBufferOptions {
unsigned long numberOfChannels = 1;
required unsigned long length;
required float sampleRate;
};
[Exposed=Window]
interface AudioNode : EventTarget {
AudioNode connect (AudioNode destinationNode,
optional unsigned long output = 0,
optional unsigned long input = 0);
undefined connect (AudioParam destinationParam, optional unsigned long output = 0);
undefined disconnect ();
undefined disconnect (unsigned long output);
undefined disconnect (AudioNode destinationNode);
undefined disconnect (AudioNode destinationNode, unsigned long output);
undefined disconnect (AudioNode destinationNode,
unsigned long output,
unsigned long input);
undefined disconnect (AudioParam destinationParam);
undefined disconnect (AudioParam destinationParam, unsigned long output);
readonly attribute BaseAudioContext context;
readonly attribute unsigned long numberOfInputs;
readonly attribute unsigned long numberOfOutputs;
attribute unsigned long channelCount;
attribute ChannelCountMode channelCountMode;
attribute ChannelInterpretation channelInterpretation;
};
enum ChannelCountMode {
"max",
"clamped-max",
"explicit"
};
enum ChannelInterpretation {
"speakers",
"discrete"
};
dictionary AudioNodeOptions {
unsigned long channelCount;
ChannelCountMode channelCountMode;
ChannelInterpretation channelInterpretation;
};
enum AutomationRate {
"a-rate",
"k-rate"
};
[Exposed=Window]
interface AudioParam {
attribute float value;
attribute AutomationRate automationRate;
readonly attribute float defaultValue;
readonly attribute float minValue;
readonly attribute float maxValue;
AudioParam setValueAtTime (float value, double startTime);
AudioParam linearRampToValueAtTime (float value, double endTime);
AudioParam exponentialRampToValueAtTime (float value, double endTime);
AudioParam setTargetAtTime (float target, double startTime, float timeConstant);
AudioParam setValueCurveAtTime (sequence<float> values,
double startTime,
double duration);
AudioParam cancelScheduledValues (double cancelTime);
AudioParam cancelAndHoldAtTime (double cancelTime);
};
[Exposed=Window]
interface AudioScheduledSourceNode : AudioNode {
attribute EventHandler onended;
undefined start(optional double when = 0);
undefined stop(optional double when = 0);
};
[Exposed=Window]
interface AnalyserNode : AudioNode {
constructor (BaseAudioContext context, optional AnalyserOptions options = {});
undefined getFloatFrequencyData (Float32Array array);
undefined getByteFrequencyData (Uint8Array array);
undefined getFloatTimeDomainData (Float32Array array);
undefined getByteTimeDomainData (Uint8Array array);
attribute unsigned long fftSize;
readonly attribute unsigned long frequencyBinCount;
attribute double minDecibels;
attribute double maxDecibels;
attribute double smoothingTimeConstant;
};
dictionary AnalyserOptions : AudioNodeOptions {
unsigned long fftSize = 2048;
double maxDecibels = -30;
double minDecibels = -100;
double smoothingTimeConstant = 0.8;
};
[Exposed=Window]
interface AudioBufferSourceNode : AudioScheduledSourceNode {
constructor (BaseAudioContext context,
optional AudioBufferSourceOptions options = {});
attribute AudioBuffer? buffer;
readonly attribute AudioParam playbackRate;
readonly attribute AudioParam detune;
attribute boolean loop;
attribute double loopStart;
attribute double loopEnd;
undefined start (optional double when = 0,
optional double offset,
optional double duration);
};
dictionary AudioBufferSourceOptions {
AudioBuffer? buffer;
float detune = 0;
boolean loop = false;
double loopEnd = 0;
double loopStart = 0;
float playbackRate = 1;
};
[Exposed=Window]
interface AudioDestinationNode : AudioNode {
readonly attribute unsigned long maxChannelCount;
};
[Exposed=Window]
interface AudioListener {
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam forwardX;
readonly attribute AudioParam forwardY;
readonly attribute AudioParam forwardZ;
readonly attribute AudioParam upX;
readonly attribute AudioParam upY;
readonly attribute AudioParam upZ;
undefined setPosition (float x, float y, float z);
undefined setOrientation (float x, float y, float z, float xUp, float yUp, float zUp);
};
[Exposed=Window]
interface AudioProcessingEvent : Event {
constructor (DOMString type, AudioProcessingEventInit eventInitDict);
readonly attribute double playbackTime;
readonly attribute AudioBuffer inputBuffer;
readonly attribute AudioBuffer outputBuffer;
};
dictionary AudioProcessingEventInit : EventInit {
required double playbackTime;
required AudioBuffer inputBuffer;
required AudioBuffer outputBuffer;
};
enum BiquadFilterType {
"lowpass",
"highpass",
"bandpass",
"lowshelf",
"highshelf",
"peaking",
"notch",
"allpass"
};
[Exposed=Window]
interface BiquadFilterNode : AudioNode {
constructor (BaseAudioContext context, optional BiquadFilterOptions options = {});
attribute BiquadFilterType type;
readonly attribute AudioParam frequency;
readonly attribute AudioParam detune;
readonly attribute AudioParam Q;
readonly attribute AudioParam gain;
undefined getFrequencyResponse (Float32Array frequencyHz,
Float32Array magResponse,
Float32Array phaseResponse);
};
dictionary BiquadFilterOptions : AudioNodeOptions {
BiquadFilterType type = "lowpass";
float Q = 1;
float detune = 0;
float frequency = 350;
float gain = 0;
};
[Exposed=Window]
interface ChannelMergerNode : AudioNode {
constructor (BaseAudioContext context, optional ChannelMergerOptions options = {});
};
dictionary ChannelMergerOptions : AudioNodeOptions {
unsigned long numberOfInputs = 6;
};
[Exposed=Window]
interface ChannelSplitterNode : AudioNode {
constructor (BaseAudioContext context, optional ChannelSplitterOptions options = {});
};
dictionary ChannelSplitterOptions : AudioNodeOptions {
unsigned long numberOfOutputs = 6;
};
[Exposed=Window]
interface ConstantSourceNode : AudioScheduledSourceNode {
constructor (BaseAudioContext context, optional ConstantSourceOptions options = {});
readonly attribute AudioParam offset;
};
dictionary ConstantSourceOptions {
float offset = 1;
};
[Exposed=Window]
interface ConvolverNode : AudioNode {
constructor (BaseAudioContext context, optional ConvolverOptions options = {});
attribute AudioBuffer? buffer;
attribute boolean normalize;
};
dictionary ConvolverOptions : AudioNodeOptions {
AudioBuffer? buffer;
boolean disableNormalization = false;
};
[Exposed=Window]
interface DelayNode : AudioNode {
constructor (BaseAudioContext context, optional DelayOptions options = {});
readonly attribute AudioParam delayTime;
};
dictionary DelayOptions : AudioNodeOptions {
double maxDelayTime = 1;
double delayTime = 0;
};
[Exposed=Window]
interface DynamicsCompressorNode : AudioNode {
constructor (BaseAudioContext context,
optional DynamicsCompressorOptions options = {});
readonly attribute AudioParam threshold;
readonly attribute AudioParam knee;
readonly attribute AudioParam ratio;
readonly attribute float reduction;
readonly attribute AudioParam attack;
readonly attribute AudioParam release;
};
dictionary DynamicsCompressorOptions : AudioNodeOptions {
float attack = 0.003;
float knee = 30;
float ratio = 12;
float release = 0.25;
float threshold = -24;
};
[Exposed=Window]
interface GainNode : AudioNode {
constructor (BaseAudioContext context, optional GainOptions options = {});
readonly attribute AudioParam gain;
};
dictionary GainOptions : AudioNodeOptions {
float gain = 1.0;
};
[Exposed=Window]
interface IIRFilterNode : AudioNode {
constructor (BaseAudioContext context, IIRFilterOptions options);
undefined getFrequencyResponse (Float32Array frequencyHz,
Float32Array magResponse,
Float32Array phaseResponse);
};
dictionary IIRFilterOptions : AudioNodeOptions {
required sequence<double> feedforward;
required sequence<double> feedback;
};
[Exposed=Window]
interface MediaElementAudioSourceNode : AudioNode {
constructor (AudioContext context, MediaElementAudioSourceOptions options);
[SameObject] readonly attribute HTMLMediaElement mediaElement;
};
dictionary MediaElementAudioSourceOptions {
required HTMLMediaElement mediaElement;
};
[Exposed=Window]
interface MediaStreamAudioDestinationNode : AudioNode {
constructor (AudioContext context, optional AudioNodeOptions options = {});
readonly attribute MediaStream stream;
};
[Exposed=Window]
interface MediaStreamAudioSourceNode : AudioNode {
constructor (AudioContext context, MediaStreamAudioSourceOptions options);
[SameObject] readonly attribute MediaStream mediaStream;
};
dictionary MediaStreamAudioSourceOptions {
required MediaStream mediaStream;
};
[Exposed=Window]
interface MediaStreamTrackAudioSourceNode : AudioNode {
constructor (AudioContext context, MediaStreamTrackAudioSourceOptions options);
};
dictionary MediaStreamTrackAudioSourceOptions {
required MediaStreamTrack mediaStreamTrack;
};
enum OscillatorType {
"sine",
"square",
"sawtooth",
"triangle",
"custom"
};
[Exposed=Window]
interface OscillatorNode : AudioScheduledSourceNode {
constructor (BaseAudioContext context, optional OscillatorOptions options = {});
attribute OscillatorType type;
readonly attribute AudioParam frequency;
readonly attribute AudioParam detune;
undefined setPeriodicWave (PeriodicWave periodicWave);
};
dictionary OscillatorOptions : AudioNodeOptions {
OscillatorType type = "sine";
float frequency = 440;
float detune = 0;
PeriodicWave periodicWave;
};
enum PanningModelType {
"equalpower",
"HRTF"
};
enum DistanceModelType {
"linear",
"inverse",
"exponential"
};
[Exposed=Window]
interface PannerNode : AudioNode {
constructor (BaseAudioContext context, optional PannerOptions options = {});
attribute PanningModelType panningModel;
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam orientationX;
readonly attribute AudioParam orientationY;
readonly attribute AudioParam orientationZ;
attribute DistanceModelType distanceModel;
attribute double refDistance;
attribute double maxDistance;
attribute double rolloffFactor;
attribute double coneInnerAngle;
attribute double coneOuterAngle;
attribute double coneOuterGain;
undefined setPosition (float x, float y, float z);
undefined setOrientation (float x, float y, float z);
};
dictionary PannerOptions : AudioNodeOptions {
PanningModelType panningModel = "equalpower";
DistanceModelType distanceModel = "inverse";
float positionX = 0;
float positionY = 0;
float positionZ = 0;
float orientationX = 1;
float orientationY = 0;
float orientationZ = 0;
double refDistance = 1;
double maxDistance = 10000;
double rolloffFactor = 1;
double coneInnerAngle = 360;
double coneOuterAngle = 360;
double coneOuterGain = 0;
};
[Exposed=Window]
interface PeriodicWave {
constructor (BaseAudioContext context, optional PeriodicWaveOptions options = {});
};
dictionary PeriodicWaveConstraints {
boolean disableNormalization = false;
};
dictionary PeriodicWaveOptions : PeriodicWaveConstraints {
sequence<float> real;
sequence<float> imag;
};
[Exposed=Window]
interface ScriptProcessorNode : AudioNode {
attribute EventHandler onaudioprocess;
readonly attribute long bufferSize;
};
[Exposed=Window]
interface StereoPannerNode : AudioNode {
constructor (BaseAudioContext context, optional StereoPannerOptions options = {});
readonly attribute AudioParam pan;
};
dictionary StereoPannerOptions : AudioNodeOptions {
float pan = 0;
};
enum OverSampleType {
"none",
"2x",
"4x"
};
[Exposed=Window]
interface WaveShaperNode : AudioNode {
constructor (BaseAudioContext context, optional WaveShaperOptions options = {});
attribute Float32Array? curve;
attribute OverSampleType oversample;
};
dictionary WaveShaperOptions : AudioNodeOptions {
sequence<float> curve;
OverSampleType oversample = "none";
};
[Exposed=Window, SecureContext]
interface AudioWorklet : Worklet {
readonly attribute MessagePort port;
};
callback AudioWorkletProcessorConstructor = AudioWorkletProcessor (object options);
[Global=(Worklet, AudioWorklet), Exposed=AudioWorklet]
interface AudioWorkletGlobalScope : WorkletGlobalScope {
undefined registerProcessor (DOMString name,
AudioWorkletProcessorConstructor processorCtor);
readonly attribute unsigned long long currentFrame;
readonly attribute double currentTime;
readonly attribute float sampleRate;
readonly attribute unsigned long renderQuantumSize;
readonly attribute MessagePort port;
};
[Exposed=Window]
interface AudioParamMap {
readonly maplike<DOMString, AudioParam>;
};
[Exposed=Window, SecureContext]
interface AudioWorkletNode : AudioNode {
constructor (BaseAudioContext context, DOMString name,
optional AudioWorkletNodeOptions options = {});
readonly attribute AudioParamMap parameters;
readonly attribute MessagePort port;
attribute EventHandler onprocessorerror;
};
dictionary AudioWorkletNodeOptions : AudioNodeOptions {
unsigned long numberOfInputs = 1;
unsigned long numberOfOutputs = 1;
sequence<unsigned long> outputChannelCount;
record<DOMString, double> parameterData;
object processorOptions;
};
[Exposed=AudioWorklet]
interface AudioWorkletProcessor {
constructor ();
readonly attribute MessagePort port;
};
callback AudioWorkletProcessCallback =
boolean (FrozenArray<FrozenArray<Float32Array>> inputs,
FrozenArray<FrozenArray<Float32Array>> outputs,
object parameters);
dictionary AudioParamDescriptor {
required DOMString name;
float defaultValue = 0;
float minValue = -3.4028235e38;
float maxValue = 3.4028235e38;
AutomationRate automationRate = "a-rate";
};

View file

@ -0,0 +1,9 @@
<!doctype html>
<meta charset=utf-8>
<meta name="timeout" content="long">
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<script src="../resources/WebIDLParser.js"></script>
<script src="../resources/idlharness.js"></script>
<div id=log></div>
<script src="../webaudio/idlharness.https.window.js"></script>

View file

@ -0,0 +1,72 @@
// META: script=/resources/WebIDLParser.js
// META: script=/resources/idlharness.js
// META: timeout=long
// https://webaudio.github.io/web-audio-api/
'use strict';
idl_test(
['webaudio'],
['cssom', 'uievents', 'mediacapture-streams', 'html', 'dom'],
async idl_array => {
idl_array.add_untested_idls('interface SVGElement {};');
idl_array.add_objects({
BaseAudioContext: [],
AudioContext: ['context'],
OfflineAudioContext: ['new OfflineAudioContext(1, 1, sample_rate)'],
OfflineAudioCompletionEvent: [
'new OfflineAudioCompletionEvent("", {renderedBuffer: buffer})'
],
AudioBuffer: ['buffer'],
AudioNode: [],
AudioParam: ['new AudioBufferSourceNode(context).playbackRate'],
AudioScheduledSourceNode: [],
AnalyserNode: ['new AnalyserNode(context)'],
AudioBufferSourceNode: ['new AudioBufferSourceNode(context)'],
AudioDestinationNode: ['context.destination'],
AudioListener: ['context.listener'],
AudioProcessingEvent: [`new AudioProcessingEvent('', {
playbackTime: 0, inputBuffer: buffer, outputBuffer: buffer
})`],
BiquadFilterNode: ['new BiquadFilterNode(context)'],
ChannelMergerNode: ['new ChannelMergerNode(context)'],
ChannelSplitterNode: ['new ChannelSplitterNode(context)'],
ConstantSourceNode: ['new ConstantSourceNode(context)'],
ConvolverNode: ['new ConvolverNode(context)'],
DelayNode: ['new DelayNode(context)'],
DynamicsCompressorNode: ['new DynamicsCompressorNode(context)'],
GainNode: ['new GainNode(context)'],
IIRFilterNode: [
'new IIRFilterNode(context, {feedforward: [1], feedback: [1]})'
],
MediaElementAudioSourceNode: [
'new MediaElementAudioSourceNode(context, {mediaElement: new Audio})'
],
MediaStreamAudioDestinationNode: [
'new MediaStreamAudioDestinationNode(context)'
],
MediaStreamAudioSourceNode: [],
MediaStreamTrackAudioSourceNode: [],
OscillatorNode: ['new OscillatorNode(context)'],
PannerNode: ['new PannerNode(context)'],
PeriodicWave: ['new PeriodicWave(context)'],
ScriptProcessorNode: ['context.createScriptProcessor()'],
StereoPannerNode: ['new StereoPannerNode(context)'],
WaveShaperNode: ['new WaveShaperNode(context)'],
AudioWorklet: ['context.audioWorklet'],
AudioWorkletGlobalScope: [],
AudioParamMap: ['worklet_node.parameters'],
AudioWorkletNode: ['worklet_node'],
AudioWorkletProcessor: [],
});
self.sample_rate = 44100;
self.context = new AudioContext;
self.buffer = new AudioBuffer({length: 1, sampleRate: sample_rate});
await context.audioWorklet.addModule(
'the-audio-api/the-audioworklet-interface/processors/dummy-processor.js');
self.worklet_node = new AudioWorkletNode(context, 'dummy');
}
);