| <!DOCTYPE html> |
| <meta charset="utf-8"> |
| <title>WebAudio IDL tests</title> |
| <link rel="help" href="https://webaudio.github.io/web-audio-api/"/> |
| <script src="/resources/testharness.js"></script> |
| <script src="/resources/testharnessreport.js"></script> |
| <script src="/resources/WebIDLParser.js"></script> |
| <script src="/resources/idlharness.js"></script> |
| <script> |
| 'use strict'; |
| |
| promise_test(async t => { |
| const [html, dom, uievents, mediacapture, webaudio] = await Promise.all([ |
| // Needed for EventTarget, HTMLMediaElement |
| '/interfaces/html.idl', |
| |
| // Needed for Event, EventListener |
| '/interfaces/dom.idl', |
| |
| // Needed for MouseEvent |
| '/interfaces/uievents.idl', |
| |
| // Needed for MediaStream, MediaStreamTrack |
| '/interfaces/mediacapture-main.idl', |
| |
| '/interfaces/web-audio-api.idl' |
| ].map(url => fetch(url).then(response => response.text()))); |
| |
| const idl_array = new IdlArray(); |
| |
| // Dependencies of HTML |
| idl_array.add_untested_idls('interface LinkStyle {};'); |
| idl_array.add_untested_idls('interface SVGElement {};'); |
| idl_array.add_untested_idls('interface WorkletGlobalScope {};'); |
| idl_array.add_untested_idls(html); |
| idl_array.add_untested_idls(uievents, { only: [ |
| 'MouseEvent', |
| 'MouseEventInit', |
| 'EventModifierInit', |
| 'UIEvent', |
| 'UIEventInit', |
| ]}); |
| |
| idl_array.add_untested_idls(dom); |
| idl_array.add_untested_idls(mediacapture); |
| idl_array.add_untested_idls('interface Worklet {};'); |
| |
| idl_array.add_idls(webaudio); |
| |
| const sample_rate = 44100; |
| const context = new AudioContext; |
| const buffer = new AudioBuffer({length: 1, sampleRate: sample_rate}); |
| await context.audioWorklet.addModule( |
| 'the-audio-api/the-audioworklet-interface/processors/dummy-processor.js'); |
| const worklet_node = new AudioWorkletNode(context, 'dummy'); |
| |
| idl_array.add_objects({ |
| BaseAudioContext: [], |
| AudioContext: [context], |
| OfflineAudioContext: [new OfflineAudioContext(1, 1, sample_rate)], |
| OfflineAudioCompletionEvent: [ |
| new OfflineAudioCompletionEvent('', {renderedBuffer: buffer})], |
| AudioBuffer: [buffer], |
| AudioNode: [], |
| AudioParam: [new AudioBufferSourceNode(context).playbackRate], |
| AudioScheduledSourceNode: [], |
| AnalyserNode: [new AnalyserNode(context)], |
| AudioBufferSourceNode: [new AudioBufferSourceNode(context)], |
| AudioDestinationNode: [context.destination], |
| AudioListener: [context.listener], |
| AudioProcessingEvent: [new AudioProcessingEvent('', { |
| playbackTime: 0, inputBuffer: buffer, outputBuffer: buffer |
| })], |
| BiquadFilterNode: [new BiquadFilterNode(context)], |
| ChannelMergerNode: [new ChannelMergerNode(context)], |
| ChannelSplitterNode: [new ChannelSplitterNode(context)], |
| ConstantSourceNode: [new ConstantSourceNode(context)], |
| ConvolverNode: [new ConvolverNode(context)], |
| DelayNode: [new DelayNode(context)], |
| DynamicsCompressorNode: [new DynamicsCompressorNode(context)], |
| GainNode: [new GainNode(context)], |
| IIRFilterNode: [ |
| new IIRFilterNode(context, {feedforward: [1], feedback: [1]})], |
| MediaElementAudioSourceNode: [ |
| new MediaElementAudioSourceNode(context, {mediaElement: new Audio})], |
| MediaStreamAudioDestinationNode: [ |
| new MediaStreamAudioDestinationNode(context)], |
| MediaStreamAudioSourceNode: [], |
| MediaStreamTrackAudioSourceNode: [], |
| OscillatorNode: [new OscillatorNode(context)], |
| PannerNode: [new PannerNode(context)], |
| PeriodicWave: [new PeriodicWave(context)], |
| ScriptProcessorNode: [context.createScriptProcessor()], |
| StereoPannerNode: [new StereoPannerNode(context)], |
| WaveShaperNode: [new WaveShaperNode(context)], |
| AudioWorklet: [context.audioWorklet], |
| AudioWorkletGlobalScope: [], |
| AudioParamMap: [worklet_node.parameters], |
| AudioWorkletNode: [worklet_node], |
| AudioWorkletProcessor: [], |
| }); |
| idl_array.test(); |
| |
| }, 'Test driver'); |
| </script> |