| <!DOCTYPE html> |
| <html> |
| <head> |
| <title> |
| scriptprocessornode.html |
| </title> |
| <script src="../../imported/w3c/web-platform-tests/resources/testharness.js"></script> |
| <script src="../../resources/testharnessreport.js"></script> |
| <script src="../resources/audit-util.js"></script> |
| <script src="../resources/audit.js"></script> |
| </head> |
| <body> |
| <script id="layout-test-code"> |
| let audit = Audit.createTaskRunner(); |
| |
| let sampleRate = 44100.0; |
| let outputChannels = 6; |
| let playbackTime = 0.0; |
| |
| // For the current implementation of ScriptProcessorNode, when it works |
| // with OfflineAudioContext (which runs much faster than real-time) the |
| // event.inputBuffer might be overwrite again before onaudioprocess ever |
| // get chance to be called. We carefully arrange the renderLengthInFrames |
| // and bufferSize to have exactly the same value to avoid this issue. |
| let renderLengthInFrames = 512; |
| let bufferSize = 512; |
| |
| let context; |
| |
| function createBuffer(context, length) { |
| let audioBuffer = context.createBuffer(2, length, sampleRate); |
| let n = audioBuffer.length; |
| let dataL = audioBuffer.getChannelData(0); |
| let dataR = audioBuffer.getChannelData(1); |
| |
| for (let i = 0; i < n; ++i) { |
| dataL[i] = -1; |
| dataR[i] = 1; |
| } |
| |
| return audioBuffer; |
| } |
| |
| function processAudioData(event, should) { |
| playbackTime = event.playbackTime; |
| let expectedTime = |
| context.currentTime + (bufferSize / context.sampleRate); |
| let allowedTimeGap = 0.0000001; |
| |
| // There may be a little time gap which is from different thread |
| // operation between currentTime when main thread fires onaudioprocess() |
| // and currentTime when read in JS since currentTime is continuously |
| // increasing on audio thread. |
| |
| should(playbackTime, 'playbackTime').beCloseTo(expectedTime, { |
| threshold: allowedTimeGap |
| }); |
| |
| buffer = event.outputBuffer; |
| should(buffer.numberOfChannels, 'Number of output channels') |
| .beEqualTo(outputChannels); |
| should(buffer.length, 'Length of buffer').beEqualTo(bufferSize); |
| |
| buffer = event.inputBuffer; |
| let bufferDataL = buffer.getChannelData(0); |
| let bufferDataR = buffer.getChannelData(1); |
| |
| should(bufferDataL, 'Left channel').beConstantValueOf(-1); |
| should(bufferDataR, 'Right channel').beConstantValueOf(1); |
| } |
| |
| function doBufferSizeTest(size, should) { |
| should(() => { |
| context.createScriptProcessor(size, 1, 1); |
| }, 'context.createScriptProcessor(' + size + ', 1, 1)').notThrow(); |
| } |
| |
| audit.define( |
| {label: 'test', description: 'Basic ScriptProcessorNode properties'}, |
| (task, should) => { |
| // Create offline audio context. |
| context = |
| new OfflineAudioContext(2, renderLengthInFrames, sampleRate); |
| |
| should(() => { |
| context.createScriptProcessor(512, 0, 0); |
| }, 'createScriptProcessor(512, 0, 0)').throw(); |
| |
| should(() => { |
| context.createScriptProcessor(512, 1, 0); |
| }, 'context.createScriptProcessor(512, 1, 0)').notThrow(); |
| |
| should(() => { |
| context.createScriptProcessor(512, 2, 0); |
| }, 'context.createScriptProcessor(512, 2, 0)').notThrow(); |
| |
| should(() => { |
| context.createScriptProcessor(512, 0, 1); |
| }, 'context.createScriptProcessor(512, 0, 1)').notThrow(); |
| |
| should(() => { |
| context.createScriptProcessor(512, 0, 2); |
| }, 'context.createScriptProcessor(512, 0, 2)').notThrow(); |
| should(() => { |
| context.createScriptProcessor(511, 1, 1); |
| }, 'context.createScriptProcessor(511, 1, 1)').throw(); |
| |
| doBufferSizeTest(256, should); |
| doBufferSizeTest(512, should); |
| doBufferSizeTest(1024, should); |
| doBufferSizeTest(2048, should); |
| doBufferSizeTest(4096, should); |
| doBufferSizeTest(8192, should); |
| doBufferSizeTest(16384, should); |
| |
| let sourceBuffer = createBuffer(context, renderLengthInFrames); |
| |
| let bufferSource = context.createBufferSource(); |
| bufferSource.buffer = sourceBuffer; |
| |
| let jsnode = |
| context.createScriptProcessor(bufferSize, 2, outputChannels); |
| |
| bufferSource.connect(jsnode); |
| jsnode.connect(context.destination); |
| jsnode.onaudioprocess = event => { |
| processAudioData(event, should); |
| }; |
| |
| bufferSource.start(0); |
| |
| context.startRendering().then(() => task.done()); |
| ; |
| }); |
| |
| audit.run(); |
| </script> |
| </body> |
| </html> |