blob: cf6e6bc4ef69b891646c9cff9b6b42d259bfe13d [file] [log] [blame]
<!DOCTYPE html>
<!--
Tests that AudioBufferSourceNode supports loop-points with .loopStart and .loopEnd.
-->
<html>
<head>
<title>
audiobuffersource-loop-points.html
</title>
<script src="../../imported/w3c/web-platform-tests/resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../resources/audit-util.js"></script>
<script src="../resources/audit.js"></script>
<script src="../resources/audio-file-utils.js"></script>
</head>
<body>
<script id="layout-test-code">
let audit = Audit.createTaskRunner();
// Use power of two to eliminate round-off in computing frames from time
// and vice versa.
let sampleRate = 32768;
let numberOfNotes = 60; // play over a 5 octave range
// Make sure noteDuration, noteSilence, and noteSpacing are exactly whole
// frames.
let noteDuration = Math.floor(0.025 * sampleRate) / sampleRate;
// Leave about 5ms of silence between each "note"
let noteSilence = Math.floor(0.005 * sampleRate) / sampleRate;
let noteSpacing = noteDuration + noteSilence;
let lengthInSeconds = numberOfNotes * noteSpacing;
let context = 0;
let expectedAudio;
function createTestBuffer(frequency, sampleRate) {
// Create a buffer containing two periods at this frequency.
// The 1st half is a pure sine wave period scaled by a linear ramp from
// 0 -> 1. The 2nd half of the buffer corresponds exactly to one pure
// sine wave period.
let onePeriodDuration = 1 / frequency;
let sampleFrameLength = 2 * onePeriodDuration * sampleRate;
let audioBuffer =
context.createBuffer(1, sampleFrameLength, sampleRate);
let n = audioBuffer.length;
let channelData = audioBuffer.getChannelData(0);
for (let i = 0; i < n; ++i) {
let sample = Math.sin(frequency * 2.0 * Math.PI * i / sampleRate);
// Linear ramp from 0 -> 1 for the first period.
// Stay at 1 for the 2nd period.
let scale = i < n / 2 ? i / (n / 2) : 1;
sample *= scale;
channelData[i] = sample;
}
return audioBuffer;
}
function playNote(buffer, time, duration, playbackRate) {
let source = context.createBufferSource();
source.buffer = buffer;
source.playbackRate.value = playbackRate;
let gainNode = context.createGain();
source.connect(gainNode);
gainNode.connect(context.destination);
// Loop the 2nd half of the buffer.
// We should be able to hear any problems as glitches if the looping
// incorrectly indexes to anywhere outside of the desired loop-points,
// since only the 2nd half is a perfect sine-wave cycle, while the 1st
// half of the buffer contains a linear ramp of a sine-wave cycle.
source.loop = true;
source.loopStart = 0.5 * buffer.duration;
source.loopEnd = buffer.duration;
// Play for the given duration.
source.start(time);
source.stop(time + duration);
// Apply a quick linear fade-out to avoid a click at the end of the
// note.
gainNode.gain.value = 1;
gainNode.gain.setValueAtTime(1, time + duration - 0.005);
gainNode.gain.linearRampToValueAtTime(0, time + duration);
}
audit.define(
{
label: 'initialize',
description: 'Set up context and expected results'
},
async (task, should) => {
// Create offline audio context.
should(
() => {context = new OfflineAudioContext(
2, sampleRate * lengthInSeconds, sampleRate)},
'Creating context for testing')
.notThrow();
const arrayBuffer = await Audit.loadFileFromUrl(
'resources/audiobuffersource-loop-points-expected.wav');
expectedAudio = await context.decodeAudioData(arrayBuffer);
task.done();
});
audit.define(
{
label: 'test',
description: 'Test loop points and compare with expected results'
},
(task, should) => {
// Create the test buffer.
// We'll loop this with the loop-points set for the 2nd half of this
// buffer.
let buffer = createTestBuffer(440.0, sampleRate);
// Play all the notes as a chromatic scale.
for (let i = 0; i < numberOfNotes; ++i) {
let time = i * noteSpacing;
// start three octaves down
let semitone = i - numberOfNotes / 2;
// Convert from semitone to rate.
let playbackRate = Math.pow(2, semitone / 12);
playNote(buffer, time, noteDuration, playbackRate);
}
context.startRendering()
.then(renderedAudio => {
// Compute a threshold based on the maximum error, |maxUlp|,
// in ULP. This is experimentally determined. Assuming that
// the reference file is a 16-bit wav file, the max values in
// the wave file are +/- 32768.
let maxUlp = 1.9532e-3;
let threshold = maxUlp / 32768;
for (let k = 0; k < renderedAudio.numberOfChannels; ++k) {
should(
renderedAudio.getChannelData(k),
'Rendered audio for channel ' + k)
.beCloseToArray(
expectedAudio.getChannelData(k),
{absoluteThreshold: threshold});
}
const filename = 'audiobuffersource-loop-points-actual.wav';
if (downloadAudioBuffer(renderedAudio, filename, true)) {
should(true, 'Saved reference file').message(filename, '');
}
})
.then(() => task.done());
});
audit.run();
</script>
</body>
</html>