blob: 69cdf66a286941518fd0440550f5a469f3073e4f [file] [log] [blame]
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Testing getUserMedia plugged in Web Audio</title>
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="../../webrtc/routines.js"></script>
</head>
<body>
<script>
if (window.testRunner)
testRunner.setUserMediaPermission(true);
var finishTest, errorTest;
promise_test((test) => {
return navigator.mediaDevices.getUserMedia({ audio: true}).then((stream) => {
return new Promise((resolve, reject) => {
finishTest = resolve;
errorTest = reject;
var audioContext = new AudioContext();
var script = audioContext.createScriptProcessor(2048, 1, 1);
script.onaudioprocess = (event) => {
var squaredSum = 0.0;
event.inputBuffer.getChannelData(0).forEach((value) => {
squaredSum += value * value;
});
// Mock source should send some bips with sufficient energy to finish the test
if (squaredSum > 30) {
source.disconnect(script);
script.disconnect(audioContext.destination);
finishTest();
}
};
let source = audioContext.createMediaStreamSource(stream);
source.connect(script);
script.connect(audioContext.destination);
});
});
}, "Plugging in getUserMedia audio stream into Web Audio");
function waitFor(duration)
{
return new Promise(resolve => setTimeout(resolve, duration));
}
promise_test(async (test) => {
if (!window.internals)
return Promise.reject("Internals API required");
const stream = await navigator.mediaDevices.getUserMedia({ audio: {deviceId: true}});
internals.setMockAudioTrackChannelNumber(stream.getAudioTracks()[0], 1);
var audioContext = new AudioContext();
var script = audioContext.createScriptProcessor(2048, 1, 1);
let source = audioContext.createMediaStreamSource(stream);
source.connect(script);
script.connect(audioContext.destination);
await waitFor(200);
internals.setMockAudioTrackChannelNumber(stream.getAudioTracks()[0], 2);
await waitFor(200);
source.disconnect(script);
script.disconnect(audioContext.destination);
}, "Web Audio should work even if number of channels of a track increases from 1 to 2");
promise_test(async (test) => {
if (!window.internals)
return Promise.reject("Internals API required");
const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
internals.useMockAudioDestinationCocoa();
var audioContext = new AudioContext();
var analyzer = audioContext.createAnalyser();
analyzer.fftSize = 256;
let source = audioContext.createMediaStreamSource(stream);
source.connect(analyzer);
analyzer.connect(audioContext.destination);
await new Promise(resolve => setTimeout(resolve, 500));
source.disconnect(analyzer);
analyzer.disconnect(audioContext.destination);
}, "Web Audio should work with mock audio destination");
var context;
async function checkForNoise(stream, expectedHeardNoise, counter)
{
if (!counter)
counter = 1;
else if (++counter > 50)
return Promise.resolve(false);
results = await analyseAudio(stream, 100, context);
if (results.heardNoise == expectedHeardNoise)
return Promise.resolve(true);
var results = await checkForNoise(stream, expectedHeardNoise, counter);
return results;
}
promise_test(async (test) => {
context = new AudioContext();
let stream = await navigator.mediaDevices.getUserMedia({audio: { echoCancellation : true}});
assert_true(stream.getAudioTracks()[0].getConstraints().echoCancellation);
assert_true(stream.getAudioTracks()[0].getSettings().echoCancellation);
assert_true(await checkForNoise(stream, false), "should not hear noise");
stream = await navigator.mediaDevices.getUserMedia({audio: { echoCancellation : false}});
assert_false(stream.getAudioTracks()[0].getSettings().echoCancellation, "settings is ok");
assert_true(await checkForNoise(stream, true), "heard noise");
context.close();
context = null;
}, "Check echo cancellation can be disabled in getUserMedia call");
</script>
</body>
</html>