blob: 9deb9fdfcfcf90c28b9b577c625c7eb4073d6d91 [file] [log] [blame]
<!DOCTYPE html>
<html>
<head>
<title>mediastream page muted</title>
<script src="../../resources/js-test-pre.js"></script>
<script>
if (window.internals)
internals.setDisableGetDisplayMediaUserGestureConstraint(true);
async function checkPageState(activeState, inactiveState) {
await new Promise((resolve, reject) => {
let retryCount = 0;
let retryInterval = 100;
let maximumRetryCount = 20 * 1000 / retryInterval;
let test = () => {
if ( window.internals
&& window.internals.pageMediaState().includes(activeState)
&& !window.internals.pageMediaState().includes(inactiveState)) {
testPassed(`window.internals.pageMediaState().includes(${activeState}) became true`);
testPassed(`window.internals.pageMediaState().includes(${inactiveState}) became false`);
resolve()
return;
}
if (++retryCount > maximumRetryCount) {
testFailed(`Page muted state failed to change after ${maximumRetryCount / 1000 * retryInterval} seconds`);
resolve();
return;
}
setTimeout(test, retryInterval);
}
setTimeout(test, 0);
});
}
async function waitForMutedChange(stream, event, mute, unmute) {
await new Promise((resolve, reject) => {
const interval = 200;
let timeout = setTimeout(() => {
testFailed(`'${event}' event not fired in ${interval / 1000} seconds!`);
resolve();
}, interval);
let checkResult = (callback) => {
switch (callback()) {
case "resolve":
clearTimeout(timeout);
resolve();
break;
case "reject":
clearTimeout(timeout);
reject();
break;
case "continue":
break;
}
}
stream.getTracks().forEach(track => {
track.onmute = (evt) => { debug('EVENT: mute'); checkResult(mute); }
track.onunmute = (evt) => { debug('EVENT: unmute'); checkResult(unmute); }
});
});
}
async function createScreenCaptureStream() {
debug("<br>*** Creating screen capture stream");
displayStream = await navigator.mediaDevices.getDisplayMedia({ video: true });
shouldBeType("displayStream", "Object");
shouldBe("displayStream.getTracks().length", "1");
shouldBe("displayStream.getVideoTracks().length", "1");
screenCaptureTrack = displayStream.getVideoTracks()[0];
shouldBeFalse("screenCaptureTrack.muted");
}
async function createCameraMicrophoneStream() {
debug("<br>*** Creating camera and microphone stream");
cameraStream = await navigator.mediaDevices.getUserMedia({ audio: true, video: true });
shouldBeType("cameraStream", "Object");
shouldBe("cameraStream.getTracks().length", "2");
microphoneCaptureTrack = cameraStream.getAudioTracks()[0];
cameraCaptureTrack = cameraStream.getVideoTracks()[0];
shouldBeFalse("microphoneCaptureTrack.muted");
shouldBeFalse("cameraCaptureTrack.muted");
}
async function muteScreenCapture() {
debug("<br>*** Muting screen capture");
if (window.internals)
window.internals.setPageMuted("screencapture");
await waitForMutedChange(displayStream, "mute",
() => { return "resolve"; },
() => { testFailed("unexpected 'unmute' event"); return "reject"; }
);
shouldBeTrue("screenCaptureTrack.muted");
shouldBeFalse("microphoneCaptureTrack.muted");
shouldBeFalse("cameraCaptureTrack.muted");
await checkPageState("HasMutedDisplayCaptureDevice", "HasActiveDisplayCaptureDevice");
await checkPageState("HasActiveAudioCaptureDevice", "HasMutedAudioCaptureDevice");
await checkPageState("HasActiveAudioCaptureDevice", "HasMutedAudioCaptureDevice");
}
async function muteCameraMicrophone() {
debug("<br>*** Muting camera and microphone");
if (window.internals)
window.internals.setPageMuted("screencapture,capturedevices");
count = 0;
await waitForMutedChange(cameraStream, "mute",
() => { return (++count == 2) ? "resolve" : "continue"; },
() => { testFailed("unexpected 'unmute' event"); return "reject"; }
);
shouldBeTrue("screenCaptureTrack.muted");
shouldBeTrue("microphoneCaptureTrack.muted");
shouldBeTrue("cameraCaptureTrack.muted");
await checkPageState("HasMutedDisplayCaptureDevice", "HasActiveDisplayCaptureDevice");
await checkPageState("HasMutedAudioCaptureDevice", "HasActiveAudioCaptureDevice");
await checkPageState("HasMutedVideoCaptureDevice", "HasActiveVideoCaptureDevice");
}
async function unmuteCameraMicrophone() {
debug("<br>*** Unmuting camera and microphone");
if (window.internals)
internals.setPageMuted("screencapture");
count = 0;
await waitForMutedChange(cameraStream, "unmute",
() => { testFailed("unexpected 'mute' event"); return "reject"; },
() => { return (++count == 2) ? "resolve" : "continue"; }
);
shouldBeTrue("screenCaptureTrack.muted");
shouldBeFalse("microphoneCaptureTrack.muted");
shouldBeFalse("cameraCaptureTrack.muted");
await checkPageState("HasMutedDisplayCaptureDevice", "HasActiveDisplayCaptureDevice");
await checkPageState("HasActiveAudioCaptureDevice", "HasMutedAudioCaptureDevice");
await checkPageState("HasActiveAudioCaptureDevice", "HasMutedAudioCaptureDevice");
}
async function unmuteScreenCapture() {
debug("<br>*** Unmuting screen capture");
if (window.internals)
internals.setPageMuted("");
await waitForMutedChange(displayStream, "unmute",
() => { testFailed("unexpected 'mute' event"); return "reject"; },
() => { return "resolve"; }
);
shouldBeFalse("screenCaptureTrack.muted");
shouldBeFalse("microphoneCaptureTrack.muted");
shouldBeFalse("cameraCaptureTrack.muted");
await checkPageState("HasActiveDisplayCaptureDevice", "HasMutedDisplayCaptureDevice");
await checkPageState("HasActiveAudioCaptureDevice", "HasMutedAudioCaptureDevice");
await checkPageState("HasActiveAudioCaptureDevice", "HasMutedAudioCaptureDevice");
}
jsTestIsAsync = true;
if (window.testRunner) {
testRunner.setUserMediaPermission(true);
testRunner.waitUntilDone();
}
if (window.internals) {
window.internals.setMockMediaCaptureDevicesEnabled(true);
window.internals.settings.setScreenCaptureEnabled(true);
}
(async function() {
try {
await createScreenCaptureStream();
await createCameraMicrophoneStream();
await muteScreenCapture();
await muteCameraMicrophone();
await unmuteCameraMicrophone();
await unmuteScreenCapture();
} catch (exception) {
failTest(exception.description);
}
finishJSTest();
})()
</script>
</head>
<body>
<script src="../../resources/js-test-post.js"></script>
</body>
</html>