blob: 1557475e95fb6832105ba8d7938ca2486a90a4fd [file] [log] [blame]
/*
* Copyright (C) 2007, 2009 Apple Inc. All rights reserved.
* Copyright (C) 2007 Collabora Ltd. All rights reserved.
* Copyright (C) 2007 Alp Toker <alp@atoker.com>
* Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
* Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
* Copyright (C) 2015, 2016 Metrological Group B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* aint with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "MediaPlayerPrivateGStreamerBase.h"
#if ENABLE(VIDEO) && USE(GSTREAMER)
#include "GStreamerCommon.h"
#include "GraphicsContext.h"
#include "ImageGStreamer.h"
#include "ImageOrientation.h"
#include "IntRect.h"
#include "Logging.h"
#include "MediaPlayer.h"
#include "NotImplemented.h"
#include "VideoSinkGStreamer.h"
#include "WebKitWebSourceGStreamer.h"
#include <wtf/glib/GUniquePtr.h>
#include <wtf/text/AtomicString.h>
#include <wtf/text/CString.h>
#include <wtf/MathExtras.h>
#include <wtf/StringPrintStream.h>
#include <gst/audio/streamvolume.h>
#include <gst/video/gstvideometa.h>
#if ENABLE(ENCRYPTED_MEDIA)
#include "CDMInstance.h"
#include "GStreamerEMEUtilities.h"
#include "SharedBuffer.h"
#include "WebKitClearKeyDecryptorGStreamer.h"
#endif
#if USE(GSTREAMER_GL)
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
#define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
#define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
#else
#define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
#define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
#define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
#endif
#include <gst/app/gstappsink.h>
#if USE(LIBEPOXY)
// Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
#include <epoxy/gl.h>
#endif
#define GST_USE_UNSTABLE_API
#include <gst/gl/gl.h>
#undef GST_USE_UNSTABLE_API
#include "GLContext.h"
#if USE(GLX)
#include "GLContextGLX.h"
#include <gst/gl/x11/gstgldisplay_x11.h>
#endif
#if USE(EGL)
#include "GLContextEGL.h"
#include <gst/gl/egl/gstgldisplay_egl.h>
#endif
#if PLATFORM(X11)
#include "PlatformDisplayX11.h"
#endif
#if PLATFORM(WAYLAND)
#include "PlatformDisplayWayland.h"
#include <gst/gl/wayland/gstgldisplay_wayland.h>
#elif PLATFORM(WPE)
#include "PlatformDisplayWPE.h"
#endif
// gstglapi.h may include eglplatform.h and it includes X.h, which
// defines None, breaking MediaPlayer::None enum
#if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
#undef None
#endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
#include "VideoTextureCopierGStreamer.h"
#endif // USE(GSTREAMER_GL)
#if USE(TEXTURE_MAPPER_GL)
#include "BitmapTextureGL.h"
#include "BitmapTexturePool.h"
#include "TextureMapperContextAttributes.h"
#include "TextureMapperGL.h"
#include "TextureMapperPlatformLayerBuffer.h"
#include "TextureMapperPlatformLayerProxy.h"
#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
#include <cairo-gl.h>
#endif
#endif // USE(TEXTURE_MAPPER_GL)
GST_DEBUG_CATEGORY(webkit_media_player_debug);
#define GST_CAT_DEFAULT webkit_media_player_debug
namespace WebCore {
using namespace std;
void registerWebKitGStreamerElements()
{
#if ENABLE(ENCRYPTED_MEDIA)
if (!webkitGstCheckVersion(1, 6, 1))
return;
GRefPtr<GstElementFactory> clearKeyDecryptorFactory = adoptGRef(gst_element_factory_find("webkitclearkey"));
if (!clearKeyDecryptorFactory)
gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT);
#endif
}
bool MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements()
{
if (!initializeGStreamer())
return false;
registerWebKitGStreamerElements();
GRefPtr<GstElementFactory> srcFactory = adoptGRef(gst_element_factory_find("webkitwebsrc"));
if (!srcFactory) {
GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
gst_element_register(0, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
}
return true;
}
static int greatestCommonDivisor(int a, int b)
{
while (b) {
int temp = a;
a = b;
b = temp % b;
}
return ABS(a);
}
#if USE(TEXTURE_MAPPER_GL)
static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
{
switch (orientation) {
case DefaultImageOrientation:
return 0;
case OriginRightTop:
return TextureMapperGL::ShouldRotateTexture90;
case OriginBottomRight:
return TextureMapperGL::ShouldRotateTexture180;
case OriginLeftBottom:
return TextureMapperGL::ShouldRotateTexture270;
default:
ASSERT_NOT_REACHED();
}
return 0;
}
#endif
#if USE(GSTREAMER_GL)
class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
public:
explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags)
{
GstVideoInfo videoInfo;
if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
return;
m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
GstBuffer* buffer = gst_sample_get_buffer(sample);
if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))))
return;
m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
m_isValid = true;
}
virtual ~GstVideoFrameHolder()
{
if (UNLIKELY(!m_isValid))
return;
gst_video_frame_unmap(&m_videoFrame);
}
const IntSize& size() const { return m_size; }
TextureMapperGL::Flags flags() const { return m_flags; }
GLuint textureID() const { return m_textureID; }
bool isValid() const { return m_isValid; }
private:
GstVideoFrame m_videoFrame;
IntSize m_size;
TextureMapperGL::Flags m_flags;
GLuint m_textureID;
bool m_isValid { false };
};
#endif // USE(GSTREAMER_GL)
MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
: m_notifier(MainThreadNotifier<MainThreadNotification>::create())
, m_player(player)
, m_fpsSink(nullptr)
, m_readyState(MediaPlayer::HaveNothing)
, m_networkState(MediaPlayer::Empty)
, m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
#if USE(TEXTURE_MAPPER_GL)
, m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
#endif
{
}
MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
{
#if ENABLE(ENCRYPTED_MEDIA)
m_protectionCondition.notifyAll();
#endif
m_notifier->invalidate();
if (m_videoSink) {
g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
#if USE(GSTREAMER_GL)
if (GST_IS_BIN(m_videoSink.get())) {
GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
g_signal_handlers_disconnect_by_data(appsink.get(), this);
}
#endif
}
if (m_volumeElement)
g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
// This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
// waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
cancelRepaint(true);
// The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
// about handlers running in the GStreamer thread.
if (m_pipeline)
gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
m_player = nullptr;
}
void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
{
m_pipeline = pipeline;
GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
if (player.handleSyncMessage(message)) {
gst_message_unref(message);
return GST_BUS_DROP;
}
return GST_BUS_PASS;
}, this, nullptr);
}
#if ENABLE(ENCRYPTED_MEDIA)
static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message)
{
const GstStructure* structure = gst_message_get_structure(message);
const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "stream-encryption-systems");
ASSERT(streamEncryptionAllowedSystemsValue && G_VALUE_HOLDS(streamEncryptionAllowedSystemsValue, G_TYPE_STRV));
const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue));
ASSERT(streamEncryptionAllowedSystems);
Vector<String> streamEncryptionAllowedSystemsVector;
unsigned i;
for (i = 0; streamEncryptionAllowedSystems[i]; ++i)
streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]);
const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events");
ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList));
unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList);
Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector;
for (i = 0; i < streamEncryptionEventsListSize; ++i)
streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i)))));
return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector);
}
#endif
bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
{
UNUSED_PARAM(message);
if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
return false;
const gchar* contextType;
gst_message_parse_context_type(message, &contextType);
GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
#if USE(GSTREAMER_GL)
GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
if (elementContext) {
gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
return true;
}
#endif // USE(GSTREAMER_GL)
#if ENABLE(ENCRYPTED_MEDIA)
if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
if (isMainThread()) {
GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
ASSERT_NOT_REACHED();
return false;
}
GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message");
LockHolder lock(m_protectionMutex);
std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message);
GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size());
Vector<uint8_t> concatenatedInitDataChunks;
unsigned concatenatedInitDataChunksNumber = 0;
String eventKeySystemIdString;
for (auto& event : streamEncryptionInformation.first) {
GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get()));
const char* eventKeySystemId = nullptr;
GstBuffer* data = nullptr;
gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
GstMapInfo mapInfo;
if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
GST_WARNING("cannot map %s protection data", eventKeySystemId);
break;
}
GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
GST_MEMDUMP("init data", reinterpret_cast<const unsigned char*>(mapInfo.data), mapInfo.size);
concatenatedInitDataChunks.append(mapInfo.data, mapInfo.size);
++concatenatedInitDataChunksNumber;
eventKeySystemIdString = eventKeySystemId;
if (streamEncryptionInformation.second.contains(eventKeySystemId)) {
GST_TRACE("considering init data handled for %s", eventKeySystemId);
m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
}
gst_buffer_unmap(data, &mapInfo);
}
if (!concatenatedInitDataChunksNumber)
return false;
if (concatenatedInitDataChunksNumber > 1)
eventKeySystemIdString = emptyString();
RunLoop::main().dispatch([weakThis = m_weakPtrFactory.createWeakPtr(*this), eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] {
if (!weakThis)
return;
GST_DEBUG_OBJECT(weakThis->pipeline(), "scheduling initializationDataEncountered event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size());
GST_MEMDUMP("init datas", initData.data(), initData.size());
weakThis->m_player->initializationDataEncountered(ASCIILiteral("cenc"), ArrayBuffer::create(initData.data(), initData.size()));
});
GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
return this->m_cdmInstance;
});
if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
GST_INFO_OBJECT(pipeline(), "working with %s, continuing with %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
GstStructure* contextStructure = gst_context_writable_structure(context.get());
gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
} else
GST_WARNING("no proper CDM instance attached");
return true;
}
#endif // ENABLE(ENCRYPTED_MEDIA)
return false;
}
#if USE(GSTREAMER_GL)
GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
{
if (!ensureGstGLContext())
return nullptr;
if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
gst_context_set_gl_display(displayContext, gstGLDisplay());
return displayContext;
}
if (!g_strcmp0(contextType, "gst.gl.app_context")) {
GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
GstStructure* structure = gst_context_writable_structure(appContext);
#if GST_CHECK_VERSION(1, 11, 0)
gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
#else
gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
#endif
return appContext;
}
return nullptr;
}
bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
{
if (m_glContext)
return true;
auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
// The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062.
bool shouldAdoptRef = webkitGstCheckVersion(1, 13, 1);
if (!m_glDisplay) {
#if PLATFORM(X11)
#if USE(GLX)
if (is<PlatformDisplayX11>(sharedDisplay)) {
GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display");
if (shouldAdoptRef)
m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())));
else
m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
}
#elif USE(EGL)
if (is<PlatformDisplayX11>(sharedDisplay)) {
GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display");
if (shouldAdoptRef)
m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())));
else
m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
}
#endif
#endif
#if PLATFORM(WAYLAND)
if (is<PlatformDisplayWayland>(sharedDisplay)) {
GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display");
if (shouldAdoptRef)
m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())));
else
m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
}
#endif
#if PLATFORM(WPE)
ASSERT(is<PlatformDisplayWPE>(sharedDisplay));
GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display");
if (shouldAdoptRef)
m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWPE>(sharedDisplay).eglDisplay())));
else
m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWPE>(sharedDisplay).eglDisplay()));
#endif
ASSERT(m_glDisplay);
}
GLContext* webkitContext = sharedDisplay.sharingGLContext();
// EGL and GLX are mutually exclusive, no need for ifdefs here.
GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
#if USE(OPENGL_ES)
GstGLAPI glAPI = GST_GL_API_GLES2;
#elif USE(OPENGL)
GstGLAPI glAPI = GST_GL_API_OPENGL;
#else
ASSERT_NOT_REACHED();
#endif
PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
if (!contextHandle)
return false;
if (shouldAdoptRef)
m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
else
m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
return true;
}
#endif // USE(GSTREAMER_GL)
// Returns the size of the video
FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
{
if (!hasVideo())
return FloatSize();
if (!m_videoSize.isEmpty())
return m_videoSize;
auto sampleLocker = holdLock(m_sampleMutex);
if (!GST_IS_SAMPLE(m_sample.get()))
return FloatSize();
GstCaps* caps = gst_sample_get_caps(m_sample.get());
if (!caps)
return FloatSize();
// TODO: handle possible clean aperture data. See
// https://bugzilla.gnome.org/show_bug.cgi?id=596571
// TODO: handle possible transformation matrix. See
// https://bugzilla.gnome.org/show_bug.cgi?id=596326
// Get the video PAR and original size, if this fails the
// video-sink has likely not yet negotiated its caps.
int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
IntSize originalSize;
GstVideoFormat format;
if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
return FloatSize();
#if USE(TEXTURE_MAPPER_GL)
// When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
if (m_renderingCanBeAccelerated) {
if (m_videoSourceOrientation.usesWidthAsHeight())
originalSize = originalSize.transposedSize();
}
#endif
GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
// Calculate DAR based on PAR and video size.
int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
// Divide display width and height by their GCD to avoid possible overflows.
int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
displayWidth /= displayAspectRatioGCD;
displayHeight /= displayAspectRatioGCD;
// Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
guint64 width = 0, height = 0;
if (!(originalSize.height() % displayHeight)) {
GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
height = static_cast<guint64>(originalSize.height());
} else if (!(originalSize.width() % displayWidth)) {
GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
width = static_cast<guint64>(originalSize.width());
} else {
GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
height = static_cast<guint64>(originalSize.height());
}
GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
return m_videoSize;
}
void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
{
if (!m_volumeElement)
return;
GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
}
float MediaPlayerPrivateGStreamerBase::volume() const
{
if (!m_volumeElement)
return 0;
return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
}
void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
{
if (!m_player || !m_volumeElement)
return;
double volume;
volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
// get_volume() can return values superior to 1.0 if the user
// applies software user gain via third party application (GNOME
// volume control for instance).
volume = CLAMP(volume, 0.0, 1.0);
m_player->volumeChanged(static_cast<float>(volume));
}
void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
{
// This is called when m_volumeElement receives the notify::volume signal.
GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
}
MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
{
return m_networkState;
}
MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
{
return m_readyState;
}
void MediaPlayerPrivateGStreamerBase::sizeChanged()
{
notImplemented();
}
void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
{
if (!m_volumeElement)
return;
bool currentValue = muted();
if (currentValue == mute)
return;
GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
}
bool MediaPlayerPrivateGStreamerBase::muted() const
{
if (!m_volumeElement)
return false;
gboolean muted;
g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
return muted;
}
void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
{
if (!m_player || !m_volumeElement)
return;
gboolean muted;
g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
m_player->muteChanged(static_cast<bool>(muted));
}
void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
{
// This is called when m_volumeElement receives the notify::mute signal.
player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
}
void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
{
m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
}
#if USE(TEXTURE_MAPPER_GL)
void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
{
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
GstVideoGLTextureUploadMeta* meta;
if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
guint ids[4] = { texture.id(), 0, 0, 0 };
if (gst_video_gl_texture_upload_meta_upload(meta, ids))
return;
}
}
// Right now the TextureMapper only supports chromas with one plane
ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
GstVideoFrame videoFrame;
if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
return;
int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride);
gst_video_frame_unmap(&videoFrame);
}
RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
{
return m_platformLayerProxy.copyRef();
}
void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
{
}
void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
{
#if !USE(GSTREAMER_GL)
class ConditionNotifier {
public:
ConditionNotifier(Lock& lock, Condition& condition)
: m_locker(lock), m_condition(condition)
{
}
~ConditionNotifier()
{
m_condition.notifyOne();
}
private:
LockHolder m_locker;
Condition& m_condition;
};
ConditionNotifier notifier(m_drawMutex, m_drawCondition);
#endif
auto sampleLocker = holdLock(m_sampleMutex);
if (!GST_IS_SAMPLE(m_sample.get()))
return;
LockHolder holder(m_platformLayerProxy->lock());
if (!m_platformLayerProxy->isActive())
return;
#if USE(GSTREAMER_GL)
std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation));
if (UNLIKELY(!frameHolder->isValid()))
return;
std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
#else
GstVideoInfo videoInfo;
if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
return;
IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GL_DONT_CARE);
if (UNLIKELY(!buffer)) {
auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
}
updateTexture(buffer->textureGL(), videoInfo);
buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0));
m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
#endif // USE(GSTREAMER_GL)
}
#endif // USE(TEXTURE_MAPPER_GL)
void MediaPlayerPrivateGStreamerBase::repaint()
{
ASSERT(m_sample);
ASSERT(isMainThread());
m_player->repaint();
LockHolder lock(m_drawMutex);
m_drawCondition.notifyOne();
}
void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
{
bool triggerResize;
{
auto sampleLocker = holdLock(m_sampleMutex);
triggerResize = !m_sample;
m_sample = sample;
}
if (triggerResize) {
GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
m_notifier->notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); });
}
if (!m_renderingCanBeAccelerated) {
LockHolder locker(m_drawMutex);
if (m_destroying)
return;
m_drawTimer.startOneShot(0_s);
m_drawCondition.wait(m_drawMutex);
return;
}
#if USE(TEXTURE_MAPPER_GL)
#if USE(GSTREAMER_GL)
pushTextureToCompositor();
#else
{
LockHolder lock(m_drawMutex);
if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
return;
m_drawCondition.wait(m_drawMutex);
}
#endif
#endif // USE(TEXTURE_MAPPER_GL)
}
void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
{
player->triggerRepaint(sample);
}
void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying)
{
// The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
// to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
// the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
// main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
// thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
//
// This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
// m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
if (!m_renderingCanBeAccelerated) {
LockHolder locker(m_drawMutex);
m_drawTimer.stop();
m_destroying = destroying;
m_drawCondition.notifyOne();
}
}
void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
{
player->cancelRepaint();
}
#if USE(GSTREAMER_GL)
GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
{
GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
player->triggerRepaint(sample.get());
return GST_FLOW_OK;
}
GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
{
GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
player->triggerRepaint(sample.get());
return GST_FLOW_OK;
}
void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
{
GST_DEBUG_OBJECT(pipeline(), "Flushing video sample");
auto sampleLocker = holdLock(m_sampleMutex);
m_sample.clear();
{
LockHolder locker(m_platformLayerProxy->lock());
if (m_platformLayerProxy->isActive())
m_platformLayerProxy->dropCurrentBufferWhilePreservingTexture();
}
}
#endif
void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
{
m_size = size;
}
void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
{
if (context.paintingDisabled())
return;
if (!m_player->visible())
return;
auto sampleLocker = holdLock(m_sampleMutex);
if (!GST_IS_SAMPLE(m_sample.get()))
return;
ImagePaintingOptions paintingOptions(CompositeCopy);
if (m_renderingCanBeAccelerated)
paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
if (!gstImage)
return;
if (Image* image = reinterpret_cast<Image*>(gstImage->image()))
context.drawImage(*image, rect, gstImage->rect(), paintingOptions);
}
#if USE(GSTREAMER_GL)
bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
{
UNUSED_PARAM(context);
if (m_usingFallbackVideoSink)
return false;
if (premultiplyAlpha)
return false;
auto sampleLocker = holdLock(m_sampleMutex);
GstVideoInfo videoInfo;
if (!getSampleVideoInfo(m_sample.get(), videoInfo))
return false;
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
GstVideoFrame videoFrame;
if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
return false;
IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
if (m_videoSourceOrientation.usesWidthAsHeight())
size = size.transposedSize();
unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
if (!m_videoTextureCopier)
m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
gst_video_frame_unmap(&videoFrame);
return copied;
}
NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
{
#if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
if (m_usingFallbackVideoSink)
return nullptr;
auto sampleLocker = holdLock(m_sampleMutex);
GstVideoInfo videoInfo;
if (!getSampleVideoInfo(m_sample.get(), videoInfo))
return nullptr;
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
GstVideoFrame videoFrame;
if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
return nullptr;
IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
if (m_videoSourceOrientation.usesWidthAsHeight())
size = size.transposedSize();
GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
context->makeContextCurrent();
if (!m_videoTextureCopier)
m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation);
gst_video_frame_unmap(&videoFrame);
if (!copied)
return nullptr;
return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
#else
return nullptr;
#endif
}
#endif // USE(GSTREAMER_GL)
void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
{
if (m_videoSourceOrientation == orientation)
return;
m_videoSourceOrientation = orientation;
}
bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
{
return true;
}
PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
{
return NoPlatformMedia;
}
MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
{
if (m_readyState == MediaPlayer::HaveNothing)
return MediaPlayer::Unknown;
if (isLiveStream())
return MediaPlayer::LiveStream;
return MediaPlayer::Download;
}
#if USE(GSTREAMER_GL)
GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
{
if (!webkitGstCheckVersion(1, 8, 0))
return nullptr;
GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
if (!appsink)
return nullptr;
g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
gst_pad_add_probe (pad.get(), GST_PAD_PROBE_TYPE_EVENT_FLUSH, [] (GstPad*, GstPadProbeInfo* info, gpointer userData) -> GstPadProbeReturn {
if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_FLUSH_START)
return GST_PAD_PROBE_OK;
auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
player->flushCurrentBuffer();
return GST_PAD_PROBE_OK;
}, this, nullptr);
return appsink;
}
GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
{
if (!webkitGstCheckVersion(1, 8, 0))
return nullptr;
gboolean result = TRUE;
GstElement* videoSink = gst_bin_new(nullptr);
GstElement* upload = gst_element_factory_make("glupload", nullptr);
GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
GstElement* appsink = createGLAppSink();
if (!appsink || !upload || !colorconvert) {
GST_WARNING("Failed to create GstGL elements");
gst_object_unref(videoSink);
if (upload)
gst_object_unref(upload);
if (colorconvert)
gst_object_unref(colorconvert);
if (appsink)
gst_object_unref(appsink);
return nullptr;
}
gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
if (!result) {
GST_WARNING("Failed to link GstGL elements");
gst_object_unref(videoSink);
videoSink = nullptr;
}
return videoSink;
}
void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
{
if (!m_glDisplayElementContext)
m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
if (m_glDisplayElementContext)
gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
if (!m_glAppElementContext)
m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
if (m_glAppElementContext)
gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
}
#endif // USE(GSTREAMER_GL)
GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
{
acceleratedRenderingStateChanged();
#if USE(GSTREAMER_GL)
if (m_renderingCanBeAccelerated)
m_videoSink = createVideoSinkGL();
#endif
if (!m_videoSink) {
m_usingFallbackVideoSink = true;
m_videoSink = webkitVideoSinkNew();
g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
}
GstElement* videoSink = nullptr;
m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
if (m_fpsSink) {
g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
// Turn off text overlay unless logging is enabled.
#if LOG_DISABLED
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
#else
if (!isLogChannelEnabled("Media"))
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
#endif // LOG_DISABLED
if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
videoSink = m_fpsSink.get();
} else
m_fpsSink = nullptr;
}
if (!m_fpsSink)
videoSink = m_videoSink.get();
ASSERT(videoSink);
return videoSink;
}
void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
{
ASSERT(!m_volumeElement);
m_volumeElement = volume;
// We don't set the initial volume because we trust the sink to keep it for us. See
// https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
if (!m_player->platformVolumeConfigurationRequired()) {
GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
} else
GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
}
unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
{
guint64 decodedFrames = 0;
if (m_fpsSink)
g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
return static_cast<unsigned>(decodedFrames);
}
unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
{
guint64 framesDropped = 0;
if (m_fpsSink)
g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
return static_cast<unsigned>(framesDropped);
}
unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
{
GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
gint64 position = 0;
if (audioSink() && gst_element_query(audioSink(), query))
gst_query_parse_position(query, 0, &position);
gst_query_unref(query);
return static_cast<unsigned>(position);
}
unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
{
GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
gint64 position = 0;
if (gst_element_query(m_videoSink.get(), query))
gst_query_parse_position(query, 0, &position);
gst_query_unref(query);
return static_cast<unsigned>(position);
}
#if ENABLE(ENCRYPTED_MEDIA)
void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
{
ASSERT(!m_cdmInstance);
m_cdmInstance = &instance;
GST_DEBUG_OBJECT(pipeline(), "CDM instance %p set", m_cdmInstance.get());
m_protectionCondition.notifyAll();
}
void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
{
#ifdef NDEBUG
UNUSED_PARAM(instance);
#endif
ASSERT(m_cdmInstance.get() == &instance);
GST_DEBUG_OBJECT(pipeline(), "detaching CDM instance %p", m_cdmInstance.get());
m_cdmInstance = nullptr;
m_protectionCondition.notifyAll();
}
void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
{
ASSERT(m_cdmInstance.get() == &instance);
GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
attemptToDecryptWithLocalInstance();
}
void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
{
// FIXME.
}
void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer)
{
bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr)));
m_needToResendCredentials = m_handledProtectionEvents.size() > 0;
GST_TRACE("emitted decryption cipher key on pipeline, event handled %s, need to resend credentials %s", boolForPrinting(eventHandled), boolForPrinting(m_needToResendCredentials));
}
void MediaPlayerPrivateGStreamerBase::dispatchCDMInstance()
{
// This function dispatches the CDMInstance in GStreamer playback pipeline.
if (m_cdmInstance)
m_player->attemptToDecryptWithInstance(const_cast<CDMInstance&>(*m_cdmInstance.get()));
}
void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
{
if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
m_handledProtectionEvents.remove(GST_EVENT_SEQNUM(event));
if (m_needToResendCredentials) {
GST_DEBUG_OBJECT(pipeline(), "resending credentials");
attemptToDecryptWithLocalInstance();
}
return;
}
const gchar* eventKeySystemId = nullptr;
gst_event_parse_protection(event, &eventKeySystemId, nullptr, nullptr);
GST_WARNING("FIXME: unhandled protection event for %s", eventKeySystemId);
ASSERT_NOT_REACHED();
}
#endif
bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
{
bool result = false;
#if ENABLE(ENCRYPTED_MEDIA)
result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
#endif
GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
return result;
}
MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
{
UNUSED_PARAM(parameters);
return result;
}
}
#endif // USE(GSTREAMER)