1/* 2 * Copyright (C) 2007, 2009 Apple Inc. All rights reserved. 3 * Copyright (C) 2007 Collabora Ltd. All rights reserved. 4 * Copyright (C) 2007 Alp Toker <alp@atoker.com> 5 * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org> 6 * Copyright (C) 2009, 2010 Igalia S.L 7 * 8 * This library is free software; you can redistribute it and/or 9 * modify it under the terms of the GNU Library General Public 10 * License as published by the Free Software Foundation; either 11 * version 2 of the License, or (at your option) any later version. 12 * 13 * This library is distributed in the hope that it will be useful, 14 * but WITHOUT ANY WARRANTY; without even the implied warranty of 15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 16 * Library General Public License for more details. 17 * 18 * You should have received a copy of the GNU Library General Public License 19 * aint with this library; see the file COPYING.LIB. If not, write to 20 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, 21 * Boston, MA 02110-1301, USA. 22 */ 23 24#include "config.h" 25#include "MediaPlayerPrivateGStreamerBase.h" 26 27#if ENABLE(VIDEO) && USE(GSTREAMER) 28 29#include "ColorSpace.h" 30#include "GStreamerUtilities.h" 31#include "GraphicsContext.h" 32#include "GraphicsTypes.h" 33#include "ImageGStreamer.h" 34#include "ImageOrientation.h" 35#include "IntRect.h" 36#include "MediaPlayer.h" 37#include "NotImplemented.h" 38#include "VideoSinkGStreamer.h" 39#include "WebKitWebSourceGStreamer.h" 40#include <gst/gst.h> 41#include <wtf/gobject/GMutexLocker.h> 42#include <wtf/text/CString.h> 43 44#include <gst/audio/streamvolume.h> 45#include <gst/video/gstvideometa.h> 46 47#if GST_CHECK_VERSION(1, 1, 0) && USE(TEXTURE_MAPPER_GL) 48#include "TextureMapperGL.h" 49#endif 50 51GST_DEBUG_CATEGORY(webkit_media_player_debug); 52#define GST_CAT_DEFAULT webkit_media_player_debug 53 54using namespace std; 55 56namespace WebCore { 57 58static int greatestCommonDivisor(int a, int b) 59{ 60 while (b) { 61 int temp = a; 62 a = b; 63 b = temp % b; 64 } 65 66 return ABS(a); 67} 68 69static void mediaPlayerPrivateVolumeChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player) 70{ 71 // This is called when m_volumeElement receives the notify::volume signal. 72 LOG_MEDIA_MESSAGE("Volume changed to: %f", player->volume()); 73 player->volumeChanged(); 74} 75 76static void mediaPlayerPrivateMuteChangedCallback(GObject*, GParamSpec*, MediaPlayerPrivateGStreamerBase* player) 77{ 78 // This is called when m_volumeElement receives the notify::mute signal. 79 player->muteChanged(); 80} 81 82static void mediaPlayerPrivateRepaintCallback(WebKitVideoSink*, GstBuffer *buffer, MediaPlayerPrivateGStreamerBase* playerPrivate) 83{ 84 playerPrivate->triggerRepaint(buffer); 85} 86 87MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player) 88 : m_player(player) 89 , m_fpsSink(0) 90 , m_readyState(MediaPlayer::HaveNothing) 91 , m_networkState(MediaPlayer::Empty) 92 , m_buffer(0) 93 , m_repaintHandler(0) 94 , m_volumeSignalHandler(0) 95 , m_muteSignalHandler(0) 96{ 97 m_bufferMutex = new GMutex; 98 g_mutex_init(m_bufferMutex); 99} 100 101MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase() 102{ 103 if (m_repaintHandler) { 104 g_signal_handler_disconnect(m_webkitVideoSink.get(), m_repaintHandler); 105 m_repaintHandler = 0; 106 } 107 108 g_mutex_clear(m_bufferMutex); 109 delete m_bufferMutex; 110 111 if (m_buffer) 112 gst_buffer_unref(m_buffer); 113 m_buffer = 0; 114 115 m_player = 0; 116 117 if (m_volumeSignalHandler) { 118 g_signal_handler_disconnect(m_volumeElement.get(), m_volumeSignalHandler); 119 m_volumeSignalHandler = 0; 120 } 121 122 if (m_muteSignalHandler) { 123 g_signal_handler_disconnect(m_volumeElement.get(), m_muteSignalHandler); 124 m_muteSignalHandler = 0; 125 } 126} 127 128// Returns the size of the video 129IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const 130{ 131 if (!hasVideo()) 132 return IntSize(); 133 134 if (!m_videoSize.isEmpty()) 135 return m_videoSize; 136 137 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); 138 if (!caps) 139 return IntSize(); 140 141 142 // TODO: handle possible clean aperture data. See 143 // https://bugzilla.gnome.org/show_bug.cgi?id=596571 144 // TODO: handle possible transformation matrix. See 145 // https://bugzilla.gnome.org/show_bug.cgi?id=596326 146 147 // Get the video PAR and original size, if this fails the 148 // video-sink has likely not yet negotiated its caps. 149 int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride; 150 IntSize originalSize; 151 GstVideoFormat format; 152 if (!getVideoSizeAndFormatFromCaps(caps.get(), originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) 153 return IntSize(); 154 155 LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height()); 156 LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator); 157 158 // Calculate DAR based on PAR and video size. 159 int displayWidth = originalSize.width() * pixelAspectRatioNumerator; 160 int displayHeight = originalSize.height() * pixelAspectRatioDenominator; 161 162 // Divide display width and height by their GCD to avoid possible overflows. 163 int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight); 164 displayWidth /= displayAspectRatioGCD; 165 displayHeight /= displayAspectRatioGCD; 166 167 // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function. 168 guint64 width = 0, height = 0; 169 if (!(originalSize.height() % displayHeight)) { 170 LOG_MEDIA_MESSAGE("Keeping video original height"); 171 width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); 172 height = static_cast<guint64>(originalSize.height()); 173 } else if (!(originalSize.width() % displayWidth)) { 174 LOG_MEDIA_MESSAGE("Keeping video original width"); 175 height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth); 176 width = static_cast<guint64>(originalSize.width()); 177 } else { 178 LOG_MEDIA_MESSAGE("Approximating while keeping original video height"); 179 width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight); 180 height = static_cast<guint64>(originalSize.height()); 181 } 182 183 LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height); 184 m_videoSize = IntSize(static_cast<int>(width), static_cast<int>(height)); 185 return m_videoSize; 186} 187 188void MediaPlayerPrivateGStreamerBase::setVolume(float volume) 189{ 190 if (!m_volumeElement) 191 return; 192 193 LOG_MEDIA_MESSAGE("Setting volume: %f", volume); 194 gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume)); 195} 196 197float MediaPlayerPrivateGStreamerBase::volume() const 198{ 199 if (!m_volumeElement) 200 return 0; 201 202 return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC); 203} 204 205 206void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange() 207{ 208 if (!m_player || !m_volumeElement) 209 return; 210 double volume; 211 volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC); 212 // get_volume() can return values superior to 1.0 if the user 213 // applies software user gain via third party application (GNOME 214 // volume control for instance). 215 volume = CLAMP(volume, 0.0, 1.0); 216 m_player->volumeChanged(static_cast<float>(volume)); 217} 218 219void MediaPlayerPrivateGStreamerBase::volumeChanged() 220{ 221 m_volumeTimerHandler.schedule("[WebKit] MediaPlayerPrivateGStreamerBase::volumeChanged", std::function<void()>(std::bind(&MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange, this))); 222} 223 224MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const 225{ 226 return m_networkState; 227} 228 229MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const 230{ 231 return m_readyState; 232} 233 234void MediaPlayerPrivateGStreamerBase::sizeChanged() 235{ 236 notImplemented(); 237} 238 239void MediaPlayerPrivateGStreamerBase::setMuted(bool muted) 240{ 241 if (!m_volumeElement) 242 return; 243 244 g_object_set(m_volumeElement.get(), "mute", muted, NULL); 245} 246 247bool MediaPlayerPrivateGStreamerBase::muted() const 248{ 249 if (!m_volumeElement) 250 return false; 251 252 bool muted; 253 g_object_get(m_volumeElement.get(), "mute", &muted, NULL); 254 return muted; 255} 256 257void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute() 258{ 259 if (!m_player || !m_volumeElement) 260 return; 261 262 gboolean muted; 263 g_object_get(m_volumeElement.get(), "mute", &muted, NULL); 264 m_player->muteChanged(static_cast<bool>(muted)); 265} 266 267void MediaPlayerPrivateGStreamerBase::muteChanged() 268{ 269 m_muteTimerHandler.schedule("[WebKit] MediaPlayerPrivateGStreamerBase::muteChanged", std::function<void()>(std::bind(&MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute, this))); 270} 271 272#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) 273PassRefPtr<BitmapTexture> MediaPlayerPrivateGStreamerBase::updateTexture(TextureMapper* textureMapper) 274{ 275 GMutexLocker lock(m_bufferMutex); 276 if (!m_buffer) 277 return nullptr; 278 279 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); 280 if (!caps) 281 return nullptr; 282 283 GstVideoInfo videoInfo; 284 gst_video_info_init(&videoInfo); 285 if (!gst_video_info_from_caps(&videoInfo, caps.get())) 286 return nullptr; 287 288 IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)); 289 RefPtr<BitmapTexture> texture = textureMapper->acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag); 290 291#if GST_CHECK_VERSION(1, 1, 0) 292 GstVideoGLTextureUploadMeta* meta; 293 if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) { 294 if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture. 295 const BitmapTextureGL* textureGL = static_cast<const BitmapTextureGL*>(texture.get()); 296 guint ids[4] = { textureGL->id(), 0, 0, 0 }; 297 298 if (gst_video_gl_texture_upload_meta_upload(meta, ids)) 299 return texture; 300 } 301 } 302#endif 303 304 // Right now the TextureMapper only supports chromas with one plane 305 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1); 306 307 GstVideoFrame videoFrame; 308 if (!gst_video_frame_map(&videoFrame, &videoInfo, m_buffer, GST_MAP_READ)) 309 return nullptr; 310 311 int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0); 312 const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0); 313 texture->updateContents(srcData, WebCore::IntRect(WebCore::IntPoint(0, 0), size), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData); 314 gst_video_frame_unmap(&videoFrame); 315 316 return texture; 317} 318#endif 319 320void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstBuffer* buffer) 321{ 322 g_return_if_fail(GST_IS_BUFFER(buffer)); 323 324 { 325 GMutexLocker lock(m_bufferMutex); 326 gst_buffer_replace(&m_buffer, buffer); 327 } 328 329#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) 330 if (supportsAcceleratedRendering() && m_player->mediaPlayerClient()->mediaPlayerRenderingCanBeAccelerated(m_player) && client()) { 331 client()->setPlatformLayerNeedsDisplay(); 332 return; 333 } 334#endif 335 336 m_player->repaint(); 337} 338 339void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size) 340{ 341 m_size = size; 342} 343 344void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext* context, const IntRect& rect) 345{ 346#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) 347 if (client()) 348 return; 349#endif 350 351 if (context->paintingDisabled()) 352 return; 353 354 if (!m_player->visible()) 355 return; 356 357 GMutexLocker lock(m_bufferMutex); 358 if (!m_buffer) 359 return; 360 361 GRefPtr<GstCaps> caps = currentVideoSinkCaps(); 362 if (!caps) 363 return; 364 365 RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_buffer, caps.get()); 366 if (!gstImage) 367 return; 368 369 context->drawImage(reinterpret_cast<Image*>(gstImage->image().get()), ColorSpaceSRGB, 370 rect, gstImage->rect(), CompositeCopy); 371} 372 373#if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS) 374void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper* textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity) 375{ 376 if (textureMapper->accelerationMode() != TextureMapper::OpenGLMode) 377 return; 378 379 if (!m_player->visible()) 380 return; 381 382 RefPtr<BitmapTexture> texture = updateTexture(textureMapper); 383 if (texture) 384 textureMapper->drawTexture(*texture.get(), targetRect, matrix, opacity); 385} 386#endif 387 388bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const 389{ 390 return true; 391} 392 393PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const 394{ 395 return NoPlatformMedia; 396} 397 398MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const 399{ 400 if (m_readyState == MediaPlayer::HaveNothing) 401 return MediaPlayer::Unknown; 402 403 if (isLiveStream()) 404 return MediaPlayer::LiveStream; 405 406 return MediaPlayer::Download; 407} 408 409GRefPtr<GstCaps> MediaPlayerPrivateGStreamerBase::currentVideoSinkCaps() const 410{ 411 if (!m_webkitVideoSink) 412 return nullptr; 413 414 GRefPtr<GstCaps> currentCaps; 415 g_object_get(G_OBJECT(m_webkitVideoSink.get()), "current-caps", ¤tCaps.outPtr(), NULL); 416 return currentCaps; 417} 418 419GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink() 420{ 421 ASSERT(initializeGStreamer()); 422 423 GstElement* videoSink = nullptr; 424 m_webkitVideoSink = webkitVideoSinkNew(); 425 426 m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this); 427 428 m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink"); 429 if (m_fpsSink) { 430 g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr); 431 432 // Turn off text overlay unless logging is enabled. 433#if LOG_DISABLED 434 g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr); 435#else 436 if (!isLogChannelEnabled("Media")) 437 g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr); 438#endif // LOG_DISABLED 439 440 if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) { 441 g_object_set(m_fpsSink.get(), "video-sink", m_webkitVideoSink.get(), nullptr); 442 videoSink = m_fpsSink.get(); 443 } else 444 m_fpsSink = nullptr; 445 } 446 447 if (!m_fpsSink) 448 videoSink = m_webkitVideoSink.get(); 449 450 ASSERT(videoSink); 451 452 return videoSink; 453} 454 455void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume) 456{ 457 ASSERT(!m_volumeElement); 458 m_volumeElement = volume; 459 460 // We don't set the initial volume because we trust the sink to keep it for us. See 461 // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information. 462 if (!m_player->platformVolumeConfigurationRequired()) { 463 LOG_MEDIA_MESSAGE("Setting stream volume to %f", m_player->volume()); 464 g_object_set(m_volumeElement.get(), "volume", m_player->volume(), NULL); 465 } else 466 LOG_MEDIA_MESSAGE("Not setting stream volume, trusting system one"); 467 468 LOG_MEDIA_MESSAGE("Setting stream muted %d", m_player->muted()); 469 g_object_set(m_volumeElement.get(), "mute", m_player->muted(), NULL); 470 471 m_volumeSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::volume", G_CALLBACK(mediaPlayerPrivateVolumeChangedCallback), this); 472 m_muteSignalHandler = g_signal_connect(m_volumeElement.get(), "notify::mute", G_CALLBACK(mediaPlayerPrivateMuteChangedCallback), this); 473} 474 475unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const 476{ 477 guint64 decodedFrames = 0; 478 if (m_fpsSink) 479 g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, NULL); 480 return static_cast<unsigned>(decodedFrames); 481} 482 483unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const 484{ 485 guint64 framesDropped = 0; 486 if (m_fpsSink) 487 g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, NULL); 488 return static_cast<unsigned>(framesDropped); 489} 490 491unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const 492{ 493 GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES); 494 gint64 position = 0; 495 496 if (audioSink() && gst_element_query(audioSink(), query)) 497 gst_query_parse_position(query, 0, &position); 498 499 gst_query_unref(query); 500 return static_cast<unsigned>(position); 501} 502 503unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const 504{ 505 GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES); 506 gint64 position = 0; 507 508 if (gst_element_query(m_webkitVideoSink.get(), query)) 509 gst_query_parse_position(query, 0, &position); 510 511 gst_query_unref(query); 512 return static_cast<unsigned>(position); 513} 514 515} 516 517#endif // USE(GSTREAMER) 518