1/* 2 * Copyright (C) 2011-2014 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26#include "config.h" 27 28#if PLATFORM(WIN) && ENABLE(VIDEO) 29 30#if USE(AVFOUNDATION) 31 32#include "MediaPlayerPrivateAVFoundationCF.h" 33 34#include "ApplicationCacheResource.h" 35#include "CDMSessionAVFoundationCF.h" 36#include "COMPtr.h" 37#include "FloatConversion.h" 38#include "FrameView.h" 39#include "GraphicsContext.h" 40#if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 41#include "InbandTextTrackPrivateAVCF.h" 42#else 43#include "InbandTextTrackPrivateLegacyAVCF.h" 44#endif 45#include "URL.h" 46#include "Logging.h" 47#include "PlatformCALayerWin.h" 48#include "SoftLinking.h" 49#include "TimeRanges.h" 50#include "WebCoreAVCFResourceLoader.h" 51 52#include <AVFoundationCF/AVCFPlayerItem.h> 53#if HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 54#include <AVFoundationCF/AVCFPlayerItemLegibleOutput.h> 55#endif 56#include <AVFoundationCF/AVCFPlayerLayer.h> 57#if HAVE(AVFOUNDATION_LOADER_DELEGATE) || HAVE(ENCRYPTED_MEDIA_V2) 58#include <AVFoundationCF/AVCFAssetResourceLoader.h> 59#endif 60#include <AVFoundationCF/AVFoundationCF.h> 61#include <CoreMedia/CoreMedia.h> 62#include "WebKitQuartzCoreAdditions/WKCACFTypes.h" 63#include <delayimp.h> 64#include <dispatch/dispatch.h> 65#if ENABLE(ENCRYPTED_MEDIA_V2) 66#include <runtime/DataView.h> 67#include <runtime/Uint16Array.h> 68#endif 69#include <wtf/HashMap.h> 70#include <wtf/Threading.h> 71#include <wtf/text/CString.h> 72#include <wtf/text/StringView.h> 73 74// The softlink header files must be included after the AVCF and CoreMedia header files. 75#include "AVFoundationCFSoftLinking.h" 76#include "CoreMediaSoftLinking.h" 77 78// We don't bother softlinking against libdispatch since it's already been loaded by AAS. 79#ifdef DEBUG_ALL 80#pragma comment(lib, "libdispatch_debug.lib") 81#else 82#pragma comment(lib, "libdispatch.lib") 83#endif 84 85using namespace std; 86 87namespace WebCore { 88 89class LayerClient; 90 91class AVFWrapper { 92public: 93 AVFWrapper(MediaPlayerPrivateAVFoundationCF*); 94 ~AVFWrapper(); 95 96 void scheduleDisconnectAndDelete(); 97 98 void createAVCFVideoLayer(); 99 void destroyVideoLayer(); 100 PlatformLayer* platformLayer(); 101 102 CACFLayerRef caVideoLayer() { return m_caVideoLayer.get(); } 103 PlatformLayer* videoLayerWrapper() { return m_videoLayerWrapper ? m_videoLayerWrapper->platformLayer() : 0; }; 104 void setVideoLayerNeedsCommit(); 105 void setVideoLayerHidden(bool); 106 107 void createImageGenerator(); 108 void destroyImageGenerator(); 109 RetainPtr<CGImageRef> createImageForTimeInRect(float, const IntRect&); 110 111 void createAssetForURL(const String& url, bool inheritURI); 112 void setAsset(AVCFURLAssetRef); 113 114 void createPlayer(IDirect3DDevice9*); 115 void createPlayerItem(); 116 117 void checkPlayability(); 118 void beginLoadingMetadata(); 119 120 void seekToTime(double, double, double); 121 void updateVideoLayerGravity(); 122 123 void setCurrentTextTrack(InbandTextTrackPrivateAVF*); 124 InbandTextTrackPrivateAVF* currentTextTrack() const { return m_currentTextTrack; } 125 126#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 127 static void legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef, CFArrayRef attributedString, CFArrayRef nativeSampleBuffers, CMTime itemTime); 128 static void processCue(void* context); 129#endif 130#if HAVE(AVFOUNDATION_LOADER_DELEGATE) 131 static Boolean resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef, AVCFAssetResourceLoadingRequestRef, void* context); 132#endif 133 static void loadMetadataCompletionCallback(AVCFAssetRef, void*); 134 static void loadPlayableCompletionCallback(AVCFAssetRef, void*); 135 static void periodicTimeObserverCallback(AVCFPlayerRef, CMTime, void*); 136 static void seekCompletedCallback(AVCFPlayerItemRef, Boolean, void*); 137 static void notificationCallback(CFNotificationCenterRef, void*, CFStringRef, const void*, CFDictionaryRef); 138 static void processNotification(void* context); 139 140 inline AVCFPlayerLayerRef videoLayer() const { return (AVCFPlayerLayerRef)m_avCFVideoLayer.get(); } 141 inline AVCFPlayerRef avPlayer() const { return (AVCFPlayerRef)m_avPlayer.get(); } 142 inline AVCFURLAssetRef avAsset() const { return (AVCFURLAssetRef)m_avAsset.get(); } 143 inline AVCFPlayerItemRef avPlayerItem() const { return (AVCFPlayerItemRef)m_avPlayerItem.get(); } 144 inline AVCFPlayerObserverRef timeObserver() const { return (AVCFPlayerObserverRef)m_timeObserver.get(); } 145 inline AVCFAssetImageGeneratorRef imageGenerator() const { return m_imageGenerator.get(); } 146#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 147 inline AVCFPlayerItemLegibleOutputRef legibleOutput() const { return m_legibleOutput.get(); } 148 AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia() const; 149#endif 150 inline dispatch_queue_t dispatchQueue() const { return m_notificationQueue; } 151 152#if ENABLE(ENCRYPTED_MEDIA_V2) 153 RetainPtr<AVCFAssetResourceLoadingRequestRef> takeRequestForKeyURI(const String&); 154#endif 155 156private: 157 inline void* callbackContext() const { return reinterpret_cast<void*>(m_objectID); } 158 159 static Mutex& mapLock(); 160 static HashMap<uintptr_t, AVFWrapper*>& map(); 161 static AVFWrapper* avfWrapperForCallbackContext(void*); 162 void addToMap(); 163 void removeFromMap() const; 164#if HAVE(AVFOUNDATION_LOADER_DELEGATE) 165 bool shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest); 166 static void processShouldWaitForLoadingOfResource(void* context); 167#endif 168 169 static void disconnectAndDeleteAVFWrapper(void*); 170 171 static uintptr_t s_nextAVFWrapperObjectID; 172 uintptr_t m_objectID; 173 174 MediaPlayerPrivateAVFoundationCF* m_owner; 175 176 RetainPtr<AVCFPlayerRef> m_avPlayer; 177 RetainPtr<AVCFURLAssetRef> m_avAsset; 178 RetainPtr<AVCFPlayerItemRef> m_avPlayerItem; 179 RetainPtr<AVCFPlayerLayerRef> m_avCFVideoLayer; 180 RetainPtr<AVCFPlayerObserverRef> m_timeObserver; 181 RetainPtr<AVCFAssetImageGeneratorRef> m_imageGenerator; 182#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 183 RetainPtr<AVCFPlayerItemLegibleOutputRef> m_legibleOutput; 184 RetainPtr<AVCFMediaSelectionGroupRef> m_selectionGroup; 185#endif 186 187 dispatch_queue_t m_notificationQueue; 188 189 mutable RetainPtr<CACFLayerRef> m_caVideoLayer; 190 RefPtr<PlatformCALayer> m_videoLayerWrapper; 191 192 OwnPtr<LayerClient> m_layerClient; 193 COMPtr<IDirect3DDevice9Ex> m_d3dDevice; 194 195 InbandTextTrackPrivateAVF* m_currentTextTrack; 196 197#if ENABLE(ENCRYPTED_MEDIA_V2) 198 HashMap<String, RetainPtr<AVCFAssetResourceLoadingRequestRef>> m_keyURIToRequestMap; 199#endif 200}; 201 202uintptr_t AVFWrapper::s_nextAVFWrapperObjectID; 203 204class LayerClient : public PlatformCALayerClient { 205public: 206 LayerClient(AVFWrapper* parent) : m_parent(parent) { } 207 virtual ~LayerClient() { m_parent = 0; } 208 209private: 210 virtual void platformCALayerLayoutSublayersOfLayer(PlatformCALayer*); 211 virtual bool platformCALayerRespondsToLayoutChanges() const { return true; } 212 213 virtual void platformCALayerAnimationStarted(CFTimeInterval beginTime) { } 214 virtual GraphicsLayer::CompositingCoordinatesOrientation platformCALayerContentsOrientation() const { return GraphicsLayer::CompositingCoordinatesBottomUp; } 215 virtual void platformCALayerPaintContents(PlatformCALayer*, GraphicsContext&, const FloatRect&) { } 216 virtual bool platformCALayerShowDebugBorders() const { return false; } 217 virtual bool platformCALayerShowRepaintCounter(PlatformCALayer*) const { return false; } 218 virtual int platformCALayerIncrementRepaintCount(PlatformCALayer*) { return 0; } 219 220 virtual bool platformCALayerContentsOpaque() const { return false; } 221 virtual bool platformCALayerDrawsContent() const { return false; } 222 virtual float platformCALayerDeviceScaleFactor() const { return 1; } 223 224 AVFWrapper* m_parent; 225}; 226 227#if !LOG_DISABLED 228static const char* boolString(bool val) 229{ 230 return val ? "true" : "false"; 231} 232#endif 233 234static CFArrayRef createMetadataKeyNames() 235{ 236 static const CFStringRef keyNames[] = { 237 AVCFAssetPropertyDuration, 238 AVCFAssetPropertyNaturalSize, 239 AVCFAssetPropertyPreferredTransform, 240 AVCFAssetPropertyPreferredRate, 241 AVCFAssetPropertyPlayable, 242 AVCFAssetPropertyTracks, 243#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 244 AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 245#endif 246 }; 247 248 return CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks); 249} 250 251static CFArrayRef metadataKeyNames() 252{ 253 static CFArrayRef keys = createMetadataKeyNames(); 254 return keys; 255} 256 257// FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key. 258static CFStringRef CMTimeRangeStartKey() 259{ 260 DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("start"))); 261 return key; 262} 263 264// FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key. 265static CFStringRef CMTimeRangeDurationKey() 266{ 267 DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("duration"))); 268 return key; 269} 270 271// FIXME: It would be better if AVCF exported this notification name. 272static CFStringRef CACFContextNeedsFlushNotification() 273{ 274 DEPRECATED_DEFINE_STATIC_LOCAL(CFStringRef, name, (CFSTR("kCACFContextNeedsFlushNotification"))); 275 return name; 276} 277 278// Define AVCF object accessors as inline functions here instead of in MediaPlayerPrivateAVFoundationCF so we don't have 279// to include the AVCF headers in MediaPlayerPrivateAVFoundationCF.h 280inline AVCFPlayerLayerRef videoLayer(AVFWrapper* wrapper) 281{ 282 return wrapper ? wrapper->videoLayer() : 0; 283} 284 285inline AVCFPlayerRef avPlayer(AVFWrapper* wrapper) 286{ 287 return wrapper ? wrapper->avPlayer() : 0; 288} 289 290inline AVCFURLAssetRef avAsset(AVFWrapper* wrapper) 291{ 292 return wrapper ? wrapper->avAsset() : 0; 293} 294 295inline AVCFPlayerItemRef avPlayerItem(AVFWrapper* wrapper) 296{ 297 return wrapper ? wrapper->avPlayerItem() : 0; 298} 299 300inline AVCFAssetImageGeneratorRef imageGenerator(AVFWrapper* wrapper) 301{ 302 return wrapper ? wrapper->imageGenerator() : 0; 303} 304 305#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 306inline AVCFPlayerItemLegibleOutputRef avLegibleOutput(AVFWrapper* wrapper) 307{ 308 return wrapper ? wrapper->legibleOutput() : 0; 309} 310 311inline AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia(AVFWrapper* wrapper) 312{ 313 return wrapper ? wrapper->safeMediaSelectionGroupForLegibleMedia() : 0; 314} 315#endif 316 317#if HAVE(AVFOUNDATION_LOADER_DELEGATE) 318static dispatch_queue_t globalQueue = nullptr; 319 320static void initGlobalLoaderDelegateQueue(void* ctx) 321{ 322 globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL); 323} 324 325static dispatch_queue_t globalLoaderDelegateQueue() 326{ 327 static dispatch_once_t onceToken; 328 329 dispatch_once_f(&onceToken, nullptr, initGlobalLoaderDelegateQueue); 330 331 return globalQueue; 332} 333#endif 334 335PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationCF::create(MediaPlayer* player) 336{ 337 return adoptPtr(new MediaPlayerPrivateAVFoundationCF(player)); 338} 339 340void MediaPlayerPrivateAVFoundationCF::registerMediaEngine(MediaEngineRegistrar registrar) 341{ 342 if (isAvailable()) 343 registrar(create, getSupportedTypes, supportsType, 0, 0, 0, 0); 344} 345 346MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(MediaPlayer* player) 347 : MediaPlayerPrivateAVFoundation(player) 348 , m_avfWrapper(0) 349 , m_videoFrameHasDrawn(false) 350{ 351 LOG(Media, "MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(%p)", this); 352} 353 354MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF() 355{ 356 LOG(Media, "MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF(%p)", this); 357#if HAVE(AVFOUNDATION_LOADER_DELEGATE) 358 for (auto& pair : m_resourceLoaderMap) 359 pair.value->invalidate(); 360#endif 361 cancelLoad(); 362} 363 364void MediaPlayerPrivateAVFoundationCF::cancelLoad() 365{ 366 LOG(Media, "MediaPlayerPrivateAVFoundationCF::cancelLoad(%p)", this); 367 368 // Do nothing when our cancellation of pending loading calls its completion handler 369 setDelayCallbacks(true); 370 setIgnoreLoadStateChanges(true); 371 372 tearDownVideoRendering(); 373 374 clearTextTracks(); 375 376 if (m_avfWrapper) { 377 // The AVCF objects have to be destroyed on the same dispatch queue used for notifications, so schedule a call to 378 // disconnectAndDeleteAVFWrapper on that queue. 379 m_avfWrapper->scheduleDisconnectAndDelete(); 380 m_avfWrapper = 0; 381 } 382 383 setIgnoreLoadStateChanges(false); 384 setDelayCallbacks(false); 385} 386 387void MediaPlayerPrivateAVFoundationCF::updateVideoLayerGravity() 388{ 389 ASSERT(supportsAcceleratedRendering()); 390 391 if (m_avfWrapper) 392 m_avfWrapper->updateVideoLayerGravity(); 393} 394 395bool MediaPlayerPrivateAVFoundationCF::hasLayerRenderer() const 396{ 397 return videoLayer(m_avfWrapper); 398} 399 400bool MediaPlayerPrivateAVFoundationCF::hasContextRenderer() const 401{ 402 return imageGenerator(m_avfWrapper); 403} 404 405void MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer() 406{ 407 LOG(Media, "MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer(%p)", this); 408 409 if (imageGenerator(m_avfWrapper)) 410 return; 411 412 if (m_avfWrapper) 413 m_avfWrapper->createImageGenerator(); 414} 415 416void MediaPlayerPrivateAVFoundationCF::destroyContextVideoRenderer() 417{ 418 if (m_avfWrapper) 419 m_avfWrapper->destroyImageGenerator(); 420} 421 422void MediaPlayerPrivateAVFoundationCF::createVideoLayer() 423{ 424 ASSERT(supportsAcceleratedRendering()); 425 426 if (m_avfWrapper) 427 m_avfWrapper->createAVCFVideoLayer(); 428} 429 430void MediaPlayerPrivateAVFoundationCF::destroyVideoLayer() 431{ 432 LOG(Media, "MediaPlayerPrivateAVFoundationCF::destroyVideoLayer(%p) - destroying %p", this, videoLayer(m_avfWrapper)); 433 if (m_avfWrapper) 434 m_avfWrapper->destroyVideoLayer(); 435} 436 437bool MediaPlayerPrivateAVFoundationCF::hasAvailableVideoFrame() const 438{ 439 return (m_videoFrameHasDrawn || (videoLayer(m_avfWrapper) && AVCFPlayerLayerIsReadyForDisplay(videoLayer(m_avfWrapper)))); 440} 441 442void MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(InbandTextTrackPrivateAVF* track) 443{ 444 if (m_avfWrapper) 445 m_avfWrapper->setCurrentTextTrack(track); 446} 447 448InbandTextTrackPrivateAVF* MediaPlayerPrivateAVFoundationCF::currentTextTrack() const 449{ 450 if (m_avfWrapper) 451 return m_avfWrapper->currentTextTrack(); 452 453 return 0; 454} 455 456void MediaPlayerPrivateAVFoundationCF::createAVAssetForURL(const String& url) 457{ 458 ASSERT(!m_avfWrapper); 459 460 setDelayCallbacks(true); 461 462 bool inheritURI = player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"); 463 464 m_avfWrapper = new AVFWrapper(this); 465 m_avfWrapper->createAssetForURL(url, inheritURI); 466 setDelayCallbacks(false); 467} 468 469void MediaPlayerPrivateAVFoundationCF::createAVPlayer() 470{ 471 ASSERT(m_avfWrapper); 472 473 setDelayCallbacks(true); 474 m_avfWrapper->createPlayer(reinterpret_cast<IDirect3DDevice9*>(player()->graphicsDeviceAdapter())); 475 setDelayCallbacks(false); 476} 477 478void MediaPlayerPrivateAVFoundationCF::createAVPlayerItem() 479{ 480 ASSERT(m_avfWrapper); 481 482 setDelayCallbacks(true); 483 m_avfWrapper->createPlayerItem(); 484 485 setDelayCallbacks(false); 486} 487 488void MediaPlayerPrivateAVFoundationCF::checkPlayability() 489{ 490 ASSERT(m_avfWrapper); 491 m_avfWrapper->checkPlayability(); 492} 493 494void MediaPlayerPrivateAVFoundationCF::beginLoadingMetadata() 495{ 496 ASSERT(m_avfWrapper); 497 m_avfWrapper->beginLoadingMetadata(); 498} 499 500MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationCF::playerItemStatus() const 501{ 502 if (!avPlayerItem(m_avfWrapper)) 503 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist; 504 505 AVCFPlayerItemStatus status = AVCFPlayerItemGetStatus(avPlayerItem(m_avfWrapper), 0); 506 if (status == AVCFPlayerItemStatusUnknown) 507 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown; 508 if (status == AVCFPlayerItemStatusFailed) 509 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed; 510 if (AVCFPlayerItemIsPlaybackLikelyToKeepUp(avPlayerItem(m_avfWrapper))) 511 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp; 512 if (AVCFPlayerItemIsPlaybackBufferFull(avPlayerItem(m_avfWrapper))) 513 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull; 514 if (AVCFPlayerItemIsPlaybackBufferEmpty(avPlayerItem(m_avfWrapper))) 515 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty; 516 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay; 517} 518 519PlatformMedia MediaPlayerPrivateAVFoundationCF::platformMedia() const 520{ 521 LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformMedia(%p)", this); 522 PlatformMedia pm; 523 pm.type = PlatformMedia::AVFoundationCFMediaPlayerType; 524 pm.media.avcfMediaPlayer = (AVCFPlayer*)avPlayer(m_avfWrapper); 525 return pm; 526} 527 528PlatformLayer* MediaPlayerPrivateAVFoundationCF::platformLayer() const 529{ 530 if (!m_avfWrapper) 531 return 0; 532 533 return m_avfWrapper->platformLayer(); 534} 535 536void MediaPlayerPrivateAVFoundationCF::platformSetVisible(bool isVisible) 537{ 538 if (!m_avfWrapper) 539 return; 540 541 // FIXME: We use a CATransaction here on the Mac, we need to figure out why this was done there and 542 // whether we're affected by the same issue. 543 setDelayCallbacks(true); 544 m_avfWrapper->setVideoLayerHidden(!isVisible); 545 if (!isVisible) 546 tearDownVideoRendering(); 547 setDelayCallbacks(false); 548} 549 550void MediaPlayerPrivateAVFoundationCF::platformPlay() 551{ 552 LOG(Media, "MediaPlayerPrivateAVFoundationCF::play(%p)", this); 553 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 554 return; 555 556 setDelayCallbacks(true); 557 AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate()); 558 setDelayCallbacks(false); 559} 560 561void MediaPlayerPrivateAVFoundationCF::platformPause() 562{ 563 LOG(Media, "MediaPlayerPrivateAVFoundationCF::pause(%p)", this); 564 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 565 return; 566 567 setDelayCallbacks(true); 568 AVCFPlayerSetRate(avPlayer(m_avfWrapper), 0); 569 setDelayCallbacks(false); 570} 571 572double MediaPlayerPrivateAVFoundationCF::platformDuration() const 573{ 574 if (!metaDataAvailable() || !avAsset(m_avfWrapper)) 575 return 0; 576 577 CMTime cmDuration; 578 579 // Check the AVItem if we have one and it has loaded duration, some assets never report duration. 580 if (avPlayerItem(m_avfWrapper) && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay) 581 cmDuration = AVCFPlayerItemGetDuration(avPlayerItem(m_avfWrapper)); 582 else 583 cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper)); 584 585 if (CMTIME_IS_NUMERIC(cmDuration)) 586 return CMTimeGetSeconds(cmDuration); 587 588 if (CMTIME_IS_INDEFINITE(cmDuration)) 589 return numeric_limits<double>::infinity(); 590 591 LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %.0f", this, static_cast<float>(MediaPlayer::invalidTime())); 592 return static_cast<float>(MediaPlayer::invalidTime()); 593} 594 595double MediaPlayerPrivateAVFoundationCF::currentTimeDouble() const 596{ 597 if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper)) 598 return 0; 599 600 CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper)); 601 if (CMTIME_IS_NUMERIC(itemTime)) 602 return std::max(CMTimeGetSeconds(itemTime), 0.0); 603 604 return 0; 605} 606 607void MediaPlayerPrivateAVFoundationCF::seekToTime(double time, double negativeTolerance, double positiveTolerance) 608{ 609 if (!m_avfWrapper) 610 return; 611 612 // seekToTime generates several event callbacks, update afterwards. 613 setDelayCallbacks(true); 614 m_avfWrapper->seekToTime(time, negativeTolerance, positiveTolerance); 615 setDelayCallbacks(false); 616} 617 618void MediaPlayerPrivateAVFoundationCF::setVolume(float volume) 619{ 620 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 621 return; 622 623 AVCFPlayerSetVolume(avPlayer(m_avfWrapper), volume); 624} 625 626void MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(bool closedCaptionsVisible) 627{ 628 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 629 return; 630 631 LOG(Media, "MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible)); 632 AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(m_avfWrapper), closedCaptionsVisible); 633} 634 635void MediaPlayerPrivateAVFoundationCF::updateRate() 636{ 637 LOG(Media, "MediaPlayerPrivateAVFoundationCF::updateRate(%p)", this); 638 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 639 return; 640 641 setDelayCallbacks(true); 642 AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate()); 643 setDelayCallbacks(false); 644} 645 646float MediaPlayerPrivateAVFoundationCF::rate() const 647{ 648 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 649 return 0; 650 651 setDelayCallbacks(true); 652 float currentRate = AVCFPlayerGetRate(avPlayer(m_avfWrapper)); 653 setDelayCallbacks(false); 654 655 return currentRate; 656} 657 658static bool timeRangeIsValidAndNotEmpty(CMTime start, CMTime duration) 659{ 660 // Is the range valid? 661 if (!CMTIME_IS_VALID(start) || !CMTIME_IS_VALID(duration) || duration.epoch || duration.value < 0) 662 return false; 663 664 if (CMTIME_COMPARE_INLINE(duration, ==, kCMTimeZero)) 665 return false; 666 667 return true; 668} 669 670std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationCF::platformBufferedTimeRanges() const 671{ 672 auto timeRanges = PlatformTimeRanges::create(); 673 674 if (!avPlayerItem(m_avfWrapper)) 675 return timeRanges; 676 677 RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper))); 678 if (!loadedRanges) 679 return timeRanges; 680 681 CFIndex rangeCount = CFArrayGetCount(loadedRanges.get()); 682 for (CFIndex i = 0; i < rangeCount; i++) { 683 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i)); 684 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 685 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 686 687 if (timeRangeIsValidAndNotEmpty(start, duration)) { 688 double rangeStart = CMTimeGetSeconds(start); 689 double rangeEnd = rangeStart + CMTimeGetSeconds(duration); 690 timeRanges->add(MediaTime::createWithDouble(rangeStart), MediaTime::createWithDouble(rangeEnd)); 691 } 692 } 693 694 return timeRanges; 695} 696 697double MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 698{ 699 RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper))); 700 if (!seekableRanges) 701 return 0; 702 703 double minTimeSeekable = std::numeric_limits<double>::infinity(); 704 bool hasValidRange = false; 705 CFIndex rangeCount = CFArrayGetCount(seekableRanges.get()); 706 for (CFIndex i = 0; i < rangeCount; i++) { 707 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i)); 708 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 709 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 710 if (!timeRangeIsValidAndNotEmpty(start, duration)) 711 continue; 712 713 hasValidRange = true; 714 double startOfRange = CMTimeGetSeconds(start); 715 if (minTimeSeekable > startOfRange) 716 minTimeSeekable = startOfRange; 717 } 718 return hasValidRange ? minTimeSeekable : 0; 719} 720 721double MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const 722{ 723 if (!avPlayerItem(m_avfWrapper)) 724 return 0; 725 726 RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper))); 727 if (!seekableRanges) 728 return 0; 729 730 double maxTimeSeekable = 0; 731 CFIndex rangeCount = CFArrayGetCount(seekableRanges.get()); 732 for (CFIndex i = 0; i < rangeCount; i++) { 733 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i)); 734 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 735 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 736 if (!timeRangeIsValidAndNotEmpty(start, duration)) 737 continue; 738 739 double endOfRange = CMTimeGetSeconds(CMTimeAdd(start, duration)); 740 if (maxTimeSeekable < endOfRange) 741 maxTimeSeekable = endOfRange; 742 } 743 744 return maxTimeSeekable; 745} 746 747float MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const 748{ 749 if (!avPlayerItem(m_avfWrapper)) 750 return 0; 751 752 RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper))); 753 if (!loadedRanges) 754 return 0; 755 756 float maxTimeLoaded = 0; 757 CFIndex rangeCount = CFArrayGetCount(loadedRanges.get()); 758 for (CFIndex i = 0; i < rangeCount; i++) { 759 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i)); 760 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 761 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 762 if (!timeRangeIsValidAndNotEmpty(start, duration)) 763 continue; 764 765 float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeAdd(start, duration))); 766 if (maxTimeLoaded < endOfRange) 767 maxTimeLoaded = endOfRange; 768 } 769 770 return maxTimeLoaded; 771} 772 773unsigned long long MediaPlayerPrivateAVFoundationCF::totalBytes() const 774{ 775 if (!metaDataAvailable() || !avAsset(m_avfWrapper)) 776 return 0; 777 778 int64_t totalMediaSize = 0; 779 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyAssetTracks(avAsset(m_avfWrapper))); 780 CFIndex trackCount = CFArrayGetCount(tracks.get()); 781 for (CFIndex i = 0; i < trackCount; i++) { 782 AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), i); 783 totalMediaSize += AVCFAssetTrackGetTotalSampleDataLength(assetTrack); 784 } 785 786 return static_cast<unsigned long long>(totalMediaSize); 787} 788 789MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationCF::assetStatus() const 790{ 791 if (!avAsset(m_avfWrapper)) 792 return MediaPlayerAVAssetStatusDoesNotExist; 793 794 // First, make sure all metadata properties we rely on are loaded. 795 CFArrayRef keys = metadataKeyNames(); 796 CFIndex keyCount = CFArrayGetCount(keys); 797 for (CFIndex i = 0; i < keyCount; i++) { 798 CFStringRef keyName = static_cast<CFStringRef>(CFArrayGetValueAtIndex(keys, i)); 799 AVCFPropertyValueStatus keyStatus = AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), keyName, 0); 800 801 if (keyStatus < AVCFPropertyValueStatusLoaded) 802 return MediaPlayerAVAssetStatusLoading; 803 if (keyStatus == AVCFPropertyValueStatusFailed) { 804 if (CFStringCompare(keyName, AVCFAssetPropertyNaturalSize, 0) == kCFCompareEqualTo) { 805 // Don't treat a failure to retrieve @"naturalSize" as fatal. We will use @"presentationSize" instead. 806 // <rdar://problem/15966685> 807 continue; 808 } 809#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 810 if (CFStringCompare(keyName, AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) == kCFCompareEqualTo) { 811 // On Windows, the media selection options are not available when initially interacting with a streaming source. 812 // <rdar://problem/16160699> 813 continue; 814 } 815#endif 816 return MediaPlayerAVAssetStatusFailed; 817 } 818 if (keyStatus == AVCFPropertyValueStatusCancelled) 819 return MediaPlayerAVAssetStatusCancelled; 820 } 821 822 if (AVCFAssetIsPlayable(avAsset(m_avfWrapper))) 823 return MediaPlayerAVAssetStatusPlayable; 824 825 return MediaPlayerAVAssetStatusLoaded; 826} 827 828void MediaPlayerPrivateAVFoundationCF::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect) 829{ 830 if (!metaDataAvailable() || context->paintingDisabled()) 831 return; 832 833 if (currentRenderingMode() == MediaRenderingToLayer && !imageGenerator(m_avfWrapper)) { 834 // We're being told to render into a context, but we already have the 835 // video layer, which probably means we've been called from <canvas>. 836 createContextVideoRenderer(); 837 } 838 839 paint(context, rect); 840} 841 842void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext* context, const IntRect& rect) 843{ 844 if (!metaDataAvailable() || context->paintingDisabled() || !imageGenerator(m_avfWrapper)) 845 return; 846 847 LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this); 848 849 setDelayCallbacks(true); 850 RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentTime(), rect); 851 if (image) { 852 context->save(); 853 context->translate(rect.x(), rect.y() + rect.height()); 854 context->scale(FloatSize(1.0f, -1.0f)); 855 context->setImageInterpolationQuality(InterpolationLow); 856 IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height())); 857 CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get()); 858 context->restore(); 859 image = 0; 860 } 861 setDelayCallbacks(false); 862 863 m_videoFrameHasDrawn = true; 864} 865 866static HashSet<String> mimeTypeCache() 867{ 868 DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ()); 869 static bool typeListInitialized = false; 870 871 if (typeListInitialized) 872 return cache; 873 typeListInitialized = true; 874 875 RetainPtr<CFArrayRef> supportedTypes = adoptCF(AVCFURLAssetCopyAudiovisualMIMETypes()); 876 877 ASSERT(supportedTypes); 878 if (!supportedTypes) 879 return cache; 880 881 CFIndex typeCount = CFArrayGetCount(supportedTypes.get()); 882 for (CFIndex i = 0; i < typeCount; i++) 883 cache.add(static_cast<CFStringRef>(CFArrayGetValueAtIndex(supportedTypes.get(), i))); 884 885 return cache; 886} 887 888void MediaPlayerPrivateAVFoundationCF::getSupportedTypes(HashSet<String>& supportedTypes) 889{ 890 supportedTypes = mimeTypeCache(); 891} 892 893#if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2) 894static bool keySystemIsSupported(const String& keySystem) 895{ 896 if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0")) 897 return true; 898 return false; 899} 900#endif 901 902MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationCF::supportsType(const MediaEngineSupportParameters& parameters) 903{ 904 // Only return "IsSupported" if there is no codecs parameter for now as there is no way to ask if it supports an 905 // extended MIME type until rdar://8721715 is fixed. 906 if (mimeTypeCache().contains(parameters.type)) 907 return parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported; 908 909 return MediaPlayer::IsNotSupported; 910} 911 912 913bool MediaPlayerPrivateAVFoundationCF::isAvailable() 914{ 915 return AVFoundationCFLibrary() && CoreMediaLibrary(); 916} 917 918#if HAVE(AVFOUNDATION_LOADER_DELEGATE) 919void MediaPlayerPrivateAVFoundationCF::didCancelLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest) 920{ 921 WebCoreAVCFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest); 922 923 if (resourceLoader) 924 resourceLoader->stopLoading(); 925} 926 927void MediaPlayerPrivateAVFoundationCF::didStopLoadingRequest(AVCFAssetResourceLoadingRequestRef avRequest) 928{ 929 m_resourceLoaderMap.remove(avRequest); 930} 931#endif 932 933float MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(float timeValue) const 934{ 935 if (!metaDataAvailable()) 936 return timeValue; 937 938 // FIXME - can not implement until rdar://8721669 is fixed. 939 return timeValue; 940} 941 942void MediaPlayerPrivateAVFoundationCF::tracksChanged() 943{ 944 String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack; 945 m_languageOfPrimaryAudioTrack = String(); 946 947 if (!avAsset(m_avfWrapper)) 948 return; 949 950 setDelayCharacteristicsChangedNotification(true); 951 952 bool haveCCTrack = false; 953 bool hasCaptions = false; 954 955 // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are 956 // asked about those fairly frequently. 957 if (!avPlayerItem(m_avfWrapper)) { 958 // We don't have a player item yet, so check with the asset because some assets support inspection 959 // prior to becoming ready to play. 960 RetainPtr<CFArrayRef> visualTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual)); 961 setHasVideo(CFArrayGetCount(visualTracks.get())); 962 963 RetainPtr<CFArrayRef> audioTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible)); 964 setHasAudio(CFArrayGetCount(audioTracks.get())); 965 966#if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 967 RetainPtr<CFArrayRef> captionTracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeClosedCaption)); 968 hasCaptions = CFArrayGetCount(captionTracks.get()); 969#endif 970 } else { 971 bool hasVideo = false; 972 bool hasAudio = false; 973 974 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper))); 975 976 CFIndex trackCount = CFArrayGetCount(tracks.get()); 977 for (CFIndex i = 0; i < trackCount; i++) { 978 AVCFPlayerItemTrackRef track = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i)); 979 980 if (AVCFPlayerItemTrackIsEnabled(track)) { 981 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(track)); 982 if (!assetTrack) { 983 // Asset tracks may not be available yet when streaming. <rdar://problem/16160699> 984 LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track); 985 continue; 986 } 987 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get()); 988 if (!mediaType) 989 continue; 990 991 if (CFStringCompare(mediaType, AVCFMediaTypeVideo, kCFCompareCaseInsensitive) == kCFCompareEqualTo) 992 hasVideo = true; 993 else if (CFStringCompare(mediaType, AVCFMediaTypeAudio, kCFCompareCaseInsensitive) == kCFCompareEqualTo) 994 hasAudio = true; 995 else if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) == kCFCompareEqualTo) { 996#if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 997 hasCaptions = true; 998#endif 999 haveCCTrack = true; 1000 } 1001 } 1002 } 1003 1004 setHasVideo(hasVideo); 1005 setHasAudio(hasAudio); 1006 } 1007 1008#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1009 AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper); 1010 if (legibleGroup) { 1011 RetainPtr<CFArrayRef> playableOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup))); 1012 hasCaptions = CFArrayGetCount(playableOptions.get()); 1013 if (hasCaptions) 1014 processMediaSelectionOptions(); 1015 } 1016#endif 1017 1018#if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1019 if (haveCCTrack) 1020 processLegacyClosedCaptionsTracks(); 1021#endif 1022 1023 setHasClosedCaptions(hasCaptions); 1024 1025 LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s", 1026 this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions())); 1027 1028 sizeChanged(); 1029 1030 if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack()) 1031 characteristicsChanged(); 1032 1033 setDelayCharacteristicsChangedNotification(false); 1034} 1035 1036void MediaPlayerPrivateAVFoundationCF::sizeChanged() 1037{ 1038 if (!avAsset(m_avfWrapper)) 1039 return; 1040 1041 // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute 1042 // the union of all visual track rects. 1043 CGRect trackRectUnion = CGRectZero; 1044 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual)); 1045 CFIndex trackCount = CFArrayGetCount(tracks.get()); 1046 for (CFIndex i = 0; i < trackCount; i++) { 1047 AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i)); 1048 1049 CGSize trackSize = AVCFAssetTrackGetNaturalSize(assetTrack); 1050 CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height); 1051 trackRectUnion = CGRectUnion(trackRectUnion, CGRectApplyAffineTransform(trackRect, AVCFAssetTrackGetPreferredTransform(assetTrack))); 1052 } 1053 // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height. 1054 trackRectUnion = CGRectOffset(trackRectUnion, trackRectUnion.origin.x, trackRectUnion.origin.y); 1055 CGSize naturalSize = trackRectUnion.size; 1056 1057 if (!naturalSize.height && !naturalSize.width && avPlayerItem(m_avfWrapper)) 1058 naturalSize = AVCFPlayerItemGetPresentationSize(avPlayerItem(m_avfWrapper)); 1059 1060 // Also look at the asset's preferred transform so we account for a movie matrix. 1061 CGSize movieSize = CGSizeApplyAffineTransform(AVCFAssetGetNaturalSize(avAsset(m_avfWrapper)), AVCFAssetGetPreferredTransform(avAsset(m_avfWrapper))); 1062 if (movieSize.width > naturalSize.width) 1063 naturalSize.width = movieSize.width; 1064 if (movieSize.height > naturalSize.height) 1065 naturalSize.height = movieSize.height; 1066 setNaturalSize(IntSize(naturalSize)); 1067} 1068 1069bool MediaPlayerPrivateAVFoundationCF::requiresImmediateCompositing() const 1070{ 1071 // The AVFoundationCF player needs to have the root compositor available at construction time 1072 // so it can attach to the rendering device. Otherwise it falls back to CPU-only mode. 1073 // 1074 // It would be nice if AVCFPlayer had some way to switch to hardware-accelerated mode 1075 // when asked, then we could follow AVFoundation's model and switch to compositing 1076 // mode when beginning to play media. 1077 return true; 1078} 1079 1080#if ENABLE(ENCRYPTED_MEDIA_V2) 1081RetainPtr<AVCFAssetResourceLoadingRequestRef> MediaPlayerPrivateAVFoundationCF::takeRequestForKeyURI(const String& keyURI) 1082{ 1083 if (!m_avfWrapper) 1084 return nullptr; 1085 1086 return m_avfWrapper->takeRequestForKeyURI(keyURI); 1087} 1088 1089std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationCF::createSession(const String& keySystem) 1090{ 1091 if (!keySystemIsSupported(keySystem)) 1092 return nullptr; 1093 1094 return std::make_unique<CDMSessionAVFoundationCF>(this); 1095} 1096#endif 1097 1098#if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1099void MediaPlayerPrivateAVFoundationCF::processLegacyClosedCaptionsTracks() 1100{ 1101#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1102 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), 0, safeMediaSelectionGroupForLegibleMedia(m_avfWrapper)); 1103#endif 1104 1105 Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks; 1106 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper))); 1107 CFIndex trackCount = CFArrayGetCount(tracks.get()); 1108 for (CFIndex i = 0; i < trackCount; ++i) { 1109 AVCFPlayerItemTrackRef playerItemTrack = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i)); 1110 1111 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(playerItemTrack)); 1112 if (!assetTrack) { 1113 // Asset tracks may not be available yet when streaming. <rdar://problem/16160699> 1114 LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track); 1115 continue; 1116 } 1117 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get()); 1118 if (!mediaType) 1119 continue; 1120 1121 if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) != kCFCompareEqualTo) 1122 continue; 1123 1124 bool newCCTrack = true; 1125 for (unsigned i = removedTextTracks.size(); i > 0; --i) { 1126 if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption) 1127 continue; 1128 1129 RefPtr<InbandTextTrackPrivateLegacyAVCF> track = static_cast<InbandTextTrackPrivateLegacyAVCF*>(m_textTracks[i - 1].get()); 1130 if (track->avPlayerItemTrack() == playerItemTrack) { 1131 removedTextTracks.remove(i - 1); 1132 newCCTrack = false; 1133 break; 1134 } 1135 } 1136 1137 if (!newCCTrack) 1138 continue; 1139 1140 m_textTracks.append(InbandTextTrackPrivateLegacyAVCF::create(this, playerItemTrack)); 1141 } 1142 1143 processNewAndRemovedTextTracks(removedTextTracks); 1144} 1145#endif 1146 1147#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1148void MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions() 1149{ 1150 AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper); 1151 if (!legibleGroup) { 1152 LOG(Media, "MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this); 1153 return; 1154 } 1155 1156 // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically, 1157 // but set the selected legible track to nil so text tracks will not be automatically configured. 1158 if (!m_textTracks.size()) { 1159 ASSERT(AVCFMediaSelectionGroupAllowsEmptySelection(legibleGroup)); 1160 AVCFPlayerItemRef playerItem = avPlayerItem(m_avfWrapper); 1161 1162 if (playerItem) 1163 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(playerItem, 0, legibleGroup); 1164 } 1165 1166 Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks; 1167 RetainPtr<CFArrayRef> legibleOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup))); 1168 CFIndex legibleOptionsCount = CFArrayGetCount(legibleOptions.get()); 1169 for (CFIndex i = 0; i < legibleOptionsCount; ++i) { 1170 AVCFMediaSelectionOptionRef option = static_cast<AVCFMediaSelectionOptionRef>(CFArrayGetValueAtIndex(legibleOptions.get(), i)); 1171 bool newTrack = true; 1172 for (unsigned i = removedTextTracks.size(); i > 0; --i) { 1173 if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption) 1174 continue; 1175 1176 RefPtr<InbandTextTrackPrivateAVCF> track = static_cast<InbandTextTrackPrivateAVCF*>(removedTextTracks[i - 1].get()); 1177 if (CFEqual(track->mediaSelectionOption(), option)) { 1178 removedTextTracks.remove(i - 1); 1179 newTrack = false; 1180 break; 1181 } 1182 } 1183 if (!newTrack) 1184 continue; 1185 1186 m_textTracks.append(InbandTextTrackPrivateAVCF::create(this, option, InbandTextTrackPrivate::Generic)); 1187 } 1188 1189 processNewAndRemovedTextTracks(removedTextTracks); 1190} 1191 1192#endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1193 1194void AVFWrapper::setCurrentTextTrack(InbandTextTrackPrivateAVF* track) 1195{ 1196 if (m_currentTextTrack == track) 1197 return; 1198 1199 LOG(Media, "MediaPlayerPrivateAVFoundationCF::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : ""); 1200 1201 m_currentTextTrack = track; 1202 1203 if (track) { 1204 if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption) 1205 AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), TRUE); 1206#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1207 else 1208 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), static_cast<InbandTextTrackPrivateAVCF*>(track)->mediaSelectionOption(), safeMediaSelectionGroupForLegibleMedia()); 1209#endif 1210 } else { 1211#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1212 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), 0, safeMediaSelectionGroupForLegibleMedia()); 1213#endif 1214 AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), FALSE); 1215 } 1216} 1217 1218String MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack() const 1219{ 1220 if (!m_languageOfPrimaryAudioTrack.isNull()) 1221 return m_languageOfPrimaryAudioTrack; 1222 1223 if (!avPlayerItem(m_avfWrapper)) 1224 return emptyString(); 1225 1226#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1227 // If AVFoundation has an audible group, return the language of the currently selected audible option. 1228 AVCFMediaSelectionGroupRef audibleGroup = AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible); 1229 AVCFMediaSelectionOptionRef currentlySelectedAudibleOption = AVCFPlayerItemGetSelectedMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), audibleGroup); 1230 if (currentlySelectedAudibleOption) { 1231 RetainPtr<CFLocaleRef> audibleOptionLocale = adoptCF(AVCFMediaSelectionOptionCopyLocale(currentlySelectedAudibleOption)); 1232 m_languageOfPrimaryAudioTrack = CFLocaleGetIdentifier(audibleOptionLocale.get()); 1233 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data()); 1234 1235 return m_languageOfPrimaryAudioTrack; 1236 } 1237#endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1238 1239 // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or 1240 // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language. 1241 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeAudio)); 1242 CFIndex trackCount = CFArrayGetCount(tracks.get()); 1243 if (!tracks || trackCount != 1) { 1244 m_languageOfPrimaryAudioTrack = emptyString(); 1245 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? trackCount : 0)); 1246 return m_languageOfPrimaryAudioTrack; 1247 } 1248 1249 AVCFAssetTrackRef track = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), 0); 1250 RetainPtr<CFStringRef> language = adoptCF(AVCFAssetTrackCopyExtendedLanguageTag(track)); 1251 1252 // If the language code is stored as a QuickTime 5-bit packed code there aren't enough bits for a full 1253 // RFC 4646 language tag so extendedLanguageTag returns null. In this case languageCode will return the 1254 // ISO 639-2/T language code so check it. 1255 if (!language) 1256 language = adoptCF(AVCFAssetTrackCopyLanguageCode(track)); 1257 1258 // Some legacy tracks have "und" as a language, treat that the same as no language at all. 1259 if (language && CFStringCompare(language.get(), CFSTR("und"), kCFCompareCaseInsensitive) != kCFCompareEqualTo) { 1260 m_languageOfPrimaryAudioTrack = language.get(); 1261 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data()); 1262 return m_languageOfPrimaryAudioTrack; 1263 } 1264 1265 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this); 1266 m_languageOfPrimaryAudioTrack = emptyString(); 1267 return m_languageOfPrimaryAudioTrack; 1268} 1269 1270void MediaPlayerPrivateAVFoundationCF::contentsNeedsDisplay() 1271{ 1272 if (m_avfWrapper) 1273 m_avfWrapper->setVideoLayerNeedsCommit(); 1274} 1275 1276AVFWrapper::AVFWrapper(MediaPlayerPrivateAVFoundationCF* owner) 1277 : m_owner(owner) 1278 , m_objectID(s_nextAVFWrapperObjectID++) 1279 , m_currentTextTrack(0) 1280{ 1281 ASSERT(isMainThread()); 1282 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1283 LOG(Media, "AVFWrapper::AVFWrapper(%p)", this); 1284 1285 m_notificationQueue = dispatch_queue_create("MediaPlayerPrivateAVFoundationCF.notificationQueue", 0); 1286 addToMap(); 1287} 1288 1289AVFWrapper::~AVFWrapper() 1290{ 1291 ASSERT(isMainThread()); 1292 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1293 LOG(Media, "AVFWrapper::~AVFWrapper(%p %d)", this, m_objectID); 1294 1295 destroyVideoLayer(); 1296 destroyImageGenerator(); 1297 1298 if (m_notificationQueue) 1299 dispatch_release(m_notificationQueue); 1300 1301 if (avAsset()) { 1302 AVCFAssetCancelLoading(avAsset()); 1303 m_avAsset = 0; 1304 } 1305 1306#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1307 if (legibleOutput()) { 1308 if (avPlayerItem()) 1309 AVCFPlayerItemRemoveOutput(avPlayerItem(), legibleOutput()); 1310 m_legibleOutput = 0; 1311 } 1312#endif 1313 1314 m_avPlayerItem = 0; 1315 m_timeObserver = 0; 1316 m_avPlayer = 0; 1317} 1318 1319Mutex& AVFWrapper::mapLock() 1320{ 1321 static Mutex mapLock; 1322 return mapLock; 1323} 1324 1325HashMap<uintptr_t, AVFWrapper*>& AVFWrapper::map() 1326{ 1327 static HashMap<uintptr_t, AVFWrapper*>& map = *new HashMap<uintptr_t, AVFWrapper*>; 1328 return map; 1329} 1330 1331void AVFWrapper::addToMap() 1332{ 1333 MutexLocker locker(mapLock()); 1334 1335 // HashMap doesn't like a key of 0, and also make sure we aren't 1336 // using an object ID that's already in use. 1337 while (!m_objectID || (map().find(m_objectID) != map().end())) 1338 m_objectID = s_nextAVFWrapperObjectID++; 1339 1340 LOG(Media, "AVFWrapper::addToMap(%p %d)", this, m_objectID); 1341 1342 map().add(m_objectID, this); 1343} 1344 1345void AVFWrapper::removeFromMap() const 1346{ 1347 LOG(Media, "AVFWrapper::removeFromMap(%p %d)", this, m_objectID); 1348 1349 MutexLocker locker(mapLock()); 1350 map().remove(m_objectID); 1351} 1352 1353AVFWrapper* AVFWrapper::avfWrapperForCallbackContext(void* context) 1354{ 1355 // Assumes caller has locked mapLock(). 1356 HashMap<uintptr_t, AVFWrapper*>::iterator it = map().find(reinterpret_cast<uintptr_t>(context)); 1357 if (it == map().end()) 1358 return 0; 1359 1360 return it->value; 1361} 1362 1363void AVFWrapper::scheduleDisconnectAndDelete() 1364{ 1365 // Ignore any subsequent notifications we might receive in notificationCallback(). 1366 removeFromMap(); 1367 1368 dispatch_async_f(dispatchQueue(), this, disconnectAndDeleteAVFWrapper); 1369} 1370 1371static void destroyAVFWrapper(void* context) 1372{ 1373 ASSERT(isMainThread()); 1374 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1375 AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context); 1376 if (!avfWrapper) 1377 return; 1378 1379 delete avfWrapper; 1380} 1381 1382void AVFWrapper::disconnectAndDeleteAVFWrapper(void* context) 1383{ 1384 AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context); 1385 1386 LOG(Media, "AVFWrapper::disconnectAndDeleteAVFWrapper(%p)", avfWrapper); 1387 1388 if (avfWrapper->avPlayerItem()) { 1389 CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter(); 1390 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDidPlayToEndTimeNotification, avfWrapper->avPlayerItem()); 1391 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemStatusChangedNotification, avfWrapper->avPlayerItem()); 1392 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemTracksChangedNotification, avfWrapper->avPlayerItem()); 1393 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemSeekableTimeRangesChangedNotification, avfWrapper->avPlayerItem()); 1394 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemLoadedTimeRangesChangedNotification, avfWrapper->avPlayerItem()); 1395 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemPresentationSizeChangedNotification, avfWrapper->avPlayerItem()); 1396 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, avfWrapper->avPlayerItem()); 1397 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, avfWrapper->avPlayerItem()); 1398 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferFullChangedNotification, avfWrapper->avPlayerItem()); 1399 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDurationChangedNotification, avfWrapper->avPlayerItem()); 1400 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), CACFContextNeedsFlushNotification(), 0); 1401 } 1402 1403 if (avfWrapper->avPlayer()) { 1404 if (avfWrapper->timeObserver()) 1405 AVCFPlayerRemoveObserver(avfWrapper->avPlayer(), avfWrapper->timeObserver()); 1406 1407 CFNotificationCenterRemoveObserver(CFNotificationCenterGetLocalCenter(), avfWrapper->callbackContext(), AVCFPlayerRateChangedNotification, avfWrapper->avPlayer()); 1408 } 1409 1410#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1411 AVCFPlayerItemRemoveOutput(avfWrapper->avPlayerItem(), avfWrapper->legibleOutput()); 1412#endif 1413 1414 // We must release the AVCFPlayer and other items on the same thread that created them. 1415 dispatch_async_f(dispatch_get_main_queue(), context, destroyAVFWrapper); 1416} 1417 1418void AVFWrapper::createAssetForURL(const String& url, bool inheritURI) 1419{ 1420 ASSERT(!avAsset()); 1421 1422 RetainPtr<CFURLRef> urlRef = URL(ParsedURLString, url).createCFURL(); 1423 1424 RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks)); 1425 1426 if (inheritURI) 1427 CFDictionarySetValue(optionsRef.get(), AVCFURLAssetInheritURIQueryComponentFromReferencingURIKey, kCFBooleanTrue); 1428 1429 m_avAsset = adoptCF(AVCFURLAssetCreateWithURLAndOptions(kCFAllocatorDefault, urlRef.get(), optionsRef.get(), m_notificationQueue)); 1430 1431#if HAVE(AVFOUNDATION_LOADER_DELEGATE) 1432 AVCFAssetResourceLoaderCallbacks loaderCallbacks; 1433 loaderCallbacks.version = kAVCFAssetResourceLoader_CallbacksVersion_1; 1434 ASSERT(callbackContext()); 1435 loaderCallbacks.context = callbackContext(); 1436 loaderCallbacks.resourceLoaderShouldWaitForLoadingOfRequestedResource = AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource; 1437 1438 RetainPtr<AVCFAssetResourceLoaderRef> resourceLoader = adoptCF(AVCFURLAssetGetResourceLoader(m_avAsset.get())); 1439 AVCFAssetResourceLoaderSetCallbacks(resourceLoader.get(), &loaderCallbacks, globalLoaderDelegateQueue()); 1440#endif 1441} 1442 1443void AVFWrapper::createPlayer(IDirect3DDevice9* d3dDevice) 1444{ 1445 ASSERT(isMainThread()); 1446 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1447 ASSERT(avPlayerItem()); 1448 1449 if (avPlayer()) 1450 return; 1451 1452 RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks)); 1453 1454 if (d3dDevice) { 1455 // QI for an IDirect3DDevice9Ex interface, it is required to do HW video decoding. 1456 COMPtr<IDirect3DDevice9Ex> d3dEx(Query, d3dDevice); 1457 m_d3dDevice = d3dEx; 1458 } else 1459 m_d3dDevice = 0; 1460 1461 if (m_d3dDevice && AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey) 1462 CFDictionarySetValue(optionsRef.get(), AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey, kCFBooleanTrue); 1463 1464#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1465 CFDictionarySetValue(optionsRef.get(), AVCFPlayerAppliesMediaSelectionCriteriaAutomaticallyKey, kCFBooleanTrue); 1466#endif 1467 1468 // FIXME: We need a way to create a AVPlayer without an AVPlayerItem, see <rdar://problem/9877730>. 1469 AVCFPlayerRef playerRef = AVCFPlayerCreateWithPlayerItemAndOptions(kCFAllocatorDefault, avPlayerItem(), optionsRef.get(), m_notificationQueue); 1470 m_avPlayer = adoptCF(playerRef); 1471#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1472 AVCFPlayerSetClosedCaptionDisplayEnabled(playerRef, FALSE); 1473#endif 1474 1475 if (m_d3dDevice && AVCFPlayerSetDirect3DDevicePtr()) 1476 AVCFPlayerSetDirect3DDevicePtr()(playerRef, m_d3dDevice.get()); 1477 1478 CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter(); 1479 ASSERT(center); 1480 1481 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerRateChangedNotification, playerRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1482 1483 // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but 1484 // our observer will also be called whenever a seek happens. 1485 const double veryLongInterval = 60*60*60*24*30; 1486 m_timeObserver = adoptCF(AVCFPlayerCreatePeriodicTimeObserverForInterval(playerRef, CMTimeMake(veryLongInterval, 10), m_notificationQueue, &periodicTimeObserverCallback, callbackContext())); 1487} 1488 1489void AVFWrapper::createPlayerItem() 1490{ 1491 ASSERT(isMainThread()); 1492 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1493 ASSERT(avAsset()); 1494 1495 if (avPlayerItem()) 1496 return; 1497 1498 // Create the player item so we begin loading media data. 1499 AVCFPlayerItemRef itemRef = AVCFPlayerItemCreateWithAsset(kCFAllocatorDefault, avAsset(), m_notificationQueue); 1500 m_avPlayerItem = adoptCF(itemRef); 1501 1502 CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter(); 1503 ASSERT(center); 1504 1505 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDidPlayToEndTimeNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1506 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemStatusChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1507 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemTracksChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1508 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemSeekableTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1509 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemLoadedTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1510 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemPresentationSizeChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1511 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1512 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1513 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferFullChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1514 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDurationChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1515 // FIXME: Are there other legible output things we need to register for? asset and hasEnabledAudio are not exposed by AVCF 1516 1517 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, CACFContextNeedsFlushNotification(), 0, CFNotificationSuspensionBehaviorDeliverImmediately); 1518 1519#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1520 const CFTimeInterval legibleOutputAdvanceInterval = 2; 1521 1522 m_legibleOutput = adoptCF(AVCFPlayerItemLegibleOutputCreateWithMediaSubtypesForNativeRepresentation(kCFAllocatorDefault, 0)); 1523 AVCFPlayerItemOutputSetSuppressPlayerRendering(m_legibleOutput.get(), TRUE); 1524 1525 AVCFPlayerItemLegibleOutputCallbacks callbackInfo; 1526 callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_1; 1527 ASSERT(callbackContext()); 1528 callbackInfo.context = callbackContext(); 1529 callbackInfo.legibleOutputCallback = AVFWrapper::legibleOutputCallback; 1530 1531 AVCFPlayerItemLegibleOutputSetCallbacks(m_legibleOutput.get(), &callbackInfo, dispatchQueue()); 1532 AVCFPlayerItemLegibleOutputSetAdvanceIntervalForCallbackInvocation(m_legibleOutput.get(), legibleOutputAdvanceInterval); 1533 AVCFPlayerItemLegibleOutputSetTextStylingResolution(m_legibleOutput.get(), AVCFPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly); 1534 AVCFPlayerItemAddOutput(m_avPlayerItem.get(), m_legibleOutput.get()); 1535#endif 1536} 1537 1538void AVFWrapper::periodicTimeObserverCallback(AVCFPlayerRef, CMTime cmTime, void* context) 1539{ 1540 MutexLocker locker(mapLock()); 1541 AVFWrapper* self = avfWrapperForCallbackContext(context); 1542 if (!self) { 1543 LOG(Media, "AVFWrapper::periodicTimeObserverCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1544 return; 1545 } 1546 1547 double time = std::max(0.0, CMTimeGetSeconds(cmTime)); // Clamp to zero, negative values are sometimes reported. 1548 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time); 1549} 1550 1551struct NotificationCallbackData { 1552 RetainPtr<CFStringRef> m_propertyName; 1553 void* m_context; 1554 1555 NotificationCallbackData(CFStringRef propertyName, void* context) 1556 : m_propertyName(propertyName), m_context(context) 1557 { 1558 } 1559}; 1560 1561void AVFWrapper::processNotification(void* context) 1562{ 1563 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1564 ASSERT(context); 1565 1566 if (!context) 1567 return; 1568 1569 OwnPtr<NotificationCallbackData> notificationData = adoptPtr(reinterpret_cast<NotificationCallbackData*>(context)); 1570 1571 MutexLocker locker(mapLock()); 1572 AVFWrapper* self = avfWrapperForCallbackContext(notificationData->m_context); 1573 if (!self) { 1574 LOG(Media, "AVFWrapper::processNotification invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1575 return; 1576 } 1577 1578 CFStringRef propertyName = notificationData->m_propertyName.get(); 1579 1580 if (CFEqual(propertyName, AVCFPlayerItemDidPlayToEndTimeNotification)) 1581 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime); 1582 else if (CFEqual(propertyName, AVCFPlayerItemTracksChangedNotification)) 1583 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged); 1584 else if (CFEqual(propertyName, AVCFPlayerItemStatusChangedNotification)) { 1585 AVCFURLAssetRef asset = AVCFPlayerItemGetAsset(self->avPlayerItem()); 1586 if (asset) 1587 self->setAsset(asset); 1588 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged); 1589 } else if (CFEqual(propertyName, AVCFPlayerItemSeekableTimeRangesChangedNotification)) 1590 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged); 1591 else if (CFEqual(propertyName, AVCFPlayerItemLoadedTimeRangesChangedNotification)) 1592 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged); 1593 else if (CFEqual(propertyName, AVCFPlayerItemPresentationSizeChangedNotification)) 1594 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged); 1595 else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification)) 1596 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged); 1597 else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification)) 1598 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged); 1599 else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferFullChangedNotification)) 1600 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged); 1601 else if (CFEqual(propertyName, AVCFPlayerRateChangedNotification)) 1602 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged); 1603 else if (CFEqual(propertyName, CACFContextNeedsFlushNotification())) 1604 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ContentsNeedsDisplay); 1605 else if (CFEqual(propertyName, AVCFPlayerItemDurationChangedNotification)) 1606 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged); 1607 else 1608 ASSERT_NOT_REACHED(); 1609} 1610 1611void AVFWrapper::notificationCallback(CFNotificationCenterRef, void* observer, CFStringRef propertyName, const void* object, CFDictionaryRef) 1612{ 1613#if !LOG_DISABLED 1614 char notificationName[256]; 1615 CFStringGetCString(propertyName, notificationName, sizeof(notificationName), kCFStringEncodingASCII); 1616 LOG(Media, "AVFWrapper::notificationCallback(if=%d) %s", reinterpret_cast<uintptr_t>(observer), notificationName); 1617#endif 1618 1619 OwnPtr<NotificationCallbackData> notificationData = adoptPtr(new NotificationCallbackData(propertyName, observer)); 1620 1621 dispatch_async_f(dispatch_get_main_queue(), notificationData.leakPtr(), processNotification); 1622} 1623 1624void AVFWrapper::loadPlayableCompletionCallback(AVCFAssetRef, void* context) 1625{ 1626 MutexLocker locker(mapLock()); 1627 AVFWrapper* self = avfWrapperForCallbackContext(context); 1628 if (!self) { 1629 LOG(Media, "AVFWrapper::loadPlayableCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1630 return; 1631 } 1632 1633 LOG(Media, "AVFWrapper::loadPlayableCompletionCallback(%p)", self); 1634 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown); 1635} 1636 1637void AVFWrapper::checkPlayability() 1638{ 1639 LOG(Media, "AVFWrapper::checkPlayability(%p)", this); 1640 1641 static CFArrayRef propertyKeyName; 1642 if (!propertyKeyName) { 1643 static const CFStringRef keyNames[] = { 1644 AVCFAssetPropertyPlayable 1645 }; 1646 propertyKeyName = CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks); 1647 } 1648 1649 AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), propertyKeyName, loadPlayableCompletionCallback, callbackContext()); 1650} 1651 1652void AVFWrapper::loadMetadataCompletionCallback(AVCFAssetRef, void* context) 1653{ 1654 MutexLocker locker(mapLock()); 1655 AVFWrapper* self = avfWrapperForCallbackContext(context); 1656 if (!self) { 1657 LOG(Media, "AVFWrapper::loadMetadataCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1658 return; 1659 } 1660 1661 LOG(Media, "AVFWrapper::loadMetadataCompletionCallback(%p)", self); 1662 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded); 1663} 1664 1665void AVFWrapper::beginLoadingMetadata() 1666{ 1667 ASSERT(avAsset()); 1668 LOG(Media, "AVFWrapper::beginLoadingMetadata(%p) - requesting metadata loading", this); 1669 AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), metadataKeyNames(), loadMetadataCompletionCallback, callbackContext()); 1670} 1671 1672void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void* context) 1673{ 1674 MutexLocker locker(mapLock()); 1675 AVFWrapper* self = avfWrapperForCallbackContext(context); 1676 if (!self) { 1677 LOG(Media, "AVFWrapper::seekCompletedCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1678 return; 1679 } 1680 1681 LOG(Media, "AVFWrapper::seekCompletedCallback(%p)", self); 1682 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished)); 1683} 1684 1685void AVFWrapper::seekToTime(double time, double negativeTolerance, double positiveTolerance) 1686{ 1687 ASSERT(avPlayerItem()); 1688 CMTime cmTime = CMTimeMakeWithSeconds(time, 600); 1689 CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600); 1690 CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600); 1691 AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), cmTime, cmBefore, cmAfter, &seekCompletedCallback, callbackContext()); 1692} 1693 1694#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1695struct LegibleOutputData { 1696 RetainPtr<CFArrayRef> m_attributedStrings; 1697 RetainPtr<CFArrayRef> m_samples; 1698 double m_time; 1699 void* m_context; 1700 1701 LegibleOutputData(CFArrayRef strings, CFArrayRef samples, double time, void* context) 1702 : m_attributedStrings(strings), m_samples(samples), m_time(time), m_context(context) 1703 { 1704 } 1705}; 1706 1707void AVFWrapper::processCue(void* context) 1708{ 1709 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1710 ASSERT(context); 1711 1712 if (!context) 1713 return; 1714 1715 std::unique_ptr<LegibleOutputData> legibleOutputData(reinterpret_cast<LegibleOutputData*>(context)); 1716 1717 MutexLocker locker(mapLock()); 1718 AVFWrapper* self = avfWrapperForCallbackContext(legibleOutputData->m_context); 1719 if (!self) { 1720 LOG(Media, "AVFWrapper::processCue invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1721 return; 1722 } 1723 1724 if (!self->m_currentTextTrack) 1725 return; 1726 1727 self->m_currentTextTrack->processCue(legibleOutputData->m_attributedStrings.get(), legibleOutputData->m_samples.get(), legibleOutputData->m_time); 1728} 1729 1730void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef legibleOutput, CFArrayRef attributedStrings, CFArrayRef nativeSampleBuffers, CMTime itemTime) 1731{ 1732 ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue()); 1733 MutexLocker locker(mapLock()); 1734 AVFWrapper* self = avfWrapperForCallbackContext(context); 1735 if (!self) { 1736 LOG(Media, "AVFWrapper::legibleOutputCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1737 return; 1738 } 1739 1740 LOG(Media, "AVFWrapper::legibleOutputCallback(%p)", self); 1741 1742 ASSERT(legibleOutput == self->m_legibleOutput); 1743 1744 auto legibleOutputData = std::make_unique<LegibleOutputData>(attributedStrings, nativeSampleBuffers, CMTimeGetSeconds(itemTime), context); 1745 1746 dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.release(), processCue); 1747} 1748#endif 1749 1750#if HAVE(AVFOUNDATION_LOADER_DELEGATE) 1751struct LoadRequestData { 1752 RetainPtr<AVCFAssetResourceLoadingRequestRef> m_request; 1753 void* m_context; 1754 1755 LoadRequestData(AVCFAssetResourceLoadingRequestRef request, void* context) 1756 : m_request(request), m_context(context) 1757 { 1758 } 1759}; 1760 1761void AVFWrapper::processShouldWaitForLoadingOfResource(void* context) 1762{ 1763 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1764 ASSERT(context); 1765 1766 if (!context) 1767 return; 1768 1769 std::unique_ptr<LoadRequestData> loadRequestData(reinterpret_cast<LoadRequestData*>(context)); 1770 1771 MutexLocker locker(mapLock()); 1772 AVFWrapper* self = avfWrapperForCallbackContext(loadRequestData->m_context); 1773 if (!self) { 1774 LOG(Media, "AVFWrapper::processShouldWaitForLoadingOfResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1775 AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), nullptr); 1776 return; 1777 } 1778 1779 if (!self->shouldWaitForLoadingOfResource(loadRequestData->m_request.get())) 1780 AVCFAssetResourceLoadingRequestFinishLoadingWithError(loadRequestData->m_request.get(), nullptr); 1781} 1782 1783bool AVFWrapper::shouldWaitForLoadingOfResource(AVCFAssetResourceLoadingRequestRef avRequest) 1784{ 1785#if ENABLE(ENCRYPTED_MEDIA_V2) 1786 RetainPtr<CFURLRequestRef> urlRequest = AVCFAssetResourceLoadingRequestGetURLRequest(avRequest); 1787 RetainPtr<CFURLRef> requestURL = CFURLRequestGetURL(urlRequest.get()); 1788 RetainPtr<CFStringRef> schemeRef = adoptCF(CFURLCopyScheme(requestURL.get())); 1789 String scheme = schemeRef.get(); 1790 1791 if (scheme == "skd") { 1792 RetainPtr<CFURLRef> absoluteURL = adoptCF(CFURLCopyAbsoluteURL(requestURL.get())); 1793 RetainPtr<CFStringRef> keyURIRef = CFURLGetString(absoluteURL.get()); 1794 String keyURI = keyURIRef.get(); 1795 1796 // Create an initData with the following layout: 1797 // [4 bytes: keyURI size], [keyURI size bytes: keyURI] 1798 unsigned keyURISize = keyURI.length() * sizeof(UChar); 1799 RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1); 1800 RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength()); 1801 initDataView->set<uint32_t>(0, keyURISize, true); 1802 1803 RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length()); 1804 keyURIArray->setRange(reinterpret_cast<const uint16_t*>(StringView(keyURI).upconvertedCharacters().get()), keyURI.length() / sizeof(unsigned char), 0); 1805 1806 RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength()); 1807 if (!m_owner->player()->keyNeeded(initData.get())) 1808 return false; 1809 1810 m_keyURIToRequestMap.set(keyURI, avRequest); 1811 return true; 1812 } 1813#endif 1814 1815 RefPtr<WebCoreAVCFResourceLoader> resourceLoader = WebCoreAVCFResourceLoader::create(m_owner, avRequest); 1816 m_owner->m_resourceLoaderMap.add(avRequest, resourceLoader); 1817 resourceLoader->startLoading(); 1818 return true; 1819} 1820 1821Boolean AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(AVCFAssetResourceLoaderRef resourceLoader, AVCFAssetResourceLoadingRequestRef loadingRequest, void *context) 1822{ 1823 ASSERT(dispatch_get_main_queue() != dispatch_get_current_queue()); 1824 MutexLocker locker(mapLock()); 1825 AVFWrapper* self = avfWrapperForCallbackContext(context); 1826 if (!self) { 1827 LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1828 return false; 1829 } 1830 1831 LOG(Media, "AVFWrapper::resourceLoaderShouldWaitForLoadingOfRequestedResource(%p)", self); 1832 1833 auto loadRequestData = std::make_unique<LoadRequestData>(loadingRequest, context); 1834 1835 dispatch_async_f(dispatch_get_main_queue(), loadRequestData.release(), processShouldWaitForLoadingOfResource); 1836 1837 return true; 1838} 1839#endif 1840 1841void AVFWrapper::setAsset(AVCFURLAssetRef asset) 1842{ 1843 if (asset == avAsset()) 1844 return; 1845 1846 AVCFAssetCancelLoading(avAsset()); 1847 m_avAsset = adoptCF(asset); 1848} 1849 1850PlatformLayer* AVFWrapper::platformLayer() 1851{ 1852 if (m_videoLayerWrapper) 1853 return m_videoLayerWrapper->platformLayer(); 1854 1855 if (!videoLayer()) 1856 return 0; 1857 1858 // Create a PlatformCALayer so we can resize the video layer to match the element size. 1859 m_layerClient = adoptPtr(new LayerClient(this)); 1860 if (!m_layerClient) 1861 return 0; 1862 1863 m_videoLayerWrapper = PlatformCALayerWin::create(PlatformCALayer::LayerTypeLayer, m_layerClient.get()); 1864 if (!m_videoLayerWrapper) 1865 return 0; 1866 1867 m_caVideoLayer = adoptCF(AVCFPlayerLayerCopyCACFLayer(m_avCFVideoLayer.get())); 1868 1869 CACFLayerInsertSublayer(m_videoLayerWrapper->platformLayer(), m_caVideoLayer.get(), 0); 1870 m_videoLayerWrapper->setAnchorPoint(FloatPoint3D()); 1871 m_videoLayerWrapper->setNeedsLayout(); 1872 updateVideoLayerGravity(); 1873 1874 return m_videoLayerWrapper->platformLayer(); 1875} 1876 1877void AVFWrapper::createAVCFVideoLayer() 1878{ 1879 ASSERT(isMainThread()); 1880 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1881 if (!avPlayer() || m_avCFVideoLayer) 1882 return; 1883 1884 // The layer will get hooked up via RenderLayerBacking::updateConfiguration(). 1885 m_avCFVideoLayer = adoptCF(AVCFPlayerLayerCreateWithAVCFPlayer(kCFAllocatorDefault, avPlayer(), m_notificationQueue)); 1886 LOG(Media, "AVFWrapper::createAVCFVideoLayer(%p) - returning %p", this, videoLayer()); 1887} 1888 1889void AVFWrapper::destroyVideoLayer() 1890{ 1891 ASSERT(isMainThread()); 1892 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1893 LOG(Media, "AVFWrapper::destroyVideoLayer(%p)", this); 1894 m_layerClient = nullptr; 1895 m_caVideoLayer = 0; 1896 m_videoLayerWrapper = 0; 1897 if (!m_avCFVideoLayer.get()) 1898 return; 1899 1900 AVCFPlayerLayerSetPlayer((AVCFPlayerLayerRef)m_avCFVideoLayer.get(), 0); 1901 m_avCFVideoLayer = 0; 1902} 1903 1904void AVFWrapper::setVideoLayerNeedsCommit() 1905{ 1906 if (m_videoLayerWrapper) 1907 m_videoLayerWrapper->setNeedsCommit(); 1908} 1909 1910void AVFWrapper::setVideoLayerHidden(bool value) 1911{ 1912 if (m_videoLayerWrapper) 1913 m_videoLayerWrapper->setHidden(value); 1914} 1915 1916void AVFWrapper::createImageGenerator() 1917{ 1918 ASSERT(isMainThread()); 1919 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1920 if (!avAsset() || m_imageGenerator) 1921 return; 1922 1923 m_imageGenerator = adoptCF(AVCFAssetImageGeneratorCreateWithAsset(kCFAllocatorDefault, avAsset())); 1924 1925 AVCFAssetImageGeneratorSetApertureMode(m_imageGenerator.get(), AVCFAssetImageGeneratorApertureModeCleanAperture); 1926 AVCFAssetImageGeneratorSetRequestedTimeToleranceBefore(m_imageGenerator.get(), kCMTimeZero); 1927 AVCFAssetImageGeneratorSetRequestedTimeToleranceAfter(m_imageGenerator.get(), kCMTimeZero); 1928 AVCFAssetImageGeneratorSetAppliesPreferredTrackTransform(m_imageGenerator.get(), true); 1929 1930 LOG(Media, "AVFWrapper::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get()); 1931} 1932 1933void AVFWrapper::destroyImageGenerator() 1934{ 1935 ASSERT(isMainThread()); 1936 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1937 LOG(Media, "AVFWrapper::destroyImageGenerator(%p)", this); 1938 m_imageGenerator = 0; 1939} 1940 1941RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(float time, const IntRect& rect) 1942{ 1943 if (!m_imageGenerator) 1944 return 0; 1945 1946#if !LOG_DISABLED 1947 double start = monotonicallyIncreasingTime(); 1948#endif 1949 1950 AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size())); 1951 RetainPtr<CGImageRef> rawimage = adoptCF(AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), CMTimeMakeWithSeconds(time, 600), 0, 0)); 1952 RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawimage.get(), adoptCF(CGColorSpaceCreateDeviceRGB()).get())); 1953 1954#if !LOG_DISABLED 1955 double duration = monotonicallyIncreasingTime() - start; 1956 LOG(Media, "AVFWrapper::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration)); 1957#endif 1958 1959 return image; 1960} 1961 1962#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1963AVCFMediaSelectionGroupRef AVFWrapper::safeMediaSelectionGroupForLegibleMedia() const 1964{ 1965 if (!avAsset()) 1966 return 0; 1967 1968 if (AVCFAssetGetStatusOfValueForProperty(avAsset(), AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) != AVCFPropertyValueStatusLoaded) 1969 return 0; 1970 1971 return AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(), AVCFMediaCharacteristicLegible); 1972} 1973#endif 1974 1975void AVFWrapper::updateVideoLayerGravity() 1976{ 1977 // We should call AVCFPlayerLayerSetVideoGravity() here, but it is not yet implemented. 1978 // FIXME: <rdar://problem/14884340> 1979} 1980 1981#if ENABLE(ENCRYPTED_MEDIA_V2) 1982RetainPtr<AVCFAssetResourceLoadingRequestRef> AVFWrapper::takeRequestForKeyURI(const String& keyURI) 1983{ 1984 return m_keyURIToRequestMap.take(keyURI); 1985} 1986#endif 1987 1988void LayerClient::platformCALayerLayoutSublayersOfLayer(PlatformCALayer* wrapperLayer) 1989{ 1990 ASSERT(m_parent); 1991 ASSERT(m_parent->videoLayerWrapper() == wrapperLayer->platformLayer()); 1992 1993 CGRect bounds = wrapperLayer->bounds(); 1994 CGPoint anchor = CACFLayerGetAnchorPoint(m_parent->caVideoLayer()); 1995 FloatPoint position(bounds.size.width * anchor.x, bounds.size.height * anchor.y); 1996 1997 CACFLayerSetPosition(m_parent->caVideoLayer(), position); 1998 CACFLayerSetBounds(m_parent->caVideoLayer(), bounds); 1999 2000 AVCFPlayerLayerSetFrame(m_parent->videoLayer(), CGRectMake(0, 0, bounds.size.width, bounds.size.height)); 2001} 2002 2003} // namespace WebCore 2004 2005#else 2006// AVFoundation should always be enabled for Apple production builds. 2007#if __PRODUCTION__ && !USE(AVFOUNDATION) 2008#error AVFoundation is not enabled! 2009#endif // __PRODUCTION__ && !USE(AVFOUNDATION) 2010#endif // USE(AVFOUNDATION) 2011#endif // PLATFORM(WIN) && ENABLE(VIDEO) 2012