1/* 2 * Copyright (C) 2011-2014 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26#include "config.h" 27 28#if PLATFORM(WIN) && ENABLE(VIDEO) 29 30#if USE(AVFOUNDATION) 31 32#include "MediaPlayerPrivateAVFoundationCF.h" 33 34#include "ApplicationCacheResource.h" 35#include "COMPtr.h" 36#include "FloatConversion.h" 37#include "FrameView.h" 38#include "GraphicsContext.h" 39#include "InbandTextTrackPrivateAVCF.h" 40#include "KURL.h" 41#include "Logging.h" 42#include "PlatformCALayer.h" 43#include "SoftLinking.h" 44#include "TimeRanges.h" 45 46#include <AVFoundationCF/AVCFPlayerItem.h> 47#include <AVFoundationCF/AVCFPlayerItemLegibleOutput.h> 48#include <AVFoundationCF/AVCFPlayerLayer.h> 49#include <AVFoundationCF/AVFoundationCF.h> 50#include <CoreMedia/CoreMedia.h> 51#include <delayimp.h> 52#include <dispatch/dispatch.h> 53#include <WebKitQuartzCoreAdditions/WKCACFTypes.h> 54#include <wtf/HashMap.h> 55#include <wtf/Threading.h> 56#include <wtf/text/CString.h> 57 58// The softlink header files must be included after the AVCF and CoreMedia header files. 59#include "AVFoundationCFSoftLinking.h" 60#include "CoreMediaSoftLinking.h" 61 62// We don't bother softlinking against libdispatch since it's already been loaded by AAS. 63#ifdef DEBUG_ALL 64#pragma comment(lib, "libdispatch_debug.lib") 65#else 66#pragma comment(lib, "libdispatch.lib") 67#endif 68 69using namespace std; 70 71namespace WebCore { 72 73class LayerClient; 74 75class AVFWrapper { 76public: 77 AVFWrapper(MediaPlayerPrivateAVFoundationCF*); 78 ~AVFWrapper(); 79 80 void scheduleDisconnectAndDelete(); 81 82 void createAVCFVideoLayer(); 83 void destroyVideoLayer(); 84 PlatformLayer* platformLayer(); 85 86 CACFLayerRef caVideoLayer() { return m_caVideoLayer.get(); } 87 PlatformLayer* videoLayerWrapper() { return m_videoLayerWrapper ? m_videoLayerWrapper->platformLayer() : 0; }; 88 void setVideoLayerNeedsCommit(); 89 void setVideoLayerHidden(bool); 90 91 void createImageGenerator(); 92 void destroyImageGenerator(); 93 RetainPtr<CGImageRef> createImageForTimeInRect(float, const IntRect&); 94 95 void createAssetForURL(const String& url, bool inheritURI); 96 void setAsset(AVCFURLAssetRef); 97 98 void createPlayer(IDirect3DDevice9*); 99 void createPlayerItem(); 100 101 void checkPlayability(); 102 void beginLoadingMetadata(); 103 104 void seekToTime(float); 105 106 void setCurrentTrack(InbandTextTrackPrivateAVF*); 107 InbandTextTrackPrivateAVF* currentTrack() const { return m_currentTrack; } 108 109#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 110 static void legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef, CFArrayRef attributedString, CFArrayRef nativeSampleBuffers, CMTime itemTime); 111 static void processCue(void* context); 112#endif 113 static void loadMetadataCompletionCallback(AVCFAssetRef, void*); 114 static void loadPlayableCompletionCallback(AVCFAssetRef, void*); 115 static void periodicTimeObserverCallback(AVCFPlayerRef, CMTime, void*); 116 static void seekCompletedCallback(AVCFPlayerItemRef, Boolean, void*); 117 static void notificationCallback(CFNotificationCenterRef, void*, CFStringRef, const void*, CFDictionaryRef); 118 static void processNotification(void* context); 119 120 inline AVCFPlayerLayerRef videoLayer() const { return (AVCFPlayerLayerRef)m_avCFVideoLayer.get(); } 121 inline AVCFPlayerRef avPlayer() const { return (AVCFPlayerRef)m_avPlayer.get(); } 122 inline AVCFURLAssetRef avAsset() const { return (AVCFURLAssetRef)m_avAsset.get(); } 123 inline AVCFPlayerItemRef avPlayerItem() const { return (AVCFPlayerItemRef)m_avPlayerItem.get(); } 124 inline AVCFPlayerObserverRef timeObserver() const { return (AVCFPlayerObserverRef)m_timeObserver.get(); } 125 inline AVCFAssetImageGeneratorRef imageGenerator() const { return m_imageGenerator.get(); } 126#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 127 inline AVCFPlayerItemLegibleOutputRef legibleOutput() const { return m_legibleOutput.get(); } 128 AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia() const; 129#endif 130 inline dispatch_queue_t dispatchQueue() const { return m_notificationQueue; } 131 132private: 133 inline void* callbackContext() const { return reinterpret_cast<void*>(m_objectID); } 134 135 static Mutex& mapLock(); 136 static HashMap<uintptr_t, AVFWrapper*>& map(); 137 static AVFWrapper* avfWrapperForCallbackContext(void*); 138 void addToMap(); 139 void removeFromMap() const; 140 141 static void disconnectAndDeleteAVFWrapper(void*); 142 143 static uintptr_t s_nextAVFWrapperObjectID; 144 uintptr_t m_objectID; 145 146 MediaPlayerPrivateAVFoundationCF* m_owner; 147 148 RetainPtr<AVCFPlayerRef> m_avPlayer; 149 RetainPtr<AVCFURLAssetRef> m_avAsset; 150 RetainPtr<AVCFPlayerItemRef> m_avPlayerItem; 151 RetainPtr<AVCFPlayerLayerRef> m_avCFVideoLayer; 152 RetainPtr<AVCFPlayerObserverRef> m_timeObserver; 153 RetainPtr<AVCFAssetImageGeneratorRef> m_imageGenerator; 154#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 155 RetainPtr<AVCFPlayerItemLegibleOutputRef> m_legibleOutput; 156 RetainPtr<AVCFMediaSelectionGroupRef> m_selectionGroup; 157#endif 158 dispatch_queue_t m_notificationQueue; 159 160 mutable RetainPtr<CACFLayerRef> m_caVideoLayer; 161 RefPtr<PlatformCALayer> m_videoLayerWrapper; 162 163 OwnPtr<LayerClient> m_layerClient; 164 COMPtr<IDirect3DDevice9Ex> m_d3dDevice; 165 166 InbandTextTrackPrivateAVF* m_currentTrack; 167}; 168 169uintptr_t AVFWrapper::s_nextAVFWrapperObjectID; 170 171class LayerClient : public PlatformCALayerClient { 172public: 173 LayerClient(AVFWrapper* parent) : m_parent(parent) { } 174 virtual ~LayerClient() { m_parent = 0; } 175 176private: 177 virtual void platformCALayerLayoutSublayersOfLayer(PlatformCALayer*); 178 virtual bool platformCALayerRespondsToLayoutChanges() const { return true; } 179 180 virtual void platformCALayerAnimationStarted(CFTimeInterval beginTime) { } 181 virtual GraphicsLayer::CompositingCoordinatesOrientation platformCALayerContentsOrientation() const { return GraphicsLayer::CompositingCoordinatesBottomUp; } 182 virtual void platformCALayerPaintContents(GraphicsContext&, const IntRect& inClip) { } 183 virtual bool platformCALayerShowDebugBorders() const { return false; } 184 virtual bool platformCALayerShowRepaintCounter(PlatformCALayer*) const { return false; } 185 virtual int platformCALayerIncrementRepaintCount() { return 0; } 186 187 virtual bool platformCALayerContentsOpaque() const { return false; } 188 virtual bool platformCALayerDrawsContent() const { return false; } 189 virtual void platformCALayerLayerDidDisplay(PlatformLayer*) { } 190 virtual void platformCALayerDidCreateTiles(const Vector<FloatRect>&) { } 191 virtual float platformCALayerDeviceScaleFactor() { return 1; } 192 193 AVFWrapper* m_parent; 194}; 195 196#if !LOG_DISABLED 197static const char* boolString(bool val) 198{ 199 return val ? "true" : "false"; 200} 201#endif 202 203static CFArrayRef createMetadataKeyNames() 204{ 205 static const CFStringRef keyNames[] = { 206 AVCFAssetPropertyDuration, 207 AVCFAssetPropertyNaturalSize, 208 AVCFAssetPropertyPreferredTransform, 209 AVCFAssetPropertyPreferredRate, 210 AVCFAssetPropertyPlayable, 211 AVCFAssetPropertyTracks, 212#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 213 AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 214#endif 215 }; 216 217 return CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks); 218} 219 220static CFArrayRef metadataKeyNames() 221{ 222 static CFArrayRef keys = createMetadataKeyNames(); 223 return keys; 224} 225 226// FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key. 227static CFStringRef CMTimeRangeStartKey() 228{ 229 DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("start"))); 230 return key; 231} 232 233// FIXME: It would be better if AVCFTimedMetadataGroup.h exported this key. 234static CFStringRef CMTimeRangeDurationKey() 235{ 236 DEFINE_STATIC_LOCAL(CFStringRef, key, (CFSTR("duration"))); 237 return key; 238} 239 240// FIXME: It would be better if AVCF exported this notification name. 241static CFStringRef CACFContextNeedsFlushNotification() 242{ 243 DEFINE_STATIC_LOCAL(CFStringRef, name, (CFSTR("kCACFContextNeedsFlushNotification"))); 244 return name; 245} 246 247// Define AVCF object accessors as inline functions here instead of in MediaPlayerPrivateAVFoundationCF so we don't have 248// to include the AVCF headers in MediaPlayerPrivateAVFoundationCF.h 249inline AVCFPlayerLayerRef videoLayer(AVFWrapper* wrapper) 250{ 251 return wrapper ? wrapper->videoLayer() : 0; 252} 253 254inline AVCFPlayerRef avPlayer(AVFWrapper* wrapper) 255{ 256 return wrapper ? wrapper->avPlayer() : 0; 257} 258 259inline AVCFURLAssetRef avAsset(AVFWrapper* wrapper) 260{ 261 return wrapper ? wrapper->avAsset() : 0; 262} 263 264inline AVCFPlayerItemRef avPlayerItem(AVFWrapper* wrapper) 265{ 266 return wrapper ? wrapper->avPlayerItem() : 0; 267} 268 269inline AVCFAssetImageGeneratorRef imageGenerator(AVFWrapper* wrapper) 270{ 271 return wrapper ? wrapper->imageGenerator() : 0; 272} 273 274#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 275inline AVCFPlayerItemLegibleOutputRef avLegibleOutput(AVFWrapper* wrapper) 276{ 277 return wrapper ? wrapper->legibleOutput() : 0; 278} 279 280inline AVCFMediaSelectionGroupRef safeMediaSelectionGroupForLegibleMedia(AVFWrapper* wrapper) 281{ 282 return wrapper ? wrapper->safeMediaSelectionGroupForLegibleMedia() : 0; 283} 284#endif 285 286PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationCF::create(MediaPlayer* player) 287{ 288 return adoptPtr(new MediaPlayerPrivateAVFoundationCF(player)); 289} 290 291void MediaPlayerPrivateAVFoundationCF::registerMediaEngine(MediaEngineRegistrar registrar) 292{ 293 if (isAvailable()) 294 registrar(create, getSupportedTypes, supportsType, 0, 0, 0); 295} 296 297MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(MediaPlayer* player) 298 : MediaPlayerPrivateAVFoundation(player) 299 , m_avfWrapper(0) 300 , m_videoFrameHasDrawn(false) 301{ 302 LOG(Media, "MediaPlayerPrivateAVFoundationCF::MediaPlayerPrivateAVFoundationCF(%p)", this); 303} 304 305MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF() 306{ 307 LOG(Media, "MediaPlayerPrivateAVFoundationCF::~MediaPlayerPrivateAVFoundationCF(%p)", this); 308 cancelLoad(); 309} 310 311void MediaPlayerPrivateAVFoundationCF::cancelLoad() 312{ 313 LOG(Media, "MediaPlayerPrivateAVFoundationCF::cancelLoad(%p)", this); 314 315 // Do nothing when our cancellation of pending loading calls its completion handler 316 setDelayCallbacks(true); 317 setIgnoreLoadStateChanges(true); 318 319 tearDownVideoRendering(); 320 321 clearTextTracks(); 322 323 if (m_avfWrapper) { 324 // The AVCF objects have to be destroyed on the same dispatch queue used for notifications, so schedule a call to 325 // disconnectAndDeleteAVFWrapper on that queue. 326 m_avfWrapper->scheduleDisconnectAndDelete(); 327 m_avfWrapper = 0; 328 } 329 330 setIgnoreLoadStateChanges(false); 331 setDelayCallbacks(false); 332} 333 334bool MediaPlayerPrivateAVFoundationCF::hasLayerRenderer() const 335{ 336 return videoLayer(m_avfWrapper); 337} 338 339bool MediaPlayerPrivateAVFoundationCF::hasContextRenderer() const 340{ 341 return imageGenerator(m_avfWrapper); 342} 343 344void MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer() 345{ 346 LOG(Media, "MediaPlayerPrivateAVFoundationCF::createContextVideoRenderer(%p)", this); 347 348 if (imageGenerator(m_avfWrapper)) 349 return; 350 351 if (m_avfWrapper) 352 m_avfWrapper->createImageGenerator(); 353} 354 355void MediaPlayerPrivateAVFoundationCF::destroyContextVideoRenderer() 356{ 357 if (m_avfWrapper) 358 m_avfWrapper->destroyImageGenerator(); 359} 360 361void MediaPlayerPrivateAVFoundationCF::createVideoLayer() 362{ 363 ASSERT(supportsAcceleratedRendering()); 364 365 if (m_avfWrapper) 366 m_avfWrapper->createAVCFVideoLayer(); 367} 368 369void MediaPlayerPrivateAVFoundationCF::destroyVideoLayer() 370{ 371 LOG(Media, "MediaPlayerPrivateAVFoundationCF::destroyVideoLayer(%p) - destroying %p", this, videoLayer(m_avfWrapper)); 372 if (m_avfWrapper) 373 m_avfWrapper->destroyVideoLayer(); 374} 375 376bool MediaPlayerPrivateAVFoundationCF::hasAvailableVideoFrame() const 377{ 378 return (m_videoFrameHasDrawn || (videoLayer(m_avfWrapper) && AVCFPlayerLayerIsReadyForDisplay(videoLayer(m_avfWrapper)))); 379} 380 381void MediaPlayerPrivateAVFoundationCF::setCurrentTrack(InbandTextTrackPrivateAVF* track) 382{ 383 if (m_avfWrapper) 384 m_avfWrapper->setCurrentTrack(track); 385} 386 387InbandTextTrackPrivateAVF* MediaPlayerPrivateAVFoundationCF::currentTrack() const 388{ 389 if (m_avfWrapper) 390 return m_avfWrapper->currentTrack(); 391 392 return 0; 393} 394 395void MediaPlayerPrivateAVFoundationCF::createAVAssetForURL(const String& url) 396{ 397 ASSERT(!m_avfWrapper); 398 399 setDelayCallbacks(true); 400 401 bool inheritURI = player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"); 402 403 m_avfWrapper = new AVFWrapper(this); 404 m_avfWrapper->createAssetForURL(url, inheritURI); 405 setDelayCallbacks(false); 406} 407 408void MediaPlayerPrivateAVFoundationCF::createAVPlayer() 409{ 410 ASSERT(m_avfWrapper); 411 412 setDelayCallbacks(true); 413 m_avfWrapper->createPlayer(reinterpret_cast<IDirect3DDevice9*>(player()->graphicsDeviceAdapter())); 414 setDelayCallbacks(false); 415} 416 417void MediaPlayerPrivateAVFoundationCF::createAVPlayerItem() 418{ 419 ASSERT(m_avfWrapper); 420 421 setDelayCallbacks(true); 422 m_avfWrapper->createPlayerItem(); 423 424 setDelayCallbacks(false); 425} 426 427void MediaPlayerPrivateAVFoundationCF::checkPlayability() 428{ 429 ASSERT(m_avfWrapper); 430 m_avfWrapper->checkPlayability(); 431} 432 433void MediaPlayerPrivateAVFoundationCF::beginLoadingMetadata() 434{ 435 ASSERT(m_avfWrapper); 436 m_avfWrapper->beginLoadingMetadata(); 437} 438 439MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationCF::playerItemStatus() const 440{ 441 if (!avPlayerItem(m_avfWrapper)) 442 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist; 443 444 AVCFPlayerItemStatus status = AVCFPlayerItemGetStatus(avPlayerItem(m_avfWrapper), 0); 445 if (status == AVCFPlayerItemStatusUnknown) 446 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown; 447 if (status == AVCFPlayerItemStatusFailed) 448 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed; 449 if (AVCFPlayerItemIsPlaybackLikelyToKeepUp(avPlayerItem(m_avfWrapper))) 450 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp; 451 if (AVCFPlayerItemIsPlaybackBufferFull(avPlayerItem(m_avfWrapper))) 452 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull; 453 if (AVCFPlayerItemIsPlaybackBufferEmpty(avPlayerItem(m_avfWrapper))) 454 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty; 455 return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay; 456} 457 458PlatformMedia MediaPlayerPrivateAVFoundationCF::platformMedia() const 459{ 460 LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformMedia(%p)", this); 461 PlatformMedia pm; 462 pm.type = PlatformMedia::AVFoundationCFMediaPlayerType; 463 pm.media.avcfMediaPlayer = (AVCFPlayer*)avPlayer(m_avfWrapper); 464 return pm; 465} 466 467PlatformLayer* MediaPlayerPrivateAVFoundationCF::platformLayer() const 468{ 469 if (!m_avfWrapper) 470 return 0; 471 472 return m_avfWrapper->platformLayer(); 473} 474 475void MediaPlayerPrivateAVFoundationCF::platformSetVisible(bool isVisible) 476{ 477 if (!m_avfWrapper) 478 return; 479 480 // FIXME: We use a CATransaction here on the Mac, we need to figure out why this was done there and 481 // whether we're affected by the same issue. 482 setDelayCallbacks(true); 483 m_avfWrapper->setVideoLayerHidden(!isVisible); 484 if (!isVisible) 485 tearDownVideoRendering(); 486 setDelayCallbacks(false); 487} 488 489void MediaPlayerPrivateAVFoundationCF::platformPlay() 490{ 491 LOG(Media, "MediaPlayerPrivateAVFoundationCF::play(%p)", this); 492 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 493 return; 494 495 setDelayCallbacks(true); 496 AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate()); 497 setDelayCallbacks(false); 498} 499 500void MediaPlayerPrivateAVFoundationCF::platformPause() 501{ 502 LOG(Media, "MediaPlayerPrivateAVFoundationCF::pause(%p)", this); 503 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 504 return; 505 506 setDelayCallbacks(true); 507 AVCFPlayerSetRate(avPlayer(m_avfWrapper), 0); 508 setDelayCallbacks(false); 509} 510 511float MediaPlayerPrivateAVFoundationCF::platformDuration() const 512{ 513 if (!metaDataAvailable() || !avAsset(m_avfWrapper)) 514 return 0; 515 516 CMTime cmDuration; 517 518 // Check the AVItem if we have one and it has loaded duration, some assets never report duration. 519 if (avPlayerItem(m_avfWrapper) && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay) 520 cmDuration = AVCFPlayerItemGetDuration(avPlayerItem(m_avfWrapper)); 521 else 522 cmDuration = AVCFAssetGetDuration(avAsset(m_avfWrapper)); 523 524 if (CMTIME_IS_NUMERIC(cmDuration)) 525 return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration)); 526 527 if (CMTIME_IS_INDEFINITE(cmDuration)) 528 return numeric_limits<float>::infinity(); 529 530 LOG(Media, "MediaPlayerPrivateAVFoundationCF::platformDuration(%p) - invalid duration, returning %.0f", this, static_cast<float>(MediaPlayer::invalidTime())); 531 return static_cast<float>(MediaPlayer::invalidTime()); 532} 533 534float MediaPlayerPrivateAVFoundationCF::currentTime() const 535{ 536 if (!metaDataAvailable() || !avPlayerItem(m_avfWrapper)) 537 return 0; 538 539 CMTime itemTime = AVCFPlayerItemGetCurrentTime(avPlayerItem(m_avfWrapper)); 540 if (CMTIME_IS_NUMERIC(itemTime)) 541 return max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f); 542 543 return 0; 544} 545 546void MediaPlayerPrivateAVFoundationCF::seekToTime(double time) 547{ 548 if (!m_avfWrapper) 549 return; 550 551 // seekToTime generates several event callbacks, update afterwards. 552 setDelayCallbacks(true); 553 m_avfWrapper->seekToTime(time); 554 setDelayCallbacks(false); 555} 556 557void MediaPlayerPrivateAVFoundationCF::setVolume(float volume) 558{ 559 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 560 return; 561 562 AVCFPlayerSetVolume(avPlayer(m_avfWrapper), volume); 563} 564 565void MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(bool closedCaptionsVisible) 566{ 567 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 568 return; 569 570 LOG(Media, "MediaPlayerPrivateAVFoundationCF::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible)); 571 AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(m_avfWrapper), closedCaptionsVisible); 572} 573 574void MediaPlayerPrivateAVFoundationCF::updateRate() 575{ 576 LOG(Media, "MediaPlayerPrivateAVFoundationCF::updateRate(%p)", this); 577 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 578 return; 579 580 setDelayCallbacks(true); 581 AVCFPlayerSetRate(avPlayer(m_avfWrapper), requestedRate()); 582 setDelayCallbacks(false); 583} 584 585float MediaPlayerPrivateAVFoundationCF::rate() const 586{ 587 if (!metaDataAvailable() || !avPlayer(m_avfWrapper)) 588 return 0; 589 590 setDelayCallbacks(true); 591 float currentRate = AVCFPlayerGetRate(avPlayer(m_avfWrapper)); 592 setDelayCallbacks(false); 593 594 return currentRate; 595} 596 597static bool timeRangeIsValidAndNotEmpty(CMTime start, CMTime duration) 598{ 599 // Is the range valid? 600 if (!CMTIME_IS_VALID(start) || !CMTIME_IS_VALID(duration) || duration.epoch || duration.value < 0) 601 return false; 602 603 if (CMTIME_COMPARE_INLINE(duration, ==, kCMTimeZero)) 604 return false; 605 606 return true; 607} 608 609PassRefPtr<TimeRanges> MediaPlayerPrivateAVFoundationCF::platformBufferedTimeRanges() const 610{ 611 RefPtr<TimeRanges> timeRanges = TimeRanges::create(); 612 613 if (!avPlayerItem(m_avfWrapper)) 614 return timeRanges.release(); 615 616 RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper))); 617 if (!loadedRanges) 618 return timeRanges.release(); 619 620 CFIndex rangeCount = CFArrayGetCount(loadedRanges.get()); 621 for (CFIndex i = 0; i < rangeCount; i++) { 622 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i)); 623 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 624 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 625 626 if (timeRangeIsValidAndNotEmpty(start, duration)) { 627 float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(start)); 628 float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeAdd(start, duration))); 629 timeRanges->add(rangeStart, rangeEnd); 630 } 631 } 632 633 return timeRanges.release(); 634} 635 636double MediaPlayerPrivateAVFoundationCF::platformMinTimeSeekable() const 637{ 638 RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper))); 639 if (!seekableRanges) 640 return 0; 641 642 double minTimeSeekable = std::numeric_limits<double>::infinity(); 643 bool hasValidRange = false; 644 CFIndex rangeCount = CFArrayGetCount(seekableRanges.get()); 645 for (CFIndex i = 0; i < rangeCount; i++) { 646 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i)); 647 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 648 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 649 if (!timeRangeIsValidAndNotEmpty(start, duration)) 650 continue; 651 652 hasValidRange = true; 653 double startOfRange = CMTimeGetSeconds(start); 654 if (minTimeSeekable > startOfRange) 655 minTimeSeekable = startOfRange; 656 } 657 return hasValidRange ? minTimeSeekable : 0; 658} 659 660double MediaPlayerPrivateAVFoundationCF::platformMaxTimeSeekable() const 661{ 662 if (!avPlayerItem(m_avfWrapper)) 663 return 0; 664 665 RetainPtr<CFArrayRef> seekableRanges = adoptCF(AVCFPlayerItemCopySeekableTimeRanges(avPlayerItem(m_avfWrapper))); 666 if (!seekableRanges) 667 return 0; 668 669 double maxTimeSeekable = 0; 670 CFIndex rangeCount = CFArrayGetCount(seekableRanges.get()); 671 for (CFIndex i = 0; i < rangeCount; i++) { 672 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(seekableRanges.get(), i)); 673 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 674 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 675 if (!timeRangeIsValidAndNotEmpty(start, duration)) 676 continue; 677 678 double endOfRange = CMTimeGetSeconds(CMTimeAdd(start, duration)); 679 if (maxTimeSeekable < endOfRange) 680 maxTimeSeekable = endOfRange; 681 } 682 683 return maxTimeSeekable; 684} 685 686float MediaPlayerPrivateAVFoundationCF::platformMaxTimeLoaded() const 687{ 688 if (!avPlayerItem(m_avfWrapper)) 689 return 0; 690 691 RetainPtr<CFArrayRef> loadedRanges = adoptCF(AVCFPlayerItemCopyLoadedTimeRanges(avPlayerItem(m_avfWrapper))); 692 if (!loadedRanges) 693 return 0; 694 695 float maxTimeLoaded = 0; 696 CFIndex rangeCount = CFArrayGetCount(loadedRanges.get()); 697 for (CFIndex i = 0; i < rangeCount; i++) { 698 CFDictionaryRef range = static_cast<CFDictionaryRef>(CFArrayGetValueAtIndex(loadedRanges.get(), i)); 699 CMTime start = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeStartKey()))); 700 CMTime duration = CMTimeMakeFromDictionary(static_cast<CFDictionaryRef>(CFDictionaryGetValue(range, CMTimeRangeDurationKey()))); 701 if (!timeRangeIsValidAndNotEmpty(start, duration)) 702 continue; 703 704 float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeAdd(start, duration))); 705 if (maxTimeLoaded < endOfRange) 706 maxTimeLoaded = endOfRange; 707 } 708 709 return maxTimeLoaded; 710} 711 712unsigned MediaPlayerPrivateAVFoundationCF::totalBytes() const 713{ 714 if (!metaDataAvailable() || !avAsset(m_avfWrapper)) 715 return 0; 716 717 int64_t totalMediaSize = 0; 718 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyAssetTracks(avAsset(m_avfWrapper))); 719 CFIndex trackCount = CFArrayGetCount(tracks.get()); 720 for (CFIndex i = 0; i < trackCount; i++) { 721 AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), i); 722 totalMediaSize += AVCFAssetTrackGetTotalSampleDataLength(assetTrack); 723 } 724 725 // FIXME: It doesn't seem safe to cast an int64_t to unsigned. 726 return static_cast<unsigned>(totalMediaSize); 727} 728 729MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationCF::assetStatus() const 730{ 731 if (!avAsset(m_avfWrapper)) 732 return MediaPlayerAVAssetStatusDoesNotExist; 733 734 // First, make sure all metadata properties we rely on are loaded. 735 CFArrayRef keys = metadataKeyNames(); 736 CFIndex keyCount = CFArrayGetCount(keys); 737 for (CFIndex i = 0; i < keyCount; i++) { 738 CFStringRef keyName = static_cast<CFStringRef>(CFArrayGetValueAtIndex(keys, i)); 739 AVCFPropertyValueStatus keyStatus = AVCFAssetGetStatusOfValueForProperty(avAsset(m_avfWrapper), keyName, 0); 740 741 if (keyStatus < AVCFPropertyValueStatusLoaded) 742 return MediaPlayerAVAssetStatusLoading; 743 if (keyStatus == AVCFPropertyValueStatusFailed) { 744 if (CFStringCompare(keyName, AVCFAssetPropertyNaturalSize, 0) == kCFCompareEqualTo) { 745 // Don't treat a failure to retrieve @"naturalSize" as fatal. We will use @"presentationSize" instead. 746 // <rdar://problem/15966685> 747 continue; 748 } 749#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 750 if (CFStringCompare(keyName, AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) == kCFCompareEqualTo) { 751 // On Windows, the media selection options are not available when initially interacting with a streaming source. 752 // <rdar://problem/16160699> 753 continue; 754 } 755#endif 756 return MediaPlayerAVAssetStatusFailed; 757 } 758 if (keyStatus == AVCFPropertyValueStatusCancelled) 759 return MediaPlayerAVAssetStatusCancelled; 760 } 761 762 if (AVCFAssetIsPlayable(avAsset(m_avfWrapper))) 763 return MediaPlayerAVAssetStatusPlayable; 764 765 return MediaPlayerAVAssetStatusLoaded; 766} 767 768void MediaPlayerPrivateAVFoundationCF::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect) 769{ 770 if (!metaDataAvailable() || context->paintingDisabled()) 771 return; 772 773 if (currentRenderingMode() == MediaRenderingToLayer && !imageGenerator(m_avfWrapper)) { 774 // We're being told to render into a context, but we already have the 775 // video layer, which probably means we've been called from <canvas>. 776 createContextVideoRenderer(); 777 } 778 779 paint(context, rect); 780} 781 782void MediaPlayerPrivateAVFoundationCF::paint(GraphicsContext* context, const IntRect& rect) 783{ 784 if (!metaDataAvailable() || context->paintingDisabled() || !imageGenerator(m_avfWrapper)) 785 return; 786 787 LOG(Media, "MediaPlayerPrivateAVFoundationCF::paint(%p)", this); 788 789 setDelayCallbacks(true); 790 RetainPtr<CGImageRef> image = m_avfWrapper->createImageForTimeInRect(currentTime(), rect); 791 if (image) { 792 context->save(); 793 context->translate(rect.x(), rect.y() + rect.height()); 794 context->scale(FloatSize(1.0f, -1.0f)); 795 context->setImageInterpolationQuality(InterpolationLow); 796 IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height())); 797 CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get()); 798 context->restore(); 799 image = 0; 800 } 801 setDelayCallbacks(false); 802 803 m_videoFrameHasDrawn = true; 804} 805 806static HashSet<String> mimeTypeCache() 807{ 808 DEFINE_STATIC_LOCAL(HashSet<String>, cache, ()); 809 static bool typeListInitialized = false; 810 811 if (typeListInitialized) 812 return cache; 813 typeListInitialized = true; 814 815 RetainPtr<CFArrayRef> supportedTypes = adoptCF(AVCFURLAssetCopyAudiovisualMIMETypes()); 816 817 ASSERT(supportedTypes); 818 if (!supportedTypes) 819 return cache; 820 821 CFIndex typeCount = CFArrayGetCount(supportedTypes.get()); 822 for (CFIndex i = 0; i < typeCount; i++) 823 cache.add(static_cast<CFStringRef>(CFArrayGetValueAtIndex(supportedTypes.get(), i))); 824 825 return cache; 826} 827 828void MediaPlayerPrivateAVFoundationCF::getSupportedTypes(HashSet<String>& supportedTypes) 829{ 830 supportedTypes = mimeTypeCache(); 831} 832 833MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationCF::supportsType(const String& type, const String& codecs, const KURL&) 834{ 835 // Only return "IsSupported" if there is no codecs parameter for now as there is no way to ask if it supports an 836 // extended MIME type until rdar://8721715 is fixed. 837 if (mimeTypeCache().contains(type)) 838 return codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported; 839 840 return MediaPlayer::IsNotSupported; 841} 842 843 844bool MediaPlayerPrivateAVFoundationCF::isAvailable() 845{ 846 return AVFoundationCFLibrary() && CoreMediaLibrary(); 847} 848 849float MediaPlayerPrivateAVFoundationCF::mediaTimeForTimeValue(float timeValue) const 850{ 851 if (!metaDataAvailable()) 852 return timeValue; 853 854 // FIXME - can not implement until rdar://8721669 is fixed. 855 return timeValue; 856} 857 858void MediaPlayerPrivateAVFoundationCF::tracksChanged() 859{ 860 String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack; 861 m_languageOfPrimaryAudioTrack = String(); 862 863 if (!avAsset(m_avfWrapper)) 864 return; 865 866 setDelayCharacteristicsChangedNotification(true); 867 868 bool haveCCTrack = false; 869 bool hasCaptions = false; 870 871 // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are 872 // asked about those fairly frequently. 873 if (!avPlayerItem(m_avfWrapper)) { 874 // We don't have a player item yet, so check with the asset because some assets support inspection 875 // prior to becoming ready to play. 876 RetainPtr<CFArrayRef> visualTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual)); 877 setHasVideo(CFArrayGetCount(visualTracks.get())); 878 879 RetainPtr<CFArrayRef> audioTracks = adoptCF(AVCFAssetCopyTracksWithMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible)); 880 setHasAudio(CFArrayGetCount(audioTracks.get())); 881 882#if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 883 RetainPtr<CFArrayRef> captionTracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeClosedCaption)); 884 hasCaptions = CFArrayGetCount(captionTracks.get()); 885#endif 886 } else { 887 bool hasVideo = false; 888 bool hasAudio = false; 889 890 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper))); 891 892 CFIndex trackCount = CFArrayGetCount(tracks.get()); 893 for (CFIndex i = 0; i < trackCount; i++) { 894 AVCFPlayerItemTrackRef track = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i)); 895 896 if (AVCFPlayerItemTrackIsEnabled(track)) { 897 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(track)); 898 if (!assetTrack) { 899 // Asset tracks may not be available yet when streaming. <rdar://problem/16160699> 900 LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track); 901 continue; 902 } 903 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get()); 904 if (!mediaType) 905 continue; 906 907 if (CFStringCompare(mediaType, AVCFMediaTypeVideo, kCFCompareCaseInsensitive) == kCFCompareEqualTo) 908 hasVideo = true; 909 else if (CFStringCompare(mediaType, AVCFMediaTypeAudio, kCFCompareCaseInsensitive) == kCFCompareEqualTo) 910 hasAudio = true; 911 else if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) == kCFCompareEqualTo) { 912#if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 913 hasCaptions = true; 914#endif 915 haveCCTrack = true; 916 } 917 } 918 } 919 920 setHasVideo(hasVideo); 921 setHasAudio(hasAudio); 922 } 923 924#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 925 AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper); 926 if (legibleGroup) { 927 RetainPtr<CFArrayRef> playableOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup))); 928 hasCaptions = CFArrayGetCount(playableOptions.get()); 929 if (hasCaptions) 930 processMediaSelectionOptions(); 931 } 932#endif 933 934#if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 935 if (haveCCTrack) 936 processLegacyClosedCaptionsTracks(); 937#endif 938 939 setHasClosedCaptions(hasCaptions); 940 941 LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s", 942 this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions())); 943 944 sizeChanged(); 945 946 if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack()) 947 characteristicsChanged(); 948 949 setDelayCharacteristicsChangedNotification(false); 950} 951 952void MediaPlayerPrivateAVFoundationCF::sizeChanged() 953{ 954 if (!avAsset(m_avfWrapper)) 955 return; 956 957 // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute 958 // the union of all visual track rects. 959 CGRect trackRectUnion = CGRectZero; 960 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaCharacteristicVisual)); 961 CFIndex trackCount = CFArrayGetCount(tracks.get()); 962 for (CFIndex i = 0; i < trackCount; i++) { 963 AVCFAssetTrackRef assetTrack = (AVCFAssetTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i)); 964 965 CGSize trackSize = AVCFAssetTrackGetNaturalSize(assetTrack); 966 CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height); 967 trackRectUnion = CGRectUnion(trackRectUnion, CGRectApplyAffineTransform(trackRect, AVCFAssetTrackGetPreferredTransform(assetTrack))); 968 } 969 // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height. 970 trackRectUnion = CGRectOffset(trackRectUnion, trackRectUnion.origin.x, trackRectUnion.origin.y); 971 CGSize naturalSize = trackRectUnion.size; 972 973 if (!naturalSize.height && !naturalSize.width && avPlayerItem(m_avfWrapper)) 974 naturalSize = AVCFPlayerItemGetPresentationSize(avPlayerItem(m_avfWrapper)); 975 976 // Also look at the asset's preferred transform so we account for a movie matrix. 977 CGSize movieSize = CGSizeApplyAffineTransform(AVCFAssetGetNaturalSize(avAsset(m_avfWrapper)), AVCFAssetGetPreferredTransform(avAsset(m_avfWrapper))); 978 if (movieSize.width > naturalSize.width) 979 naturalSize.width = movieSize.width; 980 if (movieSize.height > naturalSize.height) 981 naturalSize.height = movieSize.height; 982 setNaturalSize(IntSize(naturalSize)); 983} 984 985bool MediaPlayerPrivateAVFoundationCF::requiresImmediateCompositing() const 986{ 987 // The AVFoundationCF player needs to have the root compositor available at construction time 988 // so it can attach to the rendering device. Otherwise it falls back to CPU-only mode. 989 // 990 // It would be nice if AVCFPlayer had some way to switch to hardware-accelerated mode 991 // when asked, then we could follow AVFoundation's model and switch to compositing 992 // mode when beginning to play media. 993 return true; 994} 995 996#if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 997void MediaPlayerPrivateAVFoundationCF::processLegacyClosedCaptionsTracks() 998{ 999#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1000 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), 0, safeMediaSelectionGroupForLegibleMedia(m_avfWrapper)); 1001#endif 1002 1003 Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks; 1004 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFPlayerItemCopyTracks(avPlayerItem(m_avfWrapper))); 1005 CFIndex trackCount = CFArrayGetCount(tracks.get()); 1006 for (CFIndex i = 0; i < trackCount; ++i) { 1007 AVCFPlayerItemTrackRef playerItemTrack = (AVCFPlayerItemTrackRef)(CFArrayGetValueAtIndex(tracks.get(), i)); 1008 1009 RetainPtr<AVCFAssetTrackRef> assetTrack = adoptCF(AVCFPlayerItemTrackCopyAssetTrack(playerItemTrack)); 1010 if (!assetTrack) { 1011 // Asset tracks may not be available yet when streaming. <rdar://problem/16160699> 1012 LOG(Media, "MediaPlayerPrivateAVFoundationCF:tracksChanged(%p) - track = %d is enabled, but has no asset track.", this, track); 1013 continue; 1014 } 1015 CFStringRef mediaType = AVCFAssetTrackGetMediaType(assetTrack.get()); 1016 if (!mediaType) 1017 continue; 1018 1019 if (CFStringCompare(mediaType, AVCFMediaTypeClosedCaption, kCFCompareCaseInsensitive) != kCFCompareEqualTo) 1020 continue; 1021 1022 bool newCCTrack = true; 1023 for (unsigned i = removedTextTracks.size(); i > 0; --i) { 1024 if (!removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack()) 1025 continue; 1026 1027 RefPtr<InbandTextTrackPrivateLegacyAVCF> track = static_cast<InbandTextTrackPrivateLegacyAVCF*>(m_textTracks[i - 1].get()); 1028 if (track->avPlayerItemTrack() == playerItemTrack) { 1029 removedTextTracks.remove(i - 1); 1030 newCCTrack = false; 1031 break; 1032 } 1033 } 1034 1035 if (!newCCTrack) 1036 continue; 1037 1038 m_textTracks.append(InbandTextTrackPrivateLegacyAVCF::create(this, playerItemTrack)); 1039 } 1040 1041 processNewAndRemovedTextTracks(removedTextTracks); 1042} 1043#endif 1044 1045#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1046void MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions() 1047{ 1048 AVCFMediaSelectionGroupRef legibleGroup = safeMediaSelectionGroupForLegibleMedia(m_avfWrapper); 1049 if (!legibleGroup) { 1050 LOG(Media, "MediaPlayerPrivateAVFoundationCF::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this); 1051 return; 1052 } 1053 1054 // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically, 1055 // but set the selected legible track to nil so text tracks will not be automatically configured. 1056 if (!m_textTracks.size()) { 1057 ASSERT(AVCFMediaSelectionGroupAllowsEmptySelection(legibleGroup)); 1058 AVCFPlayerItemRef playerItem = avPlayerItem(m_avfWrapper); 1059 1060 if (playerItem) 1061 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(playerItem, 0, legibleGroup); 1062 } 1063 1064 Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks; 1065 RetainPtr<CFArrayRef> legibleOptions = adoptCF(AVCFMediaSelectionCopyPlayableOptionsFromArray(AVCFMediaSelectionGroupGetOptions(legibleGroup))); 1066 CFIndex legibleOptionsCount = CFArrayGetCount(legibleOptions.get()); 1067 for (CFIndex i = 0; i < legibleOptionsCount; ++i) { 1068 AVCFMediaSelectionOptionRef option = static_cast<AVCFMediaSelectionOptionRef>(CFArrayGetValueAtIndex(legibleOptions.get(), i)); 1069 bool newTrack = true; 1070 for (unsigned i = removedTextTracks.size(); i > 0; --i) { 1071 if (removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack()) 1072 continue; 1073 1074 RefPtr<InbandTextTrackPrivateAVCF> track = static_cast<InbandTextTrackPrivateAVCF*>(removedTextTracks[i - 1].get()); 1075 if (CFEqual(track->mediaSelectionOption(), option)) { 1076 removedTextTracks.remove(i - 1); 1077 newTrack = false; 1078 break; 1079 } 1080 } 1081 if (!newTrack) 1082 continue; 1083 1084 m_textTracks.append(InbandTextTrackPrivateAVCF::create(this, option)); 1085 } 1086 1087 processNewAndRemovedTextTracks(removedTextTracks); 1088} 1089 1090#endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1091 1092void AVFWrapper::setCurrentTrack(InbandTextTrackPrivateAVF* track) 1093{ 1094 if (m_currentTrack == track) 1095 return; 1096 1097 LOG(Media, "MediaPlayerPrivateAVFoundationCF::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : ""); 1098 1099 m_currentTrack = track; 1100 1101 if (track) { 1102 if (track->isLegacyClosedCaptionsTrack()) 1103 AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), TRUE); 1104#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1105 else 1106 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), static_cast<InbandTextTrackPrivateAVCF*>(track)->mediaSelectionOption(), safeMediaSelectionGroupForLegibleMedia()); 1107#endif 1108 } else { 1109#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1110 AVCFPlayerItemSelectMediaOptionInMediaSelectionGroup(avPlayerItem(), 0, safeMediaSelectionGroupForLegibleMedia()); 1111#endif 1112 AVCFPlayerSetClosedCaptionDisplayEnabled(avPlayer(), FALSE); 1113 } 1114} 1115 1116String MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack() const 1117{ 1118 if (!m_languageOfPrimaryAudioTrack.isNull()) 1119 return m_languageOfPrimaryAudioTrack; 1120 1121 if (!avPlayerItem(m_avfWrapper)) 1122 return emptyString(); 1123 1124#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1125 // If AVFoundation has an audible group, return the language of the currently selected audible option. 1126 AVCFMediaSelectionGroupRef audibleGroup = AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(m_avfWrapper), AVCFMediaCharacteristicAudible); 1127 AVCFMediaSelectionOptionRef currentlySelectedAudibleOption = AVCFPlayerItemGetSelectedMediaOptionInMediaSelectionGroup(avPlayerItem(m_avfWrapper), audibleGroup); 1128 if (currentlySelectedAudibleOption) { 1129 RetainPtr<CFLocaleRef> audibleOptionLocale = adoptCF(AVCFMediaSelectionOptionCopyLocale(currentlySelectedAudibleOption)); 1130 m_languageOfPrimaryAudioTrack = CFLocaleGetIdentifier(audibleOptionLocale.get()); 1131 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data()); 1132 1133 return m_languageOfPrimaryAudioTrack; 1134 } 1135#endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) 1136 1137 // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or 1138 // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language. 1139 RetainPtr<CFArrayRef> tracks = adoptCF(AVCFAssetCopyTracksWithMediaType(avAsset(m_avfWrapper), AVCFMediaTypeAudio)); 1140 CFIndex trackCount = CFArrayGetCount(tracks.get()); 1141 if (!tracks || trackCount != 1) { 1142 m_languageOfPrimaryAudioTrack = emptyString(); 1143 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? trackCount : 0)); 1144 return m_languageOfPrimaryAudioTrack; 1145 } 1146 1147 AVCFAssetTrackRef track = (AVCFAssetTrackRef)CFArrayGetValueAtIndex(tracks.get(), 0); 1148 RetainPtr<CFStringRef> language = adoptCF(AVCFAssetTrackCopyExtendedLanguageTag(track)); 1149 1150 // If the language code is stored as a QuickTime 5-bit packed code there aren't enough bits for a full 1151 // RFC 4646 language tag so extendedLanguageTag returns null. In this case languageCode will return the 1152 // ISO 639-2/T language code so check it. 1153 if (!language) 1154 language = adoptCF(AVCFAssetTrackCopyLanguageCode(track)); 1155 1156 // Some legacy tracks have "und" as a language, treat that the same as no language at all. 1157 if (language && CFStringCompare(language.get(), CFSTR("und"), kCFCompareCaseInsensitive) != kCFCompareEqualTo) { 1158 m_languageOfPrimaryAudioTrack = language.get(); 1159 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data()); 1160 return m_languageOfPrimaryAudioTrack; 1161 } 1162 1163 LOG(Media, "MediaPlayerPrivateAVFoundationCF::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this); 1164 m_languageOfPrimaryAudioTrack = emptyString(); 1165 return m_languageOfPrimaryAudioTrack; 1166} 1167 1168void MediaPlayerPrivateAVFoundationCF::contentsNeedsDisplay() 1169{ 1170 if (m_avfWrapper) 1171 m_avfWrapper->setVideoLayerNeedsCommit(); 1172} 1173 1174AVFWrapper::AVFWrapper(MediaPlayerPrivateAVFoundationCF* owner) 1175 : m_owner(owner) 1176 , m_objectID(s_nextAVFWrapperObjectID++) 1177 , m_currentTrack(0) 1178{ 1179 ASSERT(isMainThread()); 1180 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1181 LOG(Media, "AVFWrapper::AVFWrapper(%p)", this); 1182 1183 m_notificationQueue = dispatch_queue_create("MediaPlayerPrivateAVFoundationCF.notificationQueue", 0); 1184 addToMap(); 1185} 1186 1187AVFWrapper::~AVFWrapper() 1188{ 1189 ASSERT(isMainThread()); 1190 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1191 LOG(Media, "AVFWrapper::~AVFWrapper(%p %d)", this, m_objectID); 1192 1193 destroyVideoLayer(); 1194 destroyImageGenerator(); 1195 1196 if (m_notificationQueue) 1197 dispatch_release(m_notificationQueue); 1198 1199 if (avAsset()) { 1200 AVCFAssetCancelLoading(avAsset()); 1201 m_avAsset = 0; 1202 } 1203 1204#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1205 if (legibleOutput()) { 1206 if (avPlayerItem()) 1207 AVCFPlayerItemRemoveOutput(avPlayerItem(), legibleOutput()); 1208 m_legibleOutput = 0; 1209 } 1210#endif 1211 1212 m_avPlayerItem = 0; 1213 m_timeObserver = 0; 1214 m_avPlayer = 0; 1215} 1216 1217Mutex& AVFWrapper::mapLock() 1218{ 1219 static Mutex mapLock; 1220 return mapLock; 1221} 1222 1223HashMap<uintptr_t, AVFWrapper*>& AVFWrapper::map() 1224{ 1225 static HashMap<uintptr_t, AVFWrapper*>& map = *new HashMap<uintptr_t, AVFWrapper*>; 1226 return map; 1227} 1228 1229void AVFWrapper::addToMap() 1230{ 1231 MutexLocker locker(mapLock()); 1232 1233 // HashMap doesn't like a key of 0, and also make sure we aren't 1234 // using an object ID that's already in use. 1235 while (!m_objectID || (map().find(m_objectID) != map().end())) 1236 m_objectID = s_nextAVFWrapperObjectID++; 1237 1238 LOG(Media, "AVFWrapper::addToMap(%p %d)", this, m_objectID); 1239 1240 map().add(m_objectID, this); 1241} 1242 1243void AVFWrapper::removeFromMap() const 1244{ 1245 LOG(Media, "AVFWrapper::removeFromMap(%p %d)", this, m_objectID); 1246 1247 MutexLocker locker(mapLock()); 1248 map().remove(m_objectID); 1249} 1250 1251AVFWrapper* AVFWrapper::avfWrapperForCallbackContext(void* context) 1252{ 1253 // Assumes caller has locked mapLock(). 1254 HashMap<uintptr_t, AVFWrapper*>::iterator it = map().find(reinterpret_cast<uintptr_t>(context)); 1255 if (it == map().end()) 1256 return 0; 1257 1258 return it->value; 1259} 1260 1261void AVFWrapper::scheduleDisconnectAndDelete() 1262{ 1263 // Ignore any subsequent notifications we might receive in notificationCallback(). 1264 removeFromMap(); 1265 1266 dispatch_async_f(dispatchQueue(), this, disconnectAndDeleteAVFWrapper); 1267} 1268 1269static void destroyAVFWrapper(void* context) 1270{ 1271 ASSERT(isMainThread()); 1272 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1273 AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context); 1274 if (!avfWrapper) 1275 return; 1276 1277 delete avfWrapper; 1278} 1279 1280void AVFWrapper::disconnectAndDeleteAVFWrapper(void* context) 1281{ 1282 AVFWrapper* avfWrapper = static_cast<AVFWrapper*>(context); 1283 1284 LOG(Media, "AVFWrapper::disconnectAndDeleteAVFWrapper(%p)", avfWrapper); 1285 1286 if (avfWrapper->avPlayerItem()) { 1287 CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter(); 1288 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDidPlayToEndTimeNotification, avfWrapper->avPlayerItem()); 1289 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemStatusChangedNotification, avfWrapper->avPlayerItem()); 1290 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemTracksChangedNotification, avfWrapper->avPlayerItem()); 1291 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemSeekableTimeRangesChangedNotification, avfWrapper->avPlayerItem()); 1292 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemLoadedTimeRangesChangedNotification, avfWrapper->avPlayerItem()); 1293 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemPresentationSizeChangedNotification, avfWrapper->avPlayerItem()); 1294 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, avfWrapper->avPlayerItem()); 1295 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, avfWrapper->avPlayerItem()); 1296 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemIsPlaybackBufferFullChangedNotification, avfWrapper->avPlayerItem()); 1297 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), AVCFPlayerItemDurationChangedNotification, avfWrapper->avPlayerItem()); 1298 CFNotificationCenterRemoveObserver(center, avfWrapper->callbackContext(), CACFContextNeedsFlushNotification(), 0); 1299 } 1300 1301 if (avfWrapper->avPlayer()) { 1302 if (avfWrapper->timeObserver()) 1303 AVCFPlayerRemoveObserver(avfWrapper->avPlayer(), avfWrapper->timeObserver()); 1304 1305 CFNotificationCenterRemoveObserver(CFNotificationCenterGetLocalCenter(), avfWrapper->callbackContext(), AVCFPlayerRateChangedNotification, avfWrapper->avPlayer()); 1306 } 1307 1308#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1309 AVCFPlayerItemRemoveOutput(avfWrapper->avPlayerItem(), avfWrapper->legibleOutput()); 1310#endif 1311 1312 // We must release the AVCFPlayer and other items on the same thread that created them. 1313 dispatch_async_f(dispatch_get_main_queue(), context, destroyAVFWrapper); 1314} 1315 1316void AVFWrapper::createAssetForURL(const String& url, bool inheritURI) 1317{ 1318 ASSERT(!avAsset()); 1319 1320 RetainPtr<CFURLRef> urlRef = KURL(ParsedURLString, url).createCFURL(); 1321 1322 RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks)); 1323 1324 if (inheritURI) 1325 CFDictionarySetValue(optionsRef.get(), AVCFURLAssetInheritURIQueryComponentFromReferencingURIKey, kCFBooleanTrue); 1326 1327 AVCFURLAssetRef assetRef = AVCFURLAssetCreateWithURLAndOptions(kCFAllocatorDefault, urlRef.get(), optionsRef.get(), m_notificationQueue); 1328 m_avAsset = adoptCF(assetRef); 1329} 1330 1331void AVFWrapper::createPlayer(IDirect3DDevice9* d3dDevice) 1332{ 1333 ASSERT(isMainThread()); 1334 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1335 ASSERT(avPlayerItem()); 1336 1337 if (avPlayer()) 1338 return; 1339 1340 RetainPtr<CFMutableDictionaryRef> optionsRef = adoptCF(CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks)); 1341 1342 if (d3dDevice) { 1343 // QI for an IDirect3DDevice9Ex interface, it is required to do HW video decoding. 1344 COMPtr<IDirect3DDevice9Ex> d3dEx(Query, d3dDevice); 1345 m_d3dDevice = d3dEx; 1346 } else 1347 m_d3dDevice = 0; 1348 1349 if (m_d3dDevice && AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey) 1350 CFDictionarySetValue(optionsRef.get(), AVCFPlayerEnableHardwareAcceleratedVideoDecoderKey, kCFBooleanTrue); 1351 1352#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1353 CFDictionarySetValue(optionsRef.get(), AVCFPlayerAppliesMediaSelectionCriteriaAutomaticallyKey, kCFBooleanTrue); 1354#endif 1355 1356 // FIXME: We need a way to create a AVPlayer without an AVPlayerItem, see <rdar://problem/9877730>. 1357 AVCFPlayerRef playerRef = AVCFPlayerCreateWithPlayerItemAndOptions(kCFAllocatorDefault, avPlayerItem(), optionsRef.get(), m_notificationQueue); 1358 m_avPlayer = adoptCF(playerRef); 1359#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1360 AVCFPlayerSetClosedCaptionDisplayEnabled(playerRef, FALSE); 1361#endif 1362 1363 if (m_d3dDevice && AVCFPlayerSetDirect3DDevicePtr()) 1364 AVCFPlayerSetDirect3DDevicePtr()(playerRef, m_d3dDevice.get()); 1365 1366 CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter(); 1367 ASSERT(center); 1368 1369 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerRateChangedNotification, playerRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1370 1371 // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but 1372 // our observer will also be called whenever a seek happens. 1373 const double veryLongInterval = 60*60*60*24*30; 1374 m_timeObserver = adoptCF(AVCFPlayerCreatePeriodicTimeObserverForInterval(playerRef, CMTimeMake(veryLongInterval, 10), m_notificationQueue, &periodicTimeObserverCallback, callbackContext())); 1375} 1376 1377void AVFWrapper::createPlayerItem() 1378{ 1379 ASSERT(isMainThread()); 1380 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1381 ASSERT(avAsset()); 1382 1383 if (avPlayerItem()) 1384 return; 1385 1386 // Create the player item so we begin loading media data. 1387 AVCFPlayerItemRef itemRef = AVCFPlayerItemCreateWithAsset(kCFAllocatorDefault, avAsset(), m_notificationQueue); 1388 m_avPlayerItem = adoptCF(itemRef); 1389 1390 CFNotificationCenterRef center = CFNotificationCenterGetLocalCenter(); 1391 ASSERT(center); 1392 1393 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDidPlayToEndTimeNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1394 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemStatusChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1395 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemTracksChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1396 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemSeekableTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1397 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemLoadedTimeRangesChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1398 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemPresentationSizeChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1399 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1400 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1401 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemIsPlaybackBufferFullChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1402 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, AVCFPlayerItemDurationChangedNotification, itemRef, CFNotificationSuspensionBehaviorDeliverImmediately); 1403 // FIXME: Are there other legible output things we need to register for? asset and hasEnabledAudio are not exposed by AVCF 1404 1405 CFNotificationCenterAddObserver(center, callbackContext(), notificationCallback, CACFContextNeedsFlushNotification(), 0, CFNotificationSuspensionBehaviorDeliverImmediately); 1406 1407#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1408 const CFTimeInterval legibleOutputAdvanceInterval = 2; 1409 1410 m_legibleOutput = adoptCF(AVCFPlayerItemLegibleOutputCreateWithMediaSubtypesForNativeRepresentation(kCFAllocatorDefault, 0)); 1411 AVCFPlayerItemOutputSetSuppressPlayerRendering(m_legibleOutput.get(), TRUE); 1412 1413 AVCFPlayerItemLegibleOutputCallbacks callbackInfo; 1414 callbackInfo.version = kAVCFPlayerItemLegibleOutput_CallbacksVersion_1; 1415 ASSERT(callbackContext()); 1416 callbackInfo.context = callbackContext(); 1417 callbackInfo.legibleOutputCallback = AVFWrapper::legibleOutputCallback; 1418 1419 AVCFPlayerItemLegibleOutputSetCallbacks(m_legibleOutput.get(), &callbackInfo, dispatchQueue()); 1420 AVCFPlayerItemLegibleOutputSetAdvanceIntervalForCallbackInvocation(m_legibleOutput.get(), legibleOutputAdvanceInterval); 1421 AVCFPlayerItemLegibleOutputSetTextStylingResolution(m_legibleOutput.get(), AVCFPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly); 1422 AVCFPlayerItemAddOutput(m_avPlayerItem.get(), m_legibleOutput.get()); 1423#endif 1424} 1425 1426void AVFWrapper::periodicTimeObserverCallback(AVCFPlayerRef, CMTime cmTime, void* context) 1427{ 1428 MutexLocker locker(mapLock()); 1429 AVFWrapper* self = avfWrapperForCallbackContext(context); 1430 if (!self) { 1431 LOG(Media, "AVFWrapper::periodicTimeObserverCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1432 return; 1433 } 1434 1435 double time = std::max(0.0, CMTimeGetSeconds(cmTime)); // Clamp to zero, negative values are sometimes reported. 1436 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time); 1437} 1438 1439struct NotificationCallbackData { 1440 RetainPtr<CFStringRef> m_propertyName; 1441 void* m_context; 1442 1443 NotificationCallbackData(CFStringRef propertyName, void* context) 1444 : m_propertyName(propertyName), m_context(context) 1445 { 1446 } 1447}; 1448 1449void AVFWrapper::processNotification(void* context) 1450{ 1451 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1452 ASSERT(context); 1453 1454 if (!context) 1455 return; 1456 1457 OwnPtr<NotificationCallbackData> notificationData = adoptPtr(reinterpret_cast<NotificationCallbackData*>(context)); 1458 1459 MutexLocker locker(mapLock()); 1460 AVFWrapper* self = avfWrapperForCallbackContext(notificationData->m_context); 1461 if (!self) { 1462 LOG(Media, "AVFWrapper::processNotification invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1463 return; 1464 } 1465 1466 CFStringRef propertyName = notificationData->m_propertyName.get(); 1467 1468 if (CFEqual(propertyName, AVCFPlayerItemDidPlayToEndTimeNotification)) 1469 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime); 1470 else if (CFEqual(propertyName, AVCFPlayerItemTracksChangedNotification)) 1471 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged); 1472 else if (CFEqual(propertyName, AVCFPlayerItemStatusChangedNotification)) { 1473 AVCFURLAssetRef asset = AVCFPlayerItemGetAsset(self->avPlayerItem()); 1474 if (asset) 1475 self->setAsset(asset); 1476 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged); 1477 } else if (CFEqual(propertyName, AVCFPlayerItemSeekableTimeRangesChangedNotification)) 1478 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged); 1479 else if (CFEqual(propertyName, AVCFPlayerItemLoadedTimeRangesChangedNotification)) 1480 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged); 1481 else if (CFEqual(propertyName, AVCFPlayerItemPresentationSizeChangedNotification)) 1482 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged); 1483 else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackLikelyToKeepUpChangedNotification)) 1484 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged); 1485 else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferEmptyChangedNotification)) 1486 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged); 1487 else if (CFEqual(propertyName, AVCFPlayerItemIsPlaybackBufferFullChangedNotification)) 1488 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged); 1489 else if (CFEqual(propertyName, AVCFPlayerRateChangedNotification)) 1490 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged); 1491 else if (CFEqual(propertyName, CACFContextNeedsFlushNotification())) 1492 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ContentsNeedsDisplay); 1493 else if (CFEqual(propertyName, AVCFPlayerItemDurationChangedNotification)) 1494 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged); 1495 else 1496 ASSERT_NOT_REACHED(); 1497} 1498 1499void AVFWrapper::notificationCallback(CFNotificationCenterRef, void* observer, CFStringRef propertyName, const void* object, CFDictionaryRef) 1500{ 1501#if !LOG_DISABLED 1502 char notificationName[256]; 1503 CFStringGetCString(propertyName, notificationName, sizeof(notificationName), kCFStringEncodingASCII); 1504 LOG(Media, "AVFWrapper::notificationCallback(if=%d) %s", reinterpret_cast<uintptr_t>(observer), notificationName); 1505#endif 1506 1507 OwnPtr<NotificationCallbackData> notificationData = adoptPtr(new NotificationCallbackData(propertyName, observer)); 1508 1509 dispatch_async_f(dispatch_get_main_queue(), notificationData.leakPtr(), processNotification); 1510} 1511 1512void AVFWrapper::loadPlayableCompletionCallback(AVCFAssetRef, void* context) 1513{ 1514 MutexLocker locker(mapLock()); 1515 AVFWrapper* self = avfWrapperForCallbackContext(context); 1516 if (!self) { 1517 LOG(Media, "AVFWrapper::loadPlayableCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1518 return; 1519 } 1520 1521 LOG(Media, "AVFWrapper::loadPlayableCompletionCallback(%p)", self); 1522 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown); 1523} 1524 1525void AVFWrapper::checkPlayability() 1526{ 1527 LOG(Media, "AVFWrapper::checkPlayability(%p)", this); 1528 1529 static CFArrayRef propertyKeyName; 1530 if (!propertyKeyName) { 1531 static const CFStringRef keyNames[] = { 1532 AVCFAssetPropertyPlayable 1533 }; 1534 propertyKeyName = CFArrayCreate(0, (const void**)keyNames, sizeof(keyNames) / sizeof(keyNames[0]), &kCFTypeArrayCallBacks); 1535 } 1536 1537 AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), propertyKeyName, loadPlayableCompletionCallback, callbackContext()); 1538} 1539 1540void AVFWrapper::loadMetadataCompletionCallback(AVCFAssetRef, void* context) 1541{ 1542 MutexLocker locker(mapLock()); 1543 AVFWrapper* self = avfWrapperForCallbackContext(context); 1544 if (!self) { 1545 LOG(Media, "AVFWrapper::loadMetadataCompletionCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1546 return; 1547 } 1548 1549 LOG(Media, "AVFWrapper::loadMetadataCompletionCallback(%p)", self); 1550 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded); 1551} 1552 1553void AVFWrapper::beginLoadingMetadata() 1554{ 1555 ASSERT(avAsset()); 1556 LOG(Media, "AVFWrapper::beginLoadingMetadata(%p) - requesting metadata loading", this); 1557 AVCFAssetLoadValuesAsynchronouslyForProperties(avAsset(), metadataKeyNames(), loadMetadataCompletionCallback, callbackContext()); 1558} 1559 1560void AVFWrapper::seekCompletedCallback(AVCFPlayerItemRef, Boolean finished, void* context) 1561{ 1562 MutexLocker locker(mapLock()); 1563 AVFWrapper* self = avfWrapperForCallbackContext(context); 1564 if (!self) { 1565 LOG(Media, "AVFWrapper::seekCompletedCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1566 return; 1567 } 1568 1569 LOG(Media, "AVFWrapper::seekCompletedCallback(%p)", self); 1570 self->m_owner->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished)); 1571} 1572 1573void AVFWrapper::seekToTime(float time) 1574{ 1575 ASSERT(avPlayerItem()); 1576 AVCFPlayerItemSeekToTimeWithToleranceAndCompletionCallback(avPlayerItem(), CMTimeMakeWithSeconds(time, 600), 1577 kCMTimeZero, kCMTimeZero, &seekCompletedCallback, callbackContext()); 1578} 1579 1580#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1581struct LegibleOutputData { 1582 RetainPtr<CFArrayRef> m_attributedStrings; 1583 double m_time; 1584 void* m_context; 1585 1586 LegibleOutputData(CFArrayRef strings, double time, void* context) 1587 : m_attributedStrings(strings), m_time(time), m_context(context) 1588 { 1589 } 1590}; 1591 1592void AVFWrapper::processCue(void* context) 1593{ 1594 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1595 ASSERT(context); 1596 1597 if (!context) 1598 return; 1599 1600 OwnPtr<LegibleOutputData> legibleOutputData = adoptPtr(reinterpret_cast<LegibleOutputData*>(context)); 1601 1602 MutexLocker locker(mapLock()); 1603 AVFWrapper* self = avfWrapperForCallbackContext(legibleOutputData->m_context); 1604 if (!self) { 1605 LOG(Media, "AVFWrapper::processCue invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1606 return; 1607 } 1608 1609 if (!self->m_currentTrack) 1610 return; 1611 1612 self->m_currentTrack->processCue(legibleOutputData->m_attributedStrings.get(), legibleOutputData->m_time); 1613} 1614 1615void AVFWrapper::legibleOutputCallback(void* context, AVCFPlayerItemLegibleOutputRef legibleOutput, CFArrayRef attributedStrings, CFArrayRef /*nativeSampleBuffers*/, CMTime itemTime) 1616{ 1617 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1618 MutexLocker locker(mapLock()); 1619 AVFWrapper* self = avfWrapperForCallbackContext(context); 1620 if (!self) { 1621 LOG(Media, "AVFWrapper::legibleOutputCallback invoked for deleted AVFWrapper %d", reinterpret_cast<uintptr_t>(context)); 1622 return; 1623 } 1624 1625 LOG(Media, "AVFWrapper::legibleOutputCallback(%p)", self); 1626 1627 ASSERT(legibleOutput == self->m_legibleOutput); 1628 1629 OwnPtr<LegibleOutputData> legibleOutputData = adoptPtr(new LegibleOutputData(attributedStrings, CMTimeGetSeconds(itemTime), context)); 1630 1631 dispatch_async_f(dispatch_get_main_queue(), legibleOutputData.leakPtr(), processCue); 1632} 1633#endif 1634 1635void AVFWrapper::setAsset(AVCFURLAssetRef asset) 1636{ 1637 if (asset == avAsset()) 1638 return; 1639 1640 AVCFAssetCancelLoading(avAsset()); 1641 m_avAsset = adoptCF(asset); 1642} 1643 1644PlatformLayer* AVFWrapper::platformLayer() 1645{ 1646 if (m_videoLayerWrapper) 1647 return m_videoLayerWrapper->platformLayer(); 1648 1649 if (!videoLayer()) 1650 return 0; 1651 1652 // Create a PlatformCALayer so we can resize the video layer to match the element size. 1653 m_layerClient = adoptPtr(new LayerClient(this)); 1654 if (!m_layerClient) 1655 return 0; 1656 1657 m_videoLayerWrapper = PlatformCALayer::create(PlatformCALayer::LayerTypeLayer, m_layerClient.get()); 1658 if (!m_videoLayerWrapper) 1659 return 0; 1660 1661 m_caVideoLayer = adoptCF(AVCFPlayerLayerCopyCACFLayer(m_avCFVideoLayer.get())); 1662 1663 CACFLayerInsertSublayer(m_videoLayerWrapper->platformLayer(), m_caVideoLayer.get(), 0); 1664 m_videoLayerWrapper->setAnchorPoint(FloatPoint3D()); 1665 m_videoLayerWrapper->setNeedsLayout(); 1666 1667 return m_videoLayerWrapper->platformLayer(); 1668} 1669 1670void AVFWrapper::createAVCFVideoLayer() 1671{ 1672 ASSERT(isMainThread()); 1673 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1674 if (!avPlayer() || m_avCFVideoLayer) 1675 return; 1676 1677 // The layer will get hooked up via RenderLayerBacking::updateGraphicsLayerConfiguration(). 1678 m_avCFVideoLayer = adoptCF(AVCFPlayerLayerCreateWithAVCFPlayer(kCFAllocatorDefault, avPlayer(), m_notificationQueue)); 1679 LOG(Media, "AVFWrapper::createAVCFVideoLayer(%p) - returning %p", this, videoLayer()); 1680} 1681 1682void AVFWrapper::destroyVideoLayer() 1683{ 1684 ASSERT(isMainThread()); 1685 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1686 LOG(Media, "AVFWrapper::destroyVideoLayer(%p)", this); 1687 m_layerClient = nullptr; 1688 m_caVideoLayer = 0; 1689 m_videoLayerWrapper = 0; 1690 if (!m_avCFVideoLayer.get()) 1691 return; 1692 1693 AVCFPlayerLayerSetPlayer((AVCFPlayerLayerRef)m_avCFVideoLayer.get(), 0); 1694 m_avCFVideoLayer = 0; 1695} 1696 1697void AVFWrapper::setVideoLayerNeedsCommit() 1698{ 1699 if (m_videoLayerWrapper) 1700 m_videoLayerWrapper->setNeedsCommit(); 1701} 1702 1703void AVFWrapper::setVideoLayerHidden(bool value) 1704{ 1705 if (m_videoLayerWrapper) 1706 m_videoLayerWrapper->setHidden(value); 1707} 1708 1709void AVFWrapper::createImageGenerator() 1710{ 1711 ASSERT(isMainThread()); 1712 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1713 if (!avAsset() || m_imageGenerator) 1714 return; 1715 1716 m_imageGenerator = adoptCF(AVCFAssetImageGeneratorCreateWithAsset(kCFAllocatorDefault, avAsset())); 1717 1718 AVCFAssetImageGeneratorSetApertureMode(m_imageGenerator.get(), AVCFAssetImageGeneratorApertureModeCleanAperture); 1719 AVCFAssetImageGeneratorSetRequestedTimeToleranceBefore(m_imageGenerator.get(), kCMTimeZero); 1720 AVCFAssetImageGeneratorSetRequestedTimeToleranceAfter(m_imageGenerator.get(), kCMTimeZero); 1721 AVCFAssetImageGeneratorSetAppliesPreferredTrackTransform(m_imageGenerator.get(), true); 1722 1723 LOG(Media, "AVFWrapper::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get()); 1724} 1725 1726void AVFWrapper::destroyImageGenerator() 1727{ 1728 ASSERT(isMainThread()); 1729 ASSERT(dispatch_get_main_queue() == dispatch_get_current_queue()); 1730 LOG(Media, "AVFWrapper::destroyImageGenerator(%p)", this); 1731 m_imageGenerator = 0; 1732} 1733 1734RetainPtr<CGImageRef> AVFWrapper::createImageForTimeInRect(float time, const IntRect& rect) 1735{ 1736 if (!m_imageGenerator) 1737 return 0; 1738 1739#if !LOG_DISABLED 1740 double start = WTF::currentTime(); 1741#endif 1742 1743 AVCFAssetImageGeneratorSetMaximumSize(m_imageGenerator.get(), CGSize(rect.size())); 1744 CGImageRef image = AVCFAssetImageGeneratorCopyCGImageAtTime(m_imageGenerator.get(), CMTimeMakeWithSeconds(time, 600), 0, 0); 1745 1746#if !LOG_DISABLED 1747 double duration = WTF::currentTime() - start; 1748 LOG(Media, "AVFWrapper::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration)); 1749#endif 1750 1751 return image; 1752} 1753 1754#if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) 1755AVCFMediaSelectionGroupRef AVFWrapper::safeMediaSelectionGroupForLegibleMedia() const 1756{ 1757 if (!avAsset()) 1758 return 0; 1759 1760 if (AVCFAssetGetStatusOfValueForProperty(avAsset(), AVCFAssetPropertyAvailableMediaCharacteristicsWithMediaSelectionOptions, 0) != AVCFPropertyValueStatusLoaded) 1761 return 0; 1762 1763 return AVCFAssetGetSelectionGroupForMediaCharacteristic(avAsset(), AVCFMediaCharacteristicLegible); 1764} 1765#endif 1766 1767void LayerClient::platformCALayerLayoutSublayersOfLayer(PlatformCALayer* wrapperLayer) 1768{ 1769 ASSERT(m_parent); 1770 ASSERT(m_parent->videoLayerWrapper() == wrapperLayer->platformLayer()); 1771 1772 CGRect bounds = wrapperLayer->bounds(); 1773 CGPoint anchor = CACFLayerGetAnchorPoint(m_parent->caVideoLayer()); 1774 FloatPoint position(bounds.size.width * anchor.x, bounds.size.height * anchor.y); 1775 1776 CACFLayerSetPosition(m_parent->caVideoLayer(), position); 1777 CACFLayerSetBounds(m_parent->caVideoLayer(), bounds); 1778 1779 AVCFPlayerLayerSetFrame(m_parent->videoLayer(), CGRectMake(0, 0, bounds.size.width, bounds.size.height)); 1780} 1781 1782} // namespace WebCore 1783 1784#else 1785// AVFoundation should always be enabled for Apple production builds. 1786#if __PRODUCTION__ && !USE(AVFOUNDATION) 1787// #error AVFoundation is not enabled! 1788#endif // __PRODUCTION__ && !USE(AVFOUNDATION) 1789#endif // USE(AVFOUNDATION) 1790#endif // PLATFORM(WIN) && ENABLE(VIDEO) 1791