1/* 2 * Copyright (C) 2013 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26#import "config.h" 27#import "MediaPlayerPrivateMediaSourceAVFObjC.h" 28 29#if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION) 30 31#import "CDMSession.h" 32#import "Logging.h" 33#import "MediaSourcePrivateAVFObjC.h" 34#import "MediaSourcePrivateClient.h" 35#import "MediaTimeMac.h" 36#import "PlatformClockCM.h" 37#import "SoftLinking.h" 38#import "WebCoreSystemInterface.h" 39#import <AVFoundation/AVAsset.h> 40#import <AVFoundation/AVTime.h> 41#import <CoreMedia/CMSync.h> 42#import <QuartzCore/CALayer.h> 43#import <objc_runtime.h> 44#import <wtf/Functional.h> 45#import <wtf/MainThread.h> 46#import <wtf/NeverDestroyed.h> 47 48#pragma mark - 49#pragma mark Soft Linking 50 51SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation) 52SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia) 53 54SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset) 55SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset) 56SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer) 57SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer) 58SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer) 59SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser) 60SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics) 61 62typedef struct opaqueCMNotificationCenter *CMNotificationCenterRef; 63typedef void (*CMNotificationCallback)(CMNotificationCenterRef inCenter, const void *inListener, CFStringRef inNotificationName, const void *inNotifyingObject, CFTypeRef inNotificationPayload); 64 65SOFT_LINK(CoreMedia, CMNotificationCenterGetDefaultLocalCenter, CMNotificationCenterRef, (void), ()); 66SOFT_LINK(CoreMedia, CMNotificationCenterAddListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object, UInt32 flags), (center, listener, callback, notification, object, flags)) 67SOFT_LINK(CoreMedia, CMNotificationCenterRemoveListener, OSStatus, (CMNotificationCenterRef center, const void* listener, CMNotificationCallback callback, CFStringRef notification, const void* object), (center, listener, callback, notification, object)) 68SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time)) 69SOFT_LINK(CoreMedia, CMTimebaseGetTime, CMTime, (CMTimebaseRef timebase), (timebase)) 70 71SOFT_LINK_CONSTANT(CoreMedia, kCMTimebaseNotification_EffectiveRateChanged, CFStringRef) 72#define kCMTimebaseNotification_EffectiveRateChanged getkCMTimebaseNotification_EffectiveRateChanged() 73 74#pragma mark - 75#pragma mark AVSampleBufferDisplayLayer 76 77@interface AVSampleBufferDisplayLayer : CALayer 78@end 79 80#pragma mark - 81#pragma mark AVVideoPerformanceMetrics 82 83@interface AVVideoPerformanceMetrics : NSObject 84- (unsigned long)totalNumberOfVideoFrames; 85- (unsigned long)numberOfDroppedVideoFrames; 86- (unsigned long)numberOfCorruptedVideoFrames; 87- (double)totalFrameDelay; 88@end 89 90@interface AVSampleBufferDisplayLayer (WebCoreAVSampleBufferDisplayLayerPrivate) 91- (AVVideoPerformanceMetrics *)videoPerformanceMetrics; 92@end 93 94#pragma mark - 95#pragma mark AVSampleBufferAudioRenderer 96 97@interface AVSampleBufferAudioRenderer : NSObject 98- (void)setVolume:(float)volume; 99- (void)setMuted:(BOOL)muted; 100@end 101 102#pragma mark - 103#pragma mark AVSampleBufferRenderSynchronizer 104 105@interface AVSampleBufferRenderSynchronizer : NSObject 106- (CMTimebaseRef)timebase; 107- (float)rate; 108- (void)setRate:(float)rate; 109- (void)setRate:(float)rate time:(CMTime)time; 110- (NSArray *)renderers; 111- (void)addRenderer:(id)renderer; 112- (void)removeRenderer:(id)renderer atTime:(CMTime)time withCompletionHandler:(void (^)(BOOL didRemoveRenderer))completionHandler; 113- (id)addPeriodicTimeObserverForInterval:(CMTime)interval queue:(dispatch_queue_t)queue usingBlock:(void (^)(CMTime time))block; 114- (id)addBoundaryTimeObserverForTimes:(NSArray *)times queue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block; 115- (void)removeTimeObserver:(id)observer; 116@end 117 118namespace WebCore { 119 120#pragma mark - 121#pragma mark MediaPlayerPrivateMediaSourceAVFObjC 122 123static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef) 124{ 125 MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)listener; 126 auto weakThis = player->createWeakPtr(); 127 callOnMainThread([weakThis]{ 128 if (!weakThis) 129 return; 130 weakThis.get()->effectiveRateChanged(); 131 }); 132} 133 134MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player) 135 : m_player(player) 136 , m_weakPtrFactory(this) 137 , m_synchronizer(adoptNS([[getAVSampleBufferRenderSynchronizerClass() alloc] init])) 138 , m_seekTimer(this, &MediaPlayerPrivateMediaSourceAVFObjC::seekTimerFired) 139 , m_networkState(MediaPlayer::Empty) 140 , m_readyState(MediaPlayer::HaveNothing) 141 , m_rate(1) 142 , m_playing(0) 143 , m_seeking(false) 144 , m_seekCompleted(true) 145 , m_loadingProgressed(false) 146{ 147 CMTimebaseRef timebase = [m_synchronizer timebase]; 148 CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter(); 149 CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0); 150 151 // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use 152 // an arbitrarily large time value of once an hour: 153 __block auto weakThis = createWeakPtr(); 154 m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) { 155#if LOG_DISABLED 156 UNUSED_PARAM(time); 157#endif 158 // FIXME: Remove the below once <rdar://problem/15798050> is fixed. 159 if (!weakThis) 160 return; 161 162 if (m_seeking && !m_pendingSeek) { 163 LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::m_timeJumpedObserver(%p) - time(%s)", weakThis.get(), toString(toMediaTime(time)).utf8().data()); 164 m_seeking = false; 165 166 if (shouldBePlaying()) 167 [m_synchronizer setRate:m_rate]; 168 if (!seeking()) 169 m_player->timeChanged(); 170 } 171 172 if (m_pendingSeek) 173 seekInternal(); 174 }]; 175} 176 177MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC() 178{ 179 CMTimebaseRef timebase = [m_synchronizer timebase]; 180 CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter(); 181 CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase); 182 183 if (m_timeJumpedObserver) 184 [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()]; 185 if (m_durationObserver) 186 [m_synchronizer removeTimeObserver:m_durationObserver.get()]; 187 188 m_seekTimer.stop(); 189} 190 191#pragma mark - 192#pragma mark MediaPlayer Factory Methods 193 194void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar) 195{ 196 if (isAvailable()) 197 registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem); 198} 199 200PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateMediaSourceAVFObjC::create(MediaPlayer* player) 201{ 202 return adoptPtr(new MediaPlayerPrivateMediaSourceAVFObjC(player)); 203} 204 205bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable() 206{ 207 return AVFoundationLibrary() 208 && CoreMediaLibrary() 209 && getAVStreamDataParserClass() 210 && getAVSampleBufferAudioRendererClass() 211 && getAVSampleBufferRenderSynchronizerClass() 212 && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:)); 213} 214 215static HashSet<String> mimeTypeCache() 216{ 217 DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ()); 218 static bool typeListInitialized = false; 219 220 if (typeListInitialized) 221 return cache; 222 typeListInitialized = true; 223 224 NSArray *types = [getAVURLAssetClass() audiovisualMIMETypes]; 225 for (NSString *mimeType in types) 226 cache.add(mimeType); 227 228 return cache; 229} 230 231void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String>& types) 232{ 233 types = mimeTypeCache(); 234} 235 236#if ENABLE(ENCRYPTED_MEDIA_V2) 237static bool keySystemIsSupported(const String& keySystem) 238{ 239 return equalIgnoringCase(keySystem, "com.apple.fps.2_0"); 240} 241#endif 242 243MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters) 244{ 245#if ENABLE(ENCRYPTED_MEDIA_V2) 246 if (!parameters.keySystem.isEmpty() && !keySystemIsSupported(parameters.keySystem)) 247 return MediaPlayer::IsNotSupported; 248#endif 249 250 // This engine does not support non-media-source sources. 251 if (!parameters.isMediaSource) 252 return MediaPlayer::IsNotSupported; 253 254 if (!mimeTypeCache().contains(parameters.type)) 255 return MediaPlayer::IsNotSupported; 256 257 // The spec says: 258 // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not." 259 if (parameters.codecs.isEmpty()) 260 return MediaPlayer::MayBeSupported; 261 262 NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs]; 263 return [getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;; 264} 265 266bool MediaPlayerPrivateMediaSourceAVFObjC::supportsKeySystem(const String& keySystem, const String& mimeType) 267{ 268#if ENABLE(ENCRYPTED_MEDIA_V2) 269 if (!wkQueryDecoderAvailability()) 270 return false; 271 272 if (!keySystem.isEmpty()) { 273 if (!keySystemIsSupported(keySystem)) 274 return false; 275 276 if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType)) 277 return false; 278 279 return true; 280 } 281#else 282 UNUSED_PARAM(keySystem); 283 UNUSED_PARAM(mimeType); 284#endif 285 return false; 286} 287 288#pragma mark - 289#pragma mark MediaPlayerPrivateInterface Overrides 290 291void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&) 292{ 293 // This media engine only supports MediaSource URLs. 294 m_networkState = MediaPlayer::FormatError; 295 m_player->networkStateChanged(); 296} 297 298void MediaPlayerPrivateMediaSourceAVFObjC::load(const String& url, MediaSourcePrivateClient* client) 299{ 300 UNUSED_PARAM(url); 301 302 m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client); 303} 304 305void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad() 306{ 307} 308 309void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay() 310{ 311} 312 313PlatformMedia MediaPlayerPrivateMediaSourceAVFObjC::platformMedia() const 314{ 315 PlatformMedia pm; 316 pm.type = PlatformMedia::AVFoundationAssetType; 317 pm.media.avfAsset = m_asset.get(); 318 return pm; 319} 320 321PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const 322{ 323 return m_sampleBufferDisplayLayer.get(); 324} 325 326void MediaPlayerPrivateMediaSourceAVFObjC::play() 327{ 328 auto weakThis = createWeakPtr(); 329 callOnMainThread([weakThis]{ 330 if (!weakThis) 331 return; 332 weakThis.get()->playInternal(); 333 }); 334} 335 336void MediaPlayerPrivateMediaSourceAVFObjC::playInternal() 337{ 338 if (currentMediaTime() >= m_mediaSourcePrivate->duration()) 339 return; 340 341 m_playing = true; 342 if (shouldBePlaying()) 343 [m_synchronizer setRate:m_rate]; 344} 345 346void MediaPlayerPrivateMediaSourceAVFObjC::pause() 347{ 348 auto weakThis = createWeakPtr(); 349 callOnMainThread([weakThis]{ 350 if (!weakThis) 351 return; 352 weakThis.get()->pauseInternal(); 353 }); 354} 355 356void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal() 357{ 358 m_playing = false; 359 [m_synchronizer setRate:0]; 360} 361 362bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const 363{ 364 return !m_playing; 365} 366 367void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume) 368{ 369 for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it) 370 [*it setVolume:volume]; 371} 372 373bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const 374{ 375 return true; 376} 377 378void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted) 379{ 380 for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it) 381 [*it setMuted:muted]; 382} 383 384IntSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const 385{ 386 if (!m_mediaSourcePrivate) 387 return IntSize(); 388 389 return m_mediaSourcePrivate->naturalSize(); 390} 391 392bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const 393{ 394 if (!m_mediaSourcePrivate) 395 return false; 396 397 return m_mediaSourcePrivate->hasVideo(); 398} 399 400bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const 401{ 402 if (!m_mediaSourcePrivate) 403 return false; 404 405 return m_mediaSourcePrivate->hasAudio(); 406} 407 408void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool) 409{ 410 // No-op. 411} 412 413double MediaPlayerPrivateMediaSourceAVFObjC::durationDouble() const 414{ 415 return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration().toDouble() : 0; 416} 417 418MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const 419{ 420 MediaTime synchronizerTime = toMediaTime(CMTimebaseGetTime([m_synchronizer timebase])); 421 if (synchronizerTime < MediaTime::zeroTime()) 422 return MediaTime::zeroTime(); 423 if (synchronizerTime < m_lastSeekTime) 424 return m_lastSeekTime; 425 return synchronizerTime; 426} 427 428double MediaPlayerPrivateMediaSourceAVFObjC::currentTimeDouble() const 429{ 430 return currentMediaTime().toDouble(); 431} 432 433double MediaPlayerPrivateMediaSourceAVFObjC::startTimeDouble() const 434{ 435 return 0; 436} 437 438double MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const 439{ 440 return 0; 441} 442 443void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(double time, double negativeThreshold, double positiveThreshold) 444{ 445 LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(%p) - time(%s), negativeThreshold(%s), positiveThreshold(%s)", this, toString(time).utf8().data(), toString(negativeThreshold).utf8().data(), toString(positiveThreshold).utf8().data()); 446 m_seeking = true; 447 auto weakThis = createWeakPtr(); 448 m_pendingSeek = std::make_unique<PendingSeek>(MediaTime::createWithDouble(time), MediaTime::createWithDouble(negativeThreshold), MediaTime::createWithDouble(positiveThreshold)); 449 450 if (m_seekTimer.isActive()) 451 m_seekTimer.stop(); 452 m_seekTimer.startOneShot(0); 453} 454 455void MediaPlayerPrivateMediaSourceAVFObjC::seekTimerFired(Timer<MediaPlayerPrivateMediaSourceAVFObjC>&) 456{ 457 seekInternal(); 458} 459 460void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal() 461{ 462 std::unique_ptr<PendingSeek> pendingSeek; 463 pendingSeek.swap(m_pendingSeek); 464 465 if (!pendingSeek) 466 return; 467 468 if (!m_mediaSourcePrivate) 469 return; 470 471 if (pendingSeek->negativeThreshold == MediaTime::zeroTime() && pendingSeek->positiveThreshold == MediaTime::zeroTime()) 472 m_lastSeekTime = pendingSeek->targetTime; 473 else 474 m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold); 475 476 LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekInternal(%p) - seekTime(%s)", this, toString(m_lastSeekTime).utf8().data()); 477 478 [m_synchronizer setRate:0 time:toCMTime(m_lastSeekTime)]; 479 m_mediaSourcePrivate->seekToTime(m_lastSeekTime); 480} 481 482void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted() 483{ 484 if (!m_seeking) 485 return; 486 LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted(%p)", this); 487 m_seekCompleted = false; 488} 489 490void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted() 491{ 492 if (m_seekCompleted) 493 return; 494 LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted(%p)", this); 495 m_seekCompleted = true; 496 if (shouldBePlaying()) 497 [m_synchronizer setRate:m_rate]; 498 if (!m_seeking) 499 m_player->timeChanged(); 500} 501 502bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const 503{ 504 return m_seeking && !m_seekCompleted; 505} 506 507void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate) 508{ 509 m_rate = rate; 510 if (shouldBePlaying()) 511 [m_synchronizer setRate:m_rate]; 512} 513 514MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const 515{ 516 return m_networkState; 517} 518 519MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const 520{ 521 return m_readyState; 522} 523 524std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const 525{ 526 return PlatformTimeRanges::create(MediaTime::createWithDouble(minTimeSeekable()), MediaTime::createWithDouble(maxTimeSeekableDouble())); 527} 528 529double MediaPlayerPrivateMediaSourceAVFObjC::maxTimeSeekableDouble() const 530{ 531 return durationDouble(); 532} 533 534double MediaPlayerPrivateMediaSourceAVFObjC::minTimeSeekable() const 535{ 536 return startTimeDouble(); 537} 538 539std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const 540{ 541 return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : PlatformTimeRanges::create(); 542} 543 544bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const 545{ 546 bool loadingProgressed = m_loadingProgressed; 547 m_loadingProgressed = false; 548 return loadingProgressed; 549} 550 551void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&) 552{ 553 // No-op. 554} 555 556void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext*, const IntRect&) 557{ 558 // FIXME(125157): Implement painting. 559} 560 561void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext*, const IntRect&) 562{ 563 // FIXME(125157): Implement painting. 564} 565 566bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const 567{ 568 return m_hasAvailableVideoFrame; 569} 570 571bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const 572{ 573 return true; 574} 575 576void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged() 577{ 578 if (m_player->mediaPlayerClient()->mediaPlayerRenderingCanBeAccelerated(m_player)) 579 ensureLayer(); 580 else 581 destroyLayer(); 582} 583 584MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const 585{ 586 return MediaPlayer::StoredStream; 587} 588 589void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering() 590{ 591 // No-op. 592} 593 594String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const 595{ 596 static NeverDestroyed<String> description(ASCIILiteral("AVFoundation MediaSource Engine")); 597 return description; 598} 599 600String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const 601{ 602 // FIXME(125158): implement languageOfPrimaryAudioTrack() 603 return emptyString(); 604} 605 606size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const 607{ 608 return 0; 609} 610 611unsigned long MediaPlayerPrivateMediaSourceAVFObjC::totalVideoFrames() 612{ 613 return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalNumberOfVideoFrames]; 614} 615 616unsigned long MediaPlayerPrivateMediaSourceAVFObjC::droppedVideoFrames() 617{ 618 return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] numberOfDroppedVideoFrames]; 619} 620 621unsigned long MediaPlayerPrivateMediaSourceAVFObjC::corruptedVideoFrames() 622{ 623 return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] numberOfCorruptedVideoFrames]; 624} 625 626double MediaPlayerPrivateMediaSourceAVFObjC::totalFrameDelay() 627{ 628 return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalFrameDelay]; 629} 630 631#pragma mark - 632#pragma mark Utility Methods 633 634void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer() 635{ 636 if (m_sampleBufferDisplayLayer) 637 return; 638 639 m_sampleBufferDisplayLayer = adoptNS([[getAVSampleBufferDisplayLayerClass() alloc] init]); 640 [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()]; 641} 642 643void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer() 644{ 645 if (!m_sampleBufferDisplayLayer) 646 return; 647 648 CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]); 649 [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){ 650 // No-op. 651 }]; 652 m_sampleBufferDisplayLayer = nullptr; 653} 654 655bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const 656{ 657 return m_playing && !seeking() && m_readyState >= MediaPlayer::HaveFutureData; 658} 659 660void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged() 661{ 662 m_player->durationChanged(); 663 664 if (m_durationObserver) 665 [m_synchronizer removeTimeObserver:m_durationObserver.get()]; 666 667 if (!m_mediaSourcePrivate) 668 return; 669 670 MediaTime duration = m_mediaSourcePrivate->duration(); 671 auto weakThis = createWeakPtr(); 672 NSArray* times = @[[NSValue valueWithCMTime:toCMTime(duration)]]; 673 m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis] { 674 if (weakThis) { 675 weakThis->pauseInternal(); 676 weakThis->m_player->timeChanged(); 677 } 678 }]; 679 680 if (m_playing && duration <= currentMediaTime()) 681 pauseInternal(); 682} 683 684void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged() 685{ 686 m_player->rateChanged(); 687} 688 689void MediaPlayerPrivateMediaSourceAVFObjC::sizeChanged() 690{ 691 m_player->sizeChanged(); 692} 693 694#if ENABLE(ENCRYPTED_MEDIA_V2) 695std::unique_ptr<CDMSession> MediaPlayerPrivateMediaSourceAVFObjC::createSession(const String& keySystem) 696{ 697 if (!m_mediaSourcePrivate) 698 return nullptr; 699 700 return m_mediaSourcePrivate->createSession(keySystem); 701} 702 703void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData) 704{ 705 m_player->keyNeeded(initData); 706} 707#endif 708 709void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState) 710{ 711 if (m_readyState == readyState) 712 return; 713 714 m_readyState = readyState; 715 716 if (shouldBePlaying()) 717 [m_synchronizer setRate:m_rate]; 718 else 719 [m_synchronizer setRate:0]; 720 721 m_player->readyStateChanged(); 722} 723 724void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState) 725{ 726 if (m_networkState == networkState) 727 return; 728 729 m_networkState = networkState; 730 m_player->networkStateChanged(); 731} 732 733void MediaPlayerPrivateMediaSourceAVFObjC::addDisplayLayer(AVSampleBufferDisplayLayer* displayLayer) 734{ 735 ASSERT(displayLayer); 736 if (displayLayer == m_sampleBufferDisplayLayer) 737 return; 738 739 m_sampleBufferDisplayLayer = displayLayer; 740 [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()]; 741 m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player); 742 743 // FIXME: move this somewhere appropriate: 744 m_player->firstVideoFrameAvailable(); 745} 746 747void MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer(AVSampleBufferDisplayLayer* displayLayer) 748{ 749 if (displayLayer != m_sampleBufferDisplayLayer) 750 return; 751 752 CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]); 753 [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){ 754 // No-op. 755 }]; 756 757 m_sampleBufferDisplayLayer = nullptr; 758 m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player); 759} 760 761void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer) 762{ 763 if (m_sampleBufferAudioRenderers.contains(audioRenderer)) 764 return; 765 766 m_sampleBufferAudioRenderers.append(audioRenderer); 767 768 [audioRenderer setMuted:m_player->muted()]; 769 [audioRenderer setVolume:m_player->volume()]; 770 771 [m_synchronizer addRenderer:audioRenderer]; 772 m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player); 773} 774 775void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer) 776{ 777 size_t pos = m_sampleBufferAudioRenderers.find(audioRenderer); 778 if (pos == notFound) 779 return; 780 781 CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]); 782 [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){ 783 // No-op. 784 }]; 785 786 m_sampleBufferAudioRenderers.remove(pos); 787 m_player->mediaPlayerClient()->mediaPlayerRenderingModeChanged(m_player); 788} 789 790} 791 792#endif 793