1/*
2 * Copyright (C) 2013-2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#import "config.h"
27#import "SourceBufferPrivateAVFObjC.h"
28
29#if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31#import "ExceptionCodePlaceholder.h"
32#import "Logging.h"
33#import "MediaDescription.h"
34#import "MediaPlayerPrivateMediaSourceAVFObjC.h"
35#import "MediaSample.h"
36#import "MediaSourcePrivateAVFObjC.h"
37#import "MediaTimeMac.h"
38#import "NotImplemented.h"
39#import "SoftLinking.h"
40#import "SourceBufferPrivateClient.h"
41#import "TimeRanges.h"
42#import "AudioTrackPrivateMediaSourceAVFObjC.h"
43#import "VideoTrackPrivateMediaSourceAVFObjC.h"
44#import "InbandTextTrackPrivateAVFObjC.h"
45#import <AVFoundation/AVAssetTrack.h>
46#import <CoreMedia/CMSampleBuffer.h>
47#import <QuartzCore/CALayer.h>
48#import <objc/runtime.h>
49#import <wtf/text/AtomicString.h>
50#import <wtf/text/CString.h>
51#import <wtf/HashCountedSet.h>
52#import <wtf/WeakPtr.h>
53#import <map>
54
55#pragma mark -
56#pragma mark Soft Linking
57
58SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
59SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
60
61SOFT_LINK_CLASS(AVFoundation, AVAssetTrack)
62SOFT_LINK_CLASS(AVFoundation, AVStreamDataParser)
63SOFT_LINK_CLASS(AVFoundation, AVSampleBufferAudioRenderer)
64SOFT_LINK_CLASS(AVFoundation, AVSampleBufferDisplayLayer)
65
66SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeVideo, NSString *)
67SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeAudio, NSString *)
68SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMediaTypeText, NSString *)
69
70SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime);
71SOFT_LINK_CONSTANT(CoreMedia, kCMTimeInvalid, CMTime);
72SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_DoNotDisplay, CFStringRef)
73SOFT_LINK_CONSTANT(CoreMedia, kCMSampleAttachmentKey_NotSync, CFStringRef)
74SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DrainAfterDecoding, CFStringRef)
75SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding, CFStringRef)
76SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_EmptyMedia, CFStringRef)
77SOFT_LINK_CONSTANT(CoreMedia, kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately, CFStringRef)
78
79SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString*)
80SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString*)
81SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString*)
82SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotification, NSString*)
83SOFT_LINK_CONSTANT(AVFoundation, AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey, NSString*)
84
85SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaType, CMMediaType, (CMFormatDescriptionRef desc), (desc))
86SOFT_LINK(CoreMedia, CMSampleBufferCreate, OSStatus, (CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef *sBufOut), (allocator, dataBuffer, dataReady, makeDataReadyCallback, makeDataReadyRefcon, formatDescription, numSamples, numSampleTimingEntries, sampleTimingArray, numSampleSizeEntries, sampleSizeArray, sBufOut))
87SOFT_LINK(CoreMedia, CMSampleBufferCreateCopy, OSStatus, (CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef *sbufCopyOut), (allocator, sbuf, sbufCopyOut))
88SOFT_LINK(CoreMedia, CMSampleBufferCallForEachSample, OSStatus, (CMSampleBufferRef sbuf, OSStatus (*callback)( CMSampleBufferRef sampleBuffer, CMItemCount index, void *refcon), void *refcon), (sbuf, callback, refcon))
89SOFT_LINK(CoreMedia, CMSampleBufferGetDecodeTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
90SOFT_LINK(CoreMedia, CMSampleBufferGetDuration, CMTime, (CMSampleBufferRef sbuf), (sbuf))
91SOFT_LINK(CoreMedia, CMSampleBufferGetFormatDescription, CMFormatDescriptionRef, (CMSampleBufferRef sbuf), (sbuf))
92SOFT_LINK(CoreMedia, CMSampleBufferGetPresentationTimeStamp, CMTime, (CMSampleBufferRef sbuf), (sbuf))
93SOFT_LINK(CoreMedia, CMSampleBufferGetSampleAttachmentsArray, CFArrayRef, (CMSampleBufferRef sbuf, Boolean createIfNecessary), (sbuf, createIfNecessary))
94SOFT_LINK(CoreMedia, CMSampleBufferGetTotalSampleSize, size_t, (CMSampleBufferRef sbuf), (sbuf))
95SOFT_LINK(CoreMedia, CMFormatDescriptionGetMediaSubType, FourCharCode, (CMFormatDescriptionRef desc), (desc))
96SOFT_LINK(CoreMedia, CMSetAttachment, void, (CMAttachmentBearerRef target, CFStringRef key, CFTypeRef value, CMAttachmentMode attachmentMode), (target, key, value, attachmentMode))
97SOFT_LINK(CoreMedia, CMVideoFormatDescriptionGetPresentationDimensions, CGSize, (CMVideoFormatDescriptionRef videoDesc, Boolean usePixelAspectRatio, Boolean useCleanAperture), (videoDesc, usePixelAspectRatio, useCleanAperture))
98
99#define AVMediaTypeVideo getAVMediaTypeVideo()
100#define AVMediaTypeAudio getAVMediaTypeAudio()
101#define AVMediaTypeText getAVMediaTypeText()
102#define AVSampleBufferDisplayLayerFailedToDecodeNotification getAVSampleBufferDisplayLayerFailedToDecodeNotification()
103#define AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey getAVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey()
104#define kCMTimeZero getkCMTimeZero()
105#define kCMTimeInvalid getkCMTimeInvalid()
106#define kCMSampleAttachmentKey_NotSync getkCMSampleAttachmentKey_NotSync()
107#define kCMSampleAttachmentKey_DoNotDisplay getkCMSampleAttachmentKey_DoNotDisplay()
108#define kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding getkCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding()
109#define kCMSampleBufferAttachmentKey_DrainAfterDecoding getkCMSampleBufferAttachmentKey_DrainAfterDecoding()
110#define kCMSampleBufferAttachmentKey_EmptyMedia getkCMSampleBufferAttachmentKey_EmptyMedia()
111#define kCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately getkCMSampleBufferAttachmentKey_DisplayEmptyMediaImmediately()
112
113#define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
114#define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
115#define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
116
117#pragma mark -
118#pragma mark AVStreamDataParser
119
120@interface AVStreamDataParser : NSObject
121- (void)setDelegate:(id)delegate;
122- (void)appendStreamData:(NSData *)data;
123- (void)setShouldProvideMediaData:(BOOL)shouldProvideMediaData forTrackID:(CMPersistentTrackID)trackID;
124- (BOOL)shouldProvideMediaDataForTrackID:(CMPersistentTrackID)trackID;
125- (void)providePendingMediaData;
126- (void)processContentKeyResponseData:(NSData *)contentKeyResponseData forTrackID:(CMPersistentTrackID)trackID;
127- (void)processContentKeyResponseError:(NSError *)error forTrackID:(CMPersistentTrackID)trackID;
128- (void)renewExpiringContentKeyResponseDataForTrackID:(CMPersistentTrackID)trackID;
129- (NSData *)streamingContentKeyRequestDataForApp:(NSData *)appIdentifier contentIdentifier:(NSData *)contentIdentifier trackID:(CMPersistentTrackID)trackID options:(NSDictionary *)options error:(NSError **)outError;
130@end
131
132#pragma mark -
133#pragma mark AVSampleBufferDisplayLayer
134
135@interface AVSampleBufferDisplayLayer : CALayer
136- (NSInteger)status;
137- (NSError*)error;
138- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
139- (void)flush;
140- (BOOL)isReadyForMoreMediaData;
141- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
142- (void)stopRequestingMediaData;
143@end
144
145#pragma mark -
146#pragma mark AVSampleBufferAudioRenderer
147
148@interface AVSampleBufferAudioRenderer : NSObject
149- (NSInteger)status;
150- (NSError*)error;
151- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
152- (void)flush;
153- (BOOL)isReadyForMoreMediaData;
154- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
155- (void)stopRequestingMediaData;
156@end
157
158#pragma mark -
159#pragma mark WebAVStreamDataParserListener
160
161@interface WebAVStreamDataParserListener : NSObject {
162    WeakPtr<WebCore::SourceBufferPrivateAVFObjC> _parent;
163    AVStreamDataParser* _parser;
164}
165- (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent;
166@end
167
168@implementation WebAVStreamDataParserListener
169- (id)initWithParser:(AVStreamDataParser*)parser parent:(WeakPtr<WebCore::SourceBufferPrivateAVFObjC>)parent
170{
171    self = [super init];
172    if (!self)
173        return nil;
174
175    ASSERT(parent);
176    _parent = parent;
177    _parser = parser;
178    [_parser setDelegate:self];
179    return self;
180}
181
182- (void)dealloc
183{
184    [_parser setDelegate:nil];
185    [super dealloc];
186}
187
188- (void)invalidate
189{
190    [_parser setDelegate:nil];
191    _parser = nullptr;
192}
193
194- (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset
195{
196#if ASSERT_DISABLED
197    UNUSED_PARAM(streamDataParser);
198#endif
199    ASSERT(streamDataParser == _parser);
200    RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
201
202    RetainPtr<AVAsset*> strongAsset = asset;
203    callOnMainThread([strongSelf, strongAsset] {
204        if (strongSelf->_parent)
205            strongSelf->_parent->didParseStreamDataAsAsset(strongAsset.get());
206    });
207}
208
209- (void)streamDataParser:(AVStreamDataParser *)streamDataParser didParseStreamDataAsAsset:(AVAsset *)asset withDiscontinuity:(BOOL)discontinuity
210{
211    UNUSED_PARAM(discontinuity);
212#if ASSERT_DISABLED
213    UNUSED_PARAM(streamDataParser);
214#endif
215    ASSERT(streamDataParser == _parser);
216    RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
217
218    RetainPtr<AVAsset*> strongAsset = asset;
219    callOnMainThread([strongSelf, strongAsset] {
220        if (strongSelf->_parent)
221            strongSelf->_parent->didParseStreamDataAsAsset(strongAsset.get());
222    });
223}
224
225- (void)streamDataParser:(AVStreamDataParser *)streamDataParser didFailToParseStreamDataWithError:(NSError *)error
226{
227#if ASSERT_DISABLED
228    UNUSED_PARAM(streamDataParser);
229#endif
230    ASSERT(streamDataParser == _parser);
231    RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
232
233    RetainPtr<NSError> strongError = error;
234    callOnMainThread([strongSelf, strongError] {
235        if (strongSelf->_parent)
236            strongSelf->_parent->didFailToParseStreamDataWithError(strongError.get());
237    });
238}
239
240- (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideMediaData:(CMSampleBufferRef)sample forTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType flags:(NSUInteger)flags
241{
242#if ASSERT_DISABLED
243    UNUSED_PARAM(streamDataParser);
244#endif
245    ASSERT(streamDataParser == _parser);
246    RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
247
248    RetainPtr<CMSampleBufferRef> strongSample = sample;
249    String mediaType = nsMediaType;
250    callOnMainThread([strongSelf, strongSample, trackID, mediaType, flags] {
251        if (strongSelf->_parent)
252            strongSelf->_parent->didProvideMediaDataForTrackID(trackID, strongSample.get(), mediaType, flags);
253    });
254}
255
256- (void)streamDataParser:(AVStreamDataParser *)streamDataParser didReachEndOfTrackWithTrackID:(CMPersistentTrackID)trackID mediaType:(NSString *)nsMediaType
257{
258#if ASSERT_DISABLED
259    UNUSED_PARAM(streamDataParser);
260#endif
261    ASSERT(streamDataParser == _parser);
262    RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
263
264    String mediaType = nsMediaType;
265    callOnMainThread([strongSelf, trackID, mediaType] {
266        if (strongSelf->_parent)
267            strongSelf->_parent->didReachEndOfTrackWithTrackID(trackID, mediaType);
268    });
269}
270
271- (void)streamDataParser:(AVStreamDataParser *)streamDataParser didProvideContentKeyRequestInitializationData:(NSData *)initData forTrackID:(CMPersistentTrackID)trackID
272{
273#if ASSERT_DISABLED
274    UNUSED_PARAM(streamDataParser);
275#endif
276    ASSERT(streamDataParser == _parser);
277    RetainPtr<WebAVStreamDataParserListener> strongSelf = self;
278
279    RetainPtr<NSData> strongData = initData;
280    callOnMainThread([strongSelf, strongData, trackID] {
281        if (strongSelf->_parent)
282            strongSelf->_parent->didProvideContentKeyRequestInitializationDataForTrackID(strongData.get(), trackID);
283    });
284}
285@end
286
287@interface WebAVSampleBufferErrorListener : NSObject {
288    WebCore::SourceBufferPrivateAVFObjC* _parent;
289    Vector<RetainPtr<AVSampleBufferDisplayLayer>> _layers;
290    Vector<RetainPtr<AVSampleBufferAudioRenderer>> _renderers;
291}
292
293- (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent;
294- (void)invalidate;
295- (void)beginObservingLayer:(AVSampleBufferDisplayLayer *)layer;
296- (void)stopObservingLayer:(AVSampleBufferDisplayLayer *)layer;
297- (void)beginObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
298- (void)stopObservingRenderer:(AVSampleBufferAudioRenderer *)renderer;
299@end
300
301@implementation WebAVSampleBufferErrorListener
302
303- (id)initWithParent:(WebCore::SourceBufferPrivateAVFObjC*)parent
304{
305    if (!(self = [super init]))
306        return nil;
307
308    _parent = parent;
309    return self;
310}
311
312- (void)dealloc
313{
314    [self invalidate];
315    [super dealloc];
316}
317
318- (void)invalidate
319{
320    if (!_parent && !_layers.size() && !_renderers.size())
321        return;
322
323    for (auto& layer : _layers) {
324        [layer removeObserver:self forKeyPath:@"error"];
325        [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
326    }
327    _layers.clear();
328
329    for (auto& renderer : _renderers)
330        [renderer removeObserver:self forKeyPath:@"error"];
331    _renderers.clear();
332
333    [[NSNotificationCenter defaultCenter] removeObserver:self];
334
335    _parent = nullptr;
336}
337
338- (void)beginObservingLayer:(AVSampleBufferDisplayLayer*)layer
339{
340    ASSERT(_parent);
341    ASSERT(!_layers.contains(layer));
342
343    _layers.append(layer);
344    [layer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
345    [layer addObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection" options:NSKeyValueObservingOptionNew context:nullptr];
346    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(layerFailedToDecode:) name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
347}
348
349- (void)stopObservingLayer:(AVSampleBufferDisplayLayer*)layer
350{
351    ASSERT(_parent);
352    ASSERT(_layers.contains(layer));
353
354    [layer removeObserver:self forKeyPath:@"error"];
355    [layer removeObserver:self forKeyPath:@"outputObscuredDueToInsufficientExternalProtection"];
356    _layers.remove(_layers.find(layer));
357
358    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVSampleBufferDisplayLayerFailedToDecodeNotification object:layer];
359}
360
361- (void)beginObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
362{
363    ASSERT(_parent);
364    ASSERT(!_renderers.contains(renderer));
365
366    _renderers.append(renderer);
367    [renderer addObserver:self forKeyPath:@"error" options:NSKeyValueObservingOptionNew context:nullptr];
368}
369
370- (void)stopObservingRenderer:(AVSampleBufferAudioRenderer*)renderer
371{
372    ASSERT(_parent);
373    ASSERT(_renderers.contains(renderer));
374
375    [renderer removeObserver:self forKeyPath:@"error"];
376    _renderers.remove(_renderers.find(renderer));
377}
378
379- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
380{
381    UNUSED_PARAM(context);
382    UNUSED_PARAM(keyPath);
383    ASSERT(_parent);
384
385    RetainPtr<WebAVSampleBufferErrorListener> strongSelf = self;
386    if ([object isKindOfClass:getAVSampleBufferDisplayLayerClass()]) {
387        RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)object;
388        ASSERT(_layers.contains(layer.get()));
389
390        if ([keyPath isEqualTo:@"error"]) {
391            RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
392            callOnMainThread([strongSelf, layer, error] {
393                strongSelf->_parent->layerDidReceiveError(layer.get(), error.get());
394            });
395        } else if ([keyPath isEqualTo:@"outputObscuredDueToInsufficientExternalProtection"]) {
396            if ([[change valueForKey:NSKeyValueChangeNewKey] boolValue]) {
397                RetainPtr<NSError> error = [NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil];
398                callOnMainThread([strongSelf, layer, error] {
399                    strongSelf->_parent->layerDidReceiveError(layer.get(), error.get());
400                });
401            }
402        } else
403            ASSERT_NOT_REACHED();
404
405    } else if ([object isKindOfClass:getAVSampleBufferAudioRendererClass()]) {
406        RetainPtr<AVSampleBufferAudioRenderer> renderer = (AVSampleBufferAudioRenderer *)object;
407        RetainPtr<NSError> error = [change valueForKey:NSKeyValueChangeNewKey];
408
409        ASSERT(_renderers.contains(renderer.get()));
410        ASSERT([keyPath isEqualTo:@"error"]);
411
412        callOnMainThread([strongSelf, renderer, error] {
413            strongSelf->_parent->rendererDidReceiveError(renderer.get(), error.get());
414        });
415    } else
416        ASSERT_NOT_REACHED();
417}
418
419- (void)layerFailedToDecode:(NSNotification*)note
420{
421    RetainPtr<AVSampleBufferDisplayLayer> layer = (AVSampleBufferDisplayLayer *)[note object];
422    ASSERT(_layers.contains(layer.get()));
423
424    RetainPtr<NSError> error = [[note userInfo] valueForKey:AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey];
425
426    RetainPtr<WebAVSampleBufferErrorListener> strongSelf = self;
427    callOnMainThread([strongSelf, layer, error] {
428        strongSelf->_parent->layerDidReceiveError(layer.get(), error.get());
429    });
430}
431@end
432
433namespace WebCore {
434
435#pragma mark -
436#pragma mark MediaSampleAVFObjC
437
438class MediaSampleAVFObjC final : public MediaSample {
439public:
440    static RefPtr<MediaSampleAVFObjC> create(CMSampleBufferRef sample, int trackID) { return adoptRef(new MediaSampleAVFObjC(sample, trackID)); }
441    virtual ~MediaSampleAVFObjC() { }
442
443private:
444    MediaSampleAVFObjC(CMSampleBufferRef sample, int trackID)
445        : m_sample(sample)
446        , m_id(String::format("%d", trackID))
447    {
448    }
449
450    virtual MediaTime presentationTime() const override { return toMediaTime(CMSampleBufferGetPresentationTimeStamp(m_sample.get())); }
451    virtual MediaTime decodeTime() const override { return toMediaTime(CMSampleBufferGetDecodeTimeStamp(m_sample.get())); }
452    virtual MediaTime duration() const override { return toMediaTime(CMSampleBufferGetDuration(m_sample.get())); }
453    virtual AtomicString trackID() const override { return m_id; }
454    virtual size_t sizeInBytes() const override { return CMSampleBufferGetTotalSampleSize(m_sample.get()); }
455    virtual FloatSize presentationSize() const override;
456
457    virtual SampleFlags flags() const override;
458    virtual PlatformSample platformSample() override;
459    virtual void dump(PrintStream&) const override;
460
461    RetainPtr<CMSampleBufferRef> m_sample;
462    AtomicString m_id;
463};
464
465PlatformSample MediaSampleAVFObjC::platformSample()
466{
467    PlatformSample sample = { PlatformSample::CMSampleBufferType, { .cmSampleBuffer = m_sample.get() } };
468    return sample;
469}
470
471static bool CMSampleBufferIsRandomAccess(CMSampleBufferRef sample)
472{
473    CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sample, false);
474    if (!attachments)
475        return true;
476
477    for (CFIndex i = 0, count = CFArrayGetCount(attachments); i < count; ++i) {
478        CFDictionaryRef attachmentDict = (CFDictionaryRef)CFArrayGetValueAtIndex(attachments, i);
479        if (CFDictionaryContainsKey(attachmentDict, kCMSampleAttachmentKey_NotSync))
480            return false;
481    }
482    return true;
483}
484
485MediaSample::SampleFlags MediaSampleAVFObjC::flags() const
486{
487    int returnValue = MediaSample::None;
488
489    if (CMSampleBufferIsRandomAccess(m_sample.get()))
490        returnValue |= MediaSample::IsSync;
491
492    return SampleFlags(returnValue);
493}
494
495FloatSize MediaSampleAVFObjC::presentationSize() const
496{
497    CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(m_sample.get());
498    if (CMFormatDescriptionGetMediaType(formatDescription) != kCMMediaType_Video)
499        return FloatSize();
500
501    return FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
502}
503
504void MediaSampleAVFObjC::dump(PrintStream& out) const
505{
506    out.print("{PTS(", presentationTime(), "), DTS(", decodeTime(), "), duration(", duration(), "), flags(", (int)flags(), "), presentationSize(", presentationSize(), ")}");
507}
508
509#pragma mark -
510#pragma mark MediaDescriptionAVFObjC
511
512class MediaDescriptionAVFObjC final : public MediaDescription {
513public:
514    static RefPtr<MediaDescriptionAVFObjC> create(AVAssetTrack* track) { return adoptRef(new MediaDescriptionAVFObjC(track)); }
515    virtual ~MediaDescriptionAVFObjC() { }
516
517    virtual AtomicString codec() const override { return m_codec; }
518    virtual bool isVideo() const override { return m_isVideo; }
519    virtual bool isAudio() const override { return m_isAudio; }
520    virtual bool isText() const override { return m_isText; }
521
522protected:
523    MediaDescriptionAVFObjC(AVAssetTrack* track)
524        : m_isVideo([track hasMediaCharacteristic:AVMediaCharacteristicVisual])
525        , m_isAudio([track hasMediaCharacteristic:AVMediaCharacteristicAudible])
526        , m_isText([track hasMediaCharacteristic:AVMediaCharacteristicLegible])
527    {
528        NSArray* formatDescriptions = [track formatDescriptions];
529        CMFormatDescriptionRef description = [formatDescriptions count] ? (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0] : 0;
530        if (description) {
531            FourCharCode codec = CMFormatDescriptionGetMediaSubType(description);
532            m_codec = AtomicString(reinterpret_cast<LChar*>(&codec), 4);
533        }
534    }
535
536    AtomicString m_codec;
537    bool m_isVideo;
538    bool m_isAudio;
539    bool m_isText;
540};
541
542#pragma mark -
543#pragma mark SourceBufferPrivateAVFObjC
544
545RefPtr<SourceBufferPrivateAVFObjC> SourceBufferPrivateAVFObjC::create(MediaSourcePrivateAVFObjC* parent)
546{
547    return adoptRef(new SourceBufferPrivateAVFObjC(parent));
548}
549
550SourceBufferPrivateAVFObjC::SourceBufferPrivateAVFObjC(MediaSourcePrivateAVFObjC* parent)
551    : m_weakFactory(this)
552    , m_parser(adoptNS([[getAVStreamDataParserClass() alloc] init]))
553    , m_delegate(adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]))
554    , m_errorListener(adoptNS([[WebAVSampleBufferErrorListener alloc] initWithParent:this]))
555    , m_mediaSource(parent)
556    , m_client(0)
557    , m_parsingSucceeded(true)
558    , m_enabledVideoTrackID(-1)
559    , m_protectedTrackID(-1)
560{
561}
562
563SourceBufferPrivateAVFObjC::~SourceBufferPrivateAVFObjC()
564{
565    ASSERT(!m_client);
566    destroyParser();
567    destroyRenderers();
568}
569
570void SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(AVAsset* asset)
571{
572    LOG(MediaSource, "SourceBufferPrivateAVFObjC::didParseStreamDataAsAsset(%p)", this);
573
574    m_asset = asset;
575
576    m_videoTracks.clear();
577    m_audioTracks.clear();
578
579    SourceBufferPrivateClient::InitializationSegment segment;
580    segment.duration = toMediaTime([m_asset duration]);
581
582    for (AVAssetTrack* track in [m_asset tracks]) {
583        if ([track hasMediaCharacteristic:AVMediaCharacteristicVisual]) {
584            SourceBufferPrivateClient::InitializationSegment::VideoTrackInformation info;
585            RefPtr<VideoTrackPrivateMediaSourceAVFObjC> videoTrack = VideoTrackPrivateMediaSourceAVFObjC::create(track, this);
586            info.track = videoTrack;
587            m_videoTracks.append(videoTrack);
588            info.description = MediaDescriptionAVFObjC::create(track);
589            segment.videoTracks.append(info);
590        } else if ([track hasMediaCharacteristic:AVMediaCharacteristicAudible]) {
591            SourceBufferPrivateClient::InitializationSegment::AudioTrackInformation info;
592            RefPtr<AudioTrackPrivateMediaSourceAVFObjC> audioTrack = AudioTrackPrivateMediaSourceAVFObjC::create(track, this);
593            info.track = audioTrack;
594            m_audioTracks.append(audioTrack);
595            info.description = MediaDescriptionAVFObjC::create(track);
596            segment.audioTracks.append(info);
597        }
598
599        // FIXME(125161): Add TextTrack support
600    }
601
602    if (m_client)
603        m_client->sourceBufferPrivateDidReceiveInitializationSegment(this, segment);
604}
605
606void SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(NSError* error)
607{
608#if LOG_DISABLED
609    UNUSED_PARAM(error);
610#endif
611    LOG(MediaSource, "SourceBufferPrivateAVFObjC::didFailToParseStreamDataWithError(%p) - error:\"%s\"", this, String([error description]).utf8().data());
612
613    m_parsingSucceeded = false;
614}
615
616struct ProcessCodedFrameInfo {
617    SourceBufferPrivateAVFObjC* sourceBuffer;
618    int trackID;
619    const String& mediaType;
620};
621
622void SourceBufferPrivateAVFObjC::didProvideMediaDataForTrackID(int trackID, CMSampleBufferRef sampleBuffer, const String& mediaType, unsigned flags)
623{
624    UNUSED_PARAM(flags);
625
626    processCodedFrame(trackID, sampleBuffer, mediaType);
627}
628
629bool SourceBufferPrivateAVFObjC::processCodedFrame(int trackID, CMSampleBufferRef sampleBuffer, const String&)
630{
631    if (trackID == m_enabledVideoTrackID) {
632        CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
633        FloatSize formatSize = FloatSize(CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, true, true));
634        if (formatSize != m_cachedSize) {
635            LOG(MediaSource, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - size change detected: {width=%lf, height=%lf}", formatSize.width(), formatSize.height());
636            m_cachedSize = formatSize;
637            if (m_mediaSource)
638                m_mediaSource->player()->sizeChanged();
639        }
640    }
641
642
643    if (m_client) {
644        RefPtr<MediaSample> mediaSample = MediaSampleAVFObjC::create(sampleBuffer, trackID);
645        LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::processCodedFrame(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
646        m_client->sourceBufferPrivateDidReceiveSample(this, mediaSample.release());
647    }
648
649    return true;
650}
651
652void SourceBufferPrivateAVFObjC::didReachEndOfTrackWithTrackID(int trackID, const String& mediaType)
653{
654    UNUSED_PARAM(mediaType);
655    UNUSED_PARAM(trackID);
656    notImplemented();
657}
658
659void SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(NSData* initData, int trackID)
660{
661    if (!m_mediaSource)
662        return;
663
664    UNUSED_PARAM(trackID);
665#if ENABLE(ENCRYPTED_MEDIA_V2)
666    LOG(MediaSource, "SourceBufferPrivateAVFObjC::didProvideContentKeyRequestInitializationDataForTrackID(%p) - track:%d", this, trackID);
667    m_protectedTrackID = trackID;
668    RefPtr<Uint8Array> initDataArray = Uint8Array::create([initData length]);
669    [initData getBytes:initDataArray->data() length:initDataArray->length()];
670    m_mediaSource->sourceBufferKeyNeeded(this, initDataArray.get());
671#else
672    UNUSED_PARAM(initData);
673#endif
674}
675
676void SourceBufferPrivateAVFObjC::setClient(SourceBufferPrivateClient* client)
677{
678    m_client = client;
679}
680
681static dispatch_queue_t globalDataParserQueue()
682{
683    static dispatch_queue_t globalQueue;
684    static dispatch_once_t onceToken;
685    dispatch_once(&onceToken, ^{
686        globalQueue = dispatch_queue_create("SourceBufferPrivateAVFObjC data parser queue", DISPATCH_QUEUE_CONCURRENT);
687    });
688    return globalQueue;
689}
690
691void SourceBufferPrivateAVFObjC::append(const unsigned char* data, unsigned length)
692{
693    LOG(MediaSource, "SourceBufferPrivateAVFObjC::append(%p) - data:%p, length:%d", this, data, length);
694
695    RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:data length:length]);
696    WeakPtr<SourceBufferPrivateAVFObjC> weakThis = createWeakPtr();
697    RetainPtr<AVStreamDataParser> parser = m_parser;
698    RetainPtr<WebAVStreamDataParserListener> delegate = m_delegate;
699
700    m_parsingSucceeded = true;
701
702    dispatch_async(globalDataParserQueue(), [nsData, weakThis, parser, delegate] {
703
704        [parser appendStreamData:nsData.get()];
705
706        callOnMainThread([weakThis] {
707            if (weakThis)
708                weakThis->appendCompleted();
709        });
710    });
711}
712
713void SourceBufferPrivateAVFObjC::appendCompleted()
714{
715    if (m_parsingSucceeded && m_mediaSource)
716        m_mediaSource->player()->setLoadingProgresssed(true);
717
718    if (m_client)
719        m_client->sourceBufferPrivateAppendComplete(this, m_parsingSucceeded ? SourceBufferPrivateClient::AppendSucceeded : SourceBufferPrivateClient::ParsingFailed);
720}
721
722void SourceBufferPrivateAVFObjC::abort()
723{
724    // The parser does not have a mechanism for resetting to a clean state, so destroy and re-create it.
725    // FIXME(135164): Support resetting parser to the last appended initialization segment.
726    destroyParser();
727
728    m_parser = adoptNS([[getAVStreamDataParserClass() alloc] init]);
729    m_delegate = adoptNS([[WebAVStreamDataParserListener alloc] initWithParser:m_parser.get() parent:createWeakPtr()]);
730}
731
732void SourceBufferPrivateAVFObjC::destroyParser()
733{
734    [m_delegate invalidate];
735    m_delegate = nullptr;
736    m_parser = nullptr;
737}
738
739void SourceBufferPrivateAVFObjC::destroyRenderers()
740{
741    if (m_displayLayer) {
742        if (m_mediaSource)
743            m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
744        [m_displayLayer flush];
745        [m_displayLayer stopRequestingMediaData];
746        [m_errorListener stopObservingLayer:m_displayLayer.get()];
747        m_displayLayer = nullptr;
748    }
749
750    for (auto it = m_audioRenderers.begin(), end = m_audioRenderers.end(); it != end; ++it) {
751        AVSampleBufferAudioRenderer* renderer = it->second.get();
752        if (m_mediaSource)
753            m_mediaSource->player()->removeAudioRenderer(renderer);
754        [renderer flush];
755        [renderer stopRequestingMediaData];
756        [m_errorListener stopObservingRenderer:renderer];
757    }
758
759    m_audioRenderers.clear();
760}
761
762void SourceBufferPrivateAVFObjC::removedFromMediaSource()
763{
764    destroyParser();
765    destroyRenderers();
766
767    if (m_mediaSource)
768        m_mediaSource->removeSourceBuffer(this);
769}
770
771MediaPlayer::ReadyState SourceBufferPrivateAVFObjC::readyState() const
772{
773    return m_mediaSource ? m_mediaSource->player()->readyState() : MediaPlayer::HaveNothing;
774}
775
776void SourceBufferPrivateAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
777{
778    if (m_mediaSource)
779        m_mediaSource->player()->setReadyState(readyState);
780}
781
782bool SourceBufferPrivateAVFObjC::hasVideo() const
783{
784    if (!m_client)
785        return false;
786
787    return m_client->sourceBufferPrivateHasVideo(this);
788}
789
790bool SourceBufferPrivateAVFObjC::hasAudio() const
791{
792    if (!m_client)
793        return false;
794
795    return m_client->sourceBufferPrivateHasAudio(this);
796}
797
798void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(VideoTrackPrivateMediaSourceAVFObjC* track)
799{
800    int trackID = track->trackID();
801    if (!track->selected() && m_enabledVideoTrackID == trackID) {
802        m_enabledVideoTrackID = -1;
803        [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
804        if (m_mediaSource)
805            m_mediaSource->player()->removeDisplayLayer(m_displayLayer.get());
806    } else if (track->selected()) {
807        m_enabledVideoTrackID = trackID;
808        [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
809        if (!m_displayLayer) {
810            m_displayLayer = adoptNS([[getAVSampleBufferDisplayLayerClass() alloc] init]);
811            [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
812                didBecomeReadyForMoreSamples(trackID);
813            }];
814            [m_errorListener beginObservingLayer:m_displayLayer.get()];
815        }
816        if (m_mediaSource)
817            m_mediaSource->player()->addDisplayLayer(m_displayLayer.get());
818    }
819}
820
821void SourceBufferPrivateAVFObjC::trackDidChangeEnabled(AudioTrackPrivateMediaSourceAVFObjC* track)
822{
823    int trackID = track->trackID();
824
825    if (!track->enabled()) {
826        AVSampleBufferAudioRenderer* renderer = m_audioRenderers[trackID].get();
827        [m_parser setShouldProvideMediaData:NO forTrackID:trackID];
828        if (m_mediaSource)
829            m_mediaSource->player()->removeAudioRenderer(renderer);
830    } else {
831        [m_parser setShouldProvideMediaData:YES forTrackID:trackID];
832        RetainPtr<AVSampleBufferAudioRenderer> renderer;
833        if (!m_audioRenderers.count(trackID)) {
834            renderer = adoptNS([[getAVSampleBufferAudioRendererClass() alloc] init]);
835            [renderer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
836                didBecomeReadyForMoreSamples(trackID);
837            }];
838            m_audioRenderers[trackID] = renderer;
839            [m_errorListener beginObservingRenderer:renderer.get()];
840        } else
841            renderer = m_audioRenderers[trackID].get();
842
843        if (m_mediaSource)
844            m_mediaSource->player()->addAudioRenderer(renderer.get());
845    }
846}
847
848void SourceBufferPrivateAVFObjC::registerForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
849{
850    ASSERT(!m_errorClients.contains(client));
851    m_errorClients.append(client);
852}
853
854void SourceBufferPrivateAVFObjC::unregisterForErrorNotifications(SourceBufferPrivateAVFObjCErrorClient* client)
855{
856    ASSERT(m_errorClients.contains(client));
857    m_errorClients.remove(m_errorClients.find(client));
858}
859
860void SourceBufferPrivateAVFObjC::layerDidReceiveError(AVSampleBufferDisplayLayer *layer, NSError *error)
861{
862    LOG(MediaSource, "SourceBufferPrivateAVFObjC::layerDidReceiveError(%p): layer(%p), error(%@)", this, layer, [error description]);
863    for (auto& client : m_errorClients)
864        client->layerDidReceiveError(layer, error);
865
866    int errorCode = [[[error userInfo] valueForKey:@"OSStatus"] intValue];
867
868    if (m_client)
869        m_client->sourceBufferPrivateDidReceiveRenderingError(this, errorCode);
870}
871
872void SourceBufferPrivateAVFObjC::rendererDidReceiveError(AVSampleBufferAudioRenderer *renderer, NSError *error)
873{
874    LOG(MediaSource, "SourceBufferPrivateAVFObjC::rendererDidReceiveError(%p): renderer(%p), error(%@)", this, renderer, [error description]);
875    for (auto& client : m_errorClients)
876        client->rendererDidReceiveError(renderer, error);
877}
878
879static RetainPtr<CMSampleBufferRef> createNonDisplayingCopy(CMSampleBufferRef sampleBuffer)
880{
881    CMSampleBufferRef newSampleBuffer = 0;
882    CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &newSampleBuffer);
883    if (!newSampleBuffer)
884        return sampleBuffer;
885
886    CFArrayRef attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(newSampleBuffer, true);
887    for (CFIndex i = 0; i < CFArrayGetCount(attachmentsArray); ++i) {
888        CFMutableDictionaryRef attachments = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachmentsArray, i);
889        CFDictionarySetValue(attachments, kCMSampleAttachmentKey_DoNotDisplay, kCFBooleanTrue);
890    }
891
892    return adoptCF(newSampleBuffer);
893}
894
895void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AtomicString trackIDString)
896{
897    int trackID = trackIDString.toInt();
898    LOG(MediaSource, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) samples: %d samples, trackId: %d", this, mediaSamples.size(), trackID);
899
900    if (trackID == m_enabledVideoTrackID)
901        flushAndEnqueueNonDisplayingSamples(mediaSamples, m_displayLayer.get());
902    else if (m_audioRenderers.count(trackID))
903        flushAndEnqueueNonDisplayingSamples(mediaSamples, m_audioRenderers[trackID].get());
904}
905
906void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferAudioRenderer* renderer)
907{
908    [renderer flush];
909
910    for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
911        RefPtr<MediaSample>& mediaSample = *it;
912
913        PlatformSample platformSample = mediaSample->platformSample();
914        ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
915
916        RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
917
918        [renderer enqueueSampleBuffer:sampleBuffer.get()];
919    }
920}
921
922void SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(Vector<RefPtr<MediaSample>> mediaSamples, AVSampleBufferDisplayLayer* layer)
923{
924    [layer flush];
925
926    for (auto it = mediaSamples.begin(), end = mediaSamples.end(); it != end; ++it) {
927        RefPtr<MediaSample>& mediaSample = *it;
928
929        LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::flushAndEnqueueNonDisplayingSamples(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
930
931        PlatformSample platformSample = mediaSample->platformSample();
932        ASSERT(platformSample.type == PlatformSample::CMSampleBufferType);
933
934        RetainPtr<CMSampleBufferRef> sampleBuffer = createNonDisplayingCopy(platformSample.sample.cmSampleBuffer);
935
936        [layer enqueueSampleBuffer:sampleBuffer.get()];
937    }
938
939    if (m_mediaSource)
940        m_mediaSource->player()->setHasAvailableVideoFrame(false);
941}
942
943void SourceBufferPrivateAVFObjC::enqueueSample(PassRefPtr<MediaSample> prpMediaSample, AtomicString trackIDString)
944{
945    int trackID = trackIDString.toInt();
946    if (trackID != m_enabledVideoTrackID && !m_audioRenderers.count(trackID))
947        return;
948
949    RefPtr<MediaSample> mediaSample = prpMediaSample;
950
951    PlatformSample platformSample = mediaSample->platformSample();
952    if (platformSample.type != PlatformSample::CMSampleBufferType)
953        return;
954
955    LOG(MediaSourceSamples, "SourceBufferPrivateAVFObjC::enqueueSample(%p) - sample(%s)", this, toString(*mediaSample).utf8().data());
956
957    if (trackID == m_enabledVideoTrackID) {
958        [m_displayLayer enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
959        if (m_mediaSource)
960            m_mediaSource->player()->setHasAvailableVideoFrame(true);
961    } else
962        [m_audioRenderers[trackID] enqueueSampleBuffer:platformSample.sample.cmSampleBuffer];
963}
964
965bool SourceBufferPrivateAVFObjC::isReadyForMoreSamples(AtomicString trackIDString)
966{
967    int trackID = trackIDString.toInt();
968    if (trackID == m_enabledVideoTrackID)
969        return [m_displayLayer isReadyForMoreMediaData];
970    else if (m_audioRenderers.count(trackID))
971        return [m_audioRenderers[trackID] isReadyForMoreMediaData];
972    else
973        ASSERT_NOT_REACHED();
974
975    return false;
976}
977
978void SourceBufferPrivateAVFObjC::setActive(bool isActive)
979{
980    if (m_mediaSource)
981        m_mediaSource->sourceBufferPrivateDidChangeActiveState(this, isActive);
982}
983
984MediaTime SourceBufferPrivateAVFObjC::fastSeekTimeForMediaTime(MediaTime time, MediaTime negativeThreshold, MediaTime positiveThreshold)
985{
986    if (m_client)
987        return m_client->sourceBufferPrivateFastSeekTimeForMediaTime(this, time, negativeThreshold, positiveThreshold);
988    return time;
989}
990
991void SourceBufferPrivateAVFObjC::seekToTime(MediaTime time)
992{
993    if (m_client)
994        m_client->sourceBufferPrivateSeekToTime(this, time);
995}
996
997IntSize SourceBufferPrivateAVFObjC::naturalSize()
998{
999    return roundedIntSize(m_cachedSize);
1000}
1001
1002void SourceBufferPrivateAVFObjC::didBecomeReadyForMoreSamples(int trackID)
1003{
1004    if (trackID == m_enabledVideoTrackID)
1005        [m_displayLayer stopRequestingMediaData];
1006    else if (m_audioRenderers.count(trackID))
1007        [m_audioRenderers[trackID] stopRequestingMediaData];
1008    else {
1009        ASSERT_NOT_REACHED();
1010        return;
1011    }
1012
1013    if (m_client)
1014        m_client->sourceBufferPrivateDidBecomeReadyForMoreSamples(this, AtomicString::number(trackID));
1015}
1016
1017void SourceBufferPrivateAVFObjC::notifyClientWhenReadyForMoreSamples(AtomicString trackIDString)
1018{
1019    int trackID = trackIDString.toInt();
1020    if (trackID == m_enabledVideoTrackID) {
1021        [m_displayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
1022            didBecomeReadyForMoreSamples(trackID);
1023        }];
1024    } else if (m_audioRenderers.count(trackID)) {
1025        [m_audioRenderers[trackID] requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
1026            didBecomeReadyForMoreSamples(trackID);
1027        }];
1028    } else
1029        ASSERT_NOT_REACHED();
1030}
1031
1032}
1033
1034#endif // ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
1035