Home | History | Annotate | Download | only in avfoundation
      1 /*
      2  * Copyright (C) 2011 Apple Inc. All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  * 1. Redistributions of source code must retain the above copyright
      8  *    notice, this list of conditions and the following disclaimer.
      9  * 2. Redistributions in binary form must reproduce the above copyright
     10  *    notice, this list of conditions and the following disclaimer in the
     11  *    documentation and/or other materials provided with the distribution.
     12  *
     13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
     14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
     17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
     18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
     20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
     21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     24  */
     25 
     26 #import "config.h"
     27 
     28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
     29 
     30 #import "MediaPlayerPrivateAVFoundationObjC.h"
     31 
     32 #import "ApplicationCacheResource.h"
     33 #import "BlockExceptions.h"
     34 #import "FloatConversion.h"
     35 #import "FrameView.h"
     36 #import "FloatConversion.h"
     37 #import "GraphicsContext.h"
     38 #import "KURL.h"
     39 #import "Logging.h"
     40 #import "SoftLinking.h"
     41 #import "TimeRanges.h"
     42 #import "WebCoreSystemInterface.h"
     43 #import <objc/objc-runtime.h>
     44 #import <wtf/UnusedParam.h>
     45 
     46 #import <CoreMedia/CoreMedia.h>
     47 #import <AVFoundation/AVFoundation.h>
     48 
     49 SOFT_LINK_FRAMEWORK(AVFoundation)
     50 SOFT_LINK_FRAMEWORK(CoreMedia)
     51 
     52 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
     53 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
     54 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
     55 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
     56 
     57 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
     58 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
     59 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
     60 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
     61 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
     62 
     63 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
     64 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
     65 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
     66 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
     67 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
     68 
     69 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
     70 
     71 #define AVPlayer getAVPlayerClass()
     72 #define AVPlayerItem getAVPlayerItemClass()
     73 #define AVPlayerLayer getAVPlayerLayerClass()
     74 #define AVURLAsset getAVURLAssetClass()
     75 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
     76 
     77 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
     78 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
     79 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
     80 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
     81 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
     82 
     83 #define kCMTimeZero getkCMTimeZero()
     84 
     85 using namespace WebCore;
     86 using namespace std;
     87 
     88 enum MediaPlayerAVFoundationObservationContext {
     89     MediaPlayerAVFoundationObservationContextPlayerItem,
     90     MediaPlayerAVFoundationObservationContextPlayer
     91 };
     92 
     93 @interface WebCoreAVFMovieObserver : NSObject
     94 {
     95     MediaPlayerPrivateAVFoundationObjC* m_callback;
     96     int m_delayCallbacks;
     97 }
     98 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
     99 -(void)disconnect;
    100 -(void)playableKnown;
    101 -(void)metadataLoaded;
    102 -(void)timeChanged:(double)time;
    103 -(void)seekCompleted:(BOOL)finished;
    104 -(void)didEnd:(NSNotification *)notification;
    105 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
    106 @end
    107 
    108 namespace WebCore {
    109 
    110 static NSArray *assetMetadataKeyNames();
    111 static NSArray *itemKVOProperties();
    112 
    113 #if !LOG_DISABLED
    114 static const char *boolString(bool val)
    115 {
    116     return val ? "true" : "false";
    117 }
    118 #endif
    119 
    120 static const float invalidTime = -1.0f;
    121 
    122 MediaPlayerPrivateInterface* MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
    123 {
    124     return new MediaPlayerPrivateAVFoundationObjC(player);
    125 }
    126 
    127 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
    128 {
    129     if (isAvailable())
    130         registrar(create, getSupportedTypes, supportsType, 0, 0, 0);
    131 }
    132 
    133 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
    134     : MediaPlayerPrivateAVFoundation(player)
    135     , m_objcObserver(AdoptNS, [[WebCoreAVFMovieObserver alloc] initWithCallback:this])
    136     , m_timeObserver(0)
    137 {
    138 }
    139 
    140 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
    141 {
    142     cancelLoad();
    143     [m_objcObserver.get() disconnect];
    144 }
    145 
    146 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
    147 {
    148     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
    149     tearDownVideoRendering();
    150 
    151     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
    152 
    153     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
    154     setIgnoreLoadStateChanges(true);
    155     if (m_avAsset) {
    156         [m_avAsset.get() cancelLoading];
    157         m_avAsset = nil;
    158     }
    159     if (m_avPlayerItem) {
    160         for (NSString *keyName in itemKVOProperties())
    161             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
    162 
    163         m_avPlayerItem = nil;
    164     }
    165     if (m_avPlayer) {
    166         if (m_timeObserver)
    167             [m_avPlayer.get() removeTimeObserver:m_timeObserver];
    168         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
    169         m_avPlayer = nil;
    170     }
    171     setIgnoreLoadStateChanges(false);
    172 }
    173 
    174 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
    175 {
    176     return m_videoLayer;
    177 }
    178 
    179 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
    180 {
    181     return m_imageGenerator;
    182 }
    183 
    184 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
    185 {
    186     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer(%p)", this);
    187 
    188     if (!m_avAsset || m_imageGenerator)
    189         return;
    190 
    191     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
    192 
    193     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
    194     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
    195 
    196     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
    197 }
    198 
    199 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
    200 {
    201     if (!m_imageGenerator)
    202         return;
    203 
    204     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer(%p) - destroying  %p", this, m_imageGenerator.get());
    205 
    206     m_imageGenerator = 0;
    207 }
    208 
    209 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
    210 {
    211     if (!m_avPlayer)
    212         return;
    213 
    214     if (!m_videoLayer) {
    215         m_videoLayer.adoptNS([[AVPlayerLayer alloc] init]);
    216         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
    217         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
    218     }
    219 }
    220 
    221 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
    222 {
    223     if (!m_videoLayer)
    224         return;
    225 
    226     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying", this, m_videoLayer.get());
    227 
    228     [m_videoLayer.get() setPlayer:nil];
    229 
    230     m_videoLayer = 0;
    231 }
    232 
    233 bool MediaPlayerPrivateAVFoundationObjC::videoLayerIsReadyToDisplay() const
    234 {
    235     return (m_videoLayer && [m_videoLayer.get() isReadyForDisplay]);
    236 }
    237 
    238 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerForURL(const String& url)
    239 {
    240     setDelayCallbacks(true);
    241 
    242     if (!m_avAsset) {
    243         NSURL *cocoaURL = KURL(ParsedURLString, url);
    244         m_avAsset.adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:nil]);
    245     }
    246 
    247     createAVPlayer();
    248 }
    249 
    250 #if ENABLE(OFFLINE_WEB_APPLICATIONS)
    251 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerForCacheResource(ApplicationCacheResource* resource)
    252 {
    253     // AVFoundation can't open arbitrary data pointers, so if this ApplicationCacheResource doesn't
    254     // have a valid local path, just open the resource's original URL.
    255     if (resource->path().isEmpty()) {
    256         createAVPlayerForURL(resource->url());
    257         return;
    258     }
    259 
    260     setDelayCallbacks(true);
    261 
    262     if (!m_avAsset) {
    263         NSURL* localURL = [NSURL fileURLWithPath:resource->path()];
    264         m_avAsset.adoptNS([[AVURLAsset alloc] initWithURL:localURL options:nil]);
    265     }
    266 
    267     createAVPlayer();
    268 }
    269 #endif
    270 
    271 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
    272 {
    273     if (!m_avPlayer) {
    274         m_avPlayer.adoptNS([[AVPlayer alloc] init]);
    275 
    276         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:nil context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
    277 
    278         // Add a time observer, ask to be called infrequently because we don't really want periodic callbacks but
    279         // our observer will also be called whenever a seek happens.
    280         const double veryLongInterval = 60*60*60*24*30;
    281         WebCoreAVFMovieObserver *observer = m_objcObserver.get();
    282         m_timeObserver = [m_avPlayer.get() addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(veryLongInterval, 10) queue:nil usingBlock:^(CMTime time){
    283             [observer timeChanged:CMTimeGetSeconds(time)];
    284         }];
    285     }
    286 
    287     if (!m_avPlayerItem) {
    288         // Create the player item so we can media data.
    289         m_avPlayerItem.adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
    290 
    291         [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get()selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
    292 
    293         for (NSString *keyName in itemKVOProperties())
    294             [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:nil context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
    295 
    296         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
    297     }
    298 
    299     setDelayCallbacks(false);
    300 }
    301 
    302 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
    303 {
    304     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
    305 
    306     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
    307         [m_objcObserver.get() playableKnown];
    308     }];
    309 }
    310 
    311 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
    312 {
    313     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::playabilityKnown(%p) - requesting metadata loading", this);
    314     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
    315         [m_objcObserver.get() metadataLoaded];
    316     }];
    317 }
    318 
    319 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
    320 {
    321     if (!m_avPlayerItem)
    322         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
    323 
    324     AVPlayerItemStatus status = [m_avPlayerItem.get() status];
    325     if (status == AVPlayerItemStatusUnknown)
    326         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
    327     if (status == AVPlayerItemStatusFailed)
    328         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
    329     if ([m_avPlayerItem.get() isPlaybackLikelyToKeepUp])
    330         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
    331     if (buffered()->contain(duration()))
    332         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
    333     if (buffered()->contain(currentTime()))
    334         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
    335 
    336     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
    337 }
    338 
    339 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
    340 {
    341     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
    342     PlatformMedia pm;
    343     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
    344     pm.media.avfMediaPlayer = m_avPlayer.get();
    345     return pm;
    346 }
    347 
    348 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
    349 {
    350     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformLayer(%p)", this);
    351     return m_videoLayer.get();
    352 }
    353 
    354 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
    355 {
    356     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
    357     if (!metaDataAvailable())
    358         return;
    359 
    360     setDelayCallbacks(true);
    361     [m_avPlayer.get() setRate:requestedRate()];
    362     setDelayCallbacks(false);
    363 }
    364 
    365 void MediaPlayerPrivateAVFoundationObjC::platformPause()
    366 {
    367     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
    368     if (!metaDataAvailable())
    369         return;
    370 
    371     setDelayCallbacks(true);
    372     [m_avPlayer.get() setRate:nil];
    373     setDelayCallbacks(false);
    374 }
    375 
    376 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
    377 {
    378     if (!metaDataAvailable() || !m_avPlayerItem)
    379         return 0;
    380 
    381     float duration;
    382     CMTime cmDuration = [m_avPlayerItem.get() duration];
    383     if (CMTIME_IS_NUMERIC(cmDuration))
    384         duration = narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
    385     else if (CMTIME_IS_INDEFINITE(cmDuration))
    386         duration = numeric_limits<float>::infinity();
    387     else {
    388         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::duration(%p) - invalid duration, returning 0", this);
    389         return 0;
    390     }
    391 
    392     return duration;
    393 }
    394 
    395 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
    396 {
    397     if (!metaDataAvailable() || !m_avPlayerItem)
    398         return 0;
    399 
    400     CMTime itemTime = [m_avPlayerItem.get() currentTime];
    401     if (CMTIME_IS_NUMERIC(itemTime))
    402         return narrowPrecisionToFloat(CMTimeGetSeconds(itemTime));
    403 
    404     return 0;
    405 }
    406 
    407 void MediaPlayerPrivateAVFoundationObjC::seekToTime(float time)
    408 {
    409     // setCurrentTime generates several event callbacks, update afterwards.
    410     setDelayCallbacks(true);
    411 
    412     WebCoreAVFMovieObserver *observer = m_objcObserver.get();
    413     [m_avPlayerItem.get() seekToTime:CMTimeMakeWithSeconds(time, 600) toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:^(BOOL finished) {
    414         [observer seekCompleted:finished];
    415     }];
    416 
    417     setDelayCallbacks(false);
    418 }
    419 
    420 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
    421 {
    422     if (!metaDataAvailable())
    423         return;
    424 
    425     [m_avPlayer.get() setVolume:volume];
    426 }
    427 
    428 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
    429 {
    430     if (!metaDataAvailable())
    431         return;
    432 
    433     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - setting to %s", this, boolString(closedCaptionsVisible));
    434     [m_avPlayer.get() setClosedCaptionDisplayEnabled:closedCaptionsVisible];
    435 }
    436 
    437 void MediaPlayerPrivateAVFoundationObjC::updateRate()
    438 {
    439     setDelayCallbacks(true);
    440     [m_avPlayer.get() setRate:requestedRate()];
    441     setDelayCallbacks(false);
    442 }
    443 
    444 float MediaPlayerPrivateAVFoundationObjC::rate() const
    445 {
    446     if (!metaDataAvailable())
    447         return 0;
    448 
    449     return [m_avPlayer.get() rate];
    450 }
    451 
    452 PassRefPtr<TimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
    453 {
    454     RefPtr<TimeRanges> timeRanges = TimeRanges::create();
    455 
    456     if (!m_avPlayerItem)
    457         return timeRanges.release();
    458 
    459     NSArray *loadedRanges = [m_avPlayerItem.get() loadedTimeRanges];
    460     for (NSValue *thisRangeValue in loadedRanges) {
    461         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
    462         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
    463             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
    464             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
    465             timeRanges->add(rangeStart, rangeEnd);
    466         }
    467     }
    468     return timeRanges.release();
    469 }
    470 
    471 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
    472 {
    473     NSArray *seekableRanges = [m_avPlayerItem.get() seekableTimeRanges];
    474     if (!seekableRanges)
    475         return 0;
    476 
    477     float maxTimeSeekable = 0;
    478     for (NSValue *thisRangeValue in seekableRanges) {
    479         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
    480         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
    481             continue;
    482 
    483         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
    484         if (maxTimeSeekable < endOfRange)
    485             maxTimeSeekable = endOfRange;
    486     }
    487     return maxTimeSeekable;
    488 }
    489 
    490 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
    491 {
    492     NSArray *loadedRanges = [m_avPlayerItem.get() loadedTimeRanges];
    493     if (!loadedRanges)
    494         return 0;
    495 
    496     float maxTimeLoaded = 0;
    497     for (NSValue *thisRangeValue in loadedRanges) {
    498         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
    499         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
    500             continue;
    501 
    502         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
    503         if (maxTimeLoaded < endOfRange)
    504             maxTimeLoaded = endOfRange;
    505     }
    506 
    507     return maxTimeLoaded;
    508 }
    509 
    510 unsigned MediaPlayerPrivateAVFoundationObjC::totalBytes() const
    511 {
    512     if (!metaDataAvailable())
    513         return 0;
    514 
    515     long long totalMediaSize = 0;
    516     NSArray *tracks = [m_avAsset.get() tracks];
    517     for (AVAssetTrack *thisTrack in tracks)
    518         totalMediaSize += [thisTrack totalSampleDataLength];
    519 
    520     return static_cast<unsigned>(totalMediaSize);
    521 }
    522 
    523 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
    524 {
    525     m_avAsset = asset;
    526 }
    527 
    528 MediaPlayerPrivateAVFoundation::AVAssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
    529 {
    530     if (!m_avAsset)
    531         return MediaPlayerAVAssetStatusUnknown;
    532 
    533     for (NSString *keyName in assetMetadataKeyNames()) {
    534         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
    535         if (keyStatus < AVKeyValueStatusLoaded)
    536             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
    537 
    538         if (keyStatus == AVKeyValueStatusFailed)
    539             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
    540         if (keyStatus == AVKeyValueStatusCancelled)
    541             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
    542     }
    543 
    544     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
    545         return MediaPlayerAVAssetStatusPlayable;
    546 
    547     return MediaPlayerAVAssetStatusLoaded;
    548 }
    549 
    550 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
    551 {
    552     if (!metaDataAvailable() || context->paintingDisabled())
    553         return;
    554 
    555     paint(context, rect);
    556 }
    557 
    558 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
    559 {
    560     if (!metaDataAvailable() || context->paintingDisabled())
    561         return;
    562 
    563     setDelayCallbacks(true);
    564     BEGIN_BLOCK_OBJC_EXCEPTIONS;
    565 
    566     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
    567     if (image) {
    568         context->save();
    569         context->translate(rect.x(), rect.y() + rect.height());
    570         context->scale(FloatSize(1.0f, -1.0f));
    571         context->setImageInterpolationQuality(InterpolationLow);
    572         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
    573         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
    574         context->restore();
    575         image = 0;
    576     }
    577 
    578     END_BLOCK_OBJC_EXCEPTIONS;
    579     setDelayCallbacks(false);
    580 
    581     MediaPlayerPrivateAVFoundation::paint(context, rect);
    582 }
    583 
    584 static HashSet<String> mimeTypeCache()
    585 {
    586     DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
    587     static bool typeListInitialized = false;
    588 
    589     if (typeListInitialized)
    590         return cache;
    591     typeListInitialized = true;
    592 
    593     NSArray *types = [AVURLAsset audiovisualMIMETypes];
    594     for (NSString *mimeType in types)
    595         cache.add(mimeType);
    596 
    597     return cache;
    598 }
    599 
    600 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
    601 {
    602     if (!m_imageGenerator)
    603         createContextVideoRenderer();
    604     ASSERT(m_imageGenerator);
    605 
    606 #if !LOG_DISABLED
    607     double start = WTF::currentTime();
    608 #endif
    609 
    610     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
    611     CGImageRef image = [m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil];
    612 
    613 #if !LOG_DISABLED
    614     double duration = WTF::currentTime() - start;
    615     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
    616 #endif
    617 
    618     return image;
    619 }
    620 
    621 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
    622 {
    623     supportedTypes = mimeTypeCache();
    624 }
    625 
    626 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const String& type, const String& codecs)
    627 {
    628     if (!mimeTypeCache().contains(type))
    629         return MediaPlayer::IsNotSupported;
    630 
    631     // The spec says:
    632     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
    633     if (codecs.isEmpty())
    634         return MediaPlayer::MayBeSupported;
    635 
    636     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
    637     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
    638 }
    639 
    640 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
    641 {
    642     return true;
    643 }
    644 
    645 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
    646 {
    647     if (!metaDataAvailable())
    648         return timeValue;
    649 
    650     // FIXME - impossible to implement until rdar://8721510 is fixed.
    651     return timeValue;
    652 }
    653 
    654 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
    655 {
    656     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we get
    657     // asked about those fairly fequently.
    658     setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
    659     setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
    660     setHasClosedCaptions([[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count]);
    661 
    662     sizeChanged();
    663 }
    664 
    665 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
    666 {
    667     NSArray *tracks = [m_avAsset.get() tracks];
    668 
    669     // Some assets don't report track properties until they are completely ready to play, but we
    670     // want to report a size as early as possible so use presentationSize when an asset has no tracks.
    671     if (![tracks count]) {
    672         setNaturalSize(IntSize([m_avPlayerItem.get() presentationSize]));
    673         return;
    674     }
    675 
    676     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
    677     // the union of all visual track rects.
    678     CGRect trackUnionRect = CGRectZero;
    679     for (AVAssetTrack *track in tracks) {
    680         CGSize trackSize = [track naturalSize];
    681         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
    682         trackUnionRect = CGRectUnion(trackUnionRect, CGRectApplyAffineTransform(trackRect, [track preferredTransform]));
    683     }
    684 
    685     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
    686     trackUnionRect = CGRectOffset(trackUnionRect, trackUnionRect.origin.x, trackUnionRect.origin.y);
    687 
    688     // Also look at the asset's preferred transform so we account for a movie matrix.
    689     CGSize naturalSize = CGSizeApplyAffineTransform(trackUnionRect.size, [m_avAsset.get() preferredTransform]);
    690 
    691     // Cache the natural size (setNaturalSize will notify the player if it has changed).
    692     setNaturalSize(IntSize(naturalSize));
    693 }
    694 
    695 NSArray* assetMetadataKeyNames()
    696 {
    697     static NSArray* keys;
    698     if (!keys) {
    699         keys = [[NSArray alloc] initWithObjects:@"duration",
    700                     @"naturalSize",
    701                     @"preferredTransform",
    702                     @"preferredVolume",
    703                     @"preferredRate",
    704                     @"playable",
    705                     @"tracks",
    706                    nil];
    707     }
    708     return keys;
    709 }
    710 
    711 NSArray* itemKVOProperties()
    712 {
    713     static NSArray* keys;
    714     if (!keys) {
    715         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
    716                 @"status",
    717                 @"asset",
    718                 @"tracks",
    719                 @"seekableTimeRanges",
    720                 @"loadedTimeRanges",
    721                 @"playbackLikelyToKeepUp",
    722                 @"playbackBufferFull",
    723                 @"playbackBufferEmpty",
    724                 nil];
    725     }
    726     return keys;
    727 }
    728 
    729 } // namespace WebCore
    730 
    731 @implementation WebCoreAVFMovieObserver
    732 
    733 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
    734 {
    735     m_callback = callback;
    736     return [super init];
    737 }
    738 
    739 - (void)disconnect
    740 {
    741     [NSObject cancelPreviousPerformRequestsWithTarget:self];
    742     m_callback = 0;
    743 }
    744 
    745 - (void)metadataLoaded
    746 {
    747     if (!m_callback)
    748         return;
    749 
    750     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
    751 }
    752 
    753 - (void)playableKnown
    754 {
    755     if (!m_callback)
    756         return;
    757 
    758     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
    759 }
    760 
    761 - (void)timeChanged:(double)time
    762 {
    763     if (!m_callback)
    764         return;
    765 
    766     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerTimeChanged, time);
    767 }
    768 
    769 - (void)seekCompleted:(BOOL)finished
    770 {
    771     if (!m_callback)
    772         return;
    773 
    774     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
    775 }
    776 
    777 - (void)didEnd:(NSNotification *)unusedNotification
    778 {
    779     UNUSED_PARAM(unusedNotification);
    780     if (!m_callback)
    781         return;
    782     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
    783 }
    784 
    785 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
    786 {
    787     UNUSED_PARAM(change);
    788 
    789     LOG(Media, "WebCoreAVFMovieObserver:observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
    790 
    791     if (!m_callback)
    792         return;
    793 
    794     if (context == MediaPlayerAVFoundationObservationContextPlayerItem) {
    795         // A value changed for an AVPlayerItem
    796         if ([keyPath isEqualToString:@"status"])
    797             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
    798         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
    799             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
    800         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
    801             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
    802         else if ([keyPath isEqualToString:@"playbackBufferFull"])
    803             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
    804         else if ([keyPath isEqualToString:@"asset"])
    805             m_callback->setAsset([object asset]);
    806         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
    807             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
    808         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
    809             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
    810         else if ([keyPath isEqualToString:@"tracks"])
    811             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
    812         else if ([keyPath isEqualToString:@"presentationSize"])
    813             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
    814 
    815         return;
    816     }
    817 
    818     if (context == MediaPlayerAVFoundationObservationContextPlayer) {
    819         // A value changed for an AVPlayer.
    820         if ([keyPath isEqualToString:@"rate"])
    821             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
    822 }
    823 }
    824 
    825 @end
    826 
    827 #endif
    828