Home | History | Annotate | Download | only in filters
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "media/filters/ffmpeg_demuxer.h"
      6 
      7 #include <algorithm>
      8 #include <string>
      9 
     10 #include "base/base64.h"
     11 #include "base/bind.h"
     12 #include "base/callback.h"
     13 #include "base/callback_helpers.h"
     14 #include "base/command_line.h"
     15 #include "base/memory/scoped_ptr.h"
     16 #include "base/message_loop/message_loop.h"
     17 #include "base/metrics/sparse_histogram.h"
     18 #include "base/stl_util.h"
     19 #include "base/strings/string_util.h"
     20 #include "base/strings/stringprintf.h"
     21 #include "base/task_runner_util.h"
     22 #include "base/time/time.h"
     23 #include "media/base/audio_decoder_config.h"
     24 #include "media/base/bind_to_loop.h"
     25 #include "media/base/decoder_buffer.h"
     26 #include "media/base/decrypt_config.h"
     27 #include "media/base/limits.h"
     28 #include "media/base/media_log.h"
     29 #include "media/base/media_switches.h"
     30 #include "media/base/video_decoder_config.h"
     31 #include "media/ffmpeg/ffmpeg_common.h"
     32 #include "media/filters/ffmpeg_glue.h"
     33 #include "media/filters/ffmpeg_h264_to_annex_b_bitstream_converter.h"
     34 #include "media/webm/webm_crypto_helpers.h"
     35 
     36 namespace media {
     37 
     38 //
     39 // FFmpegDemuxerStream
     40 //
     41 FFmpegDemuxerStream::FFmpegDemuxerStream(
     42     FFmpegDemuxer* demuxer,
     43     AVStream* stream)
     44     : demuxer_(demuxer),
     45       message_loop_(base::MessageLoopProxy::current()),
     46       stream_(stream),
     47       type_(UNKNOWN),
     48       end_of_stream_(false),
     49       last_packet_timestamp_(kNoTimestamp()),
     50       bitstream_converter_enabled_(false) {
     51   DCHECK(demuxer_);
     52 
     53   bool is_encrypted = false;
     54 
     55   // Determine our media format.
     56   switch (stream->codec->codec_type) {
     57     case AVMEDIA_TYPE_AUDIO:
     58       type_ = AUDIO;
     59       AVStreamToAudioDecoderConfig(stream, &audio_config_, true);
     60       is_encrypted = audio_config_.is_encrypted();
     61       break;
     62     case AVMEDIA_TYPE_VIDEO:
     63       type_ = VIDEO;
     64       AVStreamToVideoDecoderConfig(stream, &video_config_, true);
     65       is_encrypted = video_config_.is_encrypted();
     66       break;
     67     default:
     68       NOTREACHED();
     69       break;
     70   }
     71 
     72   // Calculate the duration.
     73   duration_ = ConvertStreamTimestamp(stream->time_base, stream->duration);
     74 
     75   if (stream_->codec->codec_id == AV_CODEC_ID_H264) {
     76     bitstream_converter_.reset(
     77         new FFmpegH264ToAnnexBBitstreamConverter(stream_->codec));
     78   }
     79 
     80   if (is_encrypted) {
     81     AVDictionaryEntry* key = av_dict_get(stream->metadata, "enc_key_id", NULL,
     82                                          0);
     83     DCHECK(key);
     84     DCHECK(key->value);
     85     if (!key || !key->value)
     86       return;
     87     base::StringPiece base64_key_id(key->value);
     88     std::string enc_key_id;
     89     base::Base64Decode(base64_key_id, &enc_key_id);
     90     DCHECK(!enc_key_id.empty());
     91     if (enc_key_id.empty())
     92       return;
     93 
     94     encryption_key_id_.assign(enc_key_id);
     95     demuxer_->FireNeedKey(kWebMEncryptInitDataType, enc_key_id);
     96   }
     97 }
     98 
     99 void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
    100   DCHECK(message_loop_->BelongsToCurrentThread());
    101 
    102   if (!demuxer_ || end_of_stream_) {
    103     NOTREACHED() << "Attempted to enqueue packet on a stopped stream";
    104     return;
    105   }
    106 
    107   // Convert the packet if there is a bitstream filter.
    108   if (packet->data && bitstream_converter_enabled_ &&
    109       !bitstream_converter_->ConvertPacket(packet.get())) {
    110     LOG(ERROR) << "Format conversion failed.";
    111   }
    112 
    113   // Get side data if any. For now, the only type of side_data is VP8 Alpha. We
    114   // keep this generic so that other side_data types in the future can be
    115   // handled the same way as well.
    116   av_packet_split_side_data(packet.get());
    117   int side_data_size = 0;
    118   uint8* side_data = av_packet_get_side_data(
    119       packet.get(),
    120       AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL,
    121       &side_data_size);
    122 
    123   // If a packet is returned by FFmpeg's av_parser_parse2() the packet will
    124   // reference inner memory of FFmpeg.  As such we should transfer the packet
    125   // into memory we control.
    126   scoped_refptr<DecoderBuffer> buffer;
    127   if (side_data_size > 0) {
    128     buffer = DecoderBuffer::CopyFrom(packet.get()->data, packet.get()->size,
    129                                      side_data, side_data_size);
    130   } else {
    131     buffer = DecoderBuffer::CopyFrom(packet.get()->data, packet.get()->size);
    132   }
    133 
    134   if ((type() == DemuxerStream::AUDIO && audio_config_.is_encrypted()) ||
    135       (type() == DemuxerStream::VIDEO && video_config_.is_encrypted())) {
    136     scoped_ptr<DecryptConfig> config(WebMCreateDecryptConfig(
    137         packet->data,  packet->size,
    138         reinterpret_cast<const uint8*>(encryption_key_id_.data()),
    139         encryption_key_id_.size()));
    140     if (!config)
    141       LOG(ERROR) << "Creation of DecryptConfig failed.";
    142     buffer->set_decrypt_config(config.Pass());
    143   }
    144 
    145   buffer->set_timestamp(ConvertStreamTimestamp(
    146       stream_->time_base, packet->pts));
    147   buffer->set_duration(ConvertStreamTimestamp(
    148       stream_->time_base, packet->duration));
    149   if (buffer->timestamp() != kNoTimestamp() &&
    150       last_packet_timestamp_ != kNoTimestamp() &&
    151       last_packet_timestamp_ < buffer->timestamp()) {
    152     buffered_ranges_.Add(last_packet_timestamp_, buffer->timestamp());
    153     demuxer_->NotifyBufferingChanged();
    154   }
    155   last_packet_timestamp_ = buffer->timestamp();
    156 
    157   buffer_queue_.Push(buffer);
    158   SatisfyPendingRead();
    159 }
    160 
    161 void FFmpegDemuxerStream::SetEndOfStream() {
    162   DCHECK(message_loop_->BelongsToCurrentThread());
    163   end_of_stream_ = true;
    164   SatisfyPendingRead();
    165 }
    166 
    167 void FFmpegDemuxerStream::FlushBuffers() {
    168   DCHECK(message_loop_->BelongsToCurrentThread());
    169   DCHECK(read_cb_.is_null()) << "There should be no pending read";
    170   buffer_queue_.Clear();
    171   end_of_stream_ = false;
    172   last_packet_timestamp_ = kNoTimestamp();
    173 }
    174 
    175 void FFmpegDemuxerStream::Stop() {
    176   DCHECK(message_loop_->BelongsToCurrentThread());
    177   buffer_queue_.Clear();
    178   if (!read_cb_.is_null()) {
    179     base::ResetAndReturn(&read_cb_).Run(
    180         DemuxerStream::kOk, DecoderBuffer::CreateEOSBuffer());
    181   }
    182   demuxer_ = NULL;
    183   stream_ = NULL;
    184   end_of_stream_ = true;
    185 }
    186 
    187 base::TimeDelta FFmpegDemuxerStream::duration() {
    188   return duration_;
    189 }
    190 
    191 DemuxerStream::Type FFmpegDemuxerStream::type() {
    192   DCHECK(message_loop_->BelongsToCurrentThread());
    193   return type_;
    194 }
    195 
    196 void FFmpegDemuxerStream::Read(const ReadCB& read_cb) {
    197   DCHECK(message_loop_->BelongsToCurrentThread());
    198   CHECK(read_cb_.is_null()) << "Overlapping reads are not supported";
    199   read_cb_ = BindToCurrentLoop(read_cb);
    200 
    201   // Don't accept any additional reads if we've been told to stop.
    202   // The |demuxer_| may have been destroyed in the pipeline thread.
    203   //
    204   // TODO(scherkus): it would be cleaner to reply with an error message.
    205   if (!demuxer_) {
    206     base::ResetAndReturn(&read_cb_).Run(
    207         DemuxerStream::kOk, DecoderBuffer::CreateEOSBuffer());
    208     return;
    209   }
    210 
    211   SatisfyPendingRead();
    212 }
    213 
    214 void FFmpegDemuxerStream::EnableBitstreamConverter() {
    215   DCHECK(message_loop_->BelongsToCurrentThread());
    216   CHECK(bitstream_converter_.get());
    217   bitstream_converter_enabled_ = true;
    218 }
    219 
    220 AudioDecoderConfig FFmpegDemuxerStream::audio_decoder_config() {
    221   DCHECK(message_loop_->BelongsToCurrentThread());
    222   CHECK_EQ(type_, AUDIO);
    223   return audio_config_;
    224 }
    225 
    226 VideoDecoderConfig FFmpegDemuxerStream::video_decoder_config() {
    227   DCHECK(message_loop_->BelongsToCurrentThread());
    228   CHECK_EQ(type_, VIDEO);
    229   return video_config_;
    230 }
    231 
    232 FFmpegDemuxerStream::~FFmpegDemuxerStream() {
    233   DCHECK(!demuxer_);
    234   DCHECK(read_cb_.is_null());
    235   DCHECK(buffer_queue_.IsEmpty());
    236 }
    237 
    238 base::TimeDelta FFmpegDemuxerStream::GetElapsedTime() const {
    239   return ConvertStreamTimestamp(stream_->time_base, stream_->cur_dts);
    240 }
    241 
    242 Ranges<base::TimeDelta> FFmpegDemuxerStream::GetBufferedRanges() const {
    243   return buffered_ranges_;
    244 }
    245 
    246 void FFmpegDemuxerStream::SatisfyPendingRead() {
    247   DCHECK(message_loop_->BelongsToCurrentThread());
    248   if (!read_cb_.is_null()) {
    249     if (!buffer_queue_.IsEmpty()) {
    250       base::ResetAndReturn(&read_cb_).Run(
    251           DemuxerStream::kOk, buffer_queue_.Pop());
    252     } else if (end_of_stream_) {
    253       base::ResetAndReturn(&read_cb_).Run(
    254           DemuxerStream::kOk, DecoderBuffer::CreateEOSBuffer());
    255     }
    256   }
    257 
    258   // Have capacity? Ask for more!
    259   if (HasAvailableCapacity() && !end_of_stream_) {
    260     demuxer_->NotifyCapacityAvailable();
    261   }
    262 }
    263 
    264 bool FFmpegDemuxerStream::HasAvailableCapacity() {
    265   // TODO(scherkus): Remove early return and reenable time-based capacity
    266   // after our data sources support canceling/concurrent reads, see
    267   // http://crbug.com/165762 for details.
    268   return !read_cb_.is_null();
    269 
    270   // Try to have one second's worth of encoded data per stream.
    271   const base::TimeDelta kCapacity = base::TimeDelta::FromSeconds(1);
    272   return buffer_queue_.IsEmpty() || buffer_queue_.Duration() < kCapacity;
    273 }
    274 
    275 // static
    276 base::TimeDelta FFmpegDemuxerStream::ConvertStreamTimestamp(
    277     const AVRational& time_base, int64 timestamp) {
    278   if (timestamp == static_cast<int64>(AV_NOPTS_VALUE))
    279     return kNoTimestamp();
    280 
    281   return ConvertFromTimeBase(time_base, timestamp);
    282 }
    283 
    284 //
    285 // FFmpegDemuxer
    286 //
    287 FFmpegDemuxer::FFmpegDemuxer(
    288     const scoped_refptr<base::MessageLoopProxy>& message_loop,
    289     DataSource* data_source,
    290     const FFmpegNeedKeyCB& need_key_cb,
    291     const scoped_refptr<MediaLog>& media_log)
    292     : host_(NULL),
    293       message_loop_(message_loop),
    294       weak_factory_(this),
    295       blocking_thread_("FFmpegDemuxer"),
    296       pending_read_(false),
    297       pending_seek_(false),
    298       data_source_(data_source),
    299       media_log_(media_log),
    300       bitrate_(0),
    301       start_time_(kNoTimestamp()),
    302       audio_disabled_(false),
    303       duration_known_(false),
    304       url_protocol_(data_source, BindToLoop(message_loop_, base::Bind(
    305           &FFmpegDemuxer::OnDataSourceError, base::Unretained(this)))),
    306       need_key_cb_(need_key_cb) {
    307   DCHECK(message_loop_.get());
    308   DCHECK(data_source_);
    309 }
    310 
    311 FFmpegDemuxer::~FFmpegDemuxer() {}
    312 
    313 void FFmpegDemuxer::Stop(const base::Closure& callback) {
    314   DCHECK(message_loop_->BelongsToCurrentThread());
    315   url_protocol_.Abort();
    316   data_source_->Stop(BindToCurrentLoop(base::Bind(
    317       &FFmpegDemuxer::OnDataSourceStopped, weak_this_,
    318       BindToCurrentLoop(callback))));
    319 
    320   // TODO(scherkus): Reenable after figuring why Stop() gets called multiple
    321   // times, see http://crbug.com/235933
    322 #if 0
    323   data_source_ = NULL;
    324 #endif
    325 }
    326 
    327 void FFmpegDemuxer::Seek(base::TimeDelta time, const PipelineStatusCB& cb) {
    328   DCHECK(message_loop_->BelongsToCurrentThread());
    329   CHECK(!pending_seek_);
    330 
    331   // TODO(scherkus): Inspect |pending_read_| and cancel IO via |blocking_url_|,
    332   // otherwise we can end up waiting for a pre-seek read to complete even though
    333   // we know we're going to drop it on the floor.
    334 
    335   // Always seek to a timestamp less than or equal to the desired timestamp.
    336   int flags = AVSEEK_FLAG_BACKWARD;
    337 
    338   // Passing -1 as our stream index lets FFmpeg pick a default stream.  FFmpeg
    339   // will attempt to use the lowest-index video stream, if present, followed by
    340   // the lowest-index audio stream.
    341   pending_seek_ = true;
    342   base::PostTaskAndReplyWithResult(
    343       blocking_thread_.message_loop_proxy().get(),
    344       FROM_HERE,
    345       base::Bind(&av_seek_frame,
    346                  glue_->format_context(),
    347                  -1,
    348                  time.InMicroseconds(),
    349                  flags),
    350       base::Bind(&FFmpegDemuxer::OnSeekFrameDone, weak_this_, cb));
    351 }
    352 
    353 void FFmpegDemuxer::SetPlaybackRate(float playback_rate) {
    354   DCHECK(message_loop_->BelongsToCurrentThread());
    355   data_source_->SetPlaybackRate(playback_rate);
    356 }
    357 
    358 void FFmpegDemuxer::OnAudioRendererDisabled() {
    359   DCHECK(message_loop_->BelongsToCurrentThread());
    360   audio_disabled_ = true;
    361   StreamVector::iterator iter;
    362   for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
    363     if (*iter && (*iter)->type() == DemuxerStream::AUDIO) {
    364       (*iter)->Stop();
    365     }
    366   }
    367 }
    368 
    369 void FFmpegDemuxer::Initialize(DemuxerHost* host,
    370                                const PipelineStatusCB& status_cb) {
    371   DCHECK(message_loop_->BelongsToCurrentThread());
    372   host_ = host;
    373   weak_this_ = weak_factory_.GetWeakPtr();
    374 
    375   // TODO(scherkus): DataSource should have a host by this point,
    376   // see http://crbug.com/122071
    377   data_source_->set_host(host);
    378 
    379   glue_.reset(new FFmpegGlue(&url_protocol_));
    380   AVFormatContext* format_context = glue_->format_context();
    381 
    382   // Disable ID3v1 tag reading to avoid costly seeks to end of file for data we
    383   // don't use.  FFmpeg will only read ID3v1 tags if no other metadata is
    384   // available, so add a metadata entry to ensure some is always present.
    385   av_dict_set(&format_context->metadata, "skip_id3v1_tags", "", 0);
    386 
    387   // Open the AVFormatContext using our glue layer.
    388   CHECK(blocking_thread_.Start());
    389   base::PostTaskAndReplyWithResult(
    390       blocking_thread_.message_loop_proxy().get(),
    391       FROM_HERE,
    392       base::Bind(&FFmpegGlue::OpenContext, base::Unretained(glue_.get())),
    393       base::Bind(&FFmpegDemuxer::OnOpenContextDone, weak_this_, status_cb));
    394 }
    395 
    396 DemuxerStream* FFmpegDemuxer::GetStream(DemuxerStream::Type type) {
    397   DCHECK(message_loop_->BelongsToCurrentThread());
    398   return GetFFmpegStream(type);
    399 }
    400 
    401 FFmpegDemuxerStream* FFmpegDemuxer::GetFFmpegStream(
    402     DemuxerStream::Type type) const {
    403   StreamVector::const_iterator iter;
    404   for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
    405     if (*iter && (*iter)->type() == type) {
    406       return *iter;
    407     }
    408   }
    409   return NULL;
    410 }
    411 
    412 base::TimeDelta FFmpegDemuxer::GetStartTime() const {
    413   DCHECK(message_loop_->BelongsToCurrentThread());
    414   return start_time_;
    415 }
    416 
    417 // Helper for calculating the bitrate of the media based on information stored
    418 // in |format_context| or failing that the size and duration of the media.
    419 //
    420 // Returns 0 if a bitrate could not be determined.
    421 static int CalculateBitrate(
    422     AVFormatContext* format_context,
    423     const base::TimeDelta& duration,
    424     int64 filesize_in_bytes) {
    425   // If there is a bitrate set on the container, use it.
    426   if (format_context->bit_rate > 0)
    427     return format_context->bit_rate;
    428 
    429   // Then try to sum the bitrates individually per stream.
    430   int bitrate = 0;
    431   for (size_t i = 0; i < format_context->nb_streams; ++i) {
    432     AVCodecContext* codec_context = format_context->streams[i]->codec;
    433     bitrate += codec_context->bit_rate;
    434   }
    435   if (bitrate > 0)
    436     return bitrate;
    437 
    438   // See if we can approximate the bitrate as long as we have a filesize and
    439   // valid duration.
    440   if (duration.InMicroseconds() <= 0 ||
    441       duration == kInfiniteDuration() ||
    442       filesize_in_bytes == 0) {
    443     return 0;
    444   }
    445 
    446   // Do math in floating point as we'd overflow an int64 if the filesize was
    447   // larger than ~1073GB.
    448   double bytes = filesize_in_bytes;
    449   double duration_us = duration.InMicroseconds();
    450   return bytes * 8000000.0 / duration_us;
    451 }
    452 
    453 void FFmpegDemuxer::OnOpenContextDone(const PipelineStatusCB& status_cb,
    454                                       bool result) {
    455   DCHECK(message_loop_->BelongsToCurrentThread());
    456   if (!blocking_thread_.IsRunning()) {
    457     status_cb.Run(PIPELINE_ERROR_ABORT);
    458     return;
    459   }
    460 
    461   if (!result) {
    462     status_cb.Run(DEMUXER_ERROR_COULD_NOT_OPEN);
    463     return;
    464   }
    465 
    466   // Fully initialize AVFormatContext by parsing the stream a little.
    467   base::PostTaskAndReplyWithResult(
    468       blocking_thread_.message_loop_proxy().get(),
    469       FROM_HERE,
    470       base::Bind(&avformat_find_stream_info,
    471                  glue_->format_context(),
    472                  static_cast<AVDictionary**>(NULL)),
    473       base::Bind(&FFmpegDemuxer::OnFindStreamInfoDone, weak_this_, status_cb));
    474 }
    475 
    476 void FFmpegDemuxer::OnFindStreamInfoDone(const PipelineStatusCB& status_cb,
    477                                          int result) {
    478   DCHECK(message_loop_->BelongsToCurrentThread());
    479   if (!blocking_thread_.IsRunning()) {
    480     status_cb.Run(PIPELINE_ERROR_ABORT);
    481     return;
    482   }
    483 
    484   if (result < 0) {
    485     status_cb.Run(DEMUXER_ERROR_COULD_NOT_PARSE);
    486     return;
    487   }
    488 
    489   // Create demuxer stream entries for each possible AVStream. Each stream
    490   // is examined to determine if it is supported or not (is the codec enabled
    491   // for it in this release?). Unsupported streams are skipped, allowing for
    492   // partial playback. At least one audio or video stream must be playable.
    493   AVFormatContext* format_context = glue_->format_context();
    494   streams_.resize(format_context->nb_streams);
    495 
    496   AVStream* audio_stream = NULL;
    497   AudioDecoderConfig audio_config;
    498 
    499   AVStream* video_stream = NULL;
    500   VideoDecoderConfig video_config;
    501 
    502   base::TimeDelta max_duration;
    503   for (size_t i = 0; i < format_context->nb_streams; ++i) {
    504     AVStream* stream = format_context->streams[i];
    505     AVCodecContext* codec_context = stream->codec;
    506     AVMediaType codec_type = codec_context->codec_type;
    507 
    508     if (codec_type == AVMEDIA_TYPE_AUDIO) {
    509       if (audio_stream)
    510         continue;
    511 
    512       // Log the codec detected, whether it is supported or not.
    513       UMA_HISTOGRAM_SPARSE_SLOWLY("Media.DetectedAudioCodec",
    514                                   codec_context->codec_id);
    515       // Ensure the codec is supported. IsValidConfig() also checks that the
    516       // channel layout and sample format are valid.
    517       AVStreamToAudioDecoderConfig(stream, &audio_config, false);
    518       if (!audio_config.IsValidConfig())
    519         continue;
    520       audio_stream = stream;
    521     } else if (codec_type == AVMEDIA_TYPE_VIDEO) {
    522       if (video_stream)
    523         continue;
    524 
    525       // Log the codec detected, whether it is supported or not.
    526       UMA_HISTOGRAM_SPARSE_SLOWLY("Media.DetectedVideoCodec",
    527                                   codec_context->codec_id);
    528       // Ensure the codec is supported. IsValidConfig() also checks that the
    529       // frame size and visible size are valid.
    530       AVStreamToVideoDecoderConfig(stream, &video_config, false);
    531 
    532       if (!video_config.IsValidConfig())
    533         continue;
    534       video_stream = stream;
    535     } else {
    536       continue;
    537     }
    538 
    539     streams_[i] = new FFmpegDemuxerStream(this, stream);
    540     max_duration = std::max(max_duration, streams_[i]->duration());
    541 
    542     if (stream->first_dts != static_cast<int64_t>(AV_NOPTS_VALUE)) {
    543       const base::TimeDelta first_dts = ConvertFromTimeBase(
    544           stream->time_base, stream->first_dts);
    545       if (start_time_ == kNoTimestamp() || first_dts < start_time_)
    546         start_time_ = first_dts;
    547     }
    548   }
    549 
    550   if (!audio_stream && !video_stream) {
    551     status_cb.Run(DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
    552     return;
    553   }
    554 
    555   if (format_context->duration != static_cast<int64_t>(AV_NOPTS_VALUE)) {
    556     // If there is a duration value in the container use that to find the
    557     // maximum between it and the duration from A/V streams.
    558     const AVRational av_time_base = {1, AV_TIME_BASE};
    559     max_duration =
    560         std::max(max_duration,
    561                  ConvertFromTimeBase(av_time_base, format_context->duration));
    562   } else {
    563     // The duration is unknown, in which case this is likely a live stream.
    564     max_duration = kInfiniteDuration();
    565   }
    566 
    567   // Some demuxers, like WAV, do not put timestamps on their frames. We
    568   // assume the the start time is 0.
    569   if (start_time_ == kNoTimestamp())
    570     start_time_ = base::TimeDelta();
    571 
    572   // MPEG-4 B-frames cause grief for a simple container like AVI. Enable PTS
    573   // generation so we always get timestamps, see http://crbug.com/169570
    574   if (strcmp(format_context->iformat->name, "avi") == 0)
    575     format_context->flags |= AVFMT_FLAG_GENPTS;
    576 
    577   // Good to go: set the duration and bitrate and notify we're done
    578   // initializing.
    579   host_->SetDuration(max_duration);
    580   duration_known_ = (max_duration != kInfiniteDuration());
    581 
    582   int64 filesize_in_bytes = 0;
    583   url_protocol_.GetSize(&filesize_in_bytes);
    584   bitrate_ = CalculateBitrate(format_context, max_duration, filesize_in_bytes);
    585   if (bitrate_ > 0)
    586     data_source_->SetBitrate(bitrate_);
    587 
    588   // Audio logging
    589   if (audio_stream) {
    590     AVCodecContext* audio_codec = audio_stream->codec;
    591     media_log_->SetBooleanProperty("found_audio_stream", true);
    592 
    593     SampleFormat sample_format = audio_config.sample_format();
    594     std::string sample_name = SampleFormatToString(sample_format);
    595 
    596     media_log_->SetStringProperty("audio_sample_format", sample_name);
    597 
    598     media_log_->SetStringProperty("audio_codec_name",
    599                                   audio_codec->codec_name);
    600     media_log_->SetIntegerProperty("audio_sample_rate",
    601                                    audio_codec->sample_rate);
    602     media_log_->SetIntegerProperty("audio_channels_count",
    603                                    audio_codec->channels);
    604     media_log_->SetIntegerProperty("audio_samples_per_second",
    605                                    audio_config.samples_per_second());
    606   } else {
    607     media_log_->SetBooleanProperty("found_audio_stream", false);
    608   }
    609 
    610   // Video logging
    611   if (video_stream) {
    612     AVCodecContext* video_codec = video_stream->codec;
    613     media_log_->SetBooleanProperty("found_video_stream", true);
    614     media_log_->SetStringProperty("video_codec_name", video_codec->codec_name);
    615     media_log_->SetIntegerProperty("width", video_codec->width);
    616     media_log_->SetIntegerProperty("height", video_codec->height);
    617     media_log_->SetIntegerProperty("coded_width",
    618                                    video_codec->coded_width);
    619     media_log_->SetIntegerProperty("coded_height",
    620                                    video_codec->coded_height);
    621     media_log_->SetStringProperty(
    622         "time_base",
    623         base::StringPrintf("%d/%d",
    624                            video_codec->time_base.num,
    625                            video_codec->time_base.den));
    626     media_log_->SetStringProperty(
    627         "video_format", VideoFrame::FormatToString(video_config.format()));
    628     media_log_->SetBooleanProperty("video_is_encrypted",
    629                                    video_config.is_encrypted());
    630   } else {
    631     media_log_->SetBooleanProperty("found_video_stream", false);
    632   }
    633 
    634 
    635   media_log_->SetDoubleProperty("max_duration", max_duration.InSecondsF());
    636   media_log_->SetDoubleProperty("start_time", start_time_.InSecondsF());
    637   media_log_->SetDoubleProperty("filesize_in_bytes",
    638                                 static_cast<double>(filesize_in_bytes));
    639   media_log_->SetIntegerProperty("bitrate", bitrate_);
    640 
    641   status_cb.Run(PIPELINE_OK);
    642 }
    643 
    644 void FFmpegDemuxer::OnSeekFrameDone(const PipelineStatusCB& cb, int result) {
    645   DCHECK(message_loop_->BelongsToCurrentThread());
    646   CHECK(pending_seek_);
    647   pending_seek_ = false;
    648 
    649   if (!blocking_thread_.IsRunning()) {
    650     cb.Run(PIPELINE_ERROR_ABORT);
    651     return;
    652   }
    653 
    654   if (result < 0) {
    655     // Use VLOG(1) instead of NOTIMPLEMENTED() to prevent the message being
    656     // captured from stdout and contaminates testing.
    657     // TODO(scherkus): Implement this properly and signal error (BUG=23447).
    658     VLOG(1) << "Not implemented";
    659   }
    660 
    661   // Tell streams to flush buffers due to seeking.
    662   StreamVector::iterator iter;
    663   for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
    664     if (*iter)
    665       (*iter)->FlushBuffers();
    666   }
    667 
    668   // Resume reading until capacity.
    669   ReadFrameIfNeeded();
    670 
    671   // Notify we're finished seeking.
    672   cb.Run(PIPELINE_OK);
    673 }
    674 
    675 void FFmpegDemuxer::ReadFrameIfNeeded() {
    676   DCHECK(message_loop_->BelongsToCurrentThread());
    677 
    678   // Make sure we have work to do before reading.
    679   if (!blocking_thread_.IsRunning() || !StreamsHaveAvailableCapacity() ||
    680       pending_read_ || pending_seek_) {
    681     return;
    682   }
    683 
    684   // Allocate and read an AVPacket from the media. Save |packet_ptr| since
    685   // evaluation order of packet.get() and base::Passed(&packet) is
    686   // undefined.
    687   ScopedAVPacket packet(new AVPacket());
    688   AVPacket* packet_ptr = packet.get();
    689 
    690   pending_read_ = true;
    691   base::PostTaskAndReplyWithResult(
    692       blocking_thread_.message_loop_proxy().get(),
    693       FROM_HERE,
    694       base::Bind(&av_read_frame, glue_->format_context(), packet_ptr),
    695       base::Bind(
    696           &FFmpegDemuxer::OnReadFrameDone, weak_this_, base::Passed(&packet)));
    697 }
    698 
    699 void FFmpegDemuxer::OnReadFrameDone(ScopedAVPacket packet, int result) {
    700   DCHECK(message_loop_->BelongsToCurrentThread());
    701   DCHECK(pending_read_);
    702   pending_read_ = false;
    703 
    704   if (!blocking_thread_.IsRunning() || pending_seek_) {
    705     return;
    706   }
    707 
    708   if (result < 0) {
    709     // Update the duration based on the audio stream if
    710     // it was previously unknown http://crbug.com/86830
    711     if (!duration_known_) {
    712       // Search streams for AUDIO one.
    713       for (StreamVector::iterator iter = streams_.begin();
    714            iter != streams_.end();
    715            ++iter) {
    716         if (*iter && (*iter)->type() == DemuxerStream::AUDIO) {
    717           base::TimeDelta duration = (*iter)->GetElapsedTime();
    718           if (duration != kNoTimestamp() && duration > base::TimeDelta()) {
    719             host_->SetDuration(duration);
    720             duration_known_ = true;
    721           }
    722           break;
    723         }
    724       }
    725     }
    726     // If we have reached the end of stream, tell the downstream filters about
    727     // the event.
    728     StreamHasEnded();
    729     return;
    730   }
    731 
    732   // Queue the packet with the appropriate stream.
    733   DCHECK_GE(packet->stream_index, 0);
    734   DCHECK_LT(packet->stream_index, static_cast<int>(streams_.size()));
    735 
    736   // Defend against ffmpeg giving us a bad stream index.
    737   if (packet->stream_index >= 0 &&
    738       packet->stream_index < static_cast<int>(streams_.size()) &&
    739       streams_[packet->stream_index] &&
    740       (!audio_disabled_ ||
    741        streams_[packet->stream_index]->type() != DemuxerStream::AUDIO)) {
    742 
    743     // TODO(scherkus): Fix demuxing upstream to never return packets w/o data
    744     // when av_read_frame() returns success code. See bug comment for ideas:
    745     //
    746     // https://code.google.com/p/chromium/issues/detail?id=169133#c10
    747     if (!packet->data) {
    748       ScopedAVPacket new_packet(new AVPacket());
    749       av_new_packet(new_packet.get(), 0);
    750 
    751       new_packet->pts = packet->pts;
    752       new_packet->dts = packet->dts;
    753       new_packet->pos = packet->pos;
    754       new_packet->duration = packet->duration;
    755       new_packet->convergence_duration = packet->convergence_duration;
    756       new_packet->flags = packet->flags;
    757       new_packet->stream_index = packet->stream_index;
    758 
    759       packet.swap(new_packet);
    760     }
    761 
    762     FFmpegDemuxerStream* demuxer_stream = streams_[packet->stream_index];
    763     demuxer_stream->EnqueuePacket(packet.Pass());
    764   }
    765 
    766   // Keep reading until we've reached capacity.
    767   ReadFrameIfNeeded();
    768 }
    769 
    770 void FFmpegDemuxer::OnDataSourceStopped(const base::Closure& callback) {
    771   // This will block until all tasks complete. Note that after this returns it's
    772   // possible for reply tasks (e.g., OnReadFrameDone()) to be queued on this
    773   // thread. Each of the reply task methods must check whether we've stopped the
    774   // thread and drop their results on the floor.
    775   DCHECK(message_loop_->BelongsToCurrentThread());
    776   blocking_thread_.Stop();
    777 
    778   StreamVector::iterator iter;
    779   for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
    780     if (*iter)
    781       (*iter)->Stop();
    782   }
    783 
    784   callback.Run();
    785 }
    786 
    787 bool FFmpegDemuxer::StreamsHaveAvailableCapacity() {
    788   DCHECK(message_loop_->BelongsToCurrentThread());
    789   StreamVector::iterator iter;
    790   for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
    791     if (*iter && (*iter)->HasAvailableCapacity()) {
    792       return true;
    793     }
    794   }
    795   return false;
    796 }
    797 
    798 void FFmpegDemuxer::StreamHasEnded() {
    799   DCHECK(message_loop_->BelongsToCurrentThread());
    800   StreamVector::iterator iter;
    801   for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
    802     if (!*iter ||
    803         (audio_disabled_ && (*iter)->type() == DemuxerStream::AUDIO)) {
    804       continue;
    805     }
    806     (*iter)->SetEndOfStream();
    807   }
    808 }
    809 
    810 void FFmpegDemuxer::FireNeedKey(const std::string& init_data_type,
    811                                 const std::string& encryption_key_id) {
    812   int key_id_size = encryption_key_id.size();
    813   scoped_ptr<uint8[]> key_id_local(new uint8[key_id_size]);
    814   memcpy(key_id_local.get(), encryption_key_id.data(), key_id_size);
    815   need_key_cb_.Run(init_data_type, key_id_local.Pass(), key_id_size);
    816 }
    817 
    818 void FFmpegDemuxer::NotifyCapacityAvailable() {
    819   DCHECK(message_loop_->BelongsToCurrentThread());
    820   ReadFrameIfNeeded();
    821 }
    822 
    823 void FFmpegDemuxer::NotifyBufferingChanged() {
    824   DCHECK(message_loop_->BelongsToCurrentThread());
    825   Ranges<base::TimeDelta> buffered;
    826   FFmpegDemuxerStream* audio =
    827       audio_disabled_ ? NULL : GetFFmpegStream(DemuxerStream::AUDIO);
    828   FFmpegDemuxerStream* video = GetFFmpegStream(DemuxerStream::VIDEO);
    829   if (audio && video) {
    830     buffered = audio->GetBufferedRanges().IntersectionWith(
    831         video->GetBufferedRanges());
    832   } else if (audio) {
    833     buffered = audio->GetBufferedRanges();
    834   } else if (video) {
    835     buffered = video->GetBufferedRanges();
    836   }
    837   for (size_t i = 0; i < buffered.size(); ++i)
    838     host_->AddBufferedTimeRange(buffered.start(i), buffered.end(i));
    839 }
    840 
    841 void FFmpegDemuxer::OnDataSourceError() {
    842   host_->OnDemuxerError(PIPELINE_ERROR_READ);
    843 }
    844 
    845 }  // namespace media
    846