Home | History | Annotate | Download | only in media
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "content/renderer/media/rtc_video_decoder.h"
      6 
      7 #include "base/bind.h"
      8 #include "base/logging.h"
      9 #include "base/memory/ref_counted.h"
     10 #include "base/message_loop/message_loop_proxy.h"
     11 #include "base/metrics/histogram.h"
     12 #include "base/safe_numerics.h"
     13 #include "base/stl_util.h"
     14 #include "base/task_runner_util.h"
     15 #include "content/child/child_thread.h"
     16 #include "content/renderer/media/native_handle_impl.h"
     17 #include "media/base/bind_to_loop.h"
     18 #include "media/filters/gpu_video_accelerator_factories.h"
     19 #include "third_party/webrtc/common_video/interface/texture_video_frame.h"
     20 #include "third_party/webrtc/system_wrappers/interface/ref_count.h"
     21 
     22 namespace content {
     23 
     24 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF;
     25 const int32 RTCVideoDecoder::ID_HALF = 0x20000000;
     26 const int32 RTCVideoDecoder::ID_INVALID = -1;
     27 
     28 // Maximum number of concurrent VDA::Decode() operations RVD will maintain.
     29 // Higher values allow better pipelining in the GPU, but also require more
     30 // resources.
     31 static const size_t kMaxInFlightDecodes = 8;
     32 
     33 // Size of shared-memory segments we allocate.  Since we reuse them we let them
     34 // be on the beefy side.
     35 static const size_t kSharedMemorySegmentBytes = 100 << 10;
     36 
     37 // Maximum number of allocated shared-memory segments.
     38 static const int kMaxNumSharedMemorySegments = 16;
     39 
     40 // Maximum number of pending WebRTC buffers that are waiting for the shared
     41 // memory. 10 seconds for 30 fps.
     42 static const size_t kMaxNumOfPendingBuffers = 300;
     43 
     44 // A shared memory segment and its allocated size. This class has the ownership
     45 // of |shm|.
     46 class RTCVideoDecoder::SHMBuffer {
     47  public:
     48   SHMBuffer(base::SharedMemory* shm, size_t size);
     49   ~SHMBuffer();
     50   base::SharedMemory* const shm;
     51   const size_t size;
     52 };
     53 
     54 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size)
     55     : shm(shm), size(size) {}
     56 
     57 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); }
     58 
     59 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id,
     60                                         uint32_t timestamp,
     61                                         int width,
     62                                         int height,
     63                                         size_t size)
     64     : bitstream_buffer_id(bitstream_buffer_id),
     65       timestamp(timestamp),
     66       width(width),
     67       height(height),
     68       size(size) {}
     69 
     70 RTCVideoDecoder::BufferData::BufferData() {}
     71 
     72 RTCVideoDecoder::BufferData::~BufferData() {}
     73 
     74 RTCVideoDecoder::RTCVideoDecoder(
     75     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories)
     76     : factories_(factories),
     77       vda_loop_proxy_(factories->GetMessageLoop()),
     78       decoder_texture_target_(0),
     79       next_picture_buffer_id_(0),
     80       state_(UNINITIALIZED),
     81       decode_complete_callback_(NULL),
     82       num_shm_buffers_(0),
     83       next_bitstream_buffer_id_(0),
     84       reset_bitstream_buffer_id_(ID_INVALID),
     85       weak_factory_(this) {
     86   DCHECK(!vda_loop_proxy_->BelongsToCurrentThread());
     87 
     88   weak_this_ = weak_factory_.GetWeakPtr();
     89 
     90   base::WaitableEvent message_loop_async_waiter(false, false);
     91   // Waiting here is safe. The media thread is stopped in the child thread and
     92   // the child thread is blocked when VideoDecoderFactory::CreateVideoDecoder
     93   // runs.
     94   vda_loop_proxy_->PostTask(FROM_HERE,
     95                             base::Bind(&RTCVideoDecoder::Initialize,
     96                                        base::Unretained(this),
     97                                        &message_loop_async_waiter));
     98   message_loop_async_waiter.Wait();
     99 }
    100 
    101 RTCVideoDecoder::~RTCVideoDecoder() {
    102   DVLOG(2) << "~RTCVideoDecoder";
    103   // Destroy VDA and remove |this| from the observer if this is vda thread.
    104   if (vda_loop_proxy_->BelongsToCurrentThread()) {
    105     base::MessageLoop::current()->RemoveDestructionObserver(this);
    106     DestroyVDA();
    107   } else {
    108     // VDA should have been destroyed in WillDestroyCurrentMessageLoop.
    109     DCHECK(!vda_);
    110   }
    111 
    112   // Delete all shared memories.
    113   STLDeleteElements(&available_shm_segments_);
    114   STLDeleteValues(&bitstream_buffers_in_decoder_);
    115   STLDeleteContainerPairFirstPointers(decode_buffers_.begin(),
    116                                       decode_buffers_.end());
    117   decode_buffers_.clear();
    118 
    119   // Delete WebRTC input buffers.
    120   for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it =
    121            pending_buffers_.begin();
    122        it != pending_buffers_.end();
    123        ++it) {
    124     delete[] it->first._buffer;
    125   }
    126 }
    127 
    128 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create(
    129     webrtc::VideoCodecType type,
    130     const scoped_refptr<media::GpuVideoAcceleratorFactories>& factories) {
    131   scoped_ptr<RTCVideoDecoder> decoder;
    132   // Convert WebRTC codec type to media codec profile.
    133   media::VideoCodecProfile profile;
    134   switch (type) {
    135     case webrtc::kVideoCodecVP8:
    136       profile = media::VP8PROFILE_MAIN;
    137       break;
    138     default:
    139       DVLOG(2) << "Video codec not supported:" << type;
    140       return decoder.Pass();
    141   }
    142 
    143   decoder.reset(new RTCVideoDecoder(factories));
    144   decoder->vda_ =
    145       factories->CreateVideoDecodeAccelerator(profile, decoder.get()).Pass();
    146   // vda can be NULL if VP8 is not supported.
    147   if (decoder->vda_ != NULL) {
    148     decoder->state_ = INITIALIZED;
    149   } else {
    150     factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release());
    151   }
    152   return decoder.Pass();
    153 }
    154 
    155 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
    156                                     int32_t /*numberOfCores*/) {
    157   DVLOG(2) << "InitDecode";
    158   DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8);
    159   if (codecSettings->codecSpecific.VP8.feedbackModeOn) {
    160     LOG(ERROR) << "Feedback mode not supported";
    161     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_ERROR);
    162   }
    163 
    164   base::AutoLock auto_lock(lock_);
    165   if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) {
    166     LOG(ERROR) << "VDA is not initialized. state=" << state_;
    167     return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_UNINITIALIZED);
    168   }
    169   // Create some shared memory if the queue is empty.
    170   if (available_shm_segments_.size() == 0) {
    171     vda_loop_proxy_->PostTask(FROM_HERE,
    172                               base::Bind(&RTCVideoDecoder::CreateSHM,
    173                                          weak_this_,
    174                                          kMaxInFlightDecodes,
    175                                          kSharedMemorySegmentBytes));
    176   }
    177   return RecordInitDecodeUMA(WEBRTC_VIDEO_CODEC_OK);
    178 }
    179 
    180 int32_t RTCVideoDecoder::Decode(
    181     const webrtc::EncodedImage& inputImage,
    182     bool missingFrames,
    183     const webrtc::RTPFragmentationHeader* /*fragmentation*/,
    184     const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
    185     int64_t /*renderTimeMs*/) {
    186   DVLOG(3) << "Decode";
    187 
    188   base::AutoLock auto_lock(lock_);
    189 
    190   if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) {
    191     LOG(ERROR) << "The decoder has not initialized.";
    192     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    193   }
    194 
    195   if (state_ == DECODE_ERROR) {
    196     LOG(ERROR) << "Decoding error occurred.";
    197     return WEBRTC_VIDEO_CODEC_ERROR;
    198   }
    199 
    200   if (missingFrames || !inputImage._completeFrame) {
    201     DLOG(ERROR) << "Missing or incomplete frames.";
    202     // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames.
    203     // Return an error to request a key frame.
    204     return WEBRTC_VIDEO_CODEC_ERROR;
    205   }
    206 
    207   // Most platforms' VDA implementations support mid-stream resolution change
    208   // internally.  Platforms whose VDAs fail to support mid-stream resolution
    209   // change gracefully need to have their clients cover for them, and we do that
    210   // here.
    211 #ifdef ANDROID
    212   const bool kVDACanHandleMidstreamResize = false;
    213 #else
    214   const bool kVDACanHandleMidstreamResize = true;
    215 #endif
    216 
    217   bool need_to_reset_for_midstream_resize = false;
    218   if (inputImage._frameType == webrtc::kKeyFrame) {
    219     DVLOG(2) << "Got key frame. size=" << inputImage._encodedWidth << "x"
    220              << inputImage._encodedHeight;
    221     gfx::Size prev_frame_size = frame_size_;
    222     frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight);
    223     if (!kVDACanHandleMidstreamResize && !prev_frame_size.IsEmpty() &&
    224         prev_frame_size != frame_size_) {
    225       need_to_reset_for_midstream_resize = true;
    226     }
    227   } else if (IsFirstBufferAfterReset(next_bitstream_buffer_id_,
    228                                      reset_bitstream_buffer_id_)) {
    229     // TODO(wuchengli): VDA should handle it. Remove this when
    230     // http://crosbug.com/p/21913 is fixed.
    231     DVLOG(1) << "The first frame should be a key frame. Drop this.";
    232     return WEBRTC_VIDEO_CODEC_ERROR;
    233   }
    234 
    235   // Create buffer metadata.
    236   BufferData buffer_data(next_bitstream_buffer_id_,
    237                          inputImage._timeStamp,
    238                          frame_size_.width(),
    239                          frame_size_.height(),
    240                          inputImage._length);
    241   // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
    242   next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST;
    243 
    244   // If a shared memory segment is available, there are no pending buffers, and
    245   // this isn't a mid-stream resolution change, then send the buffer for decode
    246   // immediately. Otherwise, save the buffer in the queue for later decode.
    247   scoped_ptr<SHMBuffer> shm_buffer;
    248   if (!need_to_reset_for_midstream_resize && pending_buffers_.size() == 0)
    249     shm_buffer = GetSHM_Locked(inputImage._length);
    250   if (!shm_buffer) {
    251     if (!SaveToPendingBuffers_Locked(inputImage, buffer_data))
    252       return WEBRTC_VIDEO_CODEC_ERROR;
    253     if (need_to_reset_for_midstream_resize) {
    254       base::AutoUnlock auto_unlock(lock_);
    255       Reset();
    256     }
    257     return WEBRTC_VIDEO_CODEC_OK;
    258   }
    259 
    260   SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data);
    261   vda_loop_proxy_->PostTask(
    262       FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_));
    263   return WEBRTC_VIDEO_CODEC_OK;
    264 }
    265 
    266 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback(
    267     webrtc::DecodedImageCallback* callback) {
    268   DVLOG(2) << "RegisterDecodeCompleteCallback";
    269   base::AutoLock auto_lock(lock_);
    270   decode_complete_callback_ = callback;
    271   return WEBRTC_VIDEO_CODEC_OK;
    272 }
    273 
    274 int32_t RTCVideoDecoder::Release() {
    275   DVLOG(2) << "Release";
    276   // Do not destroy VDA because WebRTC can call InitDecode and start decoding
    277   // again.
    278   return Reset();
    279 }
    280 
    281 int32_t RTCVideoDecoder::Reset() {
    282   DVLOG(2) << "Reset";
    283   base::AutoLock auto_lock(lock_);
    284   if (state_ == UNINITIALIZED) {
    285     LOG(ERROR) << "Decoder not initialized.";
    286     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    287   }
    288   if (next_bitstream_buffer_id_ != 0)
    289     reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1;
    290   else
    291     reset_bitstream_buffer_id_ = ID_LAST;
    292   // If VDA is already resetting, no need to request the reset again.
    293   if (state_ != RESETTING) {
    294     state_ = RESETTING;
    295     vda_loop_proxy_->PostTask(
    296         FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_));
    297   }
    298   return WEBRTC_VIDEO_CODEC_OK;
    299 }
    300 
    301 void RTCVideoDecoder::NotifyInitializeDone() {
    302   DVLOG(2) << "NotifyInitializeDone";
    303   NOTREACHED();
    304 }
    305 
    306 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count,
    307                                             const gfx::Size& size,
    308                                             uint32 texture_target) {
    309   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    310   DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target;
    311 
    312   if (!vda_)
    313     return;
    314 
    315   std::vector<uint32> texture_ids;
    316   std::vector<gpu::Mailbox> texture_mailboxes;
    317   decoder_texture_target_ = texture_target;
    318   // Discards the sync point returned here since PictureReady will imply that
    319   // the produce has already happened, and the texture is ready for use.
    320   if (!factories_->CreateTextures(count,
    321                                   size,
    322                                   &texture_ids,
    323                                   &texture_mailboxes,
    324                                   decoder_texture_target_)) {
    325     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
    326     return;
    327   }
    328   DCHECK_EQ(count, texture_ids.size());
    329   DCHECK_EQ(count, texture_mailboxes.size());
    330 
    331   std::vector<media::PictureBuffer> picture_buffers;
    332   for (size_t i = 0; i < texture_ids.size(); ++i) {
    333     picture_buffers.push_back(media::PictureBuffer(
    334         next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i]));
    335     bool inserted = assigned_picture_buffers_.insert(std::make_pair(
    336         picture_buffers.back().id(), picture_buffers.back())).second;
    337     DCHECK(inserted);
    338   }
    339   vda_->AssignPictureBuffers(picture_buffers);
    340 }
    341 
    342 void RTCVideoDecoder::DismissPictureBuffer(int32 id) {
    343   DVLOG(3) << "DismissPictureBuffer. id=" << id;
    344   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    345 
    346   std::map<int32, media::PictureBuffer>::iterator it =
    347       assigned_picture_buffers_.find(id);
    348   if (it == assigned_picture_buffers_.end()) {
    349     NOTREACHED() << "Missing picture buffer: " << id;
    350     return;
    351   }
    352 
    353   media::PictureBuffer buffer_to_dismiss = it->second;
    354   assigned_picture_buffers_.erase(it);
    355 
    356   std::set<int32>::iterator at_display_it =
    357       picture_buffers_at_display_.find(id);
    358 
    359   if (at_display_it == picture_buffers_at_display_.end()) {
    360     // We can delete the texture immediately as it's not being displayed.
    361     factories_->DeleteTexture(buffer_to_dismiss.texture_id());
    362   } else {
    363     // Texture in display. Postpone deletion until after it's returned to us.
    364     bool inserted = dismissed_picture_buffers_
    365         .insert(std::make_pair(id, buffer_to_dismiss)).second;
    366     DCHECK(inserted);
    367   }
    368 }
    369 
    370 void RTCVideoDecoder::PictureReady(const media::Picture& picture) {
    371   DVLOG(3) << "PictureReady";
    372   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    373 
    374   std::map<int32, media::PictureBuffer>::iterator it =
    375       assigned_picture_buffers_.find(picture.picture_buffer_id());
    376   if (it == assigned_picture_buffers_.end()) {
    377     NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id();
    378     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
    379     return;
    380   }
    381   const media::PictureBuffer& pb = it->second;
    382 
    383   // Create a media::VideoFrame.
    384   uint32_t timestamp = 0, width = 0, height = 0;
    385   size_t size = 0;
    386   GetBufferData(
    387       picture.bitstream_buffer_id(), &timestamp, &width, &height, &size);
    388   scoped_refptr<media::VideoFrame> frame =
    389       CreateVideoFrame(picture, pb, timestamp, width, height, size);
    390   bool inserted =
    391       picture_buffers_at_display_.insert(picture.picture_buffer_id()).second;
    392   DCHECK(inserted);
    393 
    394   // Create a WebRTC video frame.
    395   webrtc::RefCountImpl<NativeHandleImpl>* handle =
    396       new webrtc::RefCountImpl<NativeHandleImpl>(frame);
    397   webrtc::TextureVideoFrame decoded_image(handle, width, height, timestamp, 0);
    398 
    399   // Invoke decode callback. WebRTC expects no callback after Reset or Release.
    400   {
    401     base::AutoLock auto_lock(lock_);
    402     DCHECK(decode_complete_callback_ != NULL);
    403     if (IsBufferAfterReset(picture.bitstream_buffer_id(),
    404                            reset_bitstream_buffer_id_)) {
    405       decode_complete_callback_->Decoded(decoded_image);
    406     }
    407   }
    408 }
    409 
    410 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame(
    411     const media::Picture& picture,
    412     const media::PictureBuffer& pb,
    413     uint32_t timestamp,
    414     uint32_t width,
    415     uint32_t height,
    416     size_t size) {
    417   gfx::Rect visible_rect(width, height);
    418   gfx::Size natural_size(width, height);
    419   DCHECK(decoder_texture_target_);
    420   // Convert timestamp from 90KHz to ms.
    421   base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue(
    422       base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90);
    423   return media::VideoFrame::WrapNativeTexture(
    424       make_scoped_ptr(new media::VideoFrame::MailboxHolder(
    425           pb.texture_mailbox(),
    426           0,  // sync_point
    427           media::BindToCurrentLoop(
    428               base::Bind(&RTCVideoDecoder::ReusePictureBuffer,
    429                          weak_this_,
    430                          picture.picture_buffer_id())))),
    431       decoder_texture_target_,
    432       pb.size(),
    433       visible_rect,
    434       natural_size,
    435       timestamp_ms,
    436       base::Bind(&media::GpuVideoAcceleratorFactories::ReadPixels,
    437                  factories_,
    438                  pb.texture_id(),
    439                  natural_size),
    440       base::Closure());
    441 }
    442 
    443 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) {
    444   DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id;
    445   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    446 
    447   std::map<int32, SHMBuffer*>::iterator it =
    448       bitstream_buffers_in_decoder_.find(id);
    449   if (it == bitstream_buffers_in_decoder_.end()) {
    450     NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
    451     NOTREACHED() << "Missing bitstream buffer: " << id;
    452     return;
    453   }
    454 
    455   {
    456     base::AutoLock auto_lock(lock_);
    457     PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second));
    458   }
    459   bitstream_buffers_in_decoder_.erase(it);
    460 
    461   RequestBufferDecode();
    462 }
    463 
    464 void RTCVideoDecoder::NotifyFlushDone() {
    465   DVLOG(3) << "NotifyFlushDone";
    466   NOTREACHED() << "Unexpected flush done notification.";
    467 }
    468 
    469 void RTCVideoDecoder::NotifyResetDone() {
    470   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    471   DVLOG(3) << "NotifyResetDone";
    472 
    473   if (!vda_)
    474     return;
    475 
    476   input_buffer_data_.clear();
    477   {
    478     base::AutoLock auto_lock(lock_);
    479     state_ = INITIALIZED;
    480   }
    481   // Send the pending buffers for decoding.
    482   RequestBufferDecode();
    483 }
    484 
    485 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) {
    486   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    487   if (!vda_)
    488     return;
    489 
    490   LOG(ERROR) << "VDA Error:" << error;
    491   UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoDecoderError",
    492                             error,
    493                             media::VideoDecodeAccelerator::LARGEST_ERROR_ENUM);
    494   DestroyVDA();
    495 
    496   base::AutoLock auto_lock(lock_);
    497   state_ = DECODE_ERROR;
    498 }
    499 
    500 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() {
    501   DVLOG(2) << "WillDestroyCurrentMessageLoop";
    502   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    503   factories_->Abort();
    504   weak_factory_.InvalidateWeakPtrs();
    505   DestroyVDA();
    506 }
    507 
    508 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) {
    509   DVLOG(2) << "Initialize";
    510   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    511   base::MessageLoop::current()->AddDestructionObserver(this);
    512   waiter->Signal();
    513 }
    514 
    515 void RTCVideoDecoder::RequestBufferDecode() {
    516   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    517   if (!vda_)
    518     return;
    519 
    520   MovePendingBuffersToDecodeBuffers();
    521 
    522   while (CanMoreDecodeWorkBeDone()) {
    523     // Get a buffer and data from the queue.
    524     SHMBuffer* shm_buffer = NULL;
    525     BufferData buffer_data;
    526     {
    527       base::AutoLock auto_lock(lock_);
    528       // Do not request decode if VDA is resetting.
    529       if (decode_buffers_.size() == 0 || state_ == RESETTING)
    530         return;
    531       shm_buffer = decode_buffers_.front().first;
    532       buffer_data = decode_buffers_.front().second;
    533       decode_buffers_.pop_front();
    534       // Drop the buffers before Reset or Release is called.
    535       if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
    536                               reset_bitstream_buffer_id_)) {
    537         PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer));
    538         continue;
    539       }
    540     }
    541 
    542     // Create a BitstreamBuffer and send to VDA to decode.
    543     media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id,
    544                                             shm_buffer->shm->handle(),
    545                                             buffer_data.size);
    546     bool inserted = bitstream_buffers_in_decoder_
    547         .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second;
    548     DCHECK(inserted);
    549     RecordBufferData(buffer_data);
    550     vda_->Decode(bitstream_buffer);
    551   }
    552 }
    553 
    554 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() {
    555   return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes;
    556 }
    557 
    558 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) {
    559   if (id_reset == ID_INVALID)
    560     return true;
    561   int32 diff = id_buffer - id_reset;
    562   if (diff <= 0)
    563     diff += ID_LAST + 1;
    564   return diff < ID_HALF;
    565 }
    566 
    567 bool RTCVideoDecoder::IsFirstBufferAfterReset(int32 id_buffer, int32 id_reset) {
    568   if (id_reset == ID_INVALID)
    569     return id_buffer == 0;
    570   return id_buffer == ((id_reset + 1) & ID_LAST);
    571 }
    572 
    573 void RTCVideoDecoder::SaveToDecodeBuffers_Locked(
    574     const webrtc::EncodedImage& input_image,
    575     scoped_ptr<SHMBuffer> shm_buffer,
    576     const BufferData& buffer_data) {
    577   memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length);
    578   std::pair<SHMBuffer*, BufferData> buffer_pair =
    579       std::make_pair(shm_buffer.release(), buffer_data);
    580 
    581   // Store the buffer and the metadata to the queue.
    582   decode_buffers_.push_back(buffer_pair);
    583 }
    584 
    585 bool RTCVideoDecoder::SaveToPendingBuffers_Locked(
    586     const webrtc::EncodedImage& input_image,
    587     const BufferData& buffer_data) {
    588   DVLOG(2) << "SaveToPendingBuffers_Locked"
    589            << ". pending_buffers size=" << pending_buffers_.size()
    590            << ". decode_buffers_ size=" << decode_buffers_.size()
    591            << ". available_shm size=" << available_shm_segments_.size();
    592   // Queued too many buffers. Something goes wrong.
    593   if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) {
    594     LOG(WARNING) << "Too many pending buffers!";
    595     return false;
    596   }
    597 
    598   // Clone the input image and save it to the queue.
    599   uint8_t* buffer = new uint8_t[input_image._length];
    600   // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode()
    601   // interface to take a non-const ptr to the frame and add a method to the
    602   // frame that will swap buffers with another.
    603   memcpy(buffer, input_image._buffer, input_image._length);
    604   webrtc::EncodedImage encoded_image(
    605       buffer, input_image._length, input_image._length);
    606   std::pair<webrtc::EncodedImage, BufferData> buffer_pair =
    607       std::make_pair(encoded_image, buffer_data);
    608 
    609   pending_buffers_.push_back(buffer_pair);
    610   return true;
    611 }
    612 
    613 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() {
    614   base::AutoLock auto_lock(lock_);
    615   while (pending_buffers_.size() > 0) {
    616     // Get a pending buffer from the queue.
    617     const webrtc::EncodedImage& input_image = pending_buffers_.front().first;
    618     const BufferData& buffer_data = pending_buffers_.front().second;
    619 
    620     // Drop the frame if it comes before Reset or Release.
    621     if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id,
    622                             reset_bitstream_buffer_id_)) {
    623       delete[] input_image._buffer;
    624       pending_buffers_.pop_front();
    625       continue;
    626     }
    627     // Get shared memory and save it to decode buffers.
    628     scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length);
    629     if (!shm_buffer)
    630       return;
    631     SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data);
    632     delete[] input_image._buffer;
    633     pending_buffers_.pop_front();
    634   }
    635 }
    636 
    637 void RTCVideoDecoder::ResetInternal() {
    638   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    639   DVLOG(2) << "ResetInternal";
    640   if (vda_)
    641     vda_->Reset();
    642 }
    643 
    644 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id,
    645                                          uint32 sync_point) {
    646   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    647   DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id;
    648 
    649   if (!vda_)
    650     return;
    651 
    652   CHECK(!picture_buffers_at_display_.empty());
    653 
    654   size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id);
    655   DCHECK(num_erased);
    656 
    657   std::map<int32, media::PictureBuffer>::iterator it =
    658       assigned_picture_buffers_.find(picture_buffer_id);
    659 
    660   if (it == assigned_picture_buffers_.end()) {
    661     // This picture was dismissed while in display, so we postponed deletion.
    662     it = dismissed_picture_buffers_.find(picture_buffer_id);
    663     DCHECK(it != dismissed_picture_buffers_.end());
    664     factories_->DeleteTexture(it->second.texture_id());
    665     dismissed_picture_buffers_.erase(it);
    666     return;
    667   }
    668 
    669   factories_->WaitSyncPoint(sync_point);
    670 
    671   vda_->ReusePictureBuffer(picture_buffer_id);
    672 }
    673 
    674 void RTCVideoDecoder::DestroyTextures() {
    675   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    676   std::map<int32, media::PictureBuffer>::iterator it;
    677 
    678   for (it = assigned_picture_buffers_.begin();
    679        it != assigned_picture_buffers_.end();
    680        ++it) {
    681     factories_->DeleteTexture(it->second.texture_id());
    682   }
    683   assigned_picture_buffers_.clear();
    684 
    685   for (it = dismissed_picture_buffers_.begin();
    686        it != dismissed_picture_buffers_.end();
    687        ++it) {
    688     factories_->DeleteTexture(it->second.texture_id());
    689   }
    690   dismissed_picture_buffers_.clear();
    691 }
    692 
    693 void RTCVideoDecoder::DestroyVDA() {
    694   DVLOG(2) << "DestroyVDA";
    695   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    696   if (vda_)
    697     vda_.release()->Destroy();
    698   DestroyTextures();
    699   base::AutoLock auto_lock(lock_);
    700   state_ = UNINITIALIZED;
    701 }
    702 
    703 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked(
    704     size_t min_size) {
    705   // Reuse a SHM if possible.
    706   SHMBuffer* ret = NULL;
    707   if (!available_shm_segments_.empty() &&
    708       available_shm_segments_.back()->size >= min_size) {
    709     ret = available_shm_segments_.back();
    710     available_shm_segments_.pop_back();
    711   }
    712   // Post to vda thread to create shared memory if SHM cannot be reused or the
    713   // queue is almost empty.
    714   if (num_shm_buffers_ < kMaxNumSharedMemorySegments &&
    715       (ret == NULL || available_shm_segments_.size() <= 1)) {
    716     vda_loop_proxy_->PostTask(
    717         FROM_HERE,
    718         base::Bind(&RTCVideoDecoder::CreateSHM, weak_this_, 1, min_size));
    719   }
    720   return scoped_ptr<SHMBuffer>(ret);
    721 }
    722 
    723 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) {
    724   available_shm_segments_.push_back(shm_buffer.release());
    725 }
    726 
    727 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) {
    728   DCHECK(vda_loop_proxy_->BelongsToCurrentThread());
    729   DVLOG(2) << "CreateSHM. size=" << min_size;
    730   int number_to_allocate;
    731   {
    732     base::AutoLock auto_lock(lock_);
    733     number_to_allocate =
    734         std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number);
    735   }
    736   size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes);
    737   for (int i = 0; i < number_to_allocate; i++) {
    738     base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate);
    739     if (shm != NULL) {
    740       base::AutoLock auto_lock(lock_);
    741       num_shm_buffers_++;
    742       PutSHM_Locked(
    743           scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate)));
    744     }
    745   }
    746   // Kick off the decoding.
    747   RequestBufferDecode();
    748 }
    749 
    750 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) {
    751   input_buffer_data_.push_front(buffer_data);
    752   // Why this value?  Because why not.  avformat.h:MAX_REORDER_DELAY is 16, but
    753   // that's too small for some pathological B-frame test videos.  The cost of
    754   // using too-high a value is low (192 bits per extra slot).
    755   static const size_t kMaxInputBufferDataSize = 128;
    756   // Pop from the back of the list, because that's the oldest and least likely
    757   // to be useful in the future data.
    758   if (input_buffer_data_.size() > kMaxInputBufferDataSize)
    759     input_buffer_data_.pop_back();
    760 }
    761 
    762 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id,
    763                                     uint32_t* timestamp,
    764                                     uint32_t* width,
    765                                     uint32_t* height,
    766                                     size_t* size) {
    767   for (std::list<BufferData>::iterator it = input_buffer_data_.begin();
    768        it != input_buffer_data_.end();
    769        ++it) {
    770     if (it->bitstream_buffer_id != bitstream_buffer_id)
    771       continue;
    772     *timestamp = it->timestamp;
    773     *width = it->width;
    774     *height = it->height;
    775     return;
    776   }
    777   NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id;
    778 }
    779 
    780 int32_t RTCVideoDecoder::RecordInitDecodeUMA(int32_t status) {
    781   // Logging boolean is enough to know if HW decoding has been used. Also,
    782   // InitDecode is less likely to return an error so enum is not used here.
    783   bool sample = (status == WEBRTC_VIDEO_CODEC_OK) ? true : false;
    784   UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoDecoderInitDecodeSuccess", sample);
    785   return status;
    786 }
    787 
    788 }  // namespace content
    789