Home | History | Annotate | Download | only in media
      1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
      6 
      7 #include "base/bind.h"
      8 #include "base/logging.h"
      9 #include "base/message_loop/message_loop.h"
     10 #include "base/metrics/histogram.h"
     11 #include "content/common/gpu/gpu_channel.h"
     12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
     13 #include "media/base/bitstream_buffer.h"
     14 #include "media/base/limits.h"
     15 #include "media/video/picture.h"
     16 #include "ui/gl/android/scoped_java_surface.h"
     17 #include "ui/gl/android/surface_texture.h"
     18 #include "ui/gl/gl_bindings.h"
     19 
     20 namespace content {
     21 
     22 // Helper macros for dealing with failure.  If |result| evaluates false, emit
     23 // |log| to ERROR, register |error| with the decoder, and return.
     24 #define RETURN_ON_FAILURE(result, log, error)                     \
     25   do {                                                            \
     26     if (!(result)) {                                              \
     27       DLOG(ERROR) << log;                                         \
     28       base::MessageLoop::current()->PostTask(                     \
     29           FROM_HERE,                                              \
     30           base::Bind(&AndroidVideoDecodeAccelerator::NotifyError, \
     31                      weak_this_factory_.GetWeakPtr(),             \
     32                      error));                                     \
     33       state_ = ERROR;                                             \
     34       return;                                                     \
     35     }                                                             \
     36   } while (0)
     37 
     38 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
     39 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we
     40 // have actual use case.
     41 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
     42 
     43 // Max number of bitstreams notified to the client with
     44 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
     45 enum { kMaxBitstreamsNotifiedInAdvance = 32 };
     46 
     47 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
     48 // has no callback mechanism (b/11990118), we must drive it by polling for
     49 // complete frames (and available input buffers, when the codec is fully
     50 // saturated).  This function defines the polling delay.  The value used is an
     51 // arbitrary choice that trades off CPU utilization (spinning) against latency.
     52 // Mirrors android_video_encode_accelerator.cc:EncodePollDelay().
     53 static inline const base::TimeDelta DecodePollDelay() {
     54   // An alternative to this polling scheme could be to dedicate a new thread
     55   // (instead of using the ChildThread) to run the MediaCodec, and make that
     56   // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
     57   // believes the codec should complete "soon" (e.g. waiting for an input
     58   // buffer, or waiting for a picture when it knows enough complete input
     59   // pictures have been fed to saturate any internal buffering).  This is
     60   // speculative and it's unclear that this would be a win (nor that there's a
     61   // reasonably device-agnostic way to fill in the "believes" above).
     62   return base::TimeDelta::FromMilliseconds(10);
     63 }
     64 
     65 static inline const base::TimeDelta NoWaitTimeOut() {
     66   return base::TimeDelta::FromMicroseconds(0);
     67 }
     68 
     69 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
     70     const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
     71     const base::Callback<bool(void)>& make_context_current)
     72     : client_(NULL),
     73       make_context_current_(make_context_current),
     74       codec_(media::kCodecH264),
     75       state_(NO_ERROR),
     76       surface_texture_id_(0),
     77       picturebuffers_requested_(false),
     78       gl_decoder_(decoder),
     79       weak_this_factory_(this) {}
     80 
     81 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
     82   DCHECK(thread_checker_.CalledOnValidThread());
     83 }
     84 
     85 bool AndroidVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
     86                                                Client* client) {
     87   DCHECK(!media_codec_);
     88   DCHECK(thread_checker_.CalledOnValidThread());
     89 
     90   client_ = client;
     91 
     92   if (profile == media::VP8PROFILE_ANY) {
     93     codec_ = media::kCodecVP8;
     94   } else {
     95     // TODO(dwkang): enable H264 once b/8125974 is fixed.
     96     LOG(ERROR) << "Unsupported profile: " << profile;
     97     return false;
     98   }
     99 
    100   // Only consider using MediaCodec if it's likely backed by hardware.
    101   if (media::VideoCodecBridge::IsKnownUnaccelerated(
    102           codec_, media::MEDIA_CODEC_DECODER)) {
    103     return false;
    104   }
    105 
    106   if (!make_context_current_.Run()) {
    107     LOG(ERROR) << "Failed to make this decoder's GL context current.";
    108     return false;
    109   }
    110 
    111   if (!gl_decoder_) {
    112     LOG(ERROR) << "Failed to get gles2 decoder instance.";
    113     return false;
    114   }
    115   glGenTextures(1, &surface_texture_id_);
    116   glActiveTexture(GL_TEXTURE0);
    117   glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_);
    118 
    119   glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    120   glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    121   glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
    122                   GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    123   glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
    124                   GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    125   gl_decoder_->RestoreTextureUnitBindings(0);
    126   gl_decoder_->RestoreActiveTexture();
    127 
    128   surface_texture_ = gfx::SurfaceTexture::Create(surface_texture_id_);
    129 
    130   if (!ConfigureMediaCodec()) {
    131     LOG(ERROR) << "Failed to create MediaCodec instance.";
    132     return false;
    133   }
    134 
    135   return true;
    136 }
    137 
    138 void AndroidVideoDecodeAccelerator::DoIOTask() {
    139   DCHECK(thread_checker_.CalledOnValidThread());
    140   if (state_ == ERROR) {
    141     return;
    142   }
    143 
    144   QueueInput();
    145   DequeueOutput();
    146 }
    147 
    148 void AndroidVideoDecodeAccelerator::QueueInput() {
    149   DCHECK(thread_checker_.CalledOnValidThread());
    150   if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
    151     return;
    152   if (pending_bitstream_buffers_.empty())
    153     return;
    154 
    155   int input_buf_index = 0;
    156   media::MediaCodecStatus status = media_codec_->DequeueInputBuffer(
    157       NoWaitTimeOut(), &input_buf_index);
    158   if (status != media::MEDIA_CODEC_OK) {
    159     DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
    160            status == media::MEDIA_CODEC_ERROR);
    161     return;
    162   }
    163 
    164   base::Time queued_time = pending_bitstream_buffers_.front().second;
    165   UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
    166                       base::Time::Now() - queued_time);
    167   media::BitstreamBuffer bitstream_buffer =
    168       pending_bitstream_buffers_.front().first;
    169   pending_bitstream_buffers_.pop();
    170 
    171   if (bitstream_buffer.id() == -1) {
    172     media_codec_->QueueEOS(input_buf_index);
    173     return;
    174   }
    175 
    176   // Abuse the presentation time argument to propagate the bitstream
    177   // buffer ID to the output, so we can report it back to the client in
    178   // PictureReady().
    179   base::TimeDelta timestamp =
    180       base::TimeDelta::FromMicroseconds(bitstream_buffer.id());
    181 
    182   scoped_ptr<base::SharedMemory> shm(
    183       new base::SharedMemory(bitstream_buffer.handle(), true));
    184 
    185   RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
    186                     "Failed to SharedMemory::Map()",
    187                     UNREADABLE_INPUT);
    188 
    189   status =
    190       media_codec_->QueueInputBuffer(input_buf_index,
    191                                      static_cast<const uint8*>(shm->memory()),
    192                                      bitstream_buffer.size(),
    193                                      timestamp);
    194   RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
    195                     "Failed to QueueInputBuffer: " << status,
    196                     PLATFORM_FAILURE);
    197 
    198   // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
    199   // will be returned from the bitstream buffer. However, MediaCodec API is
    200   // not enough to guarantee it.
    201   // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
    202   // keep getting more bitstreams from the client, and throttle them by using
    203   // |bitstreams_notified_in_advance_|.
    204   // TODO(dwkang): check if there is a way to remove this workaround.
    205   base::MessageLoop::current()->PostTask(
    206       FROM_HERE,
    207       base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
    208                  weak_this_factory_.GetWeakPtr(),
    209                  bitstream_buffer.id()));
    210   bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
    211 }
    212 
    213 void AndroidVideoDecodeAccelerator::DequeueOutput() {
    214   DCHECK(thread_checker_.CalledOnValidThread());
    215   if (picturebuffers_requested_ && output_picture_buffers_.empty())
    216     return;
    217 
    218   if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) {
    219     // Don't have any picture buffer to send. Need to wait more.
    220     return;
    221   }
    222 
    223   bool eos = false;
    224   base::TimeDelta timestamp;
    225   int32 buf_index = 0;
    226   do {
    227     size_t offset = 0;
    228     size_t size = 0;
    229 
    230     media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
    231         NoWaitTimeOut(), &buf_index, &offset, &size, &timestamp, &eos, NULL);
    232     switch (status) {
    233       case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
    234       case media::MEDIA_CODEC_ERROR:
    235         return;
    236 
    237       case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: {
    238         int32 width, height;
    239         media_codec_->GetOutputFormat(&width, &height);
    240 
    241         if (!picturebuffers_requested_) {
    242           picturebuffers_requested_ = true;
    243           size_ = gfx::Size(width, height);
    244           base::MessageLoop::current()->PostTask(
    245               FROM_HERE,
    246               base::Bind(&AndroidVideoDecodeAccelerator::RequestPictureBuffers,
    247                          weak_this_factory_.GetWeakPtr()));
    248         } else {
    249           // Dynamic resolution change support is not specified by the Android
    250           // platform at and before JB-MR1, so it's not possible to smoothly
    251           // continue playback at this point.  Instead, error out immediately,
    252           // expecting clients to Reset() as appropriate to avoid this.
    253           // b/7093648
    254           RETURN_ON_FAILURE(size_ == gfx::Size(width, height),
    255                             "Dynamic resolution change is not supported.",
    256                             PLATFORM_FAILURE);
    257         }
    258         return;
    259       }
    260 
    261       case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
    262         RETURN_ON_FAILURE(media_codec_->GetOutputBuffers(),
    263                           "Cannot get output buffer from MediaCodec.",
    264                           PLATFORM_FAILURE);
    265         break;
    266 
    267       case media::MEDIA_CODEC_OK:
    268         DCHECK_GE(buf_index, 0);
    269         break;
    270 
    271       default:
    272         NOTREACHED();
    273         break;
    274     }
    275   } while (buf_index < 0);
    276 
    277   // This ignores the emitted ByteBuffer and instead relies on rendering to the
    278   // codec's SurfaceTexture and then copying from that texture to the client's
    279   // PictureBuffer's texture.  This means that each picture's data is written
    280   // three times: once to the ByteBuffer, once to the SurfaceTexture, and once
    281   // to the client's texture.  It would be nicer to either:
    282   // 1) Render directly to the client's texture from MediaCodec (one write); or
    283   // 2) Upload the ByteBuffer to the client's texture (two writes).
    284   // Unfortunately neither is possible:
    285   // 1) MediaCodec's use of SurfaceTexture is a singleton, and the texture
    286   //    written to can't change during the codec's lifetime.  b/11990461
    287   // 2) The ByteBuffer is likely to contain the pixels in a vendor-specific,
    288   //    opaque/non-standard format.  It's not possible to negotiate the decoder
    289   //    to emit a specific colorspace, even using HW CSC.  b/10706245
    290   // So, we live with these two extra copies per picture :(
    291   media_codec_->ReleaseOutputBuffer(buf_index, true);
    292 
    293   if (eos) {
    294     base::MessageLoop::current()->PostTask(
    295         FROM_HERE,
    296         base::Bind(&AndroidVideoDecodeAccelerator::NotifyFlushDone,
    297                    weak_this_factory_.GetWeakPtr()));
    298   } else {
    299     int64 bitstream_buffer_id = timestamp.InMicroseconds();
    300     SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
    301 
    302     // Removes ids former or equal than the id from decoder. Note that
    303     // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
    304     // because of frame reordering issue. We just maintain this roughly and use
    305     // for the throttling purpose.
    306     std::list<int32>::iterator it;
    307     for (it = bitstreams_notified_in_advance_.begin();
    308         it != bitstreams_notified_in_advance_.end();
    309         ++it) {
    310       if (*it == bitstream_buffer_id) {
    311         bitstreams_notified_in_advance_.erase(
    312             bitstreams_notified_in_advance_.begin(), ++it);
    313         break;
    314       }
    315     }
    316   }
    317 }
    318 
    319 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
    320     int32 bitstream_id) {
    321   DCHECK(thread_checker_.CalledOnValidThread());
    322   DCHECK_NE(bitstream_id, -1);
    323   DCHECK(!free_picture_ids_.empty());
    324 
    325   RETURN_ON_FAILURE(make_context_current_.Run(),
    326                     "Failed to make this decoder's GL context current.",
    327                     PLATFORM_FAILURE);
    328 
    329   int32 picture_buffer_id = free_picture_ids_.front();
    330   free_picture_ids_.pop();
    331 
    332   float transfrom_matrix[16];
    333   surface_texture_->UpdateTexImage();
    334   surface_texture_->GetTransformMatrix(transfrom_matrix);
    335 
    336   OutputBufferMap::const_iterator i =
    337       output_picture_buffers_.find(picture_buffer_id);
    338   RETURN_ON_FAILURE(i != output_picture_buffers_.end(),
    339                     "Can't find a PictureBuffer for " << picture_buffer_id,
    340                     PLATFORM_FAILURE);
    341   uint32 picture_buffer_texture_id = i->second.texture_id();
    342 
    343   RETURN_ON_FAILURE(gl_decoder_.get(),
    344                     "Failed to get gles2 decoder instance.",
    345                     ILLEGAL_STATE);
    346   // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
    347   // needed because it takes 10s of milliseconds to initialize.
    348   if (!copier_) {
    349     copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
    350     copier_->Initialize(gl_decoder_.get());
    351   }
    352 
    353   // Here, we copy |surface_texture_id_| to the picture buffer instead of
    354   // setting new texture to |surface_texture_| by calling attachToGLContext()
    355   // because:
    356   // 1. Once we call detachFrameGLContext(), it deletes the texture previous
    357   //    attached.
    358   // 2. SurfaceTexture requires us to apply a transform matrix when we show
    359   //    the texture.
    360   // TODO(hkuang): get the StreamTexture transform matrix in GPU process
    361   // instead of using default matrix crbug.com/226218.
    362   const static GLfloat default_matrix[16] = {1.0f, 0.0f, 0.0f, 0.0f,
    363                                              0.0f, 1.0f, 0.0f, 0.0f,
    364                                              0.0f, 0.0f, 1.0f, 0.0f,
    365                                              0.0f, 0.0f, 0.0f, 1.0f};
    366   copier_->DoCopyTextureWithTransform(gl_decoder_.get(),
    367                                       GL_TEXTURE_EXTERNAL_OES,
    368                                       surface_texture_id_,
    369                                       picture_buffer_texture_id,
    370                                       0,
    371                                       size_.width(),
    372                                       size_.height(),
    373                                       false,
    374                                       false,
    375                                       false,
    376                                       default_matrix);
    377 
    378   base::MessageLoop::current()->PostTask(
    379       FROM_HERE,
    380       base::Bind(
    381           &AndroidVideoDecodeAccelerator::NotifyPictureReady,
    382           weak_this_factory_.GetWeakPtr(),
    383           media::Picture(picture_buffer_id, bitstream_id, gfx::Rect(size_))));
    384 }
    385 
    386 void AndroidVideoDecodeAccelerator::Decode(
    387     const media::BitstreamBuffer& bitstream_buffer) {
    388   DCHECK(thread_checker_.CalledOnValidThread());
    389   if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
    390     base::MessageLoop::current()->PostTask(
    391         FROM_HERE,
    392         base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
    393                    weak_this_factory_.GetWeakPtr(),
    394                    bitstream_buffer.id()));
    395     return;
    396   }
    397 
    398   pending_bitstream_buffers_.push(
    399       std::make_pair(bitstream_buffer, base::Time::Now()));
    400 
    401   DoIOTask();
    402 }
    403 
    404 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
    405     const std::vector<media::PictureBuffer>& buffers) {
    406   DCHECK(thread_checker_.CalledOnValidThread());
    407   DCHECK(output_picture_buffers_.empty());
    408   DCHECK(free_picture_ids_.empty());
    409 
    410   for (size_t i = 0; i < buffers.size(); ++i) {
    411     RETURN_ON_FAILURE(buffers[i].size() == size_,
    412                       "Invalid picture buffer size was passed.",
    413                       INVALID_ARGUMENT);
    414     int32 id = buffers[i].id();
    415     output_picture_buffers_.insert(std::make_pair(id, buffers[i]));
    416     free_picture_ids_.push(id);
    417     // Since the client might be re-using |picture_buffer_id| values, forget
    418     // about previously-dismissed IDs now.  See ReusePictureBuffer() comment
    419     // about "zombies" for why we maintain this set in the first place.
    420     dismissed_picture_ids_.erase(id);
    421   }
    422 
    423   RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers,
    424                     "Invalid picture buffers were passed.",
    425                     INVALID_ARGUMENT);
    426 
    427   DoIOTask();
    428 }
    429 
    430 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
    431     int32 picture_buffer_id) {
    432   DCHECK(thread_checker_.CalledOnValidThread());
    433 
    434   // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
    435   // IPC, or in a PostTask either at the sender or receiver) when we sent a
    436   // DismissPictureBuffer() for this |picture_buffer_id|.  Account for such
    437   // potential "zombie" IDs here.
    438   if (dismissed_picture_ids_.erase(picture_buffer_id))
    439     return;
    440 
    441   free_picture_ids_.push(picture_buffer_id);
    442 
    443   DoIOTask();
    444 }
    445 
    446 void AndroidVideoDecodeAccelerator::Flush() {
    447   DCHECK(thread_checker_.CalledOnValidThread());
    448 
    449   Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
    450 }
    451 
    452 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
    453   DCHECK(thread_checker_.CalledOnValidThread());
    454   DCHECK(surface_texture_.get());
    455 
    456   gfx::ScopedJavaSurface surface(surface_texture_.get());
    457 
    458   // Pass a dummy 320x240 canvas size and let the codec signal the real size
    459   // when it's known from the bitstream.
    460   media_codec_.reset(media::VideoCodecBridge::CreateDecoder(
    461       codec_, false, gfx::Size(320, 240), surface.j_surface().obj(), NULL));
    462   if (!media_codec_)
    463     return false;
    464 
    465   io_timer_.Start(FROM_HERE,
    466                   DecodePollDelay(),
    467                   this,
    468                   &AndroidVideoDecodeAccelerator::DoIOTask);
    469   return true;
    470 }
    471 
    472 void AndroidVideoDecodeAccelerator::Reset() {
    473   DCHECK(thread_checker_.CalledOnValidThread());
    474 
    475   while (!pending_bitstream_buffers_.empty()) {
    476     int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id();
    477     pending_bitstream_buffers_.pop();
    478 
    479     if (bitstream_buffer_id != -1) {
    480       base::MessageLoop::current()->PostTask(
    481           FROM_HERE,
    482           base::Bind(&AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
    483                      weak_this_factory_.GetWeakPtr(),
    484                      bitstream_buffer_id));
    485     }
    486   }
    487   bitstreams_notified_in_advance_.clear();
    488 
    489   for (OutputBufferMap::iterator it = output_picture_buffers_.begin();
    490        it != output_picture_buffers_.end();
    491        ++it) {
    492     client_->DismissPictureBuffer(it->first);
    493     dismissed_picture_ids_.insert(it->first);
    494   }
    495   output_picture_buffers_.clear();
    496   std::queue<int32> empty;
    497   std::swap(free_picture_ids_, empty);
    498   CHECK(free_picture_ids_.empty());
    499   picturebuffers_requested_ = false;
    500 
    501   // On some devices, and up to at least JB-MR1,
    502   // - flush() can fail after EOS (b/8125974); and
    503   // - mid-stream resolution change is unsupported (b/7093648).
    504   // To cope with these facts, we always stop & restart the codec on Reset().
    505   io_timer_.Stop();
    506   media_codec_->Stop();
    507   ConfigureMediaCodec();
    508   state_ = NO_ERROR;
    509 
    510   base::MessageLoop::current()->PostTask(
    511       FROM_HERE,
    512       base::Bind(&AndroidVideoDecodeAccelerator::NotifyResetDone,
    513                  weak_this_factory_.GetWeakPtr()));
    514 }
    515 
    516 void AndroidVideoDecodeAccelerator::Destroy() {
    517   DCHECK(thread_checker_.CalledOnValidThread());
    518 
    519   weak_this_factory_.InvalidateWeakPtrs();
    520   if (media_codec_) {
    521     io_timer_.Stop();
    522     media_codec_->Stop();
    523   }
    524   if (surface_texture_id_)
    525     glDeleteTextures(1, &surface_texture_id_);
    526   if (copier_)
    527     copier_->Destroy();
    528   delete this;
    529 }
    530 
    531 bool AndroidVideoDecodeAccelerator::CanDecodeOnIOThread() {
    532   return false;
    533 }
    534 
    535 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
    536   client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D);
    537 }
    538 
    539 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
    540     const media::Picture& picture) {
    541   client_->PictureReady(picture);
    542 }
    543 
    544 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
    545     int input_buffer_id) {
    546   client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
    547 }
    548 
    549 void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
    550   client_->NotifyFlushDone();
    551 }
    552 
    553 void AndroidVideoDecodeAccelerator::NotifyResetDone() {
    554   client_->NotifyResetDone();
    555 }
    556 
    557 void AndroidVideoDecodeAccelerator::NotifyError(
    558     media::VideoDecodeAccelerator::Error error) {
    559   client_->NotifyError(error);
    560 }
    561 
    562 }  // namespace content
    563