Home | History | Annotate | Download | only in media
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "content/renderer/media/rtc_video_encoder.h"
      6 
      7 #include "base/bind.h"
      8 #include "base/location.h"
      9 #include "base/logging.h"
     10 #include "base/memory/scoped_vector.h"
     11 #include "base/message_loop/message_loop_proxy.h"
     12 #include "base/metrics/histogram.h"
     13 #include "base/synchronization/waitable_event.h"
     14 #include "content/renderer/media/renderer_gpu_video_accelerator_factories.h"
     15 #include "media/base/bitstream_buffer.h"
     16 #include "media/base/video_frame.h"
     17 #include "media/base/video_util.h"
     18 #include "media/filters/gpu_video_accelerator_factories.h"
     19 #include "media/video/video_encode_accelerator.h"
     20 #include "third_party/webrtc/system_wrappers/interface/tick_util.h"
     21 
     22 #define NOTIFY_ERROR(x)                             \
     23   do {                                              \
     24     DLOG(ERROR) << "calling NotifyError(): " << x;  \
     25     NotifyError(x);                                 \
     26   } while (0)
     27 
     28 namespace content {
     29 
     30 // This private class of RTCVideoEncoder does the actual work of communicating
     31 // with a media::VideoEncodeAccelerator for handling video encoding.  It can
     32 // be created on any thread, but should subsequently be posted to (and Destroy()
     33 // called on) a single thread.  Callbacks to RTCVideoEncoder are posted to the
     34 // thread on which the instance was constructed.
     35 //
     36 // This class separates state related to the thread that RTCVideoEncoder
     37 // operates on (presently the libjingle worker thread) from the thread that
     38 // |gpu_factories_| provides for accelerator operations (presently the media
     39 // thread).  The RTCVideoEncoder class can be deleted directly by WebRTC, while
     40 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
     41 class RTCVideoEncoder::Impl
     42     : public media::VideoEncodeAccelerator::Client,
     43       public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
     44  public:
     45   Impl(
     46       const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
     47       const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories);
     48 
     49   // Create the VEA and call Initialize() on it.  Called once per instantiation,
     50   // and then the instance is bound forevermore to whichever thread made the
     51   // call.
     52   // RTCVideoEncoder expects to be able to call this function synchronously from
     53   // its own thread, hence the |async_waiter| and |async_retval| arguments.
     54   void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
     55                               uint32 bitrate,
     56                               media::VideoCodecProfile profile,
     57                               base::WaitableEvent* async_waiter,
     58                               int32_t* async_retval);
     59   // Enqueue a frame from WebRTC for encoding.
     60   // RTCVideoEncoder expects to be able to call this function synchronously from
     61   // its own thread, hence the |async_waiter| and |async_retval| arguments.
     62   void Enqueue(const webrtc::I420VideoFrame* input_frame,
     63                bool force_keyframe,
     64                base::WaitableEvent* async_waiter,
     65                int32_t* async_retval);
     66 
     67   // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
     68   // RTCVideoEncoder::ReturnEncodedImage() function.  When that is complete,
     69   // the buffer is returned to Impl by its index using this function.
     70   void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
     71 
     72   // Request encoding parameter change for the underlying encoder.
     73   void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
     74 
     75   // Destroy this Impl's encoder.  The destructor is not explicitly called, as
     76   // Impl is a base::RefCountedThreadSafe.
     77   void Destroy();
     78 
     79   // media::VideoEncodeAccelerator::Client implementation.
     80   virtual void NotifyInitializeDone() OVERRIDE;
     81   virtual void RequireBitstreamBuffers(unsigned int input_count,
     82                                        const gfx::Size& input_coded_size,
     83                                        size_t output_buffer_size) OVERRIDE;
     84   virtual void BitstreamBufferReady(int32 bitstream_buffer_id,
     85                                     size_t payload_size,
     86                                     bool key_frame) OVERRIDE;
     87   virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE;
     88 
     89  private:
     90   friend class base::RefCountedThreadSafe<Impl>;
     91 
     92   enum {
     93     kInputBufferExtraCount = 1,  // The number of input buffers allocated, more
     94                                  // than what is requested by
     95                                  // VEA::RequireBitstreamBuffers().
     96     kOutputBufferCount = 3,
     97   };
     98 
     99   virtual ~Impl();
    100 
    101   // Perform encoding on an input frame from the input queue.
    102   void EncodeOneFrame();
    103 
    104   // Notify that an input frame is finished for encoding.  |index| is the index
    105   // of the completed frame in |input_buffers_|.
    106   void EncodeFrameFinished(int index);
    107 
    108   // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
    109   void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
    110   void SignalAsyncWaiter(int32_t retval);
    111 
    112   base::ThreadChecker thread_checker_;
    113 
    114   // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
    115   // notifications.
    116   const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
    117 
    118   // The message loop on which to post callbacks to |weak_encoder_|.
    119   const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_;
    120 
    121   // Factory for creating VEAs, shared memory buffers, etc.
    122   const scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories_;
    123 
    124   // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
    125   // Do this by waiting on the |async_waiter_| and returning the return value in
    126   // |async_retval_| when initialization completes, encoding completes, or
    127   // an error occurs.
    128   base::WaitableEvent* async_waiter_;
    129   int32_t* async_retval_;
    130 
    131   // The underlying VEA to perform encoding on.
    132   scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
    133 
    134   // Next input frame.  Since there is at most one next frame, a single-element
    135   // queue is sufficient.
    136   const webrtc::I420VideoFrame* input_next_frame_;
    137 
    138   // Whether to encode a keyframe next.
    139   bool input_next_frame_keyframe_;
    140 
    141   // Frame sizes.
    142   gfx::Size input_frame_coded_size_;
    143   gfx::Size input_visible_size_;
    144 
    145   // Shared memory buffers for input/output with the VEA.
    146   ScopedVector<base::SharedMemory> input_buffers_;
    147   ScopedVector<base::SharedMemory> output_buffers_;
    148 
    149   // Input buffers ready to be filled with input from Encode().  As a LIFO since
    150   // we don't care about ordering.
    151   std::vector<int> input_buffers_free_;
    152 
    153   DISALLOW_COPY_AND_ASSIGN(Impl);
    154 };
    155 
    156 RTCVideoEncoder::Impl::Impl(
    157     const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
    158     const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories)
    159     : weak_encoder_(weak_encoder),
    160       encoder_message_loop_proxy_(base::MessageLoopProxy::current()),
    161       gpu_factories_(gpu_factories),
    162       async_waiter_(NULL),
    163       async_retval_(NULL),
    164       input_next_frame_(NULL),
    165       input_next_frame_keyframe_(false) {
    166   thread_checker_.DetachFromThread();
    167 }
    168 
    169 void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
    170     const gfx::Size& input_visible_size,
    171     uint32 bitrate,
    172     media::VideoCodecProfile profile,
    173     base::WaitableEvent* async_waiter,
    174     int32_t* async_retval) {
    175   DVLOG(3) << "Impl::CreateAndInitializeVEA()";
    176   DCHECK(thread_checker_.CalledOnValidThread());
    177 
    178   RegisterAsyncWaiter(async_waiter, async_retval);
    179 
    180   // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
    181   if (bitrate > kuint32max / 1000) {
    182     NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
    183     return;
    184   }
    185 
    186   video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass();
    187   if (!video_encoder_) {
    188     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    189     return;
    190   }
    191   input_visible_size_ = input_visible_size;
    192   video_encoder_->Initialize(
    193       media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000);
    194 }
    195 
    196 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame,
    197                                     bool force_keyframe,
    198                                     base::WaitableEvent* async_waiter,
    199                                     int32_t* async_retval) {
    200   DVLOG(3) << "Impl::Enqueue()";
    201   DCHECK(thread_checker_.CalledOnValidThread());
    202   DCHECK(!input_next_frame_);
    203 
    204   RegisterAsyncWaiter(async_waiter, async_retval);
    205   input_next_frame_ = input_frame;
    206   input_next_frame_keyframe_ = force_keyframe;
    207 
    208   if (!input_buffers_free_.empty())
    209     EncodeOneFrame();
    210 }
    211 
    212 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
    213     int32 bitstream_buffer_id) {
    214   DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
    215               "bitstream_buffer_id=" << bitstream_buffer_id;
    216   DCHECK(thread_checker_.CalledOnValidThread());
    217   if (video_encoder_) {
    218     video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
    219         bitstream_buffer_id,
    220         output_buffers_[bitstream_buffer_id]->handle(),
    221         output_buffers_[bitstream_buffer_id]->mapped_size()));
    222   }
    223 }
    224 
    225 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
    226                                                             uint32 framerate) {
    227   DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
    228            << ", framerate=" << framerate;
    229   DCHECK(thread_checker_.CalledOnValidThread());
    230 
    231   // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
    232   if (bitrate > kuint32max / 1000) {
    233     NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
    234     return;
    235   }
    236 
    237   if (video_encoder_)
    238     video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
    239 }
    240 
    241 void RTCVideoEncoder::Impl::Destroy() {
    242   DVLOG(3) << "Impl::Destroy()";
    243   DCHECK(thread_checker_.CalledOnValidThread());
    244   if (video_encoder_)
    245     video_encoder_.release()->Destroy();
    246 }
    247 
    248 void RTCVideoEncoder::Impl::NotifyInitializeDone() {
    249   DVLOG(3) << "Impl::NotifyInitializeDone()";
    250   DCHECK(thread_checker_.CalledOnValidThread());
    251 }
    252 
    253 void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
    254     unsigned int input_count,
    255     const gfx::Size& input_coded_size,
    256     size_t output_buffer_size) {
    257   DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
    258            << ", input_coded_size=" << input_coded_size.ToString()
    259            << ", output_buffer_size=" << output_buffer_size;
    260   DCHECK(thread_checker_.CalledOnValidThread());
    261 
    262   if (!video_encoder_)
    263     return;
    264 
    265   input_frame_coded_size_ = input_coded_size;
    266 
    267   for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
    268     base::SharedMemory* shm =
    269         gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize(
    270             media::VideoFrame::I420, input_coded_size));
    271     if (!shm) {
    272       DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
    273                      "failed to create input buffer " << i;
    274       NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    275       return;
    276     }
    277     input_buffers_.push_back(shm);
    278     input_buffers_free_.push_back(i);
    279   }
    280 
    281   for (int i = 0; i < kOutputBufferCount; ++i) {
    282     base::SharedMemory* shm =
    283         gpu_factories_->CreateSharedMemory(output_buffer_size);
    284     if (!shm) {
    285       DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
    286                      "failed to create output buffer " << i;
    287       NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    288       return;
    289     }
    290     output_buffers_.push_back(shm);
    291   }
    292 
    293   // Immediately provide all output buffers to the VEA.
    294   for (size_t i = 0; i < output_buffers_.size(); ++i) {
    295     video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
    296         i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
    297   }
    298   SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
    299 }
    300 
    301 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
    302                                                  size_t payload_size,
    303                                                  bool key_frame) {
    304   DVLOG(3) << "Impl::BitstreamBufferReady(): "
    305               "bitstream_buffer_id=" << bitstream_buffer_id
    306            << ", payload_size=" << payload_size
    307            << ", key_frame=" << key_frame;
    308   DCHECK(thread_checker_.CalledOnValidThread());
    309 
    310   if (bitstream_buffer_id < 0 ||
    311       bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
    312     DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
    313                 << bitstream_buffer_id;
    314     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    315     return;
    316   }
    317   base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
    318   if (payload_size > output_buffer->mapped_size()) {
    319     DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
    320                 << payload_size;
    321     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    322     return;
    323   }
    324 
    325   // Use webrtc timestamps to ensure correct RTP sender behavior.
    326   // TODO(hshi): obtain timestamp from the capturer, see crbug.com/284783.
    327   const int64 capture_time_ms = webrtc::TickTime::MillisecondTimestamp();
    328 
    329   scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
    330       reinterpret_cast<uint8_t*>(output_buffer->memory()),
    331       payload_size,
    332       output_buffer->mapped_size()));
    333   image->_encodedWidth = input_visible_size_.width();
    334   image->_encodedHeight = input_visible_size_.height();
    335   // Convert capture time to 90 kHz RTP timestamp.
    336   image->_timeStamp = static_cast<uint32_t>(90 * capture_time_ms);
    337   image->capture_time_ms_ = capture_time_ms;
    338   image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
    339   image->_completeFrame = true;
    340 
    341   encoder_message_loop_proxy_->PostTask(
    342       FROM_HERE,
    343       base::Bind(&RTCVideoEncoder::ReturnEncodedImage,
    344                  weak_encoder_,
    345                  base::Passed(&image),
    346                  bitstream_buffer_id));
    347 }
    348 
    349 void RTCVideoEncoder::Impl::NotifyError(
    350     media::VideoEncodeAccelerator::Error error) {
    351   DVLOG(3) << "Impl::NotifyError(): error=" << error;
    352   DCHECK(thread_checker_.CalledOnValidThread());
    353   int32_t retval;
    354   switch (error) {
    355     case media::VideoEncodeAccelerator::kInvalidArgumentError:
    356       retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
    357       break;
    358     default:
    359       retval = WEBRTC_VIDEO_CODEC_ERROR;
    360   }
    361 
    362   if (video_encoder_)
    363     video_encoder_.release()->Destroy();
    364 
    365   if (async_waiter_) {
    366     SignalAsyncWaiter(retval);
    367   } else {
    368     encoder_message_loop_proxy_->PostTask(
    369         FROM_HERE,
    370         base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
    371   }
    372 }
    373 
    374 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
    375 
    376 void RTCVideoEncoder::Impl::EncodeOneFrame() {
    377   DVLOG(3) << "Impl::EncodeOneFrame()";
    378   DCHECK(thread_checker_.CalledOnValidThread());
    379   DCHECK(input_next_frame_);
    380   DCHECK(!input_buffers_free_.empty());
    381 
    382   // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
    383   // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
    384   // Encode() gets destroyed early.  Handle this by resetting our
    385   // input_next_frame_* state before we hand off the VideoFrame to the VEA.
    386   const webrtc::I420VideoFrame* next_frame = input_next_frame_;
    387   bool next_frame_keyframe = input_next_frame_keyframe_;
    388   input_next_frame_ = NULL;
    389   input_next_frame_keyframe_ = false;
    390 
    391   if (!video_encoder_) {
    392     SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
    393     return;
    394   }
    395 
    396   const int index = input_buffers_free_.back();
    397   base::SharedMemory* input_buffer = input_buffers_[index];
    398   scoped_refptr<media::VideoFrame> frame =
    399       media::VideoFrame::WrapExternalPackedMemory(
    400           media::VideoFrame::I420,
    401           input_frame_coded_size_,
    402           gfx::Rect(input_visible_size_),
    403           input_visible_size_,
    404           reinterpret_cast<uint8*>(input_buffer->memory()),
    405           input_buffer->mapped_size(),
    406           input_buffer->handle(),
    407           base::TimeDelta(),
    408           base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
    409   if (!frame) {
    410     DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
    411     NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
    412     return;
    413   }
    414 
    415   // Do a strided copy of the input frame to match the input requirements for
    416   // the encoder.
    417   // TODO(sheu): support zero-copy from WebRTC.  http://crbug.com/269312
    418   media::CopyYPlane(next_frame->buffer(webrtc::kYPlane),
    419                     next_frame->stride(webrtc::kYPlane),
    420                     next_frame->height(),
    421                     frame.get());
    422   media::CopyUPlane(next_frame->buffer(webrtc::kUPlane),
    423                     next_frame->stride(webrtc::kUPlane),
    424                     next_frame->height(),
    425                     frame.get());
    426   media::CopyVPlane(next_frame->buffer(webrtc::kVPlane),
    427                     next_frame->stride(webrtc::kVPlane),
    428                     next_frame->height(),
    429                     frame.get());
    430 
    431   video_encoder_->Encode(frame, next_frame_keyframe);
    432   input_buffers_free_.pop_back();
    433   SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
    434 }
    435 
    436 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
    437   DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
    438   DCHECK(thread_checker_.CalledOnValidThread());
    439   DCHECK_GE(index, 0);
    440   DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
    441   input_buffers_free_.push_back(index);
    442   if (input_next_frame_)
    443     EncodeOneFrame();
    444 }
    445 
    446 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
    447                                                 int32_t* retval) {
    448   DCHECK(thread_checker_.CalledOnValidThread());
    449   DCHECK(!async_waiter_);
    450   DCHECK(!async_retval_);
    451   async_waiter_ = waiter;
    452   async_retval_ = retval;
    453 }
    454 
    455 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
    456   DCHECK(thread_checker_.CalledOnValidThread());
    457   *async_retval_ = retval;
    458   async_waiter_->Signal();
    459   async_retval_ = NULL;
    460   async_waiter_ = NULL;
    461 }
    462 
    463 #undef NOTIFY_ERROR
    464 
    465 ////////////////////////////////////////////////////////////////////////////////
    466 //
    467 // RTCVideoEncoder
    468 //
    469 ////////////////////////////////////////////////////////////////////////////////
    470 
    471 RTCVideoEncoder::RTCVideoEncoder(
    472     webrtc::VideoCodecType type,
    473     media::VideoCodecProfile profile,
    474     const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories)
    475     : video_codec_type_(type),
    476       video_codec_profile_(profile),
    477       gpu_factories_(gpu_factories),
    478       encoded_image_callback_(NULL),
    479       impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED),
    480       weak_this_factory_(this) {
    481   DVLOG(1) << "RTCVideoEncoder(): profile=" << profile;
    482 }
    483 
    484 RTCVideoEncoder::~RTCVideoEncoder() {
    485   DCHECK(thread_checker_.CalledOnValidThread());
    486   Release();
    487   DCHECK(!impl_);
    488 }
    489 
    490 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
    491                                     int32_t number_of_cores,
    492                                     uint32_t max_payload_size) {
    493   DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
    494            << ", width=" << codec_settings->width
    495            << ", height=" << codec_settings->height
    496            << ", startBitrate=" << codec_settings->startBitrate;
    497   DCHECK(thread_checker_.CalledOnValidThread());
    498   DCHECK(!impl_);
    499 
    500   weak_this_factory_.InvalidateWeakPtrs();
    501   impl_ = new Impl(weak_this_factory_.GetWeakPtr(), gpu_factories_);
    502   base::WaitableEvent initialization_waiter(true, false);
    503   int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    504   gpu_factories_->GetMessageLoop()->PostTask(
    505       FROM_HERE,
    506       base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
    507                  impl_,
    508                  gfx::Size(codec_settings->width, codec_settings->height),
    509                  codec_settings->startBitrate,
    510                  video_codec_profile_,
    511                  &initialization_waiter,
    512                  &initialization_retval));
    513 
    514   // webrtc::VideoEncoder expects this call to be synchronous.
    515   initialization_waiter.Wait();
    516   RecordInitEncodeUMA(initialization_retval);
    517   return initialization_retval;
    518 }
    519 
    520 int32_t RTCVideoEncoder::Encode(
    521     const webrtc::I420VideoFrame& input_image,
    522     const webrtc::CodecSpecificInfo* codec_specific_info,
    523     const std::vector<webrtc::VideoFrameType>* frame_types) {
    524   DVLOG(3) << "Encode()";
    525   // TODO(sheu): figure out why this check fails.
    526   // DCHECK(thread_checker_.CalledOnValidThread());
    527   if (!impl_) {
    528     DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
    529     return impl_status_;
    530   }
    531 
    532   base::WaitableEvent encode_waiter(true, false);
    533   int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    534   gpu_factories_->GetMessageLoop()->PostTask(
    535       FROM_HERE,
    536       base::Bind(&RTCVideoEncoder::Impl::Enqueue,
    537                  impl_,
    538                  &input_image,
    539                  (frame_types->front() == webrtc::kKeyFrame),
    540                  &encode_waiter,
    541                  &encode_retval));
    542 
    543   // webrtc::VideoEncoder expects this call to be synchronous.
    544   encode_waiter.Wait();
    545   DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
    546   return encode_retval;
    547 }
    548 
    549 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
    550     webrtc::EncodedImageCallback* callback) {
    551   DVLOG(3) << "RegisterEncodeCompleteCallback()";
    552   DCHECK(thread_checker_.CalledOnValidThread());
    553   if (!impl_) {
    554     DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
    555     return impl_status_;
    556   }
    557 
    558   encoded_image_callback_ = callback;
    559   return WEBRTC_VIDEO_CODEC_OK;
    560 }
    561 
    562 int32_t RTCVideoEncoder::Release() {
    563   DVLOG(3) << "Release()";
    564   DCHECK(thread_checker_.CalledOnValidThread());
    565 
    566   // Reset the gpu_factory_, in case we reuse this encoder.
    567   gpu_factories_->Abort();
    568   gpu_factories_ = gpu_factories_->Clone();
    569   if (impl_) {
    570     gpu_factories_->GetMessageLoop()->PostTask(
    571         FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
    572     impl_ = NULL;
    573     weak_this_factory_.InvalidateWeakPtrs();
    574     impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
    575   }
    576   return WEBRTC_VIDEO_CODEC_OK;
    577 }
    578 
    579 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
    580   DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
    581            << ", rtt=" << rtt;
    582   DCHECK(thread_checker_.CalledOnValidThread());
    583   // Ignored.
    584   return WEBRTC_VIDEO_CODEC_OK;
    585 }
    586 
    587 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
    588   DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
    589            << ", frame_rate=" << frame_rate;
    590   DCHECK(thread_checker_.CalledOnValidThread());
    591   if (!impl_) {
    592     DVLOG(3) << "SetRates(): returning " << impl_status_;
    593     return impl_status_;
    594   }
    595 
    596   gpu_factories_->GetMessageLoop()->PostTask(
    597       FROM_HERE,
    598       base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
    599                  impl_,
    600                  new_bit_rate,
    601                  frame_rate));
    602   return WEBRTC_VIDEO_CODEC_OK;
    603 }
    604 
    605 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
    606                                          int32 bitstream_buffer_id) {
    607   DCHECK(thread_checker_.CalledOnValidThread());
    608   DVLOG(3) << "ReturnEncodedImage(): "
    609               "bitstream_buffer_id=" << bitstream_buffer_id;
    610 
    611   if (!encoded_image_callback_)
    612     return;
    613 
    614   webrtc::CodecSpecificInfo info;
    615   memset(&info, 0, sizeof(info));
    616   info.codecType = video_codec_type_;
    617   if (video_codec_type_ == webrtc::kVideoCodecVP8) {
    618     info.codecSpecific.VP8.pictureId = -1;
    619     info.codecSpecific.VP8.tl0PicIdx = -1;
    620     info.codecSpecific.VP8.keyIdx = -1;
    621   }
    622 
    623   // Generate a header describing a single fragment.
    624   webrtc::RTPFragmentationHeader header;
    625   memset(&header, 0, sizeof(header));
    626   header.VerifyAndAllocateFragmentationHeader(1);
    627   header.fragmentationOffset[0] = 0;
    628   header.fragmentationLength[0] = image->_length;
    629   header.fragmentationPlType[0] = 0;
    630   header.fragmentationTimeDiff[0] = 0;
    631 
    632   int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
    633   if (retval < 0) {
    634     DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
    635              << retval;
    636   }
    637 
    638   // The call through webrtc::EncodedImageCallback is synchronous, so we can
    639   // immediately recycle the output buffer back to the Impl.
    640   gpu_factories_->GetMessageLoop()->PostTask(
    641       FROM_HERE,
    642       base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
    643                  impl_,
    644                  bitstream_buffer_id));
    645 }
    646 
    647 void RTCVideoEncoder::NotifyError(int32_t error) {
    648   DCHECK(thread_checker_.CalledOnValidThread());
    649   DVLOG(1) << "NotifyError(): error=" << error;
    650 
    651   impl_status_ = error;
    652   gpu_factories_->GetMessageLoop()->PostTask(
    653       FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
    654   impl_ = NULL;
    655 }
    656 
    657 void RTCVideoEncoder::RecordInitEncodeUMA(int32_t init_retval) {
    658   UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
    659                         init_retval == WEBRTC_VIDEO_CODEC_OK);
    660   if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
    661     UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
    662                               video_codec_profile_,
    663                               media::VIDEO_CODEC_PROFILE_MAX);
    664   }
    665 }
    666 
    667 }  // namespace content
    668