Home | History | Annotate | Download | only in media
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "content/common/gpu/media/android_video_encode_accelerator.h"
      6 
      7 #include "base/bind.h"
      8 #include "base/command_line.h"
      9 #include "base/logging.h"
     10 #include "base/message_loop/message_loop.h"
     11 #include "base/metrics/histogram.h"
     12 #include "content/common/gpu/gpu_channel.h"
     13 #include "content/public/common/content_switches.h"
     14 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
     15 #include "media/base/android/media_codec_bridge.h"
     16 #include "media/base/bitstream_buffer.h"
     17 #include "media/base/limits.h"
     18 #include "media/video/picture.h"
     19 #include "third_party/libyuv/include/libyuv/convert_from.h"
     20 #include "ui/gl/android/scoped_java_surface.h"
     21 #include "ui/gl/gl_bindings.h"
     22 
     23 using media::MediaCodecBridge;
     24 using media::VideoCodecBridge;
     25 using media::VideoFrame;
     26 
     27 namespace content {
     28 
     29 enum {
     30   // Subset of MediaCodecInfo.CodecCapabilities.
     31   COLOR_FORMAT_YUV420_SEMIPLANAR = 21,
     32 };
     33 
     34 // Helper macros for dealing with failure.  If |result| evaluates false, emit
     35 // |log| to DLOG(ERROR), register |error| with the client, and return.
     36 #define RETURN_ON_FAILURE(result, log, error)                 \
     37   do {                                                        \
     38     if (!(result)) {                                          \
     39       DLOG(ERROR) << log;                                     \
     40       if (client_ptr_factory_.GetWeakPtr()) {                 \
     41         client_ptr_factory_.GetWeakPtr()->NotifyError(error); \
     42         client_ptr_factory_.InvalidateWeakPtrs();             \
     43       }                                                       \
     44       return;                                                 \
     45     }                                                         \
     46   } while (0)
     47 
     48 // Because MediaCodec is thread-hostile (must be poked on a single thread) and
     49 // has no callback mechanism (b/11990118), we must drive it by polling for
     50 // complete frames (and available input buffers, when the codec is fully
     51 // saturated).  This function defines the polling delay.  The value used is an
     52 // arbitrary choice that trades off CPU utilization (spinning) against latency.
     53 // Mirrors android_video_decode_accelerator.cc::DecodePollDelay().
     54 static inline const base::TimeDelta EncodePollDelay() {
     55   // An alternative to this polling scheme could be to dedicate a new thread
     56   // (instead of using the ChildThread) to run the MediaCodec, and make that
     57   // thread use the timeout-based flavor of MediaCodec's dequeue methods when it
     58   // believes the codec should complete "soon" (e.g. waiting for an input
     59   // buffer, or waiting for a picture when it knows enough complete input
     60   // pictures have been fed to saturate any internal buffering).  This is
     61   // speculative and it's unclear that this would be a win (nor that there's a
     62   // reasonably device-agnostic way to fill in the "believes" above).
     63   return base::TimeDelta::FromMilliseconds(10);
     64 }
     65 
     66 static inline const base::TimeDelta NoWaitTimeOut() {
     67   return base::TimeDelta::FromMicroseconds(0);
     68 }
     69 
     70 AndroidVideoEncodeAccelerator::AndroidVideoEncodeAccelerator(
     71     media::VideoEncodeAccelerator::Client* client)
     72     : client_ptr_factory_(client),
     73       num_buffers_at_codec_(0),
     74       num_output_buffers_(-1),
     75       output_buffers_capacity_(0),
     76       last_set_bitrate_(0) {}
     77 
     78 AndroidVideoEncodeAccelerator::~AndroidVideoEncodeAccelerator() {
     79   DCHECK(thread_checker_.CalledOnValidThread());
     80 }
     81 
     82 // static
     83 std::vector<media::VideoEncodeAccelerator::SupportedProfile>
     84 AndroidVideoEncodeAccelerator::GetSupportedProfiles() {
     85   std::vector<MediaCodecBridge::CodecsInfo> codecs_info =
     86       MediaCodecBridge::GetCodecsInfo();
     87 
     88   std::vector<SupportedProfile> profiles;
     89 
     90   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
     91   if (cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
     92     return profiles;
     93 
     94   for (size_t i = 0; i < codecs_info.size(); ++i) {
     95     const MediaCodecBridge::CodecsInfo& info = codecs_info[i];
     96     if (info.direction != media::MEDIA_CODEC_ENCODER || info.codecs != "vp8" ||
     97         VideoCodecBridge::IsKnownUnaccelerated(media::kCodecVP8,
     98                                                media::MEDIA_CODEC_ENCODER)) {
     99       // We're only looking for a HW VP8 encoder.
    100       continue;
    101     }
    102     SupportedProfile profile;
    103     profile.profile = media::VP8PROFILE_MAIN;
    104     // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
    105     // encoder?  Sure would be.  Too bad it doesn't.  So we hard-code some
    106     // reasonable defaults.
    107     profile.max_resolution.SetSize(1920, 1088);
    108     profile.max_framerate.numerator = 30;
    109     profile.max_framerate.denominator = 1;
    110     profiles.push_back(profile);
    111   }
    112   return profiles;
    113 }
    114 
    115 void AndroidVideoEncodeAccelerator::Initialize(
    116     VideoFrame::Format format,
    117     const gfx::Size& input_visible_size,
    118     media::VideoCodecProfile output_profile,
    119     uint32 initial_bitrate) {
    120   DVLOG(3) << __PRETTY_FUNCTION__ << " format: " << format
    121            << ", input_visible_size: " << input_visible_size.ToString()
    122            << ", output_profile: " << output_profile
    123            << ", initial_bitrate: " << initial_bitrate;
    124   DCHECK(!media_codec_);
    125   DCHECK(thread_checker_.CalledOnValidThread());
    126 
    127   RETURN_ON_FAILURE(media::MediaCodecBridge::IsAvailable() &&
    128                         media::MediaCodecBridge::SupportsSetParameters() &&
    129                         format == VideoFrame::I420 &&
    130                         output_profile == media::VP8PROFILE_MAIN,
    131                     "Unexpected combo: " << format << ", " << output_profile,
    132                     kInvalidArgumentError);
    133 
    134   last_set_bitrate_ = initial_bitrate;
    135 
    136   // Only consider using MediaCodec if it's likely backed by hardware.
    137   RETURN_ON_FAILURE(!media::VideoCodecBridge::IsKnownUnaccelerated(
    138                          media::kCodecVP8, media::MEDIA_CODEC_ENCODER),
    139                     "No HW support",
    140                     kPlatformFailureError);
    141 
    142   // TODO(fischman): when there is more HW out there with different color-space
    143   // support, this should turn into a negotiation with the codec for supported
    144   // formats.  For now we use the only format supported by the only available
    145   // HW.
    146   media_codec_.reset(
    147       media::VideoCodecBridge::CreateEncoder(media::kCodecVP8,
    148                                              input_visible_size,
    149                                              initial_bitrate,
    150                                              INITIAL_FRAMERATE,
    151                                              IFRAME_INTERVAL,
    152                                              COLOR_FORMAT_YUV420_SEMIPLANAR));
    153 
    154   RETURN_ON_FAILURE(
    155       media_codec_,
    156       "Failed to create/start the codec: " << input_visible_size.ToString(),
    157       kPlatformFailureError);
    158 
    159   base::MessageLoop::current()->PostTask(
    160       FROM_HERE,
    161       base::Bind(&VideoEncodeAccelerator::Client::NotifyInitializeDone,
    162                  client_ptr_factory_.GetWeakPtr()));
    163 
    164   num_output_buffers_ = media_codec_->GetOutputBuffersCount();
    165   output_buffers_capacity_ = media_codec_->GetOutputBuffersCapacity();
    166   base::MessageLoop::current()->PostTask(
    167       FROM_HERE,
    168       base::Bind(&VideoEncodeAccelerator::Client::RequireBitstreamBuffers,
    169                  client_ptr_factory_.GetWeakPtr(),
    170                  num_output_buffers_,
    171                  input_visible_size,
    172                  output_buffers_capacity_));
    173 }
    174 
    175 void AndroidVideoEncodeAccelerator::MaybeStartIOTimer() {
    176   if (!io_timer_.IsRunning() &&
    177       (num_buffers_at_codec_ > 0 || !pending_frames_.empty())) {
    178     io_timer_.Start(FROM_HERE,
    179                     EncodePollDelay(),
    180                     this,
    181                     &AndroidVideoEncodeAccelerator::DoIOTask);
    182   }
    183 }
    184 
    185 void AndroidVideoEncodeAccelerator::MaybeStopIOTimer() {
    186   if (io_timer_.IsRunning() &&
    187       (num_buffers_at_codec_ == 0 && pending_frames_.empty())) {
    188     io_timer_.Stop();
    189   }
    190 }
    191 
    192 void AndroidVideoEncodeAccelerator::Encode(
    193     const scoped_refptr<VideoFrame>& frame,
    194     bool force_keyframe) {
    195   DVLOG(3) << __PRETTY_FUNCTION__ << ": " << force_keyframe;
    196   DCHECK(thread_checker_.CalledOnValidThread());
    197   RETURN_ON_FAILURE(frame->format() == VideoFrame::I420,
    198                     "Unexpected format",
    199                     kInvalidArgumentError);
    200 
    201   // MediaCodec doesn't have a way to specify stride for non-Packed formats, so
    202   // we insist on being called with packed frames and no cropping :(
    203   RETURN_ON_FAILURE(frame->row_bytes(VideoFrame::kYPlane) ==
    204                             frame->stride(VideoFrame::kYPlane) &&
    205                         frame->row_bytes(VideoFrame::kUPlane) ==
    206                             frame->stride(VideoFrame::kUPlane) &&
    207                         frame->row_bytes(VideoFrame::kVPlane) ==
    208                             frame->stride(VideoFrame::kVPlane) &&
    209                         gfx::Rect(frame->coded_size()) == frame->visible_rect(),
    210                     "Non-packed frame, or visible rect != coded size",
    211                     kInvalidArgumentError);
    212 
    213   pending_frames_.push(MakeTuple(frame, force_keyframe, base::Time::Now()));
    214   DoIOTask();
    215 }
    216 
    217 void AndroidVideoEncodeAccelerator::UseOutputBitstreamBuffer(
    218     const media::BitstreamBuffer& buffer) {
    219   DVLOG(3) << __PRETTY_FUNCTION__ << ": bitstream_buffer_id=" << buffer.id();
    220   DCHECK(thread_checker_.CalledOnValidThread());
    221   RETURN_ON_FAILURE(buffer.size() >= media_codec_->GetOutputBuffersCapacity(),
    222                     "Output buffers too small!",
    223                     kInvalidArgumentError);
    224   available_bitstream_buffers_.push_back(buffer);
    225   DoIOTask();
    226 }
    227 
    228 void AndroidVideoEncodeAccelerator::RequestEncodingParametersChange(
    229     uint32 bitrate,
    230     uint32 framerate) {
    231   DVLOG(3) << __PRETTY_FUNCTION__ << ": bitrate: " << bitrate
    232            << ", framerate: " << framerate;
    233   DCHECK(thread_checker_.CalledOnValidThread());
    234   if (bitrate != last_set_bitrate_) {
    235     last_set_bitrate_ = bitrate;
    236     media_codec_->SetVideoBitrate(bitrate);
    237   }
    238   // Note: Android's MediaCodec doesn't allow mid-stream adjustments to
    239   // framerate, so we ignore that here.  This is OK because Android only uses
    240   // the framerate value from MediaFormat during configure() as a proxy for
    241   // bitrate, and we set that explicitly.
    242 }
    243 
    244 void AndroidVideoEncodeAccelerator::Destroy() {
    245   DVLOG(3) << __PRETTY_FUNCTION__;
    246   DCHECK(thread_checker_.CalledOnValidThread());
    247   client_ptr_factory_.InvalidateWeakPtrs();
    248   if (media_codec_) {
    249     if (io_timer_.IsRunning())
    250       io_timer_.Stop();
    251     media_codec_->Stop();
    252   }
    253   delete this;
    254 }
    255 
    256 void AndroidVideoEncodeAccelerator::DoIOTask() {
    257   QueueInput();
    258   DequeueOutput();
    259   MaybeStartIOTimer();
    260   MaybeStopIOTimer();
    261 }
    262 
    263 void AndroidVideoEncodeAccelerator::QueueInput() {
    264   if (!client_ptr_factory_.GetWeakPtr() || pending_frames_.empty())
    265     return;
    266 
    267   int input_buf_index = 0;
    268   media::MediaCodecStatus status =
    269       media_codec_->DequeueInputBuffer(NoWaitTimeOut(), &input_buf_index);
    270   if (status != media::MEDIA_CODEC_OK) {
    271     DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
    272            status == media::MEDIA_CODEC_ERROR);
    273     RETURN_ON_FAILURE(status != media::MEDIA_CODEC_ERROR,
    274                       "MediaCodec error",
    275                       kPlatformFailureError);
    276     return;
    277   }
    278 
    279   const PendingFrames::value_type& input = pending_frames_.front();
    280   bool is_key_frame = input.b;
    281   if (is_key_frame) {
    282     // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
    283     // indicate this in the QueueInputBuffer() call below and guarantee _this_
    284     // frame be encoded as a key frame, but sadly that flag is ignored.
    285     // Instead, we request a key frame "soon".
    286     media_codec_->RequestKeyFrameSoon();
    287   }
    288   scoped_refptr<VideoFrame> frame = input.a;
    289 
    290   uint8* buffer = NULL;
    291   size_t capacity = 0;
    292   media_codec_->GetInputBuffer(input_buf_index, &buffer, &capacity);
    293 
    294   size_t queued_size =
    295       VideoFrame::AllocationSize(VideoFrame::I420, frame->coded_size());
    296   RETURN_ON_FAILURE(capacity >= queued_size,
    297                     "Failed to get input buffer: " << input_buf_index,
    298                     kPlatformFailureError);
    299 
    300   uint8* dst_y = buffer;
    301   int dst_stride_y = frame->stride(VideoFrame::kYPlane);
    302   uint8* dst_uv = buffer + frame->stride(VideoFrame::kYPlane) *
    303                                frame->rows(VideoFrame::kYPlane);
    304   int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
    305   // Why NV12?  Because COLOR_FORMAT_YUV420_SEMIPLANAR.  See comment at other
    306   // mention of that constant.
    307   bool converted = !libyuv::I420ToNV12(frame->data(VideoFrame::kYPlane),
    308                                        frame->stride(VideoFrame::kYPlane),
    309                                        frame->data(VideoFrame::kUPlane),
    310                                        frame->stride(VideoFrame::kUPlane),
    311                                        frame->data(VideoFrame::kVPlane),
    312                                        frame->stride(VideoFrame::kVPlane),
    313                                        dst_y,
    314                                        dst_stride_y,
    315                                        dst_uv,
    316                                        dst_stride_uv,
    317                                        frame->coded_size().width(),
    318                                        frame->coded_size().height());
    319   RETURN_ON_FAILURE(converted, "Failed to I420ToNV12!", kPlatformFailureError);
    320 
    321   fake_input_timestamp_ += base::TimeDelta::FromMicroseconds(1);
    322   status = media_codec_->QueueInputBuffer(
    323       input_buf_index, NULL, queued_size, fake_input_timestamp_);
    324   UMA_HISTOGRAM_TIMES("Media.AVEA.InputQueueTime", base::Time::Now() - input.c);
    325   RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
    326                     "Failed to QueueInputBuffer: " << status,
    327                     kPlatformFailureError);
    328   ++num_buffers_at_codec_;
    329   pending_frames_.pop();
    330 }
    331 
    332 bool AndroidVideoEncodeAccelerator::DoOutputBuffersSuffice() {
    333   // If this returns false ever, then the VEA::Client interface will need to
    334   // grow a DismissBitstreamBuffer() call, and VEA::Client impls will have to be
    335   // prepared to field multiple requests to RequireBitstreamBuffers().
    336   int count = media_codec_->GetOutputBuffersCount();
    337   size_t capacity = media_codec_->GetOutputBuffersCapacity();
    338   bool ret = media_codec_->GetOutputBuffers() && count <= num_output_buffers_ &&
    339              capacity <= output_buffers_capacity_;
    340   LOG_IF(ERROR, !ret) << "Need more/bigger buffers; before: "
    341                       << num_output_buffers_ << "x" << output_buffers_capacity_
    342                       << ", now: " << count << "x" << capacity;
    343   UMA_HISTOGRAM_BOOLEAN("Media.AVEA.OutputBuffersSuffice", ret);
    344   return ret;
    345 }
    346 
    347 void AndroidVideoEncodeAccelerator::DequeueOutput() {
    348   if (!client_ptr_factory_.GetWeakPtr() ||
    349       available_bitstream_buffers_.empty() || num_buffers_at_codec_ == 0) {
    350     return;
    351   }
    352 
    353   int32 buf_index = 0;
    354   size_t offset = 0;
    355   size_t size = 0;
    356   bool key_frame = false;
    357   do {
    358     media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
    359         NoWaitTimeOut(), &buf_index, &offset, &size, NULL, NULL, &key_frame);
    360     switch (status) {
    361       case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
    362         return;
    363 
    364       case media::MEDIA_CODEC_ERROR:
    365         RETURN_ON_FAILURE(false, "Codec error", kPlatformFailureError);
    366         // Unreachable because of previous statement, but included for clarity.
    367         return;
    368 
    369       case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED:  // Fall-through.
    370       case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
    371         RETURN_ON_FAILURE(DoOutputBuffersSuffice(),
    372                           "Bitstream now requires more/larger buffers",
    373                           kPlatformFailureError);
    374         break;
    375 
    376       case media::MEDIA_CODEC_OK:
    377         DCHECK_GE(buf_index, 0);
    378         break;
    379 
    380       default:
    381         NOTREACHED();
    382         break;
    383     }
    384   } while (buf_index < 0);
    385 
    386   media::BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back();
    387   available_bitstream_buffers_.pop_back();
    388   scoped_ptr<base::SharedMemory> shm(
    389       new base::SharedMemory(bitstream_buffer.handle(), false));
    390   RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
    391                     "Failed to map SHM",
    392                     kPlatformFailureError);
    393   RETURN_ON_FAILURE(size <= shm->mapped_size(),
    394                     "Encoded buffer too large: " << size << ">"
    395                                                  << shm->mapped_size(),
    396                     kPlatformFailureError);
    397 
    398   media_codec_->CopyFromOutputBuffer(buf_index, offset, shm->memory(), size);
    399   media_codec_->ReleaseOutputBuffer(buf_index, false);
    400   --num_buffers_at_codec_;
    401 
    402   UMA_HISTOGRAM_COUNTS_10000("Media.AVEA.EncodedBufferSizeKB", size / 1024);
    403   base::MessageLoop::current()->PostTask(
    404       FROM_HERE,
    405       base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
    406                  client_ptr_factory_.GetWeakPtr(),
    407                  bitstream_buffer.id(),
    408                  size,
    409                  key_frame));
    410 }
    411 
    412 }  // namespace content
    413