Home | History | Annotate | Download | only in vda
      1 // Copyright 2015 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "v4l2_slice_video_decode_accelerator.h"
      6 
      7 #include <errno.h>
      8 #include <fcntl.h>
      9 #include <poll.h>
     10 #include <string.h>
     11 #include <sys/eventfd.h>
     12 #include <sys/ioctl.h>
     13 #include <sys/mman.h>
     14 
     15 #include <memory>
     16 
     17 #include "base/bind.h"
     18 #include "base/bind_helpers.h"
     19 #include "base/callback.h"
     20 #include "base/callback_helpers.h"
     21 #include "base/command_line.h"
     22 #include "base/macros.h"
     23 #include "base/memory/ptr_util.h"
     24 #include "base/numerics/safe_conversions.h"
     25 #include "base/single_thread_task_runner.h"
     26 #include "base/strings/stringprintf.h"
     27 #include "base/threading/thread_task_runner_handle.h"
     28 #include "shared_memory_region.h"
     29 
     30 #define LOGF(level) LOG(level) << __func__ << "(): "
     31 #define DLOGF(level) DLOG(level) << __func__ << "(): "
     32 #define DVLOGF(level) DVLOG(level) << __func__ << "(): "
     33 #define PLOGF(level) PLOG(level) << __func__ << "(): "
     34 
     35 #define NOTIFY_ERROR(x)                         \
     36   do {                                          \
     37     LOGF(ERROR) << "Setting error state:" << x; \
     38     SetErrorState(x);                           \
     39   } while (0)
     40 
     41 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
     42   do {                                                          \
     43     if (device_->Ioctl(type, arg) != 0) {                       \
     44       PLOGF(ERROR) << "ioctl() failed: " << type_str;           \
     45       return value;                                             \
     46     }                                                           \
     47   } while (0)
     48 
     49 #define IOCTL_OR_ERROR_RETURN(type, arg) \
     50   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type)
     51 
     52 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
     53   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
     54 
     55 #define IOCTL_OR_LOG_ERROR(type, arg)              \
     56   do {                                             \
     57     if (device_->Ioctl(type, arg) != 0)            \
     58       PLOGF(ERROR) << "ioctl() failed: " << #type; \
     59   } while (0)
     60 
     61 namespace media {
     62 
     63 // static
     64 const uint32_t V4L2SliceVideoDecodeAccelerator::supported_input_fourccs_[] = {
     65     V4L2_PIX_FMT_H264_SLICE, V4L2_PIX_FMT_VP8_FRAME, V4L2_PIX_FMT_VP9_FRAME,
     66 };
     67 
     68 class V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface
     69     : public base::RefCounted<V4L2DecodeSurface> {
     70  public:
     71   using ReleaseCB = base::Callback<void(int)>;
     72 
     73   V4L2DecodeSurface(int32_t bitstream_id,
     74                     int input_record,
     75                     int output_record,
     76                     const ReleaseCB& release_cb);
     77 
     78   // Mark the surface as decoded. This will also release all references, as
     79   // they are not needed anymore and execute the done callback, if not null.
     80   void SetDecoded();
     81   bool decoded() const { return decoded_; }
     82 
     83   int32_t bitstream_id() const { return bitstream_id_; }
     84   int input_record() const { return input_record_; }
     85   int output_record() const { return output_record_; }
     86   uint32_t config_store() const { return config_store_; }
     87 
     88   // Take references to each reference surface and keep them until the
     89   // target surface is decoded.
     90   void SetReferenceSurfaces(
     91       const std::vector<scoped_refptr<V4L2DecodeSurface>>& ref_surfaces);
     92 
     93   // If provided via this method, |done_cb| callback will be executed after
     94   // decoding into this surface is finished. The callback is reset afterwards,
     95   // so it needs to be set again before each decode operation.
     96   void SetDecodeDoneCallback(const base::Closure& done_cb) {
     97     DCHECK(done_cb_.is_null());
     98     done_cb_ = done_cb;
     99   }
    100 
    101   std::string ToString() const;
    102 
    103  private:
    104   friend class base::RefCounted<V4L2DecodeSurface>;
    105   ~V4L2DecodeSurface();
    106 
    107   int32_t bitstream_id_;
    108   int input_record_;
    109   int output_record_;
    110   uint32_t config_store_;
    111 
    112   bool decoded_;
    113   ReleaseCB release_cb_;
    114   base::Closure done_cb_;
    115 
    116   std::vector<scoped_refptr<V4L2DecodeSurface>> reference_surfaces_;
    117 
    118   DISALLOW_COPY_AND_ASSIGN(V4L2DecodeSurface);
    119 };
    120 
    121 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::V4L2DecodeSurface(
    122     int32_t bitstream_id,
    123     int input_record,
    124     int output_record,
    125     const ReleaseCB& release_cb)
    126     : bitstream_id_(bitstream_id),
    127       input_record_(input_record),
    128       output_record_(output_record),
    129       config_store_(input_record + 1),
    130       decoded_(false),
    131       release_cb_(release_cb) {}
    132 
    133 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::~V4L2DecodeSurface() {
    134   DVLOGF(5) << "Releasing output record id=" << output_record_;
    135   release_cb_.Run(output_record_);
    136 }
    137 
    138 void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetReferenceSurfaces(
    139     const std::vector<scoped_refptr<V4L2DecodeSurface>>& ref_surfaces) {
    140   DCHECK(reference_surfaces_.empty());
    141   reference_surfaces_ = ref_surfaces;
    142 }
    143 
    144 void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetDecoded() {
    145   DCHECK(!decoded_);
    146   decoded_ = true;
    147 
    148   // We can now drop references to all reference surfaces for this surface
    149   // as we are done with decoding.
    150   reference_surfaces_.clear();
    151 
    152   // And finally execute and drop the decode done callback, if set.
    153   if (!done_cb_.is_null())
    154     base::ResetAndReturn(&done_cb_).Run();
    155 }
    156 
    157 std::string V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::ToString()
    158     const {
    159   std::string out;
    160   base::StringAppendF(&out, "Buffer %d -> %d. ", input_record_, output_record_);
    161   base::StringAppendF(&out, "Reference surfaces:");
    162   for (const auto& ref : reference_surfaces_) {
    163     DCHECK_NE(ref->output_record(), output_record_);
    164     base::StringAppendF(&out, " %d", ref->output_record());
    165   }
    166   return out;
    167 }
    168 
    169 V4L2SliceVideoDecodeAccelerator::InputRecord::InputRecord()
    170     : input_id(-1),
    171       address(nullptr),
    172       length(0),
    173       bytes_used(0),
    174       at_device(false) {}
    175 
    176 V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord()
    177     : at_device(false),
    178       at_client(false),
    179       picture_id(-1),
    180       cleared(false) {}
    181 
    182 struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef {
    183   BitstreamBufferRef(
    184       base::WeakPtr<VideoDecodeAccelerator::Client>& client,
    185       const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
    186       SharedMemoryRegion* shm,
    187       int32_t input_id);
    188   ~BitstreamBufferRef();
    189   const base::WeakPtr<VideoDecodeAccelerator::Client> client;
    190   const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
    191   const std::unique_ptr<SharedMemoryRegion> shm;
    192   off_t bytes_used;
    193   const int32_t input_id;
    194 };
    195 
    196 V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
    197     base::WeakPtr<VideoDecodeAccelerator::Client>& client,
    198     const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
    199     SharedMemoryRegion* shm,
    200     int32_t input_id)
    201     : client(client),
    202       client_task_runner(client_task_runner),
    203       shm(shm),
    204       bytes_used(0),
    205       input_id(input_id) {}
    206 
    207 V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
    208   if (input_id >= 0) {
    209     DVLOGF(5) << "returning input_id: " << input_id;
    210     client_task_runner->PostTask(
    211         FROM_HERE,
    212         base::Bind(&VideoDecodeAccelerator::Client::NotifyEndOfBitstreamBuffer,
    213                    client, input_id));
    214   }
    215 }
    216 
    217 V4L2SliceVideoDecodeAccelerator::PictureRecord::PictureRecord(
    218     bool cleared,
    219     const Picture& picture)
    220     : cleared(cleared), picture(picture) {}
    221 
    222 V4L2SliceVideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
    223 
    224 class V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator
    225     : public H264Decoder::H264Accelerator {
    226  public:
    227   explicit V4L2H264Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
    228   ~V4L2H264Accelerator() override;
    229 
    230   // H264Decoder::H264Accelerator implementation.
    231   scoped_refptr<H264Picture> CreateH264Picture() override;
    232 
    233   bool SubmitFrameMetadata(const H264SPS* sps,
    234                            const H264PPS* pps,
    235                            const H264DPB& dpb,
    236                            const H264Picture::Vector& ref_pic_listp0,
    237                            const H264Picture::Vector& ref_pic_listb0,
    238                            const H264Picture::Vector& ref_pic_listb1,
    239                            const scoped_refptr<H264Picture>& pic) override;
    240 
    241   bool SubmitSlice(const H264PPS* pps,
    242                    const H264SliceHeader* slice_hdr,
    243                    const H264Picture::Vector& ref_pic_list0,
    244                    const H264Picture::Vector& ref_pic_list1,
    245                    const scoped_refptr<H264Picture>& pic,
    246                    const uint8_t* data,
    247                    size_t size) override;
    248 
    249   bool SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
    250   bool OutputPicture(const scoped_refptr<H264Picture>& pic) override;
    251 
    252   void Reset() override;
    253 
    254  private:
    255   // Max size of reference list.
    256   static const size_t kDPBIndicesListSize = 32;
    257   void H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
    258                                        uint8_t dst_list[kDPBIndicesListSize]);
    259 
    260   void H264DPBToV4L2DPB(
    261       const H264DPB& dpb,
    262       std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces);
    263 
    264   scoped_refptr<V4L2DecodeSurface> H264PictureToV4L2DecodeSurface(
    265       const scoped_refptr<H264Picture>& pic);
    266 
    267   size_t num_slices_;
    268   V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
    269 
    270   // TODO(posciak): This should be queried from hardware once supported.
    271   static const size_t kMaxSlices = 16;
    272   struct v4l2_ctrl_h264_slice_param v4l2_slice_params_[kMaxSlices];
    273   struct v4l2_ctrl_h264_decode_param v4l2_decode_param_;
    274 
    275   DISALLOW_COPY_AND_ASSIGN(V4L2H264Accelerator);
    276 };
    277 
    278 class V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator
    279     : public VP8Decoder::VP8Accelerator {
    280  public:
    281   explicit V4L2VP8Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
    282   ~V4L2VP8Accelerator() override;
    283 
    284   // VP8Decoder::VP8Accelerator implementation.
    285   scoped_refptr<VP8Picture> CreateVP8Picture() override;
    286 
    287   bool SubmitDecode(const scoped_refptr<VP8Picture>& pic,
    288                     const Vp8FrameHeader* frame_hdr,
    289                     const scoped_refptr<VP8Picture>& last_frame,
    290                     const scoped_refptr<VP8Picture>& golden_frame,
    291                     const scoped_refptr<VP8Picture>& alt_frame) override;
    292 
    293   bool OutputPicture(const scoped_refptr<VP8Picture>& pic) override;
    294 
    295  private:
    296   scoped_refptr<V4L2DecodeSurface> VP8PictureToV4L2DecodeSurface(
    297       const scoped_refptr<VP8Picture>& pic);
    298 
    299   V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
    300 
    301   DISALLOW_COPY_AND_ASSIGN(V4L2VP8Accelerator);
    302 };
    303 
    304 class V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator
    305     : public VP9Decoder::VP9Accelerator {
    306  public:
    307   explicit V4L2VP9Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
    308   ~V4L2VP9Accelerator() override;
    309 
    310   // VP9Decoder::VP9Accelerator implementation.
    311   scoped_refptr<VP9Picture> CreateVP9Picture() override;
    312 
    313   bool SubmitDecode(const scoped_refptr<VP9Picture>& pic,
    314                     const Vp9SegmentationParams& segm_params,
    315                     const Vp9LoopFilterParams& lf_params,
    316                     const std::vector<scoped_refptr<VP9Picture>>& ref_pictures,
    317                     const base::Closure& done_cb) override;
    318 
    319   bool OutputPicture(const scoped_refptr<VP9Picture>& pic) override;
    320 
    321   bool GetFrameContext(const scoped_refptr<VP9Picture>& pic,
    322                        Vp9FrameContext* frame_ctx) override;
    323 
    324   bool IsFrameContextRequired() const override {
    325     return device_needs_frame_context_;
    326   }
    327 
    328  private:
    329   scoped_refptr<V4L2DecodeSurface> VP9PictureToV4L2DecodeSurface(
    330       const scoped_refptr<VP9Picture>& pic);
    331 
    332   bool device_needs_frame_context_;
    333 
    334   V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
    335 
    336   DISALLOW_COPY_AND_ASSIGN(V4L2VP9Accelerator);
    337 };
    338 
    339 // Codec-specific subclasses of software decoder picture classes.
    340 // This allows us to keep decoders oblivious of our implementation details.
    341 class V4L2H264Picture : public H264Picture {
    342  public:
    343   explicit V4L2H264Picture(
    344       const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    345           dec_surface);
    346 
    347   V4L2H264Picture* AsV4L2H264Picture() override { return this; }
    348   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    349   dec_surface() {
    350     return dec_surface_;
    351   }
    352 
    353  private:
    354   ~V4L2H264Picture() override;
    355 
    356   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    357       dec_surface_;
    358 
    359   DISALLOW_COPY_AND_ASSIGN(V4L2H264Picture);
    360 };
    361 
    362 V4L2H264Picture::V4L2H264Picture(
    363     const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    364         dec_surface)
    365     : dec_surface_(dec_surface) {}
    366 
    367 V4L2H264Picture::~V4L2H264Picture() {}
    368 
    369 class V4L2VP8Picture : public VP8Picture {
    370  public:
    371   explicit V4L2VP8Picture(
    372       const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    373           dec_surface);
    374 
    375   V4L2VP8Picture* AsV4L2VP8Picture() override { return this; }
    376   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    377   dec_surface() {
    378     return dec_surface_;
    379   }
    380 
    381  private:
    382   ~V4L2VP8Picture() override;
    383 
    384   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    385       dec_surface_;
    386 
    387   DISALLOW_COPY_AND_ASSIGN(V4L2VP8Picture);
    388 };
    389 
    390 V4L2VP8Picture::V4L2VP8Picture(
    391     const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    392         dec_surface)
    393     : dec_surface_(dec_surface) {}
    394 
    395 V4L2VP8Picture::~V4L2VP8Picture() {}
    396 
    397 class V4L2VP9Picture : public VP9Picture {
    398  public:
    399   explicit V4L2VP9Picture(
    400       const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    401           dec_surface);
    402 
    403   V4L2VP9Picture* AsV4L2VP9Picture() override { return this; }
    404   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    405   dec_surface() {
    406     return dec_surface_;
    407   }
    408 
    409  private:
    410   ~V4L2VP9Picture() override;
    411 
    412   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    413       dec_surface_;
    414 
    415   DISALLOW_COPY_AND_ASSIGN(V4L2VP9Picture);
    416 };
    417 
    418 V4L2VP9Picture::V4L2VP9Picture(
    419     const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    420         dec_surface)
    421     : dec_surface_(dec_surface) {}
    422 
    423 V4L2VP9Picture::~V4L2VP9Picture() {}
    424 
    425 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator(
    426     const scoped_refptr<V4L2Device>& device)
    427     : input_planes_count_(0),
    428       output_planes_count_(0),
    429       child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
    430       device_(device),
    431       decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"),
    432       device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"),
    433       input_streamon_(false),
    434       input_buffer_queued_count_(0),
    435       output_streamon_(false),
    436       output_buffer_queued_count_(0),
    437       video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN),
    438       input_format_fourcc_(0),
    439       output_format_fourcc_(0),
    440       state_(kUninitialized),
    441       output_mode_(Config::OutputMode::ALLOCATE),
    442       decoder_flushing_(false),
    443       decoder_resetting_(false),
    444       surface_set_change_pending_(false),
    445       picture_clearing_count_(0),
    446       weak_this_factory_(this) {
    447   weak_this_ = weak_this_factory_.GetWeakPtr();
    448 }
    449 
    450 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() {
    451   DVLOGF(2);
    452 
    453   DCHECK(child_task_runner_->BelongsToCurrentThread());
    454   DCHECK(!decoder_thread_.IsRunning());
    455   DCHECK(!device_poll_thread_.IsRunning());
    456 
    457   DCHECK(input_buffer_map_.empty());
    458   DCHECK(output_buffer_map_.empty());
    459 }
    460 
    461 void V4L2SliceVideoDecodeAccelerator::NotifyError(Error error) {
    462   if (!child_task_runner_->BelongsToCurrentThread()) {
    463     child_task_runner_->PostTask(
    464         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::NotifyError,
    465                               weak_this_, error));
    466     return;
    467   }
    468 
    469   if (client_) {
    470     client_->NotifyError(error);
    471     client_ptr_factory_.reset();
    472   }
    473 }
    474 
    475 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config,
    476                                                  Client* client) {
    477   DVLOGF(3) << "profile: " << config.profile;
    478   DCHECK(child_task_runner_->BelongsToCurrentThread());
    479   DCHECK_EQ(state_, kUninitialized);
    480 
    481   if (config.output_mode != Config::OutputMode::ALLOCATE &&
    482       config.output_mode != Config::OutputMode::IMPORT) {
    483     NOTREACHED() << "Only ALLOCATE and IMPORT OutputModes are supported";
    484     return false;
    485   }
    486 
    487   client_ptr_factory_.reset(
    488       new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client));
    489   client_ = client_ptr_factory_->GetWeakPtr();
    490   // If we haven't been set up to decode on separate thread via
    491   // TryToSetupDecodeOnSeparateThread(), use the main thread/client for
    492   // decode tasks.
    493   if (!decode_task_runner_) {
    494     decode_task_runner_ = child_task_runner_;
    495     DCHECK(!decode_client_);
    496     decode_client_ = client_;
    497   }
    498 
    499   video_profile_ = config.profile;
    500 
    501   // TODO(posciak): This needs to be queried once supported.
    502   input_planes_count_ = 1;
    503   output_planes_count_ = 1;
    504 
    505   input_format_fourcc_ =
    506       V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, true);
    507 
    508   if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
    509     DVLOGF(1) << "Failed to open device for profile: " << config.profile
    510               << " fourcc: " << std::hex << "0x" << input_format_fourcc_;
    511     return false;
    512   }
    513 
    514   if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
    515     h264_accelerator_.reset(new V4L2H264Accelerator(this));
    516     decoder_.reset(new H264Decoder(h264_accelerator_.get()));
    517   } else if (video_profile_ >= VP8PROFILE_MIN &&
    518              video_profile_ <= VP8PROFILE_MAX) {
    519     vp8_accelerator_.reset(new V4L2VP8Accelerator(this));
    520     decoder_.reset(new VP8Decoder(vp8_accelerator_.get()));
    521   } else if (video_profile_ >= VP9PROFILE_MIN &&
    522              video_profile_ <= VP9PROFILE_MAX) {
    523     vp9_accelerator_.reset(new V4L2VP9Accelerator(this));
    524     decoder_.reset(new VP9Decoder(vp9_accelerator_.get()));
    525   } else {
    526     NOTREACHED() << "Unsupported profile " << video_profile_;
    527     return false;
    528   }
    529 
    530   // Capabilities check.
    531   struct v4l2_capability caps;
    532   const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
    533   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
    534   if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
    535     LOGF(ERROR) << "ioctl() failed: VIDIOC_QUERYCAP"
    536                 << ", caps check failed: 0x" << std::hex << caps.capabilities;
    537     return false;
    538   }
    539 
    540   if (!SetupFormats())
    541     return false;
    542 
    543   if (!decoder_thread_.Start()) {
    544     DLOGF(ERROR) << "device thread failed to start";
    545     return false;
    546   }
    547   decoder_thread_task_runner_ = decoder_thread_.task_runner();
    548 
    549   state_ = kInitialized;
    550   output_mode_ = config.output_mode;
    551 
    552   // InitializeTask will NOTIFY_ERROR on failure.
    553   decoder_thread_task_runner_->PostTask(
    554       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::InitializeTask,
    555                             base::Unretained(this)));
    556 
    557   DVLOGF(1) << "V4L2SliceVideoDecodeAccelerator initialized";
    558   return true;
    559 }
    560 
    561 void V4L2SliceVideoDecodeAccelerator::InitializeTask() {
    562   DVLOGF(3);
    563   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    564   DCHECK_EQ(state_, kInitialized);
    565 
    566   if (!CreateInputBuffers())
    567     NOTIFY_ERROR(PLATFORM_FAILURE);
    568 
    569   // Output buffers will be created once decoder gives us information
    570   // about their size and required count.
    571   state_ = kDecoding;
    572 }
    573 
    574 void V4L2SliceVideoDecodeAccelerator::Destroy() {
    575   DVLOGF(3);
    576   DCHECK(child_task_runner_->BelongsToCurrentThread());
    577 
    578   if (decoder_thread_.IsRunning()) {
    579     decoder_thread_task_runner_->PostTask(
    580         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DestroyTask,
    581                               base::Unretained(this)));
    582 
    583     // Wait for tasks to finish/early-exit.
    584     decoder_thread_.Stop();
    585   }
    586 
    587   delete this;
    588   DVLOGF(3) << "Destroyed";
    589 }
    590 
    591 void V4L2SliceVideoDecodeAccelerator::DestroyTask() {
    592   DVLOGF(3);
    593   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    594 
    595   state_ = kError;
    596 
    597   decoder_->Reset();
    598 
    599   decoder_current_bitstream_buffer_.reset();
    600   while (!decoder_input_queue_.empty())
    601     decoder_input_queue_.pop();
    602 
    603   // Stop streaming and the device_poll_thread_.
    604   StopDevicePoll(false);
    605 
    606   DestroyInputBuffers();
    607   DestroyOutputs(false);
    608 
    609   DCHECK(surfaces_at_device_.empty());
    610   DCHECK(surfaces_at_display_.empty());
    611   DCHECK(decoder_display_queue_.empty());
    612 }
    613 
    614 static bool IsSupportedOutputFormat(uint32_t v4l2_format) {
    615   // Only support V4L2_PIX_FMT_NV12 output format for now.
    616   // TODO(johnylin): add more supported format if necessary.
    617   uint32_t kSupportedOutputFmtFourcc[] = { V4L2_PIX_FMT_NV12 };
    618   return std::find(
    619       kSupportedOutputFmtFourcc,
    620       kSupportedOutputFmtFourcc + arraysize(kSupportedOutputFmtFourcc),
    621       v4l2_format) !=
    622           kSupportedOutputFmtFourcc + arraysize(kSupportedOutputFmtFourcc);
    623 }
    624 
    625 bool V4L2SliceVideoDecodeAccelerator::SetupFormats() {
    626   DCHECK_EQ(state_, kUninitialized);
    627 
    628   size_t input_size;
    629   Size max_resolution, min_resolution;
    630   device_->GetSupportedResolution(input_format_fourcc_, &min_resolution,
    631                                   &max_resolution);
    632   if (max_resolution.width() > 1920 && max_resolution.height() > 1088)
    633     input_size = kInputBufferMaxSizeFor4k;
    634   else
    635     input_size = kInputBufferMaxSizeFor1080p;
    636 
    637   struct v4l2_fmtdesc fmtdesc;
    638   memset(&fmtdesc, 0, sizeof(fmtdesc));
    639   fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    640   bool is_format_supported = false;
    641   while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
    642     if (fmtdesc.pixelformat == input_format_fourcc_) {
    643       is_format_supported = true;
    644       break;
    645     }
    646     ++fmtdesc.index;
    647   }
    648 
    649   if (!is_format_supported) {
    650     DVLOGF(1) << "Input fourcc " << input_format_fourcc_
    651               << " not supported by device.";
    652     return false;
    653   }
    654 
    655   struct v4l2_format format;
    656   memset(&format, 0, sizeof(format));
    657   format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    658   format.fmt.pix_mp.pixelformat = input_format_fourcc_;
    659   format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
    660   format.fmt.pix_mp.num_planes = input_planes_count_;
    661   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
    662 
    663   // We have to set up the format for output, because the driver may not allow
    664   // changing it once we start streaming; whether it can support our chosen
    665   // output format or not may depend on the input format.
    666   memset(&fmtdesc, 0, sizeof(fmtdesc));
    667   fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    668   output_format_fourcc_ = 0;
    669   while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
    670     if (IsSupportedOutputFormat(fmtdesc.pixelformat)) {
    671       output_format_fourcc_ = fmtdesc.pixelformat;
    672       break;
    673     }
    674     ++fmtdesc.index;
    675   }
    676 
    677   if (output_format_fourcc_ == 0) {
    678     LOGF(ERROR) << "Could not find a usable output format";
    679     return false;
    680   }
    681 
    682   // Only set fourcc for output; resolution, etc., will come from the
    683   // driver once it extracts it from the stream.
    684   memset(&format, 0, sizeof(format));
    685   format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    686   format.fmt.pix_mp.pixelformat = output_format_fourcc_;
    687   format.fmt.pix_mp.num_planes = output_planes_count_;
    688   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
    689 
    690   return true;
    691 }
    692 
    693 bool V4L2SliceVideoDecodeAccelerator::CreateInputBuffers() {
    694   DVLOGF(3);
    695   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    696   DCHECK(!input_streamon_);
    697   DCHECK(input_buffer_map_.empty());
    698 
    699   struct v4l2_requestbuffers reqbufs;
    700   memset(&reqbufs, 0, sizeof(reqbufs));
    701   reqbufs.count = kNumInputBuffers;
    702   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    703   reqbufs.memory = V4L2_MEMORY_MMAP;
    704   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
    705   if (reqbufs.count < kNumInputBuffers) {
    706     PLOGF(ERROR) << "Could not allocate enough output buffers";
    707     return false;
    708   }
    709   input_buffer_map_.resize(reqbufs.count);
    710   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
    711     free_input_buffers_.push_back(i);
    712 
    713     // Query for the MEMORY_MMAP pointer.
    714     struct v4l2_plane planes[VIDEO_MAX_PLANES];
    715     struct v4l2_buffer buffer;
    716     memset(&buffer, 0, sizeof(buffer));
    717     memset(planes, 0, sizeof(planes));
    718     buffer.index = i;
    719     buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    720     buffer.memory = V4L2_MEMORY_MMAP;
    721     buffer.m.planes = planes;
    722     buffer.length = input_planes_count_;
    723     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
    724     void* address = device_->Mmap(nullptr,
    725                                   buffer.m.planes[0].length,
    726                                   PROT_READ | PROT_WRITE,
    727                                   MAP_SHARED,
    728                                   buffer.m.planes[0].m.mem_offset);
    729     if (address == MAP_FAILED) {
    730       PLOGF(ERROR) << "mmap() failed";
    731       return false;
    732     }
    733     input_buffer_map_[i].address = address;
    734     input_buffer_map_[i].length = buffer.m.planes[0].length;
    735   }
    736 
    737   return true;
    738 }
    739 
    740 bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
    741   DVLOGF(3);
    742   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    743   DCHECK(!output_streamon_);
    744   DCHECK(output_buffer_map_.empty());
    745   DCHECK(surfaces_at_display_.empty());
    746   DCHECK(surfaces_at_device_.empty());
    747 
    748   visible_size_ = decoder_->GetPicSize();
    749   size_t num_pictures = decoder_->GetRequiredNumOfPictures();
    750 
    751   DCHECK_GT(num_pictures, 0u);
    752   DCHECK(!visible_size_.IsEmpty());
    753 
    754   struct v4l2_format format;
    755   memset(&format, 0, sizeof(format));
    756   format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    757   format.fmt.pix_mp.pixelformat = output_format_fourcc_;
    758   format.fmt.pix_mp.width = visible_size_.width();
    759   format.fmt.pix_mp.height = visible_size_.height();
    760   format.fmt.pix_mp.num_planes = input_planes_count_;
    761 
    762   if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
    763     PLOGF(ERROR) << "Failed setting format to: " << output_format_fourcc_;
    764     NOTIFY_ERROR(PLATFORM_FAILURE);
    765     return false;
    766   }
    767 
    768   coded_size_.SetSize(base::checked_cast<int>(format.fmt.pix_mp.width),
    769                       base::checked_cast<int>(format.fmt.pix_mp.height));
    770   DCHECK_EQ(coded_size_.width() % 16, 0);
    771   DCHECK_EQ(coded_size_.height() % 16, 0);
    772 
    773   if (!Rect(coded_size_).Contains(Rect(visible_size_))) {
    774     LOGF(ERROR) << "Got invalid adjusted coded size: "
    775                 << coded_size_.ToString();
    776     return false;
    777   }
    778 
    779   DVLOGF(3) << "buffer_count=" << num_pictures
    780             << ", visible size=" << visible_size_.ToString()
    781             << ", coded size=" << coded_size_.ToString();
    782 
    783   // With ALLOCATE mode the client can sample it as RGB and doesn't need to
    784   // know the precise format.
    785   VideoPixelFormat pixel_format =
    786       (output_mode_ == Config::OutputMode::IMPORT)
    787           ? V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_)
    788           : PIXEL_FORMAT_UNKNOWN;
    789 
    790   child_task_runner_->PostTask(
    791       FROM_HERE,
    792       base::Bind(&VideoDecodeAccelerator::Client::ProvidePictureBuffers,
    793                  client_, num_pictures, pixel_format, coded_size_));
    794 
    795   // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding
    796   // or event handling while we are waiting for AssignPictureBuffers(). Not
    797   // having Pictures available would not have prevented us from making decoding
    798   // progress entirely e.g. in the case of H.264 where we could further decode
    799   // non-slice NALUs and could even get another resolution change before we were
    800   // done with this one. After we get the buffers, we'll go back into kIdle and
    801   // kick off further event processing, and eventually go back into kDecoding
    802   // once no more events are pending (if any).
    803   state_ = kAwaitingPictureBuffers;
    804   return true;
    805 }
    806 
    807 void V4L2SliceVideoDecodeAccelerator::DestroyInputBuffers() {
    808   DVLOGF(3);
    809   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread() ||
    810          !decoder_thread_.IsRunning());
    811   DCHECK(!input_streamon_);
    812 
    813   if (input_buffer_map_.empty())
    814     return;
    815 
    816   for (auto& input_record : input_buffer_map_) {
    817     if (input_record.address != nullptr)
    818       device_->Munmap(input_record.address, input_record.length);
    819   }
    820 
    821   struct v4l2_requestbuffers reqbufs;
    822   memset(&reqbufs, 0, sizeof(reqbufs));
    823   reqbufs.count = 0;
    824   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    825   reqbufs.memory = V4L2_MEMORY_MMAP;
    826   IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
    827 
    828   input_buffer_map_.clear();
    829   free_input_buffers_.clear();
    830 }
    831 
    832 void V4L2SliceVideoDecodeAccelerator::DismissPictures(
    833     const std::vector<int32_t>& picture_buffer_ids,
    834     base::WaitableEvent* done) {
    835   DVLOGF(3);
    836   DCHECK(child_task_runner_->BelongsToCurrentThread());
    837 
    838   for (auto picture_buffer_id : picture_buffer_ids) {
    839     DVLOGF(1) << "dismissing PictureBuffer id=" << picture_buffer_id;
    840     client_->DismissPictureBuffer(picture_buffer_id);
    841   }
    842 
    843   done->Signal();
    844 }
    845 
    846 void V4L2SliceVideoDecodeAccelerator::DevicePollTask(bool poll_device) {
    847   DVLOGF(4);
    848   DCHECK(device_poll_thread_.task_runner()->BelongsToCurrentThread());
    849 
    850   bool event_pending;
    851   if (!device_->Poll(poll_device, &event_pending)) {
    852     NOTIFY_ERROR(PLATFORM_FAILURE);
    853     return;
    854   }
    855 
    856   // All processing should happen on ServiceDeviceTask(), since we shouldn't
    857   // touch encoder state from this thread.
    858   decoder_thread_task_runner_->PostTask(
    859       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask,
    860                             base::Unretained(this)));
    861 }
    862 
    863 void V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask() {
    864   DVLOGF(4);
    865   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    866 
    867   // ServiceDeviceTask() should only ever be scheduled from DevicePollTask().
    868 
    869   Dequeue();
    870   SchedulePollIfNeeded();
    871 }
    872 
    873 void V4L2SliceVideoDecodeAccelerator::SchedulePollIfNeeded() {
    874   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    875 
    876   if (!device_poll_thread_.IsRunning()) {
    877     DVLOGF(2) << "Device poll thread stopped, will not schedule poll";
    878     return;
    879   }
    880 
    881   DCHECK(input_streamon_ || output_streamon_);
    882 
    883   if (input_buffer_queued_count_ + output_buffer_queued_count_ == 0) {
    884     DVLOGF(4) << "No buffers queued, will not schedule poll";
    885     return;
    886   }
    887 
    888   DVLOGF(4) << "Scheduling device poll task";
    889 
    890   device_poll_thread_.task_runner()->PostTask(
    891       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
    892                             base::Unretained(this), true));
    893 
    894   DVLOGF(2) << "buffer counts: "
    895             << "INPUT[" << decoder_input_queue_.size() << "]"
    896             << " => DEVICE["
    897             << free_input_buffers_.size() << "+"
    898             << input_buffer_queued_count_ << "/"
    899             << input_buffer_map_.size() << "]->["
    900             << free_output_buffers_.size() << "+"
    901             << output_buffer_queued_count_ << "/"
    902             << output_buffer_map_.size() << "]"
    903             << " => DISPLAYQ[" << decoder_display_queue_.size() << "]"
    904             << " => CLIENT[" << surfaces_at_display_.size() << "]";
    905 }
    906 
    907 void V4L2SliceVideoDecodeAccelerator::Enqueue(
    908     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
    909   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    910 
    911   const int old_inputs_queued = input_buffer_queued_count_;
    912   const int old_outputs_queued = output_buffer_queued_count_;
    913 
    914   if (!EnqueueInputRecord(dec_surface->input_record(),
    915                           dec_surface->config_store())) {
    916     DVLOGF(1) << "Failed queueing an input buffer";
    917     NOTIFY_ERROR(PLATFORM_FAILURE);
    918     return;
    919   }
    920 
    921   if (!EnqueueOutputRecord(dec_surface->output_record())) {
    922     DVLOGF(1) << "Failed queueing an output buffer";
    923     NOTIFY_ERROR(PLATFORM_FAILURE);
    924     return;
    925   }
    926 
    927   bool inserted =
    928       surfaces_at_device_
    929           .insert(std::make_pair(dec_surface->output_record(), dec_surface))
    930           .second;
    931   DCHECK(inserted);
    932 
    933   if (old_inputs_queued == 0 && old_outputs_queued == 0)
    934     SchedulePollIfNeeded();
    935 }
    936 
    937 void V4L2SliceVideoDecodeAccelerator::Dequeue() {
    938   DVLOGF(3);
    939   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    940 
    941   struct v4l2_buffer dqbuf;
    942   struct v4l2_plane planes[VIDEO_MAX_PLANES];
    943   while (input_buffer_queued_count_ > 0) {
    944     DCHECK(input_streamon_);
    945     memset(&dqbuf, 0, sizeof(dqbuf));
    946     memset(&planes, 0, sizeof(planes));
    947     dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    948     dqbuf.memory = V4L2_MEMORY_MMAP;
    949     dqbuf.m.planes = planes;
    950     dqbuf.length = input_planes_count_;
    951     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
    952       if (errno == EAGAIN) {
    953         // EAGAIN if we're just out of buffers to dequeue.
    954         break;
    955       }
    956       PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
    957       NOTIFY_ERROR(PLATFORM_FAILURE);
    958       return;
    959     }
    960     InputRecord& input_record = input_buffer_map_[dqbuf.index];
    961     DCHECK(input_record.at_device);
    962     input_record.at_device = false;
    963     ReuseInputBuffer(dqbuf.index);
    964     input_buffer_queued_count_--;
    965     DVLOGF(4) << "Dequeued input=" << dqbuf.index
    966               << " count: " << input_buffer_queued_count_;
    967   }
    968 
    969   while (output_buffer_queued_count_ > 0) {
    970     DCHECK(output_streamon_);
    971     memset(&dqbuf, 0, sizeof(dqbuf));
    972     memset(&planes, 0, sizeof(planes));
    973     dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    974     dqbuf.memory =
    975         (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
    976                                                       : V4L2_MEMORY_DMABUF);
    977     dqbuf.m.planes = planes;
    978     dqbuf.length = output_planes_count_;
    979     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
    980       if (errno == EAGAIN) {
    981         // EAGAIN if we're just out of buffers to dequeue.
    982         break;
    983       }
    984       PLOGF(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
    985       NOTIFY_ERROR(PLATFORM_FAILURE);
    986       return;
    987     }
    988     OutputRecord& output_record = output_buffer_map_[dqbuf.index];
    989     DCHECK(output_record.at_device);
    990     output_record.at_device = false;
    991     output_buffer_queued_count_--;
    992     DVLOGF(3) << "Dequeued output=" << dqbuf.index
    993               << " count " << output_buffer_queued_count_;
    994 
    995     V4L2DecodeSurfaceByOutputId::iterator it =
    996         surfaces_at_device_.find(dqbuf.index);
    997     if (it == surfaces_at_device_.end()) {
    998       DLOGF(ERROR) << "Got invalid surface from device.";
    999       NOTIFY_ERROR(PLATFORM_FAILURE);
   1000     }
   1001 
   1002     it->second->SetDecoded();
   1003     surfaces_at_device_.erase(it);
   1004   }
   1005 
   1006   // A frame was decoded, see if we can output it.
   1007   TryOutputSurfaces();
   1008 
   1009   ProcessPendingEventsIfNeeded();
   1010   ScheduleDecodeBufferTaskIfNeeded();
   1011 }
   1012 
   1013 void V4L2SliceVideoDecodeAccelerator::NewEventPending() {
   1014   // Switch to event processing mode if we are decoding. Otherwise we are either
   1015   // already in it, or we will potentially switch to it later, after finishing
   1016   // other tasks.
   1017   if (state_ == kDecoding)
   1018     state_ = kIdle;
   1019 
   1020   ProcessPendingEventsIfNeeded();
   1021 }
   1022 
   1023 bool V4L2SliceVideoDecodeAccelerator::FinishEventProcessing() {
   1024   DCHECK_EQ(state_, kIdle);
   1025 
   1026   state_ = kDecoding;
   1027   ScheduleDecodeBufferTaskIfNeeded();
   1028 
   1029   return true;
   1030 }
   1031 
   1032 void V4L2SliceVideoDecodeAccelerator::ProcessPendingEventsIfNeeded() {
   1033   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1034 
   1035   // Process pending events, if any, in the correct order.
   1036   // We always first process the surface set change, as it is an internal
   1037   // event from the decoder and interleaving it with external requests would
   1038   // put the decoder in an undefined state.
   1039   using ProcessFunc = bool (V4L2SliceVideoDecodeAccelerator::*)();
   1040   const ProcessFunc process_functions[] = {
   1041       &V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChange,
   1042       &V4L2SliceVideoDecodeAccelerator::FinishFlush,
   1043       &V4L2SliceVideoDecodeAccelerator::FinishReset,
   1044       &V4L2SliceVideoDecodeAccelerator::FinishEventProcessing,
   1045   };
   1046 
   1047   for (const auto& fn : process_functions) {
   1048     if (state_ != kIdle)
   1049       return;
   1050 
   1051     if (!(this->*fn)())
   1052       return;
   1053   }
   1054 }
   1055 
   1056 void V4L2SliceVideoDecodeAccelerator::ReuseInputBuffer(int index) {
   1057   DVLOGF(4) << "Reusing input buffer, index=" << index;
   1058   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1059 
   1060   DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
   1061   InputRecord& input_record = input_buffer_map_[index];
   1062 
   1063   DCHECK(!input_record.at_device);
   1064   input_record.input_id = -1;
   1065   input_record.bytes_used = 0;
   1066 
   1067   DCHECK_EQ(
   1068       std::count(free_input_buffers_.begin(), free_input_buffers_.end(), index),
   1069       0);
   1070   free_input_buffers_.push_back(index);
   1071 }
   1072 
   1073 void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(int index) {
   1074   DVLOGF(4) << "Reusing output buffer, index=" << index;
   1075   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1076 
   1077   DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
   1078   OutputRecord& output_record = output_buffer_map_[index];
   1079   DCHECK(!output_record.at_device);
   1080   DCHECK(!output_record.at_client);
   1081 
   1082   DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
   1083                        index),
   1084             0);
   1085   free_output_buffers_.push_back(index);
   1086 
   1087   ScheduleDecodeBufferTaskIfNeeded();
   1088 }
   1089 
   1090 bool V4L2SliceVideoDecodeAccelerator::EnqueueInputRecord(
   1091     int index,
   1092     uint32_t config_store) {
   1093   DVLOGF(3);
   1094   DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
   1095   DCHECK_GT(config_store, 0u);
   1096 
   1097   // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
   1098   InputRecord& input_record = input_buffer_map_[index];
   1099   DCHECK(!input_record.at_device);
   1100   struct v4l2_buffer qbuf;
   1101   struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
   1102   memset(&qbuf, 0, sizeof(qbuf));
   1103   memset(qbuf_planes, 0, sizeof(qbuf_planes));
   1104   qbuf.index = index;
   1105   qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
   1106   qbuf.memory = V4L2_MEMORY_MMAP;
   1107   qbuf.m.planes = qbuf_planes;
   1108   qbuf.m.planes[0].bytesused = input_record.bytes_used;
   1109   qbuf.length = input_planes_count_;
   1110   qbuf.config_store = config_store;
   1111   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
   1112   input_record.at_device = true;
   1113   input_buffer_queued_count_++;
   1114   DVLOGF(4) << "Enqueued input=" << qbuf.index
   1115             << " count: " << input_buffer_queued_count_;
   1116 
   1117   return true;
   1118 }
   1119 
   1120 bool V4L2SliceVideoDecodeAccelerator::EnqueueOutputRecord(int index) {
   1121   DVLOGF(3);
   1122   DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
   1123 
   1124   // Enqueue an output (VIDEO_CAPTURE) buffer.
   1125   OutputRecord& output_record = output_buffer_map_[index];
   1126   DCHECK(!output_record.at_device);
   1127   DCHECK(!output_record.at_client);
   1128   DCHECK_NE(output_record.picture_id, -1);
   1129 
   1130   struct v4l2_buffer qbuf;
   1131   struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
   1132   memset(&qbuf, 0, sizeof(qbuf));
   1133   memset(qbuf_planes, 0, sizeof(qbuf_planes));
   1134   qbuf.index = index;
   1135   qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1136   if (output_mode_ == Config::OutputMode::ALLOCATE) {
   1137     qbuf.memory = V4L2_MEMORY_MMAP;
   1138   } else {
   1139     qbuf.memory = V4L2_MEMORY_DMABUF;
   1140     DCHECK_EQ(output_planes_count_, output_record.dmabuf_fds.size());
   1141     for (size_t i = 0; i < output_record.dmabuf_fds.size(); ++i) {
   1142       DCHECK(output_record.dmabuf_fds[i].is_valid());
   1143       qbuf_planes[i].m.fd = output_record.dmabuf_fds[i].get();
   1144     }
   1145   }
   1146   qbuf.m.planes = qbuf_planes;
   1147   qbuf.length = output_planes_count_;
   1148   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
   1149   output_record.at_device = true;
   1150   output_buffer_queued_count_++;
   1151   DVLOGF(4) << "Enqueued output=" << qbuf.index
   1152             << " count: " << output_buffer_queued_count_;
   1153 
   1154   return true;
   1155 }
   1156 
   1157 bool V4L2SliceVideoDecodeAccelerator::StartDevicePoll() {
   1158   DVLOGF(3) << "Starting device poll";
   1159   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1160   DCHECK(!device_poll_thread_.IsRunning());
   1161 
   1162   // Start up the device poll thread and schedule its first DevicePollTask().
   1163   if (!device_poll_thread_.Start()) {
   1164     DLOGF(ERROR) << "Device thread failed to start";
   1165     NOTIFY_ERROR(PLATFORM_FAILURE);
   1166     return false;
   1167   }
   1168   if (!input_streamon_) {
   1169     __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
   1170     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
   1171     input_streamon_ = true;
   1172   }
   1173 
   1174   if (!output_streamon_) {
   1175     __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1176     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
   1177     output_streamon_ = true;
   1178   }
   1179 
   1180   device_poll_thread_.task_runner()->PostTask(
   1181       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
   1182                             base::Unretained(this), true));
   1183 
   1184   return true;
   1185 }
   1186 
   1187 bool V4L2SliceVideoDecodeAccelerator::StopDevicePoll(bool keep_input_state) {
   1188   DVLOGF(3) << "Stopping device poll";
   1189   if (decoder_thread_.IsRunning())
   1190     DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1191 
   1192   // Signal the DevicePollTask() to stop, and stop the device poll thread.
   1193   if (!device_->SetDevicePollInterrupt()) {
   1194     PLOGF(ERROR) << "SetDevicePollInterrupt(): failed";
   1195     NOTIFY_ERROR(PLATFORM_FAILURE);
   1196     return false;
   1197   }
   1198   device_poll_thread_.Stop();
   1199   DVLOGF(3) << "Device poll thread stopped";
   1200 
   1201   // Clear the interrupt now, to be sure.
   1202   if (!device_->ClearDevicePollInterrupt()) {
   1203     NOTIFY_ERROR(PLATFORM_FAILURE);
   1204     return false;
   1205   }
   1206 
   1207   if (!keep_input_state) {
   1208     if (input_streamon_) {
   1209       __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
   1210       IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
   1211     }
   1212     input_streamon_ = false;
   1213   }
   1214 
   1215   if (output_streamon_) {
   1216     __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1217     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
   1218   }
   1219   output_streamon_ = false;
   1220 
   1221   if (!keep_input_state) {
   1222     for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
   1223       InputRecord& input_record = input_buffer_map_[i];
   1224       if (input_record.at_device) {
   1225         input_record.at_device = false;
   1226         ReuseInputBuffer(i);
   1227         input_buffer_queued_count_--;
   1228       }
   1229     }
   1230     DCHECK_EQ(input_buffer_queued_count_, 0);
   1231   }
   1232 
   1233   // STREAMOFF makes the driver drop all buffers without decoding and DQBUFing,
   1234   // so we mark them all as at_device = false and clear surfaces_at_device_.
   1235   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
   1236     OutputRecord& output_record = output_buffer_map_[i];
   1237     if (output_record.at_device) {
   1238       output_record.at_device = false;
   1239       output_buffer_queued_count_--;
   1240     }
   1241   }
   1242   surfaces_at_device_.clear();
   1243   DCHECK_EQ(output_buffer_queued_count_, 0);
   1244 
   1245   // Drop all surfaces that were awaiting decode before being displayed,
   1246   // since we've just cancelled all outstanding decodes.
   1247   while (!decoder_display_queue_.empty())
   1248     decoder_display_queue_.pop();
   1249 
   1250   DVLOGF(3) << "Device poll stopped";
   1251   return true;
   1252 }
   1253 
   1254 void V4L2SliceVideoDecodeAccelerator::Decode(
   1255     const BitstreamBuffer& bitstream_buffer) {
   1256   DVLOGF(3) << "input_id=" << bitstream_buffer.id()
   1257             << ", size=" << bitstream_buffer.size();
   1258   DCHECK(decode_task_runner_->BelongsToCurrentThread());
   1259 
   1260   if (bitstream_buffer.id() < 0) {
   1261     LOGF(ERROR) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id();
   1262     if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle()))
   1263       base::SharedMemory::CloseHandle(bitstream_buffer.handle());
   1264     NOTIFY_ERROR(INVALID_ARGUMENT);
   1265     return;
   1266   }
   1267 
   1268   decoder_thread_task_runner_->PostTask(
   1269       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask,
   1270                             base::Unretained(this), bitstream_buffer));
   1271 }
   1272 
   1273 void V4L2SliceVideoDecodeAccelerator::DecodeTask(
   1274     const BitstreamBuffer& bitstream_buffer) {
   1275   DVLOGF(3) << "input_id=" << bitstream_buffer.id()
   1276             << " size=" << bitstream_buffer.size();
   1277   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1278 
   1279   std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
   1280       decode_client_, decode_task_runner_,
   1281       new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id()));
   1282 
   1283   // Skip empty buffer.
   1284   if (bitstream_buffer.size() == 0)
   1285     return;
   1286 
   1287   if (!bitstream_record->shm->Map()) {
   1288     LOGF(ERROR) << "Could not map bitstream_buffer";
   1289     NOTIFY_ERROR(UNREADABLE_INPUT);
   1290     return;
   1291   }
   1292   DVLOGF(3) << "mapped at=" << bitstream_record->shm->memory();
   1293 
   1294   decoder_input_queue_.push(
   1295       linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
   1296 
   1297   ScheduleDecodeBufferTaskIfNeeded();
   1298 }
   1299 
   1300 bool V4L2SliceVideoDecodeAccelerator::TrySetNewBistreamBuffer() {
   1301   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1302   DCHECK(!decoder_current_bitstream_buffer_);
   1303 
   1304   if (decoder_input_queue_.empty())
   1305     return false;
   1306 
   1307   decoder_current_bitstream_buffer_.reset(
   1308       decoder_input_queue_.front().release());
   1309   decoder_input_queue_.pop();
   1310 
   1311   if (decoder_current_bitstream_buffer_->input_id == kFlushBufferId) {
   1312     // This is a buffer we queued for ourselves to trigger flush at this time.
   1313     InitiateFlush();
   1314     return false;
   1315   }
   1316 
   1317   const uint8_t* const data = reinterpret_cast<const uint8_t*>(
   1318       decoder_current_bitstream_buffer_->shm->memory());
   1319   const size_t data_size = decoder_current_bitstream_buffer_->shm->size();
   1320   decoder_->SetStream(data, data_size);
   1321 
   1322   return true;
   1323 }
   1324 
   1325 void V4L2SliceVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
   1326   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1327   if (state_ == kDecoding) {
   1328     decoder_thread_task_runner_->PostTask(
   1329         FROM_HERE,
   1330         base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeBufferTask,
   1331                    base::Unretained(this)));
   1332   }
   1333 }
   1334 
   1335 void V4L2SliceVideoDecodeAccelerator::DecodeBufferTask() {
   1336   DVLOGF(3);
   1337   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1338 
   1339   if (state_ != kDecoding) {
   1340     DVLOGF(3) << "Early exit, not in kDecoding";
   1341     return;
   1342   }
   1343 
   1344   while (true) {
   1345     AcceleratedVideoDecoder::DecodeResult res;
   1346     res = decoder_->Decode();
   1347     switch (res) {
   1348       case AcceleratedVideoDecoder::kAllocateNewSurfaces:
   1349         DVLOGF(2) << "Decoder requesting a new set of surfaces";
   1350         InitiateSurfaceSetChange();
   1351         return;
   1352 
   1353       case AcceleratedVideoDecoder::kRanOutOfStreamData:
   1354         decoder_current_bitstream_buffer_.reset();
   1355         if (!TrySetNewBistreamBuffer())
   1356           return;
   1357 
   1358         break;
   1359 
   1360       case AcceleratedVideoDecoder::kRanOutOfSurfaces:
   1361         // No more surfaces for the decoder, we'll come back once we have more.
   1362         DVLOGF(4) << "Ran out of surfaces";
   1363         return;
   1364 
   1365       case AcceleratedVideoDecoder::kNeedContextUpdate:
   1366         DVLOGF(4) << "Awaiting context update";
   1367         return;
   1368 
   1369       case AcceleratedVideoDecoder::kDecodeError:
   1370         DVLOGF(1) << "Error decoding stream";
   1371         NOTIFY_ERROR(PLATFORM_FAILURE);
   1372         return;
   1373     }
   1374   }
   1375 }
   1376 
   1377 void V4L2SliceVideoDecodeAccelerator::InitiateSurfaceSetChange() {
   1378   DVLOGF(2);
   1379   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1380   DCHECK_EQ(state_, kDecoding);
   1381 
   1382   DCHECK(!surface_set_change_pending_);
   1383   surface_set_change_pending_ = true;
   1384   NewEventPending();
   1385 }
   1386 
   1387 bool V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChange() {
   1388   DVLOGF(2);
   1389   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1390 
   1391   if (!surface_set_change_pending_)
   1392     return true;
   1393 
   1394   if (!surfaces_at_device_.empty())
   1395     return false;
   1396 
   1397   DCHECK_EQ(state_, kIdle);
   1398   DCHECK(decoder_display_queue_.empty());
   1399   // All output buffers should've been returned from decoder and device by now.
   1400   // The only remaining owner of surfaces may be display (client), and we will
   1401   // dismiss them when destroying output buffers below.
   1402   DCHECK_EQ(free_output_buffers_.size() + surfaces_at_display_.size(),
   1403             output_buffer_map_.size());
   1404 
   1405   // Keep input queue running while we switch outputs.
   1406   if (!StopDevicePoll(true)) {
   1407     NOTIFY_ERROR(PLATFORM_FAILURE);
   1408     return false;
   1409   }
   1410 
   1411   // This will return only once all buffers are dismissed and destroyed.
   1412   // This does not wait until they are displayed however, as display retains
   1413   // references to the buffers bound to textures and will release them
   1414   // after displaying.
   1415   if (!DestroyOutputs(true)) {
   1416     NOTIFY_ERROR(PLATFORM_FAILURE);
   1417     return false;
   1418   }
   1419 
   1420   if (!CreateOutputBuffers()) {
   1421     NOTIFY_ERROR(PLATFORM_FAILURE);
   1422     return false;
   1423   }
   1424 
   1425   surface_set_change_pending_ = false;
   1426   DVLOGF(3) << "Surface set change finished";
   1427   return true;
   1428 }
   1429 
   1430 bool V4L2SliceVideoDecodeAccelerator::DestroyOutputs(bool dismiss) {
   1431   DVLOGF(3);
   1432   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1433   std::vector<int32_t> picture_buffers_to_dismiss;
   1434 
   1435   if (output_buffer_map_.empty())
   1436     return true;
   1437 
   1438   for (const auto& output_record : output_buffer_map_) {
   1439     DCHECK(!output_record.at_device);
   1440     picture_buffers_to_dismiss.push_back(output_record.picture_id);
   1441   }
   1442 
   1443   if (dismiss) {
   1444     DVLOGF(2) << "Scheduling picture dismissal";
   1445     base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC,
   1446                              base::WaitableEvent::InitialState::NOT_SIGNALED);
   1447     child_task_runner_->PostTask(
   1448         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DismissPictures,
   1449                               weak_this_, picture_buffers_to_dismiss, &done));
   1450     done.Wait();
   1451   }
   1452 
   1453   // At this point client can't call ReusePictureBuffer on any of the pictures
   1454   // anymore, so it's safe to destroy.
   1455   return DestroyOutputBuffers();
   1456 }
   1457 
   1458 bool V4L2SliceVideoDecodeAccelerator::DestroyOutputBuffers() {
   1459   DVLOGF(3);
   1460   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread() ||
   1461          !decoder_thread_.IsRunning());
   1462   DCHECK(!output_streamon_);
   1463   DCHECK(surfaces_at_device_.empty());
   1464   DCHECK(decoder_display_queue_.empty());
   1465   DCHECK_EQ(surfaces_at_display_.size() + free_output_buffers_.size(),
   1466             output_buffer_map_.size());
   1467 
   1468   if (output_buffer_map_.empty())
   1469     return true;
   1470 
   1471   // It's ok to do this, client will retain references to textures, but we are
   1472   // not interested in reusing the surfaces anymore.
   1473   // This will prevent us from reusing old surfaces in case we have some
   1474   // ReusePictureBuffer() pending on ChildThread already. It's ok to ignore
   1475   // them, because we have already dismissed them (in DestroyOutputs()).
   1476   for (const auto& surface_at_display : surfaces_at_display_) {
   1477     size_t index = surface_at_display.second->output_record();
   1478     DCHECK_LT(index, output_buffer_map_.size());
   1479     OutputRecord& output_record = output_buffer_map_[index];
   1480     DCHECK(output_record.at_client);
   1481     output_record.at_client = false;
   1482   }
   1483   surfaces_at_display_.clear();
   1484   DCHECK_EQ(free_output_buffers_.size(), output_buffer_map_.size());
   1485 
   1486   free_output_buffers_.clear();
   1487   output_buffer_map_.clear();
   1488 
   1489   struct v4l2_requestbuffers reqbufs;
   1490   memset(&reqbufs, 0, sizeof(reqbufs));
   1491   reqbufs.count = 0;
   1492   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1493   reqbufs.memory = V4L2_MEMORY_MMAP;
   1494   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
   1495 
   1496   return true;
   1497 }
   1498 
   1499 void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffers(
   1500     const std::vector<PictureBuffer>& buffers) {
   1501   DVLOGF(3);
   1502   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1503 
   1504   decoder_thread_task_runner_->PostTask(
   1505       FROM_HERE,
   1506       base::Bind(&V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask,
   1507                  base::Unretained(this), buffers));
   1508 }
   1509 
   1510 void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask(
   1511     const std::vector<PictureBuffer>& buffers) {
   1512   DVLOGF(3);
   1513   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1514   DCHECK_EQ(state_, kAwaitingPictureBuffers);
   1515 
   1516   const uint32_t req_buffer_count = decoder_->GetRequiredNumOfPictures();
   1517 
   1518   if (buffers.size() < req_buffer_count) {
   1519     DLOG(ERROR) << "Failed to provide requested picture buffers. "
   1520                 << "(Got " << buffers.size()
   1521                 << ", requested " << req_buffer_count << ")";
   1522     NOTIFY_ERROR(INVALID_ARGUMENT);
   1523     return;
   1524   }
   1525 
   1526   // Allocate the output buffers.
   1527   struct v4l2_requestbuffers reqbufs;
   1528   memset(&reqbufs, 0, sizeof(reqbufs));
   1529   reqbufs.count = buffers.size();
   1530   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1531   reqbufs.memory =
   1532       (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
   1533                                                     : V4L2_MEMORY_DMABUF);
   1534   IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
   1535 
   1536   if (reqbufs.count != buffers.size()) {
   1537     DLOGF(ERROR) << "Could not allocate enough output buffers";
   1538     NOTIFY_ERROR(PLATFORM_FAILURE);
   1539     return;
   1540   }
   1541 
   1542   DCHECK(free_output_buffers_.empty());
   1543   DCHECK(output_buffer_map_.empty());
   1544   output_buffer_map_.resize(buffers.size());
   1545   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
   1546     DCHECK(buffers[i].size() == coded_size_);
   1547 
   1548     OutputRecord& output_record = output_buffer_map_[i];
   1549     DCHECK(!output_record.at_device);
   1550     DCHECK(!output_record.at_client);
   1551     DCHECK_EQ(output_record.picture_id, -1);
   1552     DCHECK(output_record.dmabuf_fds.empty());
   1553     DCHECK_EQ(output_record.cleared, false);
   1554 
   1555     output_record.picture_id = buffers[i].id();
   1556 
   1557     // This will remain true until ImportBufferForPicture is called, either by
   1558     // the client, or by ourselves, if we are allocating.
   1559     output_record.at_client = true;
   1560     if (output_mode_ == Config::OutputMode::ALLOCATE) {
   1561       std::vector<base::ScopedFD> dmabuf_fds = device_->GetDmabufsForV4L2Buffer(
   1562           i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
   1563       if (dmabuf_fds.empty()) {
   1564         NOTIFY_ERROR(PLATFORM_FAILURE);
   1565         return;
   1566       }
   1567 
   1568       auto passed_dmabuf_fds(base::WrapUnique(
   1569           new std::vector<base::ScopedFD>(std::move(dmabuf_fds))));
   1570       ImportBufferForPictureTask(output_record.picture_id,
   1571                                  std::move(passed_dmabuf_fds));
   1572     }  // else we'll get triggered via ImportBufferForPicture() from client.
   1573     DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
   1574   }
   1575 
   1576   if (!StartDevicePoll()) {
   1577     NOTIFY_ERROR(PLATFORM_FAILURE);
   1578     return;
   1579   }
   1580 
   1581   // Put us in kIdle to allow further event processing.
   1582   // ProcessPendingEventsIfNeeded() will put us back into kDecoding after all
   1583   // other pending events are processed successfully.
   1584   state_ = kIdle;
   1585   ProcessPendingEventsIfNeeded();
   1586 }
   1587 
   1588 void V4L2SliceVideoDecodeAccelerator::ImportBufferForPicture(
   1589     int32_t picture_buffer_id,
   1590     const std::vector<base::FileDescriptor>& dmabuf_fds) {
   1591   DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
   1592   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1593 
   1594   auto passed_dmabuf_fds(base::WrapUnique(new std::vector<base::ScopedFD>()));
   1595   for (const auto& fd : dmabuf_fds) {
   1596     DCHECK_NE(fd.fd, -1);
   1597     passed_dmabuf_fds->push_back(base::ScopedFD(fd.fd));
   1598   }
   1599 
   1600   if (output_mode_ != Config::OutputMode::IMPORT) {
   1601     LOGF(ERROR) << "Cannot import in non-import mode";
   1602     NOTIFY_ERROR(INVALID_ARGUMENT);
   1603     return;
   1604   }
   1605 
   1606   decoder_thread_task_runner_->PostTask(
   1607       FROM_HERE,
   1608       base::Bind(&V4L2SliceVideoDecodeAccelerator::ImportBufferForPictureTask,
   1609                  base::Unretained(this), picture_buffer_id,
   1610                  base::Passed(&passed_dmabuf_fds)));
   1611 }
   1612 
   1613 void V4L2SliceVideoDecodeAccelerator::ImportBufferForPictureTask(
   1614     int32_t picture_buffer_id,
   1615     std::unique_ptr<std::vector<base::ScopedFD>> passed_dmabuf_fds) {
   1616   DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
   1617   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1618 
   1619   const auto iter =
   1620       std::find_if(output_buffer_map_.begin(), output_buffer_map_.end(),
   1621                    [picture_buffer_id](const OutputRecord& output_record) {
   1622                      return output_record.picture_id == picture_buffer_id;
   1623                    });
   1624   if (iter == output_buffer_map_.end()) {
   1625     // It's possible that we've already posted a DismissPictureBuffer for this
   1626     // picture, but it has not yet executed when this ImportBufferForPicture was
   1627     // posted to us by the client. In that case just ignore this (we've already
   1628     // dismissed it and accounted for that).
   1629     DVLOGF(3) << "got picture id=" << picture_buffer_id
   1630               << " not in use (anymore?).";
   1631     return;
   1632   }
   1633 
   1634   if (!iter->at_client) {
   1635     LOGF(ERROR) << "Cannot import buffer that not owned by client";
   1636     NOTIFY_ERROR(INVALID_ARGUMENT);
   1637     return;
   1638   }
   1639 
   1640   size_t index = iter - output_buffer_map_.begin();
   1641   DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
   1642                        index),
   1643             0);
   1644 
   1645   DCHECK(!iter->at_device);
   1646   iter->at_client = false;
   1647 
   1648   DCHECK_EQ(output_planes_count_, passed_dmabuf_fds->size());
   1649   iter->dmabuf_fds.swap(*passed_dmabuf_fds);
   1650   free_output_buffers_.push_back(index);
   1651   ScheduleDecodeBufferTaskIfNeeded();
   1652 }
   1653 
   1654 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer(
   1655     int32_t picture_buffer_id) {
   1656   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1657   DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
   1658 
   1659   decoder_thread_task_runner_->PostTask(
   1660       FROM_HERE,
   1661       base::Bind(&V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask,
   1662                  base::Unretained(this), picture_buffer_id));
   1663 }
   1664 
   1665 void V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask(
   1666     int32_t picture_buffer_id) {
   1667   DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
   1668   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1669 
   1670   V4L2DecodeSurfaceByPictureBufferId::iterator it =
   1671       surfaces_at_display_.find(picture_buffer_id);
   1672   if (it == surfaces_at_display_.end()) {
   1673     // It's possible that we've already posted a DismissPictureBuffer for this
   1674     // picture, but it has not yet executed when this ReusePictureBuffer was
   1675     // posted to us by the client. In that case just ignore this (we've already
   1676     // dismissed it and accounted for that) and let the sync object get
   1677     // destroyed.
   1678     DVLOGF(3) << "got picture id=" << picture_buffer_id
   1679               << " not in use (anymore?).";
   1680     return;
   1681   }
   1682 
   1683   OutputRecord& output_record = output_buffer_map_[it->second->output_record()];
   1684   if (output_record.at_device || !output_record.at_client) {
   1685     DVLOGF(1) << "picture_buffer_id not reusable";
   1686     NOTIFY_ERROR(INVALID_ARGUMENT);
   1687     return;
   1688   }
   1689 
   1690   DCHECK(!output_record.at_device);
   1691   output_record.at_client = false;
   1692 
   1693   surfaces_at_display_.erase(it);
   1694 }
   1695 
   1696 void V4L2SliceVideoDecodeAccelerator::Flush() {
   1697   DVLOGF(3);
   1698   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1699 
   1700   decoder_thread_task_runner_->PostTask(
   1701       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::FlushTask,
   1702                             base::Unretained(this)));
   1703 }
   1704 
   1705 void V4L2SliceVideoDecodeAccelerator::FlushTask() {
   1706   DVLOGF(3);
   1707   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1708 
   1709   // Queue an empty buffer which - when reached - will trigger flush sequence.
   1710   decoder_input_queue_.push(
   1711       linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
   1712           decode_client_, decode_task_runner_, nullptr, kFlushBufferId)));
   1713 
   1714   ScheduleDecodeBufferTaskIfNeeded();
   1715 }
   1716 
   1717 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() {
   1718   DVLOGF(3);
   1719   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1720 
   1721   // This will trigger output for all remaining surfaces in the decoder.
   1722   // However, not all of them may be decoded yet (they would be queued
   1723   // in hardware then).
   1724   if (!decoder_->Flush()) {
   1725     DVLOGF(1) << "Failed flushing the decoder.";
   1726     NOTIFY_ERROR(PLATFORM_FAILURE);
   1727     return;
   1728   }
   1729 
   1730   // Put the decoder in an idle state, ready to resume.
   1731   decoder_->Reset();
   1732 
   1733   DCHECK(!decoder_flushing_);
   1734   decoder_flushing_ = true;
   1735   NewEventPending();
   1736 }
   1737 
   1738 bool V4L2SliceVideoDecodeAccelerator::FinishFlush() {
   1739   DVLOGF(3);
   1740   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1741 
   1742   if (!decoder_flushing_)
   1743     return true;
   1744 
   1745   if (!surfaces_at_device_.empty())
   1746     return false;
   1747 
   1748   DCHECK_EQ(state_, kIdle);
   1749 
   1750   // At this point, all remaining surfaces are decoded and dequeued, and since
   1751   // we have already scheduled output for them in InitiateFlush(), their
   1752   // respective PictureReady calls have been posted (or they have been queued on
   1753   // pending_picture_ready_). So at this time, once we SendPictureReady(),
   1754   // we will have all remaining PictureReady() posted to the client and we
   1755   // can post NotifyFlushDone().
   1756   DCHECK(decoder_display_queue_.empty());
   1757 
   1758   // Decoder should have already returned all surfaces and all surfaces are
   1759   // out of hardware. There can be no other owners of input buffers.
   1760   DCHECK_EQ(free_input_buffers_.size(), input_buffer_map_.size());
   1761 
   1762   SendPictureReady();
   1763 
   1764   decoder_flushing_ = false;
   1765   DVLOGF(3) << "Flush finished";
   1766 
   1767   child_task_runner_->PostTask(FROM_HERE,
   1768                                base::Bind(&Client::NotifyFlushDone, client_));
   1769 
   1770   return true;
   1771 }
   1772 
   1773 void V4L2SliceVideoDecodeAccelerator::Reset() {
   1774   DVLOGF(3);
   1775   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1776 
   1777   decoder_thread_task_runner_->PostTask(
   1778       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ResetTask,
   1779                             base::Unretained(this)));
   1780 }
   1781 
   1782 void V4L2SliceVideoDecodeAccelerator::ResetTask() {
   1783   DVLOGF(3);
   1784   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1785 
   1786   if (decoder_resetting_) {
   1787     // This is a bug in the client, multiple Reset()s before NotifyResetDone()
   1788     // are not allowed.
   1789     NOTREACHED() << "Client should not be requesting multiple Reset()s";
   1790     return;
   1791   }
   1792 
   1793   // Put the decoder in an idle state, ready to resume.
   1794   decoder_->Reset();
   1795 
   1796   // Drop all remaining inputs.
   1797   decoder_current_bitstream_buffer_.reset();
   1798   while (!decoder_input_queue_.empty())
   1799     decoder_input_queue_.pop();
   1800 
   1801   decoder_resetting_ = true;
   1802   NewEventPending();
   1803 }
   1804 
   1805 bool V4L2SliceVideoDecodeAccelerator::FinishReset() {
   1806   DVLOGF(3);
   1807   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1808 
   1809   if (!decoder_resetting_)
   1810     return true;
   1811 
   1812   if (!surfaces_at_device_.empty())
   1813     return false;
   1814 
   1815   DCHECK_EQ(state_, kIdle);
   1816   DCHECK(!decoder_flushing_);
   1817   SendPictureReady();
   1818 
   1819   // Drop any pending outputs.
   1820   while (!decoder_display_queue_.empty())
   1821     decoder_display_queue_.pop();
   1822 
   1823   // At this point we can have no input buffers in the decoder, because we
   1824   // Reset()ed it in ResetTask(), and have not scheduled any new Decode()s
   1825   // having been in kIdle since. We don't have any surfaces in the HW either -
   1826   // we just checked that surfaces_at_device_.empty(), and inputs are tied
   1827   // to surfaces. Since there can be no other owners of input buffers, we can
   1828   // simply mark them all as available.
   1829   DCHECK_EQ(input_buffer_queued_count_, 0);
   1830   free_input_buffers_.clear();
   1831   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
   1832     DCHECK(!input_buffer_map_[i].at_device);
   1833     ReuseInputBuffer(i);
   1834   }
   1835 
   1836   decoder_resetting_ = false;
   1837   DVLOGF(3) << "Reset finished";
   1838 
   1839   child_task_runner_->PostTask(FROM_HERE,
   1840                                base::Bind(&Client::NotifyResetDone, client_));
   1841 
   1842   return true;
   1843 }
   1844 
   1845 void V4L2SliceVideoDecodeAccelerator::SetErrorState(Error error) {
   1846   // We can touch decoder_state_ only if this is the decoder thread or the
   1847   // decoder thread isn't running.
   1848   if (decoder_thread_.IsRunning() &&
   1849       !decoder_thread_task_runner_->BelongsToCurrentThread()) {
   1850     decoder_thread_task_runner_->PostTask(
   1851         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::SetErrorState,
   1852                               base::Unretained(this), error));
   1853     return;
   1854   }
   1855 
   1856   // Post NotifyError only if we are already initialized, as the API does
   1857   // not allow doing so before that.
   1858   if (state_ != kError && state_ != kUninitialized)
   1859     NotifyError(error);
   1860 
   1861   state_ = kError;
   1862 }
   1863 
   1864 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::V4L2H264Accelerator(
   1865     V4L2SliceVideoDecodeAccelerator* v4l2_dec)
   1866     : num_slices_(0), v4l2_dec_(v4l2_dec) {
   1867   DCHECK(v4l2_dec_);
   1868 }
   1869 
   1870 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::~V4L2H264Accelerator() {}
   1871 
   1872 scoped_refptr<H264Picture>
   1873 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::CreateH264Picture() {
   1874   scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
   1875   if (!dec_surface)
   1876     return nullptr;
   1877 
   1878   return new V4L2H264Picture(dec_surface);
   1879 }
   1880 
   1881 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
   1882     H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
   1883                                     uint8_t dst_list[kDPBIndicesListSize]) {
   1884   size_t i;
   1885   for (i = 0; i < src_pic_list.size() && i < kDPBIndicesListSize; ++i) {
   1886     const scoped_refptr<H264Picture>& pic = src_pic_list[i];
   1887     dst_list[i] = pic ? pic->dpb_position : VIDEO_MAX_FRAME;
   1888   }
   1889 
   1890   while (i < kDPBIndicesListSize)
   1891     dst_list[i++] = VIDEO_MAX_FRAME;
   1892 }
   1893 
   1894 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::H264DPBToV4L2DPB(
   1895     const H264DPB& dpb,
   1896     std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces) {
   1897   memset(v4l2_decode_param_.dpb, 0, sizeof(v4l2_decode_param_.dpb));
   1898   size_t i = 0;
   1899   for (const auto& pic : dpb) {
   1900     if (i >= arraysize(v4l2_decode_param_.dpb)) {
   1901       DVLOGF(1) << "Invalid DPB size";
   1902       break;
   1903     }
   1904 
   1905     int index = VIDEO_MAX_FRAME;
   1906     if (!pic->nonexisting) {
   1907       scoped_refptr<V4L2DecodeSurface> dec_surface =
   1908           H264PictureToV4L2DecodeSurface(pic);
   1909       index = dec_surface->output_record();
   1910       ref_surfaces->push_back(dec_surface);
   1911     }
   1912 
   1913     struct v4l2_h264_dpb_entry& entry = v4l2_decode_param_.dpb[i++];
   1914     entry.buf_index = index;
   1915     entry.frame_num = pic->frame_num;
   1916     entry.pic_num = pic->pic_num;
   1917     entry.top_field_order_cnt = pic->top_field_order_cnt;
   1918     entry.bottom_field_order_cnt = pic->bottom_field_order_cnt;
   1919     entry.flags = (pic->ref ? V4L2_H264_DPB_ENTRY_FLAG_ACTIVE : 0) |
   1920                   (pic->long_term ? V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM : 0);
   1921   }
   1922 }
   1923 
   1924 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitFrameMetadata(
   1925     const H264SPS* sps,
   1926     const H264PPS* pps,
   1927     const H264DPB& dpb,
   1928     const H264Picture::Vector& ref_pic_listp0,
   1929     const H264Picture::Vector& ref_pic_listb0,
   1930     const H264Picture::Vector& ref_pic_listb1,
   1931     const scoped_refptr<H264Picture>& pic) {
   1932   struct v4l2_ext_control ctrl;
   1933   std::vector<struct v4l2_ext_control> ctrls;
   1934 
   1935   struct v4l2_ctrl_h264_sps v4l2_sps;
   1936   memset(&v4l2_sps, 0, sizeof(v4l2_sps));
   1937   v4l2_sps.constraint_set_flags =
   1938       (sps->constraint_set0_flag ? V4L2_H264_SPS_CONSTRAINT_SET0_FLAG : 0) |
   1939       (sps->constraint_set1_flag ? V4L2_H264_SPS_CONSTRAINT_SET1_FLAG : 0) |
   1940       (sps->constraint_set2_flag ? V4L2_H264_SPS_CONSTRAINT_SET2_FLAG : 0) |
   1941       (sps->constraint_set3_flag ? V4L2_H264_SPS_CONSTRAINT_SET3_FLAG : 0) |
   1942       (sps->constraint_set4_flag ? V4L2_H264_SPS_CONSTRAINT_SET4_FLAG : 0) |
   1943       (sps->constraint_set5_flag ? V4L2_H264_SPS_CONSTRAINT_SET5_FLAG : 0);
   1944 #define SPS_TO_V4L2SPS(a) v4l2_sps.a = sps->a
   1945   SPS_TO_V4L2SPS(profile_idc);
   1946   SPS_TO_V4L2SPS(level_idc);
   1947   SPS_TO_V4L2SPS(seq_parameter_set_id);
   1948   SPS_TO_V4L2SPS(chroma_format_idc);
   1949   SPS_TO_V4L2SPS(bit_depth_luma_minus8);
   1950   SPS_TO_V4L2SPS(bit_depth_chroma_minus8);
   1951   SPS_TO_V4L2SPS(log2_max_frame_num_minus4);
   1952   SPS_TO_V4L2SPS(pic_order_cnt_type);
   1953   SPS_TO_V4L2SPS(log2_max_pic_order_cnt_lsb_minus4);
   1954   SPS_TO_V4L2SPS(offset_for_non_ref_pic);
   1955   SPS_TO_V4L2SPS(offset_for_top_to_bottom_field);
   1956   SPS_TO_V4L2SPS(num_ref_frames_in_pic_order_cnt_cycle);
   1957 
   1958   static_assert(arraysize(v4l2_sps.offset_for_ref_frame) ==
   1959                     arraysize(sps->offset_for_ref_frame),
   1960                 "offset_for_ref_frame arrays must be same size");
   1961   for (size_t i = 0; i < arraysize(v4l2_sps.offset_for_ref_frame); ++i)
   1962     v4l2_sps.offset_for_ref_frame[i] = sps->offset_for_ref_frame[i];
   1963   SPS_TO_V4L2SPS(max_num_ref_frames);
   1964   SPS_TO_V4L2SPS(pic_width_in_mbs_minus1);
   1965   SPS_TO_V4L2SPS(pic_height_in_map_units_minus1);
   1966 #undef SPS_TO_V4L2SPS
   1967 
   1968 #define SET_V4L2_SPS_FLAG_IF(cond, flag) \
   1969   v4l2_sps.flags |= ((sps->cond) ? (flag) : 0)
   1970   SET_V4L2_SPS_FLAG_IF(separate_colour_plane_flag,
   1971                        V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE);
   1972   SET_V4L2_SPS_FLAG_IF(qpprime_y_zero_transform_bypass_flag,
   1973                        V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS);
   1974   SET_V4L2_SPS_FLAG_IF(delta_pic_order_always_zero_flag,
   1975                        V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO);
   1976   SET_V4L2_SPS_FLAG_IF(gaps_in_frame_num_value_allowed_flag,
   1977                        V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED);
   1978   SET_V4L2_SPS_FLAG_IF(frame_mbs_only_flag, V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY);
   1979   SET_V4L2_SPS_FLAG_IF(mb_adaptive_frame_field_flag,
   1980                        V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD);
   1981   SET_V4L2_SPS_FLAG_IF(direct_8x8_inference_flag,
   1982                        V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE);
   1983 #undef SET_V4L2_SPS_FLAG_IF
   1984   memset(&ctrl, 0, sizeof(ctrl));
   1985   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SPS;
   1986   ctrl.size = sizeof(v4l2_sps);
   1987   ctrl.p_h264_sps = &v4l2_sps;
   1988   ctrls.push_back(ctrl);
   1989 
   1990   struct v4l2_ctrl_h264_pps v4l2_pps;
   1991   memset(&v4l2_pps, 0, sizeof(v4l2_pps));
   1992 #define PPS_TO_V4L2PPS(a) v4l2_pps.a = pps->a
   1993   PPS_TO_V4L2PPS(pic_parameter_set_id);
   1994   PPS_TO_V4L2PPS(seq_parameter_set_id);
   1995   PPS_TO_V4L2PPS(num_slice_groups_minus1);
   1996   PPS_TO_V4L2PPS(num_ref_idx_l0_default_active_minus1);
   1997   PPS_TO_V4L2PPS(num_ref_idx_l1_default_active_minus1);
   1998   PPS_TO_V4L2PPS(weighted_bipred_idc);
   1999   PPS_TO_V4L2PPS(pic_init_qp_minus26);
   2000   PPS_TO_V4L2PPS(pic_init_qs_minus26);
   2001   PPS_TO_V4L2PPS(chroma_qp_index_offset);
   2002   PPS_TO_V4L2PPS(second_chroma_qp_index_offset);
   2003 #undef PPS_TO_V4L2PPS
   2004 
   2005 #define SET_V4L2_PPS_FLAG_IF(cond, flag) \
   2006   v4l2_pps.flags |= ((pps->cond) ? (flag) : 0)
   2007   SET_V4L2_PPS_FLAG_IF(entropy_coding_mode_flag,
   2008                        V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE);
   2009   SET_V4L2_PPS_FLAG_IF(
   2010       bottom_field_pic_order_in_frame_present_flag,
   2011       V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT);
   2012   SET_V4L2_PPS_FLAG_IF(weighted_pred_flag, V4L2_H264_PPS_FLAG_WEIGHTED_PRED);
   2013   SET_V4L2_PPS_FLAG_IF(deblocking_filter_control_present_flag,
   2014                        V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT);
   2015   SET_V4L2_PPS_FLAG_IF(constrained_intra_pred_flag,
   2016                        V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED);
   2017   SET_V4L2_PPS_FLAG_IF(redundant_pic_cnt_present_flag,
   2018                        V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT);
   2019   SET_V4L2_PPS_FLAG_IF(transform_8x8_mode_flag,
   2020                        V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE);
   2021   SET_V4L2_PPS_FLAG_IF(pic_scaling_matrix_present_flag,
   2022                        V4L2_H264_PPS_FLAG_PIC_SCALING_MATRIX_PRESENT);
   2023 #undef SET_V4L2_PPS_FLAG_IF
   2024   memset(&ctrl, 0, sizeof(ctrl));
   2025   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PPS;
   2026   ctrl.size = sizeof(v4l2_pps);
   2027   ctrl.p_h264_pps = &v4l2_pps;
   2028   ctrls.push_back(ctrl);
   2029 
   2030   struct v4l2_ctrl_h264_scaling_matrix v4l2_scaling_matrix;
   2031   memset(&v4l2_scaling_matrix, 0, sizeof(v4l2_scaling_matrix));
   2032 
   2033   static_assert(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <=
   2034                         arraysize(pps->scaling_list4x4) &&
   2035                     arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <=
   2036                         arraysize(pps->scaling_list4x4[0]) &&
   2037                     arraysize(v4l2_scaling_matrix.scaling_list_8x8) <=
   2038                         arraysize(pps->scaling_list8x8) &&
   2039                     arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <=
   2040                         arraysize(pps->scaling_list8x8[0]),
   2041                 "scaling_lists must be of correct size");
   2042   static_assert(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <=
   2043                         arraysize(sps->scaling_list4x4) &&
   2044                     arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <=
   2045                         arraysize(sps->scaling_list4x4[0]) &&
   2046                     arraysize(v4l2_scaling_matrix.scaling_list_8x8) <=
   2047                         arraysize(sps->scaling_list8x8) &&
   2048                     arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <=
   2049                         arraysize(sps->scaling_list8x8[0]),
   2050                 "scaling_lists must be of correct size");
   2051 
   2052   const auto* scaling_list4x4 = &sps->scaling_list4x4[0];
   2053   const auto* scaling_list8x8 = &sps->scaling_list8x8[0];
   2054   if (pps->pic_scaling_matrix_present_flag) {
   2055     scaling_list4x4 = &pps->scaling_list4x4[0];
   2056     scaling_list8x8 = &pps->scaling_list8x8[0];
   2057   }
   2058 
   2059   for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_4x4); ++i) {
   2060     for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_4x4[i]);
   2061          ++j) {
   2062       v4l2_scaling_matrix.scaling_list_4x4[i][j] = scaling_list4x4[i][j];
   2063     }
   2064   }
   2065   for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_8x8); ++i) {
   2066     for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_8x8[i]);
   2067          ++j) {
   2068       v4l2_scaling_matrix.scaling_list_8x8[i][j] = scaling_list8x8[i][j];
   2069     }
   2070   }
   2071 
   2072   memset(&ctrl, 0, sizeof(ctrl));
   2073   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX;
   2074   ctrl.size = sizeof(v4l2_scaling_matrix);
   2075   ctrl.p_h264_scal_mtrx = &v4l2_scaling_matrix;
   2076   ctrls.push_back(ctrl);
   2077 
   2078   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2079       H264PictureToV4L2DecodeSurface(pic);
   2080 
   2081   struct v4l2_ext_controls ext_ctrls;
   2082   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2083   ext_ctrls.count = ctrls.size();
   2084   ext_ctrls.controls = &ctrls[0];
   2085   ext_ctrls.config_store = dec_surface->config_store();
   2086   v4l2_dec_->SubmitExtControls(&ext_ctrls);
   2087 
   2088   H264PictureListToDPBIndicesList(ref_pic_listp0,
   2089                                   v4l2_decode_param_.ref_pic_list_p0);
   2090   H264PictureListToDPBIndicesList(ref_pic_listb0,
   2091                                   v4l2_decode_param_.ref_pic_list_b0);
   2092   H264PictureListToDPBIndicesList(ref_pic_listb1,
   2093                                   v4l2_decode_param_.ref_pic_list_b1);
   2094 
   2095   std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
   2096   H264DPBToV4L2DPB(dpb, &ref_surfaces);
   2097   dec_surface->SetReferenceSurfaces(ref_surfaces);
   2098 
   2099   return true;
   2100 }
   2101 
   2102 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitSlice(
   2103     const H264PPS* pps,
   2104     const H264SliceHeader* slice_hdr,
   2105     const H264Picture::Vector& ref_pic_list0,
   2106     const H264Picture::Vector& ref_pic_list1,
   2107     const scoped_refptr<H264Picture>& pic,
   2108     const uint8_t* data,
   2109     size_t size) {
   2110   if (num_slices_ == kMaxSlices) {
   2111     LOGF(ERROR) << "Over limit of supported slices per frame";
   2112     return false;
   2113   }
   2114 
   2115   struct v4l2_ctrl_h264_slice_param& v4l2_slice_param =
   2116       v4l2_slice_params_[num_slices_++];
   2117   memset(&v4l2_slice_param, 0, sizeof(v4l2_slice_param));
   2118 
   2119   v4l2_slice_param.size = size;
   2120 #define SHDR_TO_V4L2SPARM(a) v4l2_slice_param.a = slice_hdr->a
   2121   SHDR_TO_V4L2SPARM(header_bit_size);
   2122   SHDR_TO_V4L2SPARM(first_mb_in_slice);
   2123   SHDR_TO_V4L2SPARM(slice_type);
   2124   SHDR_TO_V4L2SPARM(pic_parameter_set_id);
   2125   SHDR_TO_V4L2SPARM(colour_plane_id);
   2126   SHDR_TO_V4L2SPARM(frame_num);
   2127   SHDR_TO_V4L2SPARM(idr_pic_id);
   2128   SHDR_TO_V4L2SPARM(pic_order_cnt_lsb);
   2129   SHDR_TO_V4L2SPARM(delta_pic_order_cnt_bottom);
   2130   SHDR_TO_V4L2SPARM(delta_pic_order_cnt0);
   2131   SHDR_TO_V4L2SPARM(delta_pic_order_cnt1);
   2132   SHDR_TO_V4L2SPARM(redundant_pic_cnt);
   2133   SHDR_TO_V4L2SPARM(dec_ref_pic_marking_bit_size);
   2134   SHDR_TO_V4L2SPARM(cabac_init_idc);
   2135   SHDR_TO_V4L2SPARM(slice_qp_delta);
   2136   SHDR_TO_V4L2SPARM(slice_qs_delta);
   2137   SHDR_TO_V4L2SPARM(disable_deblocking_filter_idc);
   2138   SHDR_TO_V4L2SPARM(slice_alpha_c0_offset_div2);
   2139   SHDR_TO_V4L2SPARM(slice_beta_offset_div2);
   2140   SHDR_TO_V4L2SPARM(num_ref_idx_l0_active_minus1);
   2141   SHDR_TO_V4L2SPARM(num_ref_idx_l1_active_minus1);
   2142   SHDR_TO_V4L2SPARM(pic_order_cnt_bit_size);
   2143 #undef SHDR_TO_V4L2SPARM
   2144 
   2145 #define SET_V4L2_SPARM_FLAG_IF(cond, flag) \
   2146   v4l2_slice_param.flags |= ((slice_hdr->cond) ? (flag) : 0)
   2147   SET_V4L2_SPARM_FLAG_IF(field_pic_flag, V4L2_SLICE_FLAG_FIELD_PIC);
   2148   SET_V4L2_SPARM_FLAG_IF(bottom_field_flag, V4L2_SLICE_FLAG_BOTTOM_FIELD);
   2149   SET_V4L2_SPARM_FLAG_IF(direct_spatial_mv_pred_flag,
   2150                          V4L2_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED);
   2151   SET_V4L2_SPARM_FLAG_IF(sp_for_switch_flag, V4L2_SLICE_FLAG_SP_FOR_SWITCH);
   2152 #undef SET_V4L2_SPARM_FLAG_IF
   2153 
   2154   struct v4l2_h264_pred_weight_table* pred_weight_table =
   2155       &v4l2_slice_param.pred_weight_table;
   2156 
   2157   if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) &&
   2158        pps->weighted_pred_flag) ||
   2159       (slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) {
   2160     pred_weight_table->luma_log2_weight_denom =
   2161         slice_hdr->luma_log2_weight_denom;
   2162     pred_weight_table->chroma_log2_weight_denom =
   2163         slice_hdr->chroma_log2_weight_denom;
   2164 
   2165     struct v4l2_h264_weight_factors* factorsl0 =
   2166         &pred_weight_table->weight_factors[0];
   2167 
   2168     for (int i = 0; i < 32; ++i) {
   2169       factorsl0->luma_weight[i] =
   2170           slice_hdr->pred_weight_table_l0.luma_weight[i];
   2171       factorsl0->luma_offset[i] =
   2172           slice_hdr->pred_weight_table_l0.luma_offset[i];
   2173 
   2174       for (int j = 0; j < 2; ++j) {
   2175         factorsl0->chroma_weight[i][j] =
   2176             slice_hdr->pred_weight_table_l0.chroma_weight[i][j];
   2177         factorsl0->chroma_offset[i][j] =
   2178             slice_hdr->pred_weight_table_l0.chroma_offset[i][j];
   2179       }
   2180     }
   2181 
   2182     if (slice_hdr->IsBSlice()) {
   2183       struct v4l2_h264_weight_factors* factorsl1 =
   2184           &pred_weight_table->weight_factors[1];
   2185 
   2186       for (int i = 0; i < 32; ++i) {
   2187         factorsl1->luma_weight[i] =
   2188             slice_hdr->pred_weight_table_l1.luma_weight[i];
   2189         factorsl1->luma_offset[i] =
   2190             slice_hdr->pred_weight_table_l1.luma_offset[i];
   2191 
   2192         for (int j = 0; j < 2; ++j) {
   2193           factorsl1->chroma_weight[i][j] =
   2194               slice_hdr->pred_weight_table_l1.chroma_weight[i][j];
   2195           factorsl1->chroma_offset[i][j] =
   2196               slice_hdr->pred_weight_table_l1.chroma_offset[i][j];
   2197         }
   2198       }
   2199     }
   2200   }
   2201 
   2202   H264PictureListToDPBIndicesList(ref_pic_list0,
   2203                                   v4l2_slice_param.ref_pic_list0);
   2204   H264PictureListToDPBIndicesList(ref_pic_list1,
   2205                                   v4l2_slice_param.ref_pic_list1);
   2206 
   2207   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2208       H264PictureToV4L2DecodeSurface(pic);
   2209 
   2210   v4l2_decode_param_.nal_ref_idc = slice_hdr->nal_ref_idc;
   2211 
   2212   // TODO(posciak): Don't add start code back here, but have it passed from
   2213   // the parser.
   2214   size_t data_copy_size = size + 3;
   2215   std::unique_ptr<uint8_t[]> data_copy(new uint8_t[data_copy_size]);
   2216   memset(data_copy.get(), 0, data_copy_size);
   2217   data_copy[2] = 0x01;
   2218   memcpy(data_copy.get() + 3, data, size);
   2219   return v4l2_dec_->SubmitSlice(dec_surface->input_record(), data_copy.get(),
   2220                                 data_copy_size);
   2221 }
   2222 
   2223 bool V4L2SliceVideoDecodeAccelerator::SubmitSlice(int index,
   2224                                                   const uint8_t* data,
   2225                                                   size_t size) {
   2226   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2227 
   2228   InputRecord& input_record = input_buffer_map_[index];
   2229 
   2230   if (input_record.bytes_used + size > input_record.length) {
   2231     DVLOGF(1) << "Input buffer too small";
   2232     return false;
   2233   }
   2234 
   2235   memcpy(static_cast<uint8_t*>(input_record.address) + input_record.bytes_used,
   2236          data, size);
   2237   input_record.bytes_used += size;
   2238 
   2239   return true;
   2240 }
   2241 
   2242 bool V4L2SliceVideoDecodeAccelerator::SubmitExtControls(
   2243     struct v4l2_ext_controls* ext_ctrls) {
   2244   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2245   DCHECK_GT(ext_ctrls->config_store, 0u);
   2246   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, ext_ctrls);
   2247   return true;
   2248 }
   2249 
   2250 bool V4L2SliceVideoDecodeAccelerator::GetExtControls(
   2251     struct v4l2_ext_controls* ext_ctrls) {
   2252   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2253   DCHECK_GT(ext_ctrls->config_store, 0u);
   2254   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_EXT_CTRLS, ext_ctrls);
   2255   return true;
   2256 }
   2257 
   2258 bool V4L2SliceVideoDecodeAccelerator::IsCtrlExposed(uint32_t ctrl_id) {
   2259   struct v4l2_queryctrl query_ctrl;
   2260   memset(&query_ctrl, 0, sizeof(query_ctrl));
   2261   query_ctrl.id = ctrl_id;
   2262 
   2263   return (device_->Ioctl(VIDIOC_QUERYCTRL, &query_ctrl) == 0);
   2264 }
   2265 
   2266 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitDecode(
   2267     const scoped_refptr<H264Picture>& pic) {
   2268   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2269       H264PictureToV4L2DecodeSurface(pic);
   2270 
   2271   v4l2_decode_param_.num_slices = num_slices_;
   2272   v4l2_decode_param_.idr_pic_flag = pic->idr;
   2273   v4l2_decode_param_.top_field_order_cnt = pic->top_field_order_cnt;
   2274   v4l2_decode_param_.bottom_field_order_cnt = pic->bottom_field_order_cnt;
   2275 
   2276   struct v4l2_ext_control ctrl;
   2277   std::vector<struct v4l2_ext_control> ctrls;
   2278 
   2279   memset(&ctrl, 0, sizeof(ctrl));
   2280   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAM;
   2281   ctrl.size = sizeof(v4l2_slice_params_);
   2282   ctrl.p_h264_slice_param = v4l2_slice_params_;
   2283   ctrls.push_back(ctrl);
   2284 
   2285   memset(&ctrl, 0, sizeof(ctrl));
   2286   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAM;
   2287   ctrl.size = sizeof(v4l2_decode_param_);
   2288   ctrl.p_h264_decode_param = &v4l2_decode_param_;
   2289   ctrls.push_back(ctrl);
   2290 
   2291   struct v4l2_ext_controls ext_ctrls;
   2292   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2293   ext_ctrls.count = ctrls.size();
   2294   ext_ctrls.controls = &ctrls[0];
   2295   ext_ctrls.config_store = dec_surface->config_store();
   2296   if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
   2297     return false;
   2298 
   2299   Reset();
   2300 
   2301   v4l2_dec_->DecodeSurface(dec_surface);
   2302   return true;
   2303 }
   2304 
   2305 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::OutputPicture(
   2306     const scoped_refptr<H264Picture>& pic) {
   2307   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2308       H264PictureToV4L2DecodeSurface(pic);
   2309   v4l2_dec_->SurfaceReady(dec_surface);
   2310   return true;
   2311 }
   2312 
   2313 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::Reset() {
   2314   num_slices_ = 0;
   2315   memset(&v4l2_decode_param_, 0, sizeof(v4l2_decode_param_));
   2316   memset(&v4l2_slice_params_, 0, sizeof(v4l2_slice_params_));
   2317 }
   2318 
   2319 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2320 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
   2321     H264PictureToV4L2DecodeSurface(const scoped_refptr<H264Picture>& pic) {
   2322   V4L2H264Picture* v4l2_pic = pic->AsV4L2H264Picture();
   2323   CHECK(v4l2_pic);
   2324   return v4l2_pic->dec_surface();
   2325 }
   2326 
   2327 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::V4L2VP8Accelerator(
   2328     V4L2SliceVideoDecodeAccelerator* v4l2_dec)
   2329     : v4l2_dec_(v4l2_dec) {
   2330   DCHECK(v4l2_dec_);
   2331 }
   2332 
   2333 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::~V4L2VP8Accelerator() {}
   2334 
   2335 scoped_refptr<VP8Picture>
   2336 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::CreateVP8Picture() {
   2337   scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
   2338   if (!dec_surface)
   2339     return nullptr;
   2340 
   2341   return new V4L2VP8Picture(dec_surface);
   2342 }
   2343 
   2344 #define ARRAY_MEMCPY_CHECKED(to, from)                               \
   2345   do {                                                               \
   2346     static_assert(sizeof(to) == sizeof(from),                        \
   2347                   #from " and " #to " arrays must be of same size"); \
   2348     memcpy(to, from, sizeof(to));                                    \
   2349   } while (0)
   2350 
   2351 static void FillV4L2SegmentationHeader(
   2352     const Vp8SegmentationHeader& vp8_sgmnt_hdr,
   2353     struct v4l2_vp8_sgmnt_hdr* v4l2_sgmnt_hdr) {
   2354 #define SET_V4L2_SGMNT_HDR_FLAG_IF(cond, flag) \
   2355   v4l2_sgmnt_hdr->flags |= ((vp8_sgmnt_hdr.cond) ? (flag) : 0)
   2356   SET_V4L2_SGMNT_HDR_FLAG_IF(segmentation_enabled,
   2357                              V4L2_VP8_SEGMNT_HDR_FLAG_ENABLED);
   2358   SET_V4L2_SGMNT_HDR_FLAG_IF(update_mb_segmentation_map,
   2359                              V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_MAP);
   2360   SET_V4L2_SGMNT_HDR_FLAG_IF(update_segment_feature_data,
   2361                              V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_FEATURE_DATA);
   2362 #undef SET_V4L2_SPARM_FLAG_IF
   2363   v4l2_sgmnt_hdr->segment_feature_mode = vp8_sgmnt_hdr.segment_feature_mode;
   2364 
   2365   ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->quant_update,
   2366                        vp8_sgmnt_hdr.quantizer_update_value);
   2367   ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->lf_update,
   2368                        vp8_sgmnt_hdr.lf_update_value);
   2369   ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->segment_probs,
   2370                        vp8_sgmnt_hdr.segment_prob);
   2371 }
   2372 
   2373 static void FillV4L2LoopfilterHeader(
   2374     const Vp8LoopFilterHeader& vp8_loopfilter_hdr,
   2375     struct v4l2_vp8_loopfilter_hdr* v4l2_lf_hdr) {
   2376 #define SET_V4L2_LF_HDR_FLAG_IF(cond, flag) \
   2377   v4l2_lf_hdr->flags |= ((vp8_loopfilter_hdr.cond) ? (flag) : 0)
   2378   SET_V4L2_LF_HDR_FLAG_IF(loop_filter_adj_enable, V4L2_VP8_LF_HDR_ADJ_ENABLE);
   2379   SET_V4L2_LF_HDR_FLAG_IF(mode_ref_lf_delta_update,
   2380                           V4L2_VP8_LF_HDR_DELTA_UPDATE);
   2381 #undef SET_V4L2_SGMNT_HDR_FLAG_IF
   2382 
   2383 #define LF_HDR_TO_V4L2_LF_HDR(a) v4l2_lf_hdr->a = vp8_loopfilter_hdr.a;
   2384   LF_HDR_TO_V4L2_LF_HDR(type);
   2385   LF_HDR_TO_V4L2_LF_HDR(level);
   2386   LF_HDR_TO_V4L2_LF_HDR(sharpness_level);
   2387 #undef LF_HDR_TO_V4L2_LF_HDR
   2388 
   2389   ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->ref_frm_delta_magnitude,
   2390                        vp8_loopfilter_hdr.ref_frame_delta);
   2391   ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->mb_mode_delta_magnitude,
   2392                        vp8_loopfilter_hdr.mb_mode_delta);
   2393 }
   2394 
   2395 static void FillV4L2QuantizationHeader(
   2396     const Vp8QuantizationHeader& vp8_quant_hdr,
   2397     struct v4l2_vp8_quantization_hdr* v4l2_quant_hdr) {
   2398   v4l2_quant_hdr->y_ac_qi = vp8_quant_hdr.y_ac_qi;
   2399   v4l2_quant_hdr->y_dc_delta = vp8_quant_hdr.y_dc_delta;
   2400   v4l2_quant_hdr->y2_dc_delta = vp8_quant_hdr.y2_dc_delta;
   2401   v4l2_quant_hdr->y2_ac_delta = vp8_quant_hdr.y2_ac_delta;
   2402   v4l2_quant_hdr->uv_dc_delta = vp8_quant_hdr.uv_dc_delta;
   2403   v4l2_quant_hdr->uv_ac_delta = vp8_quant_hdr.uv_ac_delta;
   2404 }
   2405 
   2406 static void FillV4L2Vp8EntropyHeader(
   2407     const Vp8EntropyHeader& vp8_entropy_hdr,
   2408     struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) {
   2409   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->coeff_probs,
   2410                        vp8_entropy_hdr.coeff_probs);
   2411   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->y_mode_probs,
   2412                        vp8_entropy_hdr.y_mode_probs);
   2413   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->uv_mode_probs,
   2414                        vp8_entropy_hdr.uv_mode_probs);
   2415   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->mv_probs, vp8_entropy_hdr.mv_probs);
   2416 }
   2417 
   2418 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
   2419     const scoped_refptr<VP8Picture>& pic,
   2420     const Vp8FrameHeader* frame_hdr,
   2421     const scoped_refptr<VP8Picture>& last_frame,
   2422     const scoped_refptr<VP8Picture>& golden_frame,
   2423     const scoped_refptr<VP8Picture>& alt_frame) {
   2424   struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr;
   2425   memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
   2426 
   2427 #define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
   2428   FHDR_TO_V4L2_FHDR(key_frame);
   2429   FHDR_TO_V4L2_FHDR(version);
   2430   FHDR_TO_V4L2_FHDR(width);
   2431   FHDR_TO_V4L2_FHDR(horizontal_scale);
   2432   FHDR_TO_V4L2_FHDR(height);
   2433   FHDR_TO_V4L2_FHDR(vertical_scale);
   2434   FHDR_TO_V4L2_FHDR(sign_bias_golden);
   2435   FHDR_TO_V4L2_FHDR(sign_bias_alternate);
   2436   FHDR_TO_V4L2_FHDR(prob_skip_false);
   2437   FHDR_TO_V4L2_FHDR(prob_intra);
   2438   FHDR_TO_V4L2_FHDR(prob_last);
   2439   FHDR_TO_V4L2_FHDR(prob_gf);
   2440   FHDR_TO_V4L2_FHDR(bool_dec_range);
   2441   FHDR_TO_V4L2_FHDR(bool_dec_value);
   2442   FHDR_TO_V4L2_FHDR(bool_dec_count);
   2443 #undef FHDR_TO_V4L2_FHDR
   2444 
   2445 #define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
   2446   v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
   2447   SET_V4L2_FRM_HDR_FLAG_IF(is_experimental,
   2448                            V4L2_VP8_FRAME_HDR_FLAG_EXPERIMENTAL);
   2449   SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP8_FRAME_HDR_FLAG_SHOW_FRAME);
   2450   SET_V4L2_FRM_HDR_FLAG_IF(mb_no_skip_coeff,
   2451                            V4L2_VP8_FRAME_HDR_FLAG_MB_NO_SKIP_COEFF);
   2452 #undef SET_V4L2_FRM_HDR_FLAG_IF
   2453 
   2454   FillV4L2SegmentationHeader(frame_hdr->segmentation_hdr,
   2455                              &v4l2_frame_hdr.sgmnt_hdr);
   2456 
   2457   FillV4L2LoopfilterHeader(frame_hdr->loopfilter_hdr, &v4l2_frame_hdr.lf_hdr);
   2458 
   2459   FillV4L2QuantizationHeader(frame_hdr->quantization_hdr,
   2460                              &v4l2_frame_hdr.quant_hdr);
   2461 
   2462   FillV4L2Vp8EntropyHeader(frame_hdr->entropy_hdr, &v4l2_frame_hdr.entropy_hdr);
   2463 
   2464   v4l2_frame_hdr.first_part_size =
   2465       base::checked_cast<__u32>(frame_hdr->first_part_size);
   2466   v4l2_frame_hdr.first_part_offset =
   2467       base::checked_cast<__u32>(frame_hdr->first_part_offset);
   2468   v4l2_frame_hdr.macroblock_bit_offset =
   2469       base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset);
   2470   v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions;
   2471 
   2472   static_assert(arraysize(v4l2_frame_hdr.dct_part_sizes) ==
   2473                     arraysize(frame_hdr->dct_partition_sizes),
   2474                 "DCT partition size arrays must have equal number of elements");
   2475   for (size_t i = 0; i < frame_hdr->num_of_dct_partitions &&
   2476                      i < arraysize(v4l2_frame_hdr.dct_part_sizes);
   2477        ++i)
   2478     v4l2_frame_hdr.dct_part_sizes[i] = frame_hdr->dct_partition_sizes[i];
   2479 
   2480   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2481       VP8PictureToV4L2DecodeSurface(pic);
   2482   std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
   2483 
   2484   if (last_frame) {
   2485     scoped_refptr<V4L2DecodeSurface> last_frame_surface =
   2486         VP8PictureToV4L2DecodeSurface(last_frame);
   2487     v4l2_frame_hdr.last_frame = last_frame_surface->output_record();
   2488     ref_surfaces.push_back(last_frame_surface);
   2489   } else {
   2490     v4l2_frame_hdr.last_frame = VIDEO_MAX_FRAME;
   2491   }
   2492 
   2493   if (golden_frame) {
   2494     scoped_refptr<V4L2DecodeSurface> golden_frame_surface =
   2495         VP8PictureToV4L2DecodeSurface(golden_frame);
   2496     v4l2_frame_hdr.golden_frame = golden_frame_surface->output_record();
   2497     ref_surfaces.push_back(golden_frame_surface);
   2498   } else {
   2499     v4l2_frame_hdr.golden_frame = VIDEO_MAX_FRAME;
   2500   }
   2501 
   2502   if (alt_frame) {
   2503     scoped_refptr<V4L2DecodeSurface> alt_frame_surface =
   2504         VP8PictureToV4L2DecodeSurface(alt_frame);
   2505     v4l2_frame_hdr.alt_frame = alt_frame_surface->output_record();
   2506     ref_surfaces.push_back(alt_frame_surface);
   2507   } else {
   2508     v4l2_frame_hdr.alt_frame = VIDEO_MAX_FRAME;
   2509   }
   2510 
   2511   struct v4l2_ext_control ctrl;
   2512   memset(&ctrl, 0, sizeof(ctrl));
   2513   ctrl.id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HDR;
   2514   ctrl.size = sizeof(v4l2_frame_hdr);
   2515   ctrl.p_vp8_frame_hdr = &v4l2_frame_hdr;
   2516 
   2517   struct v4l2_ext_controls ext_ctrls;
   2518   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2519   ext_ctrls.count = 1;
   2520   ext_ctrls.controls = &ctrl;
   2521   ext_ctrls.config_store = dec_surface->config_store();
   2522 
   2523   if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
   2524     return false;
   2525 
   2526   dec_surface->SetReferenceSurfaces(ref_surfaces);
   2527 
   2528   if (!v4l2_dec_->SubmitSlice(dec_surface->input_record(), frame_hdr->data,
   2529                               frame_hdr->frame_size))
   2530     return false;
   2531 
   2532   v4l2_dec_->DecodeSurface(dec_surface);
   2533   return true;
   2534 }
   2535 
   2536 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::OutputPicture(
   2537     const scoped_refptr<VP8Picture>& pic) {
   2538   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2539       VP8PictureToV4L2DecodeSurface(pic);
   2540 
   2541   v4l2_dec_->SurfaceReady(dec_surface);
   2542   return true;
   2543 }
   2544 
   2545 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2546 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::
   2547     VP8PictureToV4L2DecodeSurface(const scoped_refptr<VP8Picture>& pic) {
   2548   V4L2VP8Picture* v4l2_pic = pic->AsV4L2VP8Picture();
   2549   CHECK(v4l2_pic);
   2550   return v4l2_pic->dec_surface();
   2551 }
   2552 
   2553 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::V4L2VP9Accelerator(
   2554     V4L2SliceVideoDecodeAccelerator* v4l2_dec)
   2555     : v4l2_dec_(v4l2_dec) {
   2556   DCHECK(v4l2_dec_);
   2557 
   2558   device_needs_frame_context_ =
   2559       v4l2_dec_->IsCtrlExposed(V4L2_CID_MPEG_VIDEO_VP9_ENTROPY);
   2560   DVLOG_IF(1, device_needs_frame_context_)
   2561       << "Device requires frame context parsing";
   2562 }
   2563 
   2564 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::~V4L2VP9Accelerator() {}
   2565 
   2566 scoped_refptr<VP9Picture>
   2567 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::CreateVP9Picture() {
   2568   scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
   2569   if (!dec_surface)
   2570     return nullptr;
   2571 
   2572   return new V4L2VP9Picture(dec_surface);
   2573 }
   2574 
   2575 static void FillV4L2VP9LoopFilterParams(
   2576     const Vp9LoopFilterParams& vp9_lf_params,
   2577     struct v4l2_vp9_loop_filter_params* v4l2_lf_params) {
   2578 #define SET_LF_PARAMS_FLAG_IF(cond, flag) \
   2579   v4l2_lf_params->flags |= ((vp9_lf_params.cond) ? (flag) : 0)
   2580   SET_LF_PARAMS_FLAG_IF(delta_enabled, V4L2_VP9_LOOP_FLTR_FLAG_DELTA_ENABLED);
   2581   SET_LF_PARAMS_FLAG_IF(delta_update, V4L2_VP9_LOOP_FLTR_FLAG_DELTA_UPDATE);
   2582 #undef SET_LF_PARAMS_FLAG_IF
   2583 
   2584   v4l2_lf_params->level = vp9_lf_params.level;
   2585   v4l2_lf_params->sharpness = vp9_lf_params.sharpness;
   2586 
   2587   ARRAY_MEMCPY_CHECKED(v4l2_lf_params->deltas, vp9_lf_params.ref_deltas);
   2588   ARRAY_MEMCPY_CHECKED(v4l2_lf_params->mode_deltas, vp9_lf_params.mode_deltas);
   2589   ARRAY_MEMCPY_CHECKED(v4l2_lf_params->lvl_lookup, vp9_lf_params.lvl);
   2590 }
   2591 
   2592 static void FillV4L2VP9QuantizationParams(
   2593     const Vp9QuantizationParams& vp9_quant_params,
   2594     struct v4l2_vp9_quantization_params* v4l2_q_params) {
   2595 #define SET_Q_PARAMS_FLAG_IF(cond, flag) \
   2596   v4l2_q_params->flags |= ((vp9_quant_params.cond) ? (flag) : 0)
   2597   SET_Q_PARAMS_FLAG_IF(IsLossless(), V4L2_VP9_QUANT_PARAMS_FLAG_LOSSLESS);
   2598 #undef SET_Q_PARAMS_FLAG_IF
   2599 
   2600 #define Q_PARAMS_TO_V4L2_Q_PARAMS(a) v4l2_q_params->a = vp9_quant_params.a
   2601   Q_PARAMS_TO_V4L2_Q_PARAMS(base_q_idx);
   2602   Q_PARAMS_TO_V4L2_Q_PARAMS(delta_q_y_dc);
   2603   Q_PARAMS_TO_V4L2_Q_PARAMS(delta_q_uv_dc);
   2604   Q_PARAMS_TO_V4L2_Q_PARAMS(delta_q_uv_ac);
   2605 #undef Q_PARAMS_TO_V4L2_Q_PARAMS
   2606 }
   2607 
   2608 static void FillV4L2VP9SegmentationParams(
   2609     const Vp9SegmentationParams& vp9_segm_params,
   2610     struct v4l2_vp9_segmentation_params* v4l2_segm_params) {
   2611 #define SET_SEG_PARAMS_FLAG_IF(cond, flag) \
   2612   v4l2_segm_params->flags |= ((vp9_segm_params.cond) ? (flag) : 0)
   2613   SET_SEG_PARAMS_FLAG_IF(enabled, V4L2_VP9_SGMNT_PARAM_FLAG_ENABLED);
   2614   SET_SEG_PARAMS_FLAG_IF(update_map, V4L2_VP9_SGMNT_PARAM_FLAG_UPDATE_MAP);
   2615   SET_SEG_PARAMS_FLAG_IF(temporal_update,
   2616                          V4L2_VP9_SGMNT_PARAM_FLAG_TEMPORAL_UPDATE);
   2617   SET_SEG_PARAMS_FLAG_IF(update_data, V4L2_VP9_SGMNT_PARAM_FLAG_UPDATE_DATA);
   2618   SET_SEG_PARAMS_FLAG_IF(abs_or_delta_update,
   2619                          V4L2_VP9_SGMNT_PARAM_FLAG_ABS_OR_DELTA_UPDATE);
   2620 #undef SET_SEG_PARAMS_FLAG_IF
   2621 
   2622   ARRAY_MEMCPY_CHECKED(v4l2_segm_params->tree_probs,
   2623                        vp9_segm_params.tree_probs);
   2624   ARRAY_MEMCPY_CHECKED(v4l2_segm_params->pred_probs,
   2625                        vp9_segm_params.pred_probs);
   2626   ARRAY_MEMCPY_CHECKED(v4l2_segm_params->feature_data,
   2627                        vp9_segm_params.feature_data);
   2628 
   2629   static_assert(arraysize(v4l2_segm_params->feature_enabled) ==
   2630                         arraysize(vp9_segm_params.feature_enabled) &&
   2631                     arraysize(v4l2_segm_params->feature_enabled[0]) ==
   2632                         arraysize(vp9_segm_params.feature_enabled[0]),
   2633                 "feature_enabled arrays must be of same size");
   2634   for (size_t i = 0; i < arraysize(v4l2_segm_params->feature_enabled); ++i) {
   2635     for (size_t j = 0; j < arraysize(v4l2_segm_params->feature_enabled[i]);
   2636          ++j) {
   2637       v4l2_segm_params->feature_enabled[i][j] =
   2638           vp9_segm_params.feature_enabled[i][j];
   2639     }
   2640   }
   2641 }
   2642 
   2643 static void FillV4L2Vp9EntropyContext(
   2644     const Vp9FrameContext& vp9_frame_ctx,
   2645     struct v4l2_vp9_entropy_ctx* v4l2_entropy_ctx) {
   2646 #define ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(a) \
   2647   ARRAY_MEMCPY_CHECKED(v4l2_entropy_ctx->a, vp9_frame_ctx.a)
   2648   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(tx_probs_8x8);
   2649   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(tx_probs_16x16);
   2650   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(tx_probs_32x32);
   2651 
   2652   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(coef_probs);
   2653   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(skip_prob);
   2654   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(inter_mode_probs);
   2655   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(interp_filter_probs);
   2656   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(is_inter_prob);
   2657 
   2658   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(comp_mode_prob);
   2659   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(single_ref_prob);
   2660   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(comp_ref_prob);
   2661 
   2662   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(y_mode_probs);
   2663   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(uv_mode_probs);
   2664 
   2665   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(partition_probs);
   2666 
   2667   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_joint_probs);
   2668   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_sign_prob);
   2669   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class_probs);
   2670   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class0_bit_prob);
   2671   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_bits_prob);
   2672   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class0_fr_probs);
   2673   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_fr_probs);
   2674   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class0_hp_prob);
   2675   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_hp_prob);
   2676 #undef ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR
   2677 }
   2678 
   2679 bool V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::SubmitDecode(
   2680     const scoped_refptr<VP9Picture>& pic,
   2681     const Vp9SegmentationParams& segm_params,
   2682     const Vp9LoopFilterParams& lf_params,
   2683     const std::vector<scoped_refptr<VP9Picture>>& ref_pictures,
   2684     const base::Closure& done_cb) {
   2685   const Vp9FrameHeader* frame_hdr = pic->frame_hdr.get();
   2686   DCHECK(frame_hdr);
   2687 
   2688   struct v4l2_ctrl_vp9_frame_hdr v4l2_frame_hdr;
   2689   memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
   2690 
   2691 #define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
   2692   FHDR_TO_V4L2_FHDR(profile);
   2693   FHDR_TO_V4L2_FHDR(frame_type);
   2694 
   2695   FHDR_TO_V4L2_FHDR(bit_depth);
   2696   FHDR_TO_V4L2_FHDR(color_range);
   2697   FHDR_TO_V4L2_FHDR(subsampling_x);
   2698   FHDR_TO_V4L2_FHDR(subsampling_y);
   2699 
   2700   FHDR_TO_V4L2_FHDR(frame_width);
   2701   FHDR_TO_V4L2_FHDR(frame_height);
   2702   FHDR_TO_V4L2_FHDR(render_width);
   2703   FHDR_TO_V4L2_FHDR(render_height);
   2704 
   2705   FHDR_TO_V4L2_FHDR(reset_frame_context);
   2706 
   2707   FHDR_TO_V4L2_FHDR(interpolation_filter);
   2708   FHDR_TO_V4L2_FHDR(frame_context_idx);
   2709 
   2710   FHDR_TO_V4L2_FHDR(tile_cols_log2);
   2711   FHDR_TO_V4L2_FHDR(tile_rows_log2);
   2712 
   2713   FHDR_TO_V4L2_FHDR(header_size_in_bytes);
   2714 #undef FHDR_TO_V4L2_FHDR
   2715   v4l2_frame_hdr.color_space = static_cast<uint8_t>(frame_hdr->color_space);
   2716 
   2717   FillV4L2VP9QuantizationParams(frame_hdr->quant_params,
   2718                                 &v4l2_frame_hdr.quant_params);
   2719 
   2720 #define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
   2721   v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
   2722   SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP9_FRAME_HDR_FLAG_SHOW_FRAME);
   2723   SET_V4L2_FRM_HDR_FLAG_IF(error_resilient_mode,
   2724                            V4L2_VP9_FRAME_HDR_FLAG_ERR_RES);
   2725   SET_V4L2_FRM_HDR_FLAG_IF(intra_only, V4L2_VP9_FRAME_HDR_FLAG_FRAME_INTRA);
   2726   SET_V4L2_FRM_HDR_FLAG_IF(allow_high_precision_mv,
   2727                            V4L2_VP9_FRAME_HDR_ALLOW_HIGH_PREC_MV);
   2728   SET_V4L2_FRM_HDR_FLAG_IF(refresh_frame_context,
   2729                            V4L2_VP9_FRAME_HDR_REFRESH_FRAME_CTX);
   2730   SET_V4L2_FRM_HDR_FLAG_IF(frame_parallel_decoding_mode,
   2731                            V4L2_VP9_FRAME_HDR_PARALLEL_DEC_MODE);
   2732 #undef SET_V4L2_FRM_HDR_FLAG_IF
   2733 
   2734   FillV4L2VP9LoopFilterParams(lf_params, &v4l2_frame_hdr.lf_params);
   2735   FillV4L2VP9SegmentationParams(segm_params, &v4l2_frame_hdr.sgmnt_params);
   2736 
   2737   std::vector<struct v4l2_ext_control> ctrls;
   2738 
   2739   struct v4l2_ext_control ctrl;
   2740   memset(&ctrl, 0, sizeof(ctrl));
   2741   ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_HDR;
   2742   ctrl.size = sizeof(v4l2_frame_hdr);
   2743   ctrl.p_vp9_frame_hdr = &v4l2_frame_hdr;
   2744   ctrls.push_back(ctrl);
   2745 
   2746   struct v4l2_ctrl_vp9_decode_param v4l2_decode_param;
   2747   memset(&v4l2_decode_param, 0, sizeof(v4l2_decode_param));
   2748   DCHECK_EQ(ref_pictures.size(), arraysize(v4l2_decode_param.ref_frames));
   2749 
   2750   std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
   2751   for (size_t i = 0; i < ref_pictures.size(); ++i) {
   2752     if (ref_pictures[i]) {
   2753       scoped_refptr<V4L2DecodeSurface> ref_surface =
   2754           VP9PictureToV4L2DecodeSurface(ref_pictures[i]);
   2755 
   2756       v4l2_decode_param.ref_frames[i] = ref_surface->output_record();
   2757       ref_surfaces.push_back(ref_surface);
   2758     } else {
   2759       v4l2_decode_param.ref_frames[i] = VIDEO_MAX_FRAME;
   2760     }
   2761   }
   2762 
   2763   static_assert(arraysize(v4l2_decode_param.active_ref_frames) ==
   2764                     arraysize(frame_hdr->ref_frame_idx),
   2765                 "active reference frame array sizes mismatch");
   2766 
   2767   for (size_t i = 0; i < arraysize(frame_hdr->ref_frame_idx); ++i) {
   2768     uint8_t idx = frame_hdr->ref_frame_idx[i];
   2769     if (idx >= ref_pictures.size())
   2770       return false;
   2771 
   2772     struct v4l2_vp9_reference_frame* v4l2_ref_frame =
   2773         &v4l2_decode_param.active_ref_frames[i];
   2774 
   2775     scoped_refptr<VP9Picture> ref_pic = ref_pictures[idx];
   2776     if (ref_pic) {
   2777       scoped_refptr<V4L2DecodeSurface> ref_surface =
   2778           VP9PictureToV4L2DecodeSurface(ref_pic);
   2779       v4l2_ref_frame->buf_index = ref_surface->output_record();
   2780 #define REF_TO_V4L2_REF(a) v4l2_ref_frame->a = ref_pic->frame_hdr->a
   2781       REF_TO_V4L2_REF(frame_width);
   2782       REF_TO_V4L2_REF(frame_height);
   2783       REF_TO_V4L2_REF(bit_depth);
   2784       REF_TO_V4L2_REF(subsampling_x);
   2785       REF_TO_V4L2_REF(subsampling_y);
   2786 #undef REF_TO_V4L2_REF
   2787     } else {
   2788       v4l2_ref_frame->buf_index = VIDEO_MAX_FRAME;
   2789     }
   2790   }
   2791 
   2792   memset(&ctrl, 0, sizeof(ctrl));
   2793   ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_DECODE_PARAM;
   2794   ctrl.size = sizeof(v4l2_decode_param);
   2795   ctrl.p_vp9_decode_param = &v4l2_decode_param;
   2796   ctrls.push_back(ctrl);
   2797 
   2798   // Defined outside of the if() clause below as it must remain valid until
   2799   // the call to SubmitExtControls().
   2800   struct v4l2_ctrl_vp9_entropy v4l2_entropy;
   2801   if (device_needs_frame_context_) {
   2802     memset(&v4l2_entropy, 0, sizeof(v4l2_entropy));
   2803     FillV4L2Vp9EntropyContext(frame_hdr->initial_frame_context,
   2804                               &v4l2_entropy.initial_entropy_ctx);
   2805     FillV4L2Vp9EntropyContext(frame_hdr->frame_context,
   2806                               &v4l2_entropy.current_entropy_ctx);
   2807     v4l2_entropy.tx_mode = frame_hdr->compressed_header.tx_mode;
   2808     v4l2_entropy.reference_mode = frame_hdr->compressed_header.reference_mode;
   2809 
   2810     memset(&ctrl, 0, sizeof(ctrl));
   2811     ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_ENTROPY;
   2812     ctrl.size = sizeof(v4l2_entropy);
   2813     ctrl.p_vp9_entropy = &v4l2_entropy;
   2814     ctrls.push_back(ctrl);
   2815   }
   2816 
   2817   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2818       VP9PictureToV4L2DecodeSurface(pic);
   2819 
   2820   struct v4l2_ext_controls ext_ctrls;
   2821   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2822   ext_ctrls.count = ctrls.size();
   2823   ext_ctrls.controls = &ctrls[0];
   2824   ext_ctrls.config_store = dec_surface->config_store();
   2825   if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
   2826     return false;
   2827 
   2828   dec_surface->SetReferenceSurfaces(ref_surfaces);
   2829   dec_surface->SetDecodeDoneCallback(done_cb);
   2830 
   2831   if (!v4l2_dec_->SubmitSlice(dec_surface->input_record(), frame_hdr->data,
   2832                               frame_hdr->frame_size))
   2833     return false;
   2834 
   2835   v4l2_dec_->DecodeSurface(dec_surface);
   2836   return true;
   2837 }
   2838 
   2839 bool V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::OutputPicture(
   2840     const scoped_refptr<VP9Picture>& pic) {
   2841   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2842       VP9PictureToV4L2DecodeSurface(pic);
   2843 
   2844   v4l2_dec_->SurfaceReady(dec_surface);
   2845   return true;
   2846 }
   2847 
   2848 static void FillVp9FrameContext(struct v4l2_vp9_entropy_ctx& v4l2_entropy_ctx,
   2849                                 Vp9FrameContext* vp9_frame_ctx) {
   2850 #define ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(a) \
   2851   ARRAY_MEMCPY_CHECKED(vp9_frame_ctx->a, v4l2_entropy_ctx.a)
   2852   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(tx_probs_8x8);
   2853   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(tx_probs_16x16);
   2854   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(tx_probs_32x32);
   2855 
   2856   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(coef_probs);
   2857   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(skip_prob);
   2858   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(inter_mode_probs);
   2859   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(interp_filter_probs);
   2860   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(is_inter_prob);
   2861 
   2862   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(comp_mode_prob);
   2863   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(single_ref_prob);
   2864   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(comp_ref_prob);
   2865 
   2866   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(y_mode_probs);
   2867   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(uv_mode_probs);
   2868 
   2869   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(partition_probs);
   2870 
   2871   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_joint_probs);
   2872   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_sign_prob);
   2873   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class_probs);
   2874   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class0_bit_prob);
   2875   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_bits_prob);
   2876   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class0_fr_probs);
   2877   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_fr_probs);
   2878   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class0_hp_prob);
   2879   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_hp_prob);
   2880 #undef ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX
   2881 }
   2882 
   2883 bool V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::GetFrameContext(
   2884     const scoped_refptr<VP9Picture>& pic,
   2885     Vp9FrameContext* frame_ctx) {
   2886   struct v4l2_ctrl_vp9_entropy v4l2_entropy;
   2887   memset(&v4l2_entropy, 0, sizeof(v4l2_entropy));
   2888 
   2889   struct v4l2_ext_control ctrl;
   2890   memset(&ctrl, 0, sizeof(ctrl));
   2891   ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_ENTROPY;
   2892   ctrl.size = sizeof(v4l2_entropy);
   2893   ctrl.p_vp9_entropy = &v4l2_entropy;
   2894 
   2895   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2896       VP9PictureToV4L2DecodeSurface(pic);
   2897 
   2898   struct v4l2_ext_controls ext_ctrls;
   2899   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2900   ext_ctrls.count = 1;
   2901   ext_ctrls.controls = &ctrl;
   2902   ext_ctrls.config_store = dec_surface->config_store();
   2903 
   2904   if (!v4l2_dec_->GetExtControls(&ext_ctrls))
   2905     return false;
   2906 
   2907   FillVp9FrameContext(v4l2_entropy.current_entropy_ctx, frame_ctx);
   2908   return true;
   2909 }
   2910 
   2911 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2912 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::
   2913     VP9PictureToV4L2DecodeSurface(const scoped_refptr<VP9Picture>& pic) {
   2914   V4L2VP9Picture* v4l2_pic = pic->AsV4L2VP9Picture();
   2915   CHECK(v4l2_pic);
   2916   return v4l2_pic->dec_surface();
   2917 }
   2918 
   2919 void V4L2SliceVideoDecodeAccelerator::DecodeSurface(
   2920     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
   2921   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2922 
   2923   DVLOGF(3) << "Submitting decode for surface: " << dec_surface->ToString();
   2924   Enqueue(dec_surface);
   2925 }
   2926 
   2927 void V4L2SliceVideoDecodeAccelerator::SurfaceReady(
   2928     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
   2929   DVLOGF(3);
   2930   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2931 
   2932   decoder_display_queue_.push(dec_surface);
   2933   TryOutputSurfaces();
   2934 }
   2935 
   2936 void V4L2SliceVideoDecodeAccelerator::TryOutputSurfaces() {
   2937   while (!decoder_display_queue_.empty()) {
   2938     scoped_refptr<V4L2DecodeSurface> dec_surface =
   2939         decoder_display_queue_.front();
   2940 
   2941     if (!dec_surface->decoded())
   2942       break;
   2943 
   2944     decoder_display_queue_.pop();
   2945     OutputSurface(dec_surface);
   2946   }
   2947 }
   2948 
   2949 void V4L2SliceVideoDecodeAccelerator::OutputSurface(
   2950     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
   2951   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2952 
   2953   OutputRecord& output_record =
   2954       output_buffer_map_[dec_surface->output_record()];
   2955 
   2956   bool inserted =
   2957       surfaces_at_display_
   2958           .insert(std::make_pair(output_record.picture_id, dec_surface))
   2959           .second;
   2960   DCHECK(inserted);
   2961 
   2962   DCHECK(!output_record.at_client);
   2963   DCHECK(!output_record.at_device);
   2964   DCHECK_NE(output_record.picture_id, -1);
   2965   output_record.at_client = true;
   2966 
   2967   // TODO(posciak): Use visible size from decoder here instead
   2968   // (crbug.com/402760). Passing (0, 0) results in the client using the
   2969   // visible size extracted from the container instead.
   2970   Picture picture(output_record.picture_id, dec_surface->bitstream_id(),
   2971                   Rect(0, 0), false);
   2972   DVLOGF(3) << dec_surface->ToString()
   2973             << ", bitstream_id: " << picture.bitstream_buffer_id()
   2974             << ", picture_id: " << picture.picture_buffer_id();
   2975   pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
   2976   SendPictureReady();
   2977   output_record.cleared = true;
   2978 }
   2979 
   2980 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2981 V4L2SliceVideoDecodeAccelerator::CreateSurface() {
   2982   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2983   DCHECK_EQ(state_, kDecoding);
   2984 
   2985   if (free_input_buffers_.empty() || free_output_buffers_.empty())
   2986     return nullptr;
   2987 
   2988   int input = free_input_buffers_.front();
   2989   free_input_buffers_.pop_front();
   2990   int output = free_output_buffers_.front();
   2991   free_output_buffers_.pop_front();
   2992 
   2993   InputRecord& input_record = input_buffer_map_[input];
   2994   DCHECK_EQ(input_record.bytes_used, 0u);
   2995   DCHECK_EQ(input_record.input_id, -1);
   2996   DCHECK(decoder_current_bitstream_buffer_ != nullptr);
   2997   input_record.input_id = decoder_current_bitstream_buffer_->input_id;
   2998 
   2999   scoped_refptr<V4L2DecodeSurface> dec_surface = new V4L2DecodeSurface(
   3000       decoder_current_bitstream_buffer_->input_id, input, output,
   3001       base::Bind(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
   3002                  base::Unretained(this)));
   3003 
   3004   DVLOGF(4) << "Created surface " << input << " -> " << output;
   3005   return dec_surface;
   3006 }
   3007 
   3008 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() {
   3009   DVLOGF(3);
   3010   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   3011   bool resetting_or_flushing = (decoder_resetting_ || decoder_flushing_);
   3012   while (!pending_picture_ready_.empty()) {
   3013     bool cleared = pending_picture_ready_.front().cleared;
   3014     const Picture& picture = pending_picture_ready_.front().picture;
   3015     if (cleared && picture_clearing_count_ == 0) {
   3016       DVLOGF(4) << "Posting picture ready to decode task runner for: "
   3017                 << picture.picture_buffer_id();
   3018       // This picture is cleared. It can be posted to a thread different than
   3019       // the main GPU thread to reduce latency. This should be the case after
   3020       // all pictures are cleared at the beginning.
   3021       decode_task_runner_->PostTask(
   3022           FROM_HERE,
   3023           base::Bind(&Client::PictureReady, decode_client_, picture));
   3024       pending_picture_ready_.pop();
   3025     } else if (!cleared || resetting_or_flushing) {
   3026       DVLOGF(3) << "cleared=" << pending_picture_ready_.front().cleared
   3027                 << ", decoder_resetting_=" << decoder_resetting_
   3028                 << ", decoder_flushing_=" << decoder_flushing_
   3029                 << ", picture_clearing_count_=" << picture_clearing_count_;
   3030       DVLOGF(4) << "Posting picture ready to GPU for: "
   3031                 << picture.picture_buffer_id();
   3032       // If the picture is not cleared, post it to the child thread because it
   3033       // has to be cleared in the child thread. A picture only needs to be
   3034       // cleared once. If the decoder is resetting or flushing, send all
   3035       // pictures to ensure PictureReady arrive before reset or flush done.
   3036       child_task_runner_->PostTaskAndReply(
   3037           FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
   3038           // Unretained is safe. If Client::PictureReady gets to run, |this| is
   3039           // alive. Destroy() will wait the decode thread to finish.
   3040           base::Bind(&V4L2SliceVideoDecodeAccelerator::PictureCleared,
   3041                      base::Unretained(this)));
   3042       picture_clearing_count_++;
   3043       pending_picture_ready_.pop();
   3044     } else {
   3045       // This picture is cleared. But some pictures are about to be cleared on
   3046       // the child thread. To preserve the order, do not send this until those
   3047       // pictures are cleared.
   3048       break;
   3049     }
   3050   }
   3051 }
   3052 
   3053 void V4L2SliceVideoDecodeAccelerator::PictureCleared() {
   3054   DVLOGF(3) << "clearing count=" << picture_clearing_count_;
   3055   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   3056   DCHECK_GT(picture_clearing_count_, 0);
   3057   picture_clearing_count_--;
   3058   SendPictureReady();
   3059 }
   3060 
   3061 bool V4L2SliceVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
   3062     const base::WeakPtr<Client>& decode_client,
   3063     const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) {
   3064   decode_client_ = decode_client;
   3065   decode_task_runner_ = decode_task_runner;
   3066   return true;
   3067 }
   3068 
   3069 // static
   3070 VideoDecodeAccelerator::SupportedProfiles
   3071 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() {
   3072   scoped_refptr<V4L2Device> device(new V4L2Device());
   3073   if (!device)
   3074     return SupportedProfiles();
   3075 
   3076   return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_),
   3077                                             supported_input_fourccs_);
   3078 }
   3079 
   3080 }  // namespace media
   3081