Home | History | Annotate | Download | only in vda
      1 // Copyright 2015 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 // Note: ported from Chromium commit head: 85fdf90
      5 
      6 #include "v4l2_slice_video_decode_accelerator.h"
      7 
      8 #include <errno.h>
      9 #include <fcntl.h>
     10 #include <poll.h>
     11 #include <string.h>
     12 #include <sys/eventfd.h>
     13 #include <sys/ioctl.h>
     14 #include <sys/mman.h>
     15 
     16 #include <memory>
     17 
     18 #include "base/bind.h"
     19 #include "base/bind_helpers.h"
     20 #include "base/callback.h"
     21 #include "base/callback_helpers.h"
     22 #include "base/command_line.h"
     23 #include "base/macros.h"
     24 #include "base/memory/ptr_util.h"
     25 #include "base/numerics/safe_conversions.h"
     26 #include "base/single_thread_task_runner.h"
     27 #include "base/strings/stringprintf.h"
     28 #include "base/threading/thread_task_runner_handle.h"
     29 #include "shared_memory_region.h"
     30 
     31 #define DVLOGF(level) DVLOG(level) << __func__ << "(): "
     32 #define VLOGF(level) VLOG(level) << __func__ << "(): "
     33 #define VPLOGF(level) VPLOG(level) << __func__ << "(): "
     34 
     35 #define NOTIFY_ERROR(x)                       \
     36   do {                                        \
     37     VLOGF(1) << "Setting error state: " << x; \
     38     SetErrorState(x);                         \
     39   } while (0)
     40 
     41 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
     42   do {                                                          \
     43     if (device_->Ioctl(type, arg) != 0) {                       \
     44       VPLOGF(1) << "ioctl() failed: " << type_str;              \
     45       return value;                                             \
     46     }                                                           \
     47   } while (0)
     48 
     49 #define IOCTL_OR_ERROR_RETURN(type, arg) \
     50   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type)
     51 
     52 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
     53   IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
     54 
     55 #define IOCTL_OR_LOG_ERROR(type, arg)           \
     56   do {                                          \
     57     if (device_->Ioctl(type, arg) != 0)         \
     58       VPLOGF(1) << "ioctl() failed: " << #type; \
     59   } while (0)
     60 
     61 namespace media {
     62 
     63 // static
     64 const uint32_t V4L2SliceVideoDecodeAccelerator::supported_input_fourccs_[] = {
     65     V4L2_PIX_FMT_H264_SLICE, V4L2_PIX_FMT_VP8_FRAME, V4L2_PIX_FMT_VP9_FRAME,
     66 };
     67 
     68 class V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface
     69     : public base::RefCounted<V4L2DecodeSurface> {
     70  public:
     71   using ReleaseCB = base::Callback<void(int)>;
     72 
     73   V4L2DecodeSurface(int32_t bitstream_id,
     74                     int input_record,
     75                     int output_record,
     76                     const ReleaseCB& release_cb);
     77 
     78   // Mark the surface as decoded. This will also release all references, as
     79   // they are not needed anymore and execute the done callback, if not null.
     80   void SetDecoded();
     81   bool decoded() const { return decoded_; }
     82 
     83   int32_t bitstream_id() const { return bitstream_id_; }
     84   int input_record() const { return input_record_; }
     85   int output_record() const { return output_record_; }
     86   uint32_t config_store() const { return config_store_; }
     87   Rect visible_rect() const { return visible_rect_; }
     88 
     89   void set_visible_rect(const Rect& visible_rect) {
     90     visible_rect_ = visible_rect;
     91   }
     92 
     93   // Take references to each reference surface and keep them until the
     94   // target surface is decoded.
     95   void SetReferenceSurfaces(
     96       const std::vector<scoped_refptr<V4L2DecodeSurface>>& ref_surfaces);
     97 
     98   // If provided via this method, |done_cb| callback will be executed after
     99   // decoding into this surface is finished. The callback is reset afterwards,
    100   // so it needs to be set again before each decode operation.
    101   void SetDecodeDoneCallback(const base::Closure& done_cb) {
    102     DCHECK(done_cb_.is_null());
    103     done_cb_ = done_cb;
    104   }
    105 
    106   std::string ToString() const;
    107 
    108  private:
    109   friend class base::RefCounted<V4L2DecodeSurface>;
    110   ~V4L2DecodeSurface();
    111 
    112   int32_t bitstream_id_;
    113   int input_record_;
    114   int output_record_;
    115   uint32_t config_store_;
    116   Rect visible_rect_;
    117 
    118   bool decoded_;
    119   ReleaseCB release_cb_;
    120   base::Closure done_cb_;
    121 
    122   std::vector<scoped_refptr<V4L2DecodeSurface>> reference_surfaces_;
    123 
    124   DISALLOW_COPY_AND_ASSIGN(V4L2DecodeSurface);
    125 };
    126 
    127 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::V4L2DecodeSurface(
    128     int32_t bitstream_id,
    129     int input_record,
    130     int output_record,
    131     const ReleaseCB& release_cb)
    132     : bitstream_id_(bitstream_id),
    133       input_record_(input_record),
    134       output_record_(output_record),
    135       config_store_(input_record + 1),
    136       decoded_(false),
    137       release_cb_(release_cb) {}
    138 
    139 V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::~V4L2DecodeSurface() {
    140   DVLOGF(5) << "Releasing output record id=" << output_record_;
    141   release_cb_.Run(output_record_);
    142 }
    143 
    144 void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetReferenceSurfaces(
    145     const std::vector<scoped_refptr<V4L2DecodeSurface>>& ref_surfaces) {
    146   DCHECK(reference_surfaces_.empty());
    147   reference_surfaces_ = ref_surfaces;
    148 }
    149 
    150 void V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::SetDecoded() {
    151   DCHECK(!decoded_);
    152   decoded_ = true;
    153 
    154   // We can now drop references to all reference surfaces for this surface
    155   // as we are done with decoding.
    156   reference_surfaces_.clear();
    157 
    158   // And finally execute and drop the decode done callback, if set.
    159   if (!done_cb_.is_null())
    160     base::ResetAndReturn(&done_cb_).Run();
    161 }
    162 
    163 std::string V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface::ToString()
    164     const {
    165   std::string out;
    166   base::StringAppendF(&out, "Buffer %d -> %d. ", input_record_, output_record_);
    167   base::StringAppendF(&out, "Reference surfaces:");
    168   for (const auto& ref : reference_surfaces_) {
    169     DCHECK_NE(ref->output_record(), output_record_);
    170     base::StringAppendF(&out, " %d", ref->output_record());
    171   }
    172   return out;
    173 }
    174 
    175 V4L2SliceVideoDecodeAccelerator::InputRecord::InputRecord()
    176     : input_id(-1),
    177       address(nullptr),
    178       length(0),
    179       bytes_used(0),
    180       at_device(false) {}
    181 
    182 V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord()
    183     : at_device(false),
    184       at_client(false),
    185       picture_id(-1),
    186       cleared(false) {}
    187 
    188 struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef {
    189   BitstreamBufferRef(
    190       base::WeakPtr<VideoDecodeAccelerator::Client>& client,
    191       const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
    192       SharedMemoryRegion* shm,
    193       int32_t input_id);
    194   ~BitstreamBufferRef();
    195   const base::WeakPtr<VideoDecodeAccelerator::Client> client;
    196   const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
    197   const std::unique_ptr<SharedMemoryRegion> shm;
    198   off_t bytes_used;
    199   const int32_t input_id;
    200 };
    201 
    202 V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
    203     base::WeakPtr<VideoDecodeAccelerator::Client>& client,
    204     const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
    205     SharedMemoryRegion* shm,
    206     int32_t input_id)
    207     : client(client),
    208       client_task_runner(client_task_runner),
    209       shm(shm),
    210       bytes_used(0),
    211       input_id(input_id) {}
    212 
    213 V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
    214   if (input_id >= 0) {
    215     DVLOGF(5) << "returning input_id: " << input_id;
    216     client_task_runner->PostTask(
    217         FROM_HERE,
    218         base::Bind(&VideoDecodeAccelerator::Client::NotifyEndOfBitstreamBuffer,
    219                    client, input_id));
    220   }
    221 }
    222 
    223 V4L2SliceVideoDecodeAccelerator::PictureRecord::PictureRecord(
    224     bool cleared,
    225     const Picture& picture)
    226     : cleared(cleared), picture(picture) {}
    227 
    228 V4L2SliceVideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
    229 
    230 class V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator
    231     : public H264Decoder::H264Accelerator {
    232  public:
    233   explicit V4L2H264Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
    234   ~V4L2H264Accelerator() override;
    235 
    236   // H264Decoder::H264Accelerator implementation.
    237   scoped_refptr<H264Picture> CreateH264Picture() override;
    238 
    239   bool SubmitFrameMetadata(const H264SPS* sps,
    240                            const H264PPS* pps,
    241                            const H264DPB& dpb,
    242                            const H264Picture::Vector& ref_pic_listp0,
    243                            const H264Picture::Vector& ref_pic_listb0,
    244                            const H264Picture::Vector& ref_pic_listb1,
    245                            const scoped_refptr<H264Picture>& pic) override;
    246 
    247   bool SubmitSlice(const H264PPS* pps,
    248                    const H264SliceHeader* slice_hdr,
    249                    const H264Picture::Vector& ref_pic_list0,
    250                    const H264Picture::Vector& ref_pic_list1,
    251                    const scoped_refptr<H264Picture>& pic,
    252                    const uint8_t* data,
    253                    size_t size) override;
    254 
    255   bool SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
    256   bool OutputPicture(const scoped_refptr<H264Picture>& pic) override;
    257 
    258   void Reset() override;
    259 
    260  private:
    261   // Max size of reference list.
    262   static const size_t kDPBIndicesListSize = 32;
    263   void H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
    264                                        uint8_t dst_list[kDPBIndicesListSize]);
    265 
    266   void H264DPBToV4L2DPB(
    267       const H264DPB& dpb,
    268       std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces);
    269 
    270   scoped_refptr<V4L2DecodeSurface> H264PictureToV4L2DecodeSurface(
    271       const scoped_refptr<H264Picture>& pic);
    272 
    273   size_t num_slices_;
    274   V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
    275 
    276   // TODO(posciak): This should be queried from hardware once supported.
    277   static const size_t kMaxSlices = 16;
    278   struct v4l2_ctrl_h264_slice_param v4l2_slice_params_[kMaxSlices];
    279   struct v4l2_ctrl_h264_decode_param v4l2_decode_param_;
    280 
    281   DISALLOW_COPY_AND_ASSIGN(V4L2H264Accelerator);
    282 };
    283 
    284 class V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator
    285     : public VP8Decoder::VP8Accelerator {
    286  public:
    287   explicit V4L2VP8Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
    288   ~V4L2VP8Accelerator() override;
    289 
    290   // VP8Decoder::VP8Accelerator implementation.
    291   scoped_refptr<VP8Picture> CreateVP8Picture() override;
    292 
    293   bool SubmitDecode(const scoped_refptr<VP8Picture>& pic,
    294                     const Vp8FrameHeader* frame_hdr,
    295                     const scoped_refptr<VP8Picture>& last_frame,
    296                     const scoped_refptr<VP8Picture>& golden_frame,
    297                     const scoped_refptr<VP8Picture>& alt_frame) override;
    298 
    299   bool OutputPicture(const scoped_refptr<VP8Picture>& pic) override;
    300 
    301  private:
    302   scoped_refptr<V4L2DecodeSurface> VP8PictureToV4L2DecodeSurface(
    303       const scoped_refptr<VP8Picture>& pic);
    304 
    305   V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
    306 
    307   DISALLOW_COPY_AND_ASSIGN(V4L2VP8Accelerator);
    308 };
    309 
    310 class V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator
    311     : public VP9Decoder::VP9Accelerator {
    312  public:
    313   explicit V4L2VP9Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
    314   ~V4L2VP9Accelerator() override;
    315 
    316   // VP9Decoder::VP9Accelerator implementation.
    317   scoped_refptr<VP9Picture> CreateVP9Picture() override;
    318 
    319   bool SubmitDecode(const scoped_refptr<VP9Picture>& pic,
    320                     const Vp9SegmentationParams& segm_params,
    321                     const Vp9LoopFilterParams& lf_params,
    322                     const std::vector<scoped_refptr<VP9Picture>>& ref_pictures,
    323                     const base::Closure& done_cb) override;
    324 
    325   bool OutputPicture(const scoped_refptr<VP9Picture>& pic) override;
    326 
    327   bool GetFrameContext(const scoped_refptr<VP9Picture>& pic,
    328                        Vp9FrameContext* frame_ctx) override;
    329 
    330   bool IsFrameContextRequired() const override {
    331     return device_needs_frame_context_;
    332   }
    333 
    334  private:
    335   scoped_refptr<V4L2DecodeSurface> VP9PictureToV4L2DecodeSurface(
    336       const scoped_refptr<VP9Picture>& pic);
    337 
    338   bool device_needs_frame_context_;
    339 
    340   V4L2SliceVideoDecodeAccelerator* v4l2_dec_;
    341 
    342   DISALLOW_COPY_AND_ASSIGN(V4L2VP9Accelerator);
    343 };
    344 
    345 // Codec-specific subclasses of software decoder picture classes.
    346 // This allows us to keep decoders oblivious of our implementation details.
    347 class V4L2H264Picture : public H264Picture {
    348  public:
    349   explicit V4L2H264Picture(
    350       const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    351           dec_surface);
    352 
    353   V4L2H264Picture* AsV4L2H264Picture() override { return this; }
    354   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    355   dec_surface() {
    356     return dec_surface_;
    357   }
    358 
    359  private:
    360   ~V4L2H264Picture() override;
    361 
    362   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    363       dec_surface_;
    364 
    365   DISALLOW_COPY_AND_ASSIGN(V4L2H264Picture);
    366 };
    367 
    368 V4L2H264Picture::V4L2H264Picture(
    369     const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    370         dec_surface)
    371     : dec_surface_(dec_surface) {}
    372 
    373 V4L2H264Picture::~V4L2H264Picture() {}
    374 
    375 class V4L2VP8Picture : public VP8Picture {
    376  public:
    377   explicit V4L2VP8Picture(
    378       const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    379           dec_surface);
    380 
    381   V4L2VP8Picture* AsV4L2VP8Picture() override { return this; }
    382   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    383   dec_surface() {
    384     return dec_surface_;
    385   }
    386 
    387  private:
    388   ~V4L2VP8Picture() override;
    389 
    390   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    391       dec_surface_;
    392 
    393   DISALLOW_COPY_AND_ASSIGN(V4L2VP8Picture);
    394 };
    395 
    396 V4L2VP8Picture::V4L2VP8Picture(
    397     const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    398         dec_surface)
    399     : dec_surface_(dec_surface) {}
    400 
    401 V4L2VP8Picture::~V4L2VP8Picture() {}
    402 
    403 class V4L2VP9Picture : public VP9Picture {
    404  public:
    405   explicit V4L2VP9Picture(
    406       const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    407           dec_surface);
    408 
    409   V4L2VP9Picture* AsV4L2VP9Picture() override { return this; }
    410   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    411   dec_surface() {
    412     return dec_surface_;
    413   }
    414 
    415  private:
    416   ~V4L2VP9Picture() override;
    417 
    418   scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
    419       dec_surface_;
    420 
    421   DISALLOW_COPY_AND_ASSIGN(V4L2VP9Picture);
    422 };
    423 
    424 V4L2VP9Picture::V4L2VP9Picture(
    425     const scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>&
    426         dec_surface)
    427     : dec_surface_(dec_surface) {}
    428 
    429 V4L2VP9Picture::~V4L2VP9Picture() {}
    430 
    431 V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator(
    432     const scoped_refptr<V4L2Device>& device)
    433     : input_planes_count_(0),
    434       output_planes_count_(0),
    435       child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
    436       device_(device),
    437       decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"),
    438       device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"),
    439       input_streamon_(false),
    440       input_buffer_queued_count_(0),
    441       output_streamon_(false),
    442       output_buffer_queued_count_(0),
    443       video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN),
    444       input_format_fourcc_(0),
    445       output_format_fourcc_(0),
    446       state_(kUninitialized),
    447       output_mode_(Config::OutputMode::ALLOCATE),
    448       decoder_flushing_(false),
    449       decoder_resetting_(false),
    450       surface_set_change_pending_(false),
    451       picture_clearing_count_(0),
    452       weak_this_factory_(this) {
    453   weak_this_ = weak_this_factory_.GetWeakPtr();
    454 }
    455 
    456 V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() {
    457   DVLOGF(2);
    458 
    459   DCHECK(child_task_runner_->BelongsToCurrentThread());
    460   DCHECK(!decoder_thread_.IsRunning());
    461   DCHECK(!device_poll_thread_.IsRunning());
    462 
    463   DCHECK(input_buffer_map_.empty());
    464   DCHECK(output_buffer_map_.empty());
    465 }
    466 
    467 void V4L2SliceVideoDecodeAccelerator::NotifyError(Error error) {
    468   if (!child_task_runner_->BelongsToCurrentThread()) {
    469     child_task_runner_->PostTask(
    470         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::NotifyError,
    471                               weak_this_, error));
    472     return;
    473   }
    474 
    475   if (client_) {
    476     client_->NotifyError(error);
    477     client_ptr_factory_.reset();
    478   }
    479 }
    480 
    481 bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config,
    482                                                  Client* client) {
    483   VLOGF(3) << "profile: " << config.profile;
    484   DCHECK(child_task_runner_->BelongsToCurrentThread());
    485   DCHECK_EQ(state_, kUninitialized);
    486 
    487   if (config.output_mode != Config::OutputMode::ALLOCATE &&
    488       config.output_mode != Config::OutputMode::IMPORT) {
    489     NOTREACHED() << "Only ALLOCATE and IMPORT OutputModes are supported";
    490     return false;
    491   }
    492 
    493   client_ptr_factory_.reset(
    494       new base::WeakPtrFactory<VideoDecodeAccelerator::Client>(client));
    495   client_ = client_ptr_factory_->GetWeakPtr();
    496   // If we haven't been set up to decode on separate thread via
    497   // TryToSetupDecodeOnSeparateThread(), use the main thread/client for
    498   // decode tasks.
    499   if (!decode_task_runner_) {
    500     decode_task_runner_ = child_task_runner_;
    501     DCHECK(!decode_client_);
    502     decode_client_ = client_;
    503   }
    504 
    505   video_profile_ = config.profile;
    506 
    507   // TODO(posciak): This needs to be queried once supported.
    508   input_planes_count_ = 1;
    509   output_planes_count_ = 1;
    510 
    511   input_format_fourcc_ =
    512       V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, true);
    513 
    514   if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
    515     VLOGF(1) << "Failed to open device for profile: " << config.profile
    516              << " fourcc: " << std::hex << "0x" << input_format_fourcc_;
    517     return false;
    518   }
    519 
    520   if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
    521     h264_accelerator_.reset(new V4L2H264Accelerator(this));
    522     decoder_.reset(new H264Decoder(h264_accelerator_.get()));
    523   } else if (video_profile_ >= VP8PROFILE_MIN &&
    524              video_profile_ <= VP8PROFILE_MAX) {
    525     vp8_accelerator_.reset(new V4L2VP8Accelerator(this));
    526     decoder_.reset(new VP8Decoder(vp8_accelerator_.get()));
    527   } else if (video_profile_ >= VP9PROFILE_MIN &&
    528              video_profile_ <= VP9PROFILE_MAX) {
    529     vp9_accelerator_.reset(new V4L2VP9Accelerator(this));
    530     decoder_.reset(new VP9Decoder(vp9_accelerator_.get()));
    531   } else {
    532     NOTREACHED() << "Unsupported profile " << video_profile_;
    533     return false;
    534   }
    535 
    536   // Capabilities check.
    537   struct v4l2_capability caps;
    538   const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
    539   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
    540   if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
    541     VLOGF(1) << "ioctl() failed: VIDIOC_QUERYCAP"
    542              << ", caps check failed: 0x" << std::hex << caps.capabilities;
    543     return false;
    544   }
    545 
    546   if (!SetupFormats())
    547     return false;
    548 
    549   if (!decoder_thread_.Start()) {
    550     VLOGF(1) << "device thread failed to start";
    551     return false;
    552   }
    553   decoder_thread_task_runner_ = decoder_thread_.task_runner();
    554 
    555   state_ = kInitialized;
    556   output_mode_ = config.output_mode;
    557 
    558   // InitializeTask will NOTIFY_ERROR on failure.
    559   decoder_thread_task_runner_->PostTask(
    560       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::InitializeTask,
    561                             base::Unretained(this)));
    562 
    563   VLOGF(2) << "V4L2SliceVideoDecodeAccelerator initialized";
    564   return true;
    565 }
    566 
    567 void V4L2SliceVideoDecodeAccelerator::InitializeTask() {
    568   VLOGF(2);
    569   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    570   DCHECK_EQ(state_, kInitialized);
    571 
    572   if (!CreateInputBuffers())
    573     NOTIFY_ERROR(PLATFORM_FAILURE);
    574 
    575   // Output buffers will be created once decoder gives us information
    576   // about their size and required count.
    577   state_ = kDecoding;
    578 }
    579 
    580 void V4L2SliceVideoDecodeAccelerator::Destroy() {
    581   VLOGF(2);
    582   DCHECK(child_task_runner_->BelongsToCurrentThread());
    583 
    584   if (decoder_thread_.IsRunning()) {
    585     decoder_thread_task_runner_->PostTask(
    586         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DestroyTask,
    587                               base::Unretained(this)));
    588 
    589     // Wait for tasks to finish/early-exit.
    590     decoder_thread_.Stop();
    591   }
    592 
    593   delete this;
    594   VLOGF(2) << "Destroyed";
    595 }
    596 
    597 void V4L2SliceVideoDecodeAccelerator::DestroyTask() {
    598   DVLOGF(2);
    599   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    600 
    601   state_ = kError;
    602 
    603   decoder_->Reset();
    604 
    605   decoder_current_bitstream_buffer_.reset();
    606   while (!decoder_input_queue_.empty())
    607     decoder_input_queue_.pop();
    608 
    609   // Stop streaming and the device_poll_thread_.
    610   StopDevicePoll(false);
    611 
    612   DestroyInputBuffers();
    613   DestroyOutputs(false);
    614 
    615   DCHECK(surfaces_at_device_.empty());
    616   DCHECK(surfaces_at_display_.empty());
    617   DCHECK(decoder_display_queue_.empty());
    618 }
    619 
    620 static bool IsSupportedOutputFormat(uint32_t v4l2_format) {
    621   // Only support V4L2_PIX_FMT_NV12 output format for now.
    622   // TODO(johnylin): add more supported format if necessary.
    623   uint32_t kSupportedOutputFmtFourcc[] = { V4L2_PIX_FMT_NV12 };
    624   return std::find(
    625       kSupportedOutputFmtFourcc,
    626       kSupportedOutputFmtFourcc + arraysize(kSupportedOutputFmtFourcc),
    627       v4l2_format) !=
    628           kSupportedOutputFmtFourcc + arraysize(kSupportedOutputFmtFourcc);
    629 }
    630 
    631 bool V4L2SliceVideoDecodeAccelerator::SetupFormats() {
    632   DCHECK_EQ(state_, kUninitialized);
    633 
    634   size_t input_size;
    635   Size max_resolution, min_resolution;
    636   device_->GetSupportedResolution(input_format_fourcc_, &min_resolution,
    637                                   &max_resolution);
    638   if (max_resolution.width() > 1920 && max_resolution.height() > 1088)
    639     input_size = kInputBufferMaxSizeFor4k;
    640   else
    641     input_size = kInputBufferMaxSizeFor1080p;
    642 
    643   struct v4l2_fmtdesc fmtdesc;
    644   memset(&fmtdesc, 0, sizeof(fmtdesc));
    645   fmtdesc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    646   bool is_format_supported = false;
    647   while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
    648     if (fmtdesc.pixelformat == input_format_fourcc_) {
    649       is_format_supported = true;
    650       break;
    651     }
    652     ++fmtdesc.index;
    653   }
    654 
    655   if (!is_format_supported) {
    656     DVLOGF(1) << "Input fourcc " << input_format_fourcc_
    657               << " not supported by device.";
    658     return false;
    659   }
    660 
    661   struct v4l2_format format;
    662   memset(&format, 0, sizeof(format));
    663   format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    664   format.fmt.pix_mp.pixelformat = input_format_fourcc_;
    665   format.fmt.pix_mp.plane_fmt[0].sizeimage = input_size;
    666   format.fmt.pix_mp.num_planes = input_planes_count_;
    667   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
    668 
    669   // We have to set up the format for output, because the driver may not allow
    670   // changing it once we start streaming; whether it can support our chosen
    671   // output format or not may depend on the input format.
    672   memset(&fmtdesc, 0, sizeof(fmtdesc));
    673   fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    674   output_format_fourcc_ = 0;
    675   while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
    676     if (IsSupportedOutputFormat(fmtdesc.pixelformat)) {
    677       output_format_fourcc_ = fmtdesc.pixelformat;
    678       break;
    679     }
    680     ++fmtdesc.index;
    681   }
    682 
    683   if (output_format_fourcc_ == 0) {
    684     VLOGF(1) << "Could not find a usable output format";
    685     return false;
    686   }
    687 
    688   // Only set fourcc for output; resolution, etc., will come from the
    689   // driver once it extracts it from the stream.
    690   memset(&format, 0, sizeof(format));
    691   format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    692   format.fmt.pix_mp.pixelformat = output_format_fourcc_;
    693   format.fmt.pix_mp.num_planes = output_planes_count_;
    694   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
    695 
    696   return true;
    697 }
    698 
    699 bool V4L2SliceVideoDecodeAccelerator::CreateInputBuffers() {
    700   VLOGF(2);
    701   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    702   DCHECK(!input_streamon_);
    703   DCHECK(input_buffer_map_.empty());
    704 
    705   struct v4l2_requestbuffers reqbufs;
    706   memset(&reqbufs, 0, sizeof(reqbufs));
    707   reqbufs.count = kNumInputBuffers;
    708   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    709   reqbufs.memory = V4L2_MEMORY_MMAP;
    710   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
    711   if (reqbufs.count < kNumInputBuffers) {
    712     VLOGF(1) << "Could not allocate enough output buffers";
    713     return false;
    714   }
    715   input_buffer_map_.resize(reqbufs.count);
    716   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
    717     free_input_buffers_.push_back(i);
    718 
    719     // Query for the MEMORY_MMAP pointer.
    720     struct v4l2_plane planes[VIDEO_MAX_PLANES];
    721     struct v4l2_buffer buffer;
    722     memset(&buffer, 0, sizeof(buffer));
    723     memset(planes, 0, sizeof(planes));
    724     buffer.index = i;
    725     buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    726     buffer.memory = V4L2_MEMORY_MMAP;
    727     buffer.m.planes = planes;
    728     buffer.length = input_planes_count_;
    729     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
    730     void* address = device_->Mmap(nullptr,
    731                                   buffer.m.planes[0].length,
    732                                   PROT_READ | PROT_WRITE,
    733                                   MAP_SHARED,
    734                                   buffer.m.planes[0].m.mem_offset);
    735     if (address == MAP_FAILED) {
    736       VLOGF(1) << "mmap() failed";
    737       return false;
    738     }
    739     input_buffer_map_[i].address = address;
    740     input_buffer_map_[i].length = buffer.m.planes[0].length;
    741   }
    742 
    743   return true;
    744 }
    745 
    746 bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
    747   VLOGF(2);
    748   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    749   DCHECK(!output_streamon_);
    750   DCHECK(output_buffer_map_.empty());
    751   DCHECK(surfaces_at_display_.empty());
    752   DCHECK(surfaces_at_device_.empty());
    753 
    754   Size pic_size = decoder_->GetPicSize();
    755   size_t num_pictures = decoder_->GetRequiredNumOfPictures();
    756 
    757   DCHECK_GT(num_pictures, 0u);
    758   DCHECK(!pic_size.IsEmpty());
    759 
    760   struct v4l2_format format;
    761   memset(&format, 0, sizeof(format));
    762   format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    763   format.fmt.pix_mp.pixelformat = output_format_fourcc_;
    764   format.fmt.pix_mp.width = pic_size.width();
    765   format.fmt.pix_mp.height = pic_size.height();
    766   format.fmt.pix_mp.num_planes = input_planes_count_;
    767 
    768   if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
    769     VPLOGF(1) << "Failed setting format to: " << output_format_fourcc_;
    770     NOTIFY_ERROR(PLATFORM_FAILURE);
    771     return false;
    772   }
    773 
    774   coded_size_.SetSize(base::checked_cast<int>(format.fmt.pix_mp.width),
    775                       base::checked_cast<int>(format.fmt.pix_mp.height));
    776   DCHECK_EQ(coded_size_.width() % 16, 0);
    777   DCHECK_EQ(coded_size_.height() % 16, 0);
    778 
    779   if (!Rect(coded_size_).Contains(Rect(pic_size))) {
    780     VLOGF(1) << "Got invalid adjusted coded size: " << coded_size_.ToString();
    781     return false;
    782   }
    783 
    784   DVLOGF(3) << "buffer_count=" << num_pictures
    785             << ", pic size=" << pic_size.ToString()
    786             << ", coded size=" << coded_size_.ToString();
    787 
    788   VideoPixelFormat pixel_format =
    789       V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_);
    790 
    791   child_task_runner_->PostTask(
    792       FROM_HERE,
    793       base::Bind(&VideoDecodeAccelerator::Client::ProvidePictureBuffers,
    794                  client_, num_pictures, pixel_format, coded_size_));
    795 
    796   // Go into kAwaitingPictureBuffers to prevent us from doing any more decoding
    797   // or event handling while we are waiting for AssignPictureBuffers(). Not
    798   // having Pictures available would not have prevented us from making decoding
    799   // progress entirely e.g. in the case of H.264 where we could further decode
    800   // non-slice NALUs and could even get another resolution change before we were
    801   // done with this one. After we get the buffers, we'll go back into kIdle and
    802   // kick off further event processing, and eventually go back into kDecoding
    803   // once no more events are pending (if any).
    804   state_ = kAwaitingPictureBuffers;
    805   return true;
    806 }
    807 
    808 void V4L2SliceVideoDecodeAccelerator::DestroyInputBuffers() {
    809   VLOGF(2);
    810   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread() ||
    811          !decoder_thread_.IsRunning());
    812   DCHECK(!input_streamon_);
    813 
    814   if (input_buffer_map_.empty())
    815     return;
    816 
    817   for (auto& input_record : input_buffer_map_) {
    818     if (input_record.address != nullptr)
    819       device_->Munmap(input_record.address, input_record.length);
    820   }
    821 
    822   struct v4l2_requestbuffers reqbufs;
    823   memset(&reqbufs, 0, sizeof(reqbufs));
    824   reqbufs.count = 0;
    825   reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    826   reqbufs.memory = V4L2_MEMORY_MMAP;
    827   IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
    828 
    829   input_buffer_map_.clear();
    830   free_input_buffers_.clear();
    831 }
    832 
    833 void V4L2SliceVideoDecodeAccelerator::DismissPictures(
    834     const std::vector<int32_t>& picture_buffer_ids,
    835     base::WaitableEvent* done) {
    836   DVLOGF(3);
    837   DCHECK(child_task_runner_->BelongsToCurrentThread());
    838 
    839   for (auto picture_buffer_id : picture_buffer_ids) {
    840     DVLOGF(4) << "dismissing PictureBuffer id=" << picture_buffer_id;
    841     client_->DismissPictureBuffer(picture_buffer_id);
    842   }
    843 
    844   done->Signal();
    845 }
    846 
    847 void V4L2SliceVideoDecodeAccelerator::DevicePollTask(bool poll_device) {
    848   DVLOGF(3);
    849   DCHECK(device_poll_thread_.task_runner()->BelongsToCurrentThread());
    850 
    851   bool event_pending;
    852   if (!device_->Poll(poll_device, &event_pending)) {
    853     NOTIFY_ERROR(PLATFORM_FAILURE);
    854     return;
    855   }
    856 
    857   // All processing should happen on ServiceDeviceTask(), since we shouldn't
    858   // touch encoder state from this thread.
    859   decoder_thread_task_runner_->PostTask(
    860       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask,
    861                             base::Unretained(this)));
    862 }
    863 
    864 void V4L2SliceVideoDecodeAccelerator::ServiceDeviceTask() {
    865   DVLOGF(4);
    866   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    867 
    868   // ServiceDeviceTask() should only ever be scheduled from DevicePollTask().
    869 
    870   Dequeue();
    871   SchedulePollIfNeeded();
    872 }
    873 
    874 void V4L2SliceVideoDecodeAccelerator::SchedulePollIfNeeded() {
    875   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    876 
    877   if (!device_poll_thread_.IsRunning()) {
    878     DVLOGF(4) << "Device poll thread stopped, will not schedule poll";
    879     return;
    880   }
    881 
    882   DCHECK(input_streamon_ || output_streamon_);
    883 
    884   if (input_buffer_queued_count_ + output_buffer_queued_count_ == 0) {
    885     DVLOGF(4) << "No buffers queued, will not schedule poll";
    886     return;
    887   }
    888 
    889   DVLOGF(4) << "Scheduling device poll task";
    890 
    891   device_poll_thread_.task_runner()->PostTask(
    892       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
    893                             base::Unretained(this), true));
    894 
    895   DVLOGF(3) << "buffer counts: "
    896             << "INPUT[" << decoder_input_queue_.size() << "]"
    897             << " => DEVICE["
    898             << free_input_buffers_.size() << "+"
    899             << input_buffer_queued_count_ << "/"
    900             << input_buffer_map_.size() << "]->["
    901             << free_output_buffers_.size() << "+"
    902             << output_buffer_queued_count_ << "/"
    903             << output_buffer_map_.size() << "]"
    904             << " => DISPLAYQ[" << decoder_display_queue_.size() << "]"
    905             << " => CLIENT[" << surfaces_at_display_.size() << "]";
    906 }
    907 
    908 void V4L2SliceVideoDecodeAccelerator::Enqueue(
    909     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
    910   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    911 
    912   const int old_inputs_queued = input_buffer_queued_count_;
    913   const int old_outputs_queued = output_buffer_queued_count_;
    914 
    915   if (!EnqueueInputRecord(dec_surface->input_record(),
    916                           dec_surface->config_store())) {
    917     VLOGF(1) << "Failed queueing an input buffer";
    918     NOTIFY_ERROR(PLATFORM_FAILURE);
    919     return;
    920   }
    921 
    922   if (!EnqueueOutputRecord(dec_surface->output_record())) {
    923     VLOGF(1) << "Failed queueing an output buffer";
    924     NOTIFY_ERROR(PLATFORM_FAILURE);
    925     return;
    926   }
    927 
    928   bool inserted =
    929       surfaces_at_device_
    930           .insert(std::make_pair(dec_surface->output_record(), dec_surface))
    931           .second;
    932   DCHECK(inserted);
    933 
    934   if (old_inputs_queued == 0 && old_outputs_queued == 0)
    935     SchedulePollIfNeeded();
    936 }
    937 
    938 void V4L2SliceVideoDecodeAccelerator::Dequeue() {
    939   DVLOGF(4);
    940   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
    941 
    942   struct v4l2_buffer dqbuf;
    943   struct v4l2_plane planes[VIDEO_MAX_PLANES];
    944   while (input_buffer_queued_count_ > 0) {
    945     DCHECK(input_streamon_);
    946     memset(&dqbuf, 0, sizeof(dqbuf));
    947     memset(&planes, 0, sizeof(planes));
    948     dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    949     dqbuf.memory = V4L2_MEMORY_MMAP;
    950     dqbuf.m.planes = planes;
    951     dqbuf.length = input_planes_count_;
    952     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
    953       if (errno == EAGAIN) {
    954         // EAGAIN if we're just out of buffers to dequeue.
    955         break;
    956       }
    957       VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
    958       NOTIFY_ERROR(PLATFORM_FAILURE);
    959       return;
    960     }
    961     InputRecord& input_record = input_buffer_map_[dqbuf.index];
    962     DCHECK(input_record.at_device);
    963     input_record.at_device = false;
    964     ReuseInputBuffer(dqbuf.index);
    965     input_buffer_queued_count_--;
    966     DVLOGF(4) << "Dequeued input=" << dqbuf.index
    967               << " count: " << input_buffer_queued_count_;
    968   }
    969 
    970   while (output_buffer_queued_count_ > 0) {
    971     DCHECK(output_streamon_);
    972     memset(&dqbuf, 0, sizeof(dqbuf));
    973     memset(&planes, 0, sizeof(planes));
    974     dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    975     dqbuf.memory =
    976         (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
    977                                                       : V4L2_MEMORY_DMABUF);
    978     dqbuf.m.planes = planes;
    979     dqbuf.length = output_planes_count_;
    980     if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
    981       if (errno == EAGAIN) {
    982         // EAGAIN if we're just out of buffers to dequeue.
    983         break;
    984       }
    985       VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
    986       NOTIFY_ERROR(PLATFORM_FAILURE);
    987       return;
    988     }
    989     OutputRecord& output_record = output_buffer_map_[dqbuf.index];
    990     DCHECK(output_record.at_device);
    991     output_record.at_device = false;
    992     output_buffer_queued_count_--;
    993     DVLOGF(4) << "Dequeued output=" << dqbuf.index << " count "
    994               << output_buffer_queued_count_;
    995 
    996     V4L2DecodeSurfaceByOutputId::iterator it =
    997         surfaces_at_device_.find(dqbuf.index);
    998     if (it == surfaces_at_device_.end()) {
    999       VLOGF(1) << "Got invalid surface from device.";
   1000       NOTIFY_ERROR(PLATFORM_FAILURE);
   1001     }
   1002 
   1003     it->second->SetDecoded();
   1004     surfaces_at_device_.erase(it);
   1005   }
   1006 
   1007   // A frame was decoded, see if we can output it.
   1008   TryOutputSurfaces();
   1009 
   1010   ProcessPendingEventsIfNeeded();
   1011   ScheduleDecodeBufferTaskIfNeeded();
   1012 }
   1013 
   1014 void V4L2SliceVideoDecodeAccelerator::NewEventPending() {
   1015   // Switch to event processing mode if we are decoding. Otherwise we are either
   1016   // already in it, or we will potentially switch to it later, after finishing
   1017   // other tasks.
   1018   if (state_ == kDecoding)
   1019     state_ = kIdle;
   1020 
   1021   ProcessPendingEventsIfNeeded();
   1022 }
   1023 
   1024 bool V4L2SliceVideoDecodeAccelerator::FinishEventProcessing() {
   1025   DCHECK_EQ(state_, kIdle);
   1026 
   1027   state_ = kDecoding;
   1028   ScheduleDecodeBufferTaskIfNeeded();
   1029 
   1030   return true;
   1031 }
   1032 
   1033 void V4L2SliceVideoDecodeAccelerator::ProcessPendingEventsIfNeeded() {
   1034   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1035 
   1036   // Process pending events, if any, in the correct order.
   1037   // We always first process the surface set change, as it is an internal
   1038   // event from the decoder and interleaving it with external requests would
   1039   // put the decoder in an undefined state.
   1040   using ProcessFunc = bool (V4L2SliceVideoDecodeAccelerator::*)();
   1041   const ProcessFunc process_functions[] = {
   1042       &V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChange,
   1043       &V4L2SliceVideoDecodeAccelerator::FinishFlush,
   1044       &V4L2SliceVideoDecodeAccelerator::FinishReset,
   1045       &V4L2SliceVideoDecodeAccelerator::FinishEventProcessing,
   1046   };
   1047 
   1048   for (const auto& fn : process_functions) {
   1049     if (state_ != kIdle)
   1050       return;
   1051 
   1052     if (!(this->*fn)())
   1053       return;
   1054   }
   1055 }
   1056 
   1057 void V4L2SliceVideoDecodeAccelerator::ReuseInputBuffer(int index) {
   1058   DVLOGF(4) << "Reusing input buffer, index=" << index;
   1059   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1060 
   1061   DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
   1062   InputRecord& input_record = input_buffer_map_[index];
   1063 
   1064   DCHECK(!input_record.at_device);
   1065   input_record.input_id = -1;
   1066   input_record.bytes_used = 0;
   1067 
   1068   DCHECK_EQ(
   1069       std::count(free_input_buffers_.begin(), free_input_buffers_.end(), index),
   1070       0);
   1071   free_input_buffers_.push_back(index);
   1072 }
   1073 
   1074 void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(int index) {
   1075   DVLOGF(4) << "Reusing output buffer, index=" << index;
   1076   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1077 
   1078   DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
   1079   OutputRecord& output_record = output_buffer_map_[index];
   1080   DCHECK(!output_record.at_device);
   1081   DCHECK(!output_record.at_client);
   1082 
   1083   DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
   1084                        index),
   1085             0);
   1086   free_output_buffers_.push_back(index);
   1087 
   1088   ScheduleDecodeBufferTaskIfNeeded();
   1089 }
   1090 
   1091 bool V4L2SliceVideoDecodeAccelerator::EnqueueInputRecord(
   1092     int index,
   1093     uint32_t config_store) {
   1094   DVLOGF(4);
   1095   DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
   1096   DCHECK_GT(config_store, 0u);
   1097 
   1098   // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
   1099   InputRecord& input_record = input_buffer_map_[index];
   1100   DCHECK(!input_record.at_device);
   1101   struct v4l2_buffer qbuf;
   1102   struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
   1103   memset(&qbuf, 0, sizeof(qbuf));
   1104   memset(qbuf_planes, 0, sizeof(qbuf_planes));
   1105   qbuf.index = index;
   1106   qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
   1107   qbuf.memory = V4L2_MEMORY_MMAP;
   1108   qbuf.m.planes = qbuf_planes;
   1109   qbuf.m.planes[0].bytesused = input_record.bytes_used;
   1110   qbuf.length = input_planes_count_;
   1111   qbuf.config_store = config_store;
   1112   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
   1113   input_record.at_device = true;
   1114   input_buffer_queued_count_++;
   1115   DVLOGF(4) << "Enqueued input=" << qbuf.index
   1116             << " count: " << input_buffer_queued_count_;
   1117 
   1118   return true;
   1119 }
   1120 
   1121 bool V4L2SliceVideoDecodeAccelerator::EnqueueOutputRecord(int index) {
   1122   DVLOGF(4);
   1123   DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
   1124 
   1125   // Enqueue an output (VIDEO_CAPTURE) buffer.
   1126   OutputRecord& output_record = output_buffer_map_[index];
   1127   DCHECK(!output_record.at_device);
   1128   DCHECK(!output_record.at_client);
   1129   DCHECK_NE(output_record.picture_id, -1);
   1130 
   1131   struct v4l2_buffer qbuf;
   1132   struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
   1133   memset(&qbuf, 0, sizeof(qbuf));
   1134   memset(qbuf_planes, 0, sizeof(qbuf_planes));
   1135   qbuf.index = index;
   1136   qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1137   if (output_mode_ == Config::OutputMode::ALLOCATE) {
   1138     qbuf.memory = V4L2_MEMORY_MMAP;
   1139   } else {
   1140     qbuf.memory = V4L2_MEMORY_DMABUF;
   1141     DCHECK_EQ(output_planes_count_, output_record.dmabuf_fds.size());
   1142     for (size_t i = 0; i < output_record.dmabuf_fds.size(); ++i) {
   1143       DCHECK(output_record.dmabuf_fds[i].is_valid());
   1144       qbuf_planes[i].m.fd = output_record.dmabuf_fds[i].get();
   1145     }
   1146   }
   1147   qbuf.m.planes = qbuf_planes;
   1148   qbuf.length = output_planes_count_;
   1149   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
   1150   output_record.at_device = true;
   1151   output_buffer_queued_count_++;
   1152   DVLOGF(4) << "Enqueued output=" << qbuf.index
   1153             << " count: " << output_buffer_queued_count_;
   1154 
   1155   return true;
   1156 }
   1157 
   1158 bool V4L2SliceVideoDecodeAccelerator::StartDevicePoll() {
   1159   DVLOGF(3) << "Starting device poll";
   1160   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1161   DCHECK(!device_poll_thread_.IsRunning());
   1162 
   1163   // Start up the device poll thread and schedule its first DevicePollTask().
   1164   if (!device_poll_thread_.Start()) {
   1165     VLOGF(1) << "Device thread failed to start";
   1166     NOTIFY_ERROR(PLATFORM_FAILURE);
   1167     return false;
   1168   }
   1169   if (!input_streamon_) {
   1170     __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
   1171     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
   1172     input_streamon_ = true;
   1173   }
   1174 
   1175   if (!output_streamon_) {
   1176     __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1177     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
   1178     output_streamon_ = true;
   1179   }
   1180 
   1181   device_poll_thread_.task_runner()->PostTask(
   1182       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DevicePollTask,
   1183                             base::Unretained(this), true));
   1184 
   1185   return true;
   1186 }
   1187 
   1188 bool V4L2SliceVideoDecodeAccelerator::StopDevicePoll(bool keep_input_state) {
   1189   DVLOGF(3) << "Stopping device poll";
   1190   if (decoder_thread_.IsRunning())
   1191     DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1192 
   1193   // Signal the DevicePollTask() to stop, and stop the device poll thread.
   1194   if (!device_->SetDevicePollInterrupt()) {
   1195     VPLOGF(1) << "SetDevicePollInterrupt(): failed";
   1196     NOTIFY_ERROR(PLATFORM_FAILURE);
   1197     return false;
   1198   }
   1199   device_poll_thread_.Stop();
   1200   DVLOGF(3) << "Device poll thread stopped";
   1201 
   1202   // Clear the interrupt now, to be sure.
   1203   if (!device_->ClearDevicePollInterrupt()) {
   1204     NOTIFY_ERROR(PLATFORM_FAILURE);
   1205     return false;
   1206   }
   1207 
   1208   if (!keep_input_state) {
   1209     if (input_streamon_) {
   1210       __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
   1211       IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
   1212     }
   1213     input_streamon_ = false;
   1214   }
   1215 
   1216   if (output_streamon_) {
   1217     __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1218     IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
   1219   }
   1220   output_streamon_ = false;
   1221 
   1222   if (!keep_input_state) {
   1223     for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
   1224       InputRecord& input_record = input_buffer_map_[i];
   1225       if (input_record.at_device) {
   1226         input_record.at_device = false;
   1227         ReuseInputBuffer(i);
   1228         input_buffer_queued_count_--;
   1229       }
   1230     }
   1231     DCHECK_EQ(input_buffer_queued_count_, 0);
   1232   }
   1233 
   1234   // STREAMOFF makes the driver drop all buffers without decoding and DQBUFing,
   1235   // so we mark them all as at_device = false and clear surfaces_at_device_.
   1236   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
   1237     OutputRecord& output_record = output_buffer_map_[i];
   1238     if (output_record.at_device) {
   1239       output_record.at_device = false;
   1240       output_buffer_queued_count_--;
   1241     }
   1242   }
   1243   surfaces_at_device_.clear();
   1244   DCHECK_EQ(output_buffer_queued_count_, 0);
   1245 
   1246   // Drop all surfaces that were awaiting decode before being displayed,
   1247   // since we've just cancelled all outstanding decodes.
   1248   while (!decoder_display_queue_.empty())
   1249     decoder_display_queue_.pop();
   1250 
   1251   DVLOGF(3) << "Device poll stopped";
   1252   return true;
   1253 }
   1254 
   1255 void V4L2SliceVideoDecodeAccelerator::Decode(
   1256     const BitstreamBuffer& bitstream_buffer) {
   1257   DVLOGF(4) << "input_id=" << bitstream_buffer.id()
   1258             << ", size=" << bitstream_buffer.size();
   1259   DCHECK(decode_task_runner_->BelongsToCurrentThread());
   1260 
   1261   if (bitstream_buffer.id() < 0) {
   1262     VLOGF(1) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id();
   1263     if (base::SharedMemory::IsHandleValid(bitstream_buffer.handle()))
   1264       base::SharedMemory::CloseHandle(bitstream_buffer.handle());
   1265     NOTIFY_ERROR(INVALID_ARGUMENT);
   1266     return;
   1267   }
   1268 
   1269   decoder_thread_task_runner_->PostTask(
   1270       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeTask,
   1271                             base::Unretained(this), bitstream_buffer));
   1272 }
   1273 
   1274 void V4L2SliceVideoDecodeAccelerator::DecodeTask(
   1275     const BitstreamBuffer& bitstream_buffer) {
   1276   DVLOGF(4) << "input_id=" << bitstream_buffer.id()
   1277             << " size=" << bitstream_buffer.size();
   1278   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1279 
   1280   std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
   1281       decode_client_, decode_task_runner_,
   1282       new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id()));
   1283 
   1284   // Skip empty buffer.
   1285   if (bitstream_buffer.size() == 0)
   1286     return;
   1287 
   1288   if (!bitstream_record->shm->Map()) {
   1289     VLOGF(1) << "Could not map bitstream_buffer";
   1290     NOTIFY_ERROR(UNREADABLE_INPUT);
   1291     return;
   1292   }
   1293   DVLOGF(4) << "mapped at=" << bitstream_record->shm->memory();
   1294 
   1295   decoder_input_queue_.push(
   1296       linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
   1297 
   1298   ScheduleDecodeBufferTaskIfNeeded();
   1299 }
   1300 
   1301 bool V4L2SliceVideoDecodeAccelerator::TrySetNewBistreamBuffer() {
   1302   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1303   DCHECK(!decoder_current_bitstream_buffer_);
   1304 
   1305   if (decoder_input_queue_.empty())
   1306     return false;
   1307 
   1308   decoder_current_bitstream_buffer_.reset(
   1309       decoder_input_queue_.front().release());
   1310   decoder_input_queue_.pop();
   1311 
   1312   if (decoder_current_bitstream_buffer_->input_id == kFlushBufferId) {
   1313     // This is a buffer we queued for ourselves to trigger flush at this time.
   1314     InitiateFlush();
   1315     return false;
   1316   }
   1317 
   1318   const uint8_t* const data = reinterpret_cast<const uint8_t*>(
   1319       decoder_current_bitstream_buffer_->shm->memory());
   1320   const size_t data_size = decoder_current_bitstream_buffer_->shm->size();
   1321   decoder_->SetStream(data, data_size);
   1322 
   1323   return true;
   1324 }
   1325 
   1326 void V4L2SliceVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
   1327   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1328   if (state_ == kDecoding) {
   1329     decoder_thread_task_runner_->PostTask(
   1330         FROM_HERE,
   1331         base::Bind(&V4L2SliceVideoDecodeAccelerator::DecodeBufferTask,
   1332                    base::Unretained(this)));
   1333   }
   1334 }
   1335 
   1336 void V4L2SliceVideoDecodeAccelerator::DecodeBufferTask() {
   1337   DVLOGF(4);
   1338   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1339 
   1340   if (state_ != kDecoding) {
   1341     DVLOGF(3) << "Early exit, not in kDecoding";
   1342     return;
   1343   }
   1344 
   1345   while (true) {
   1346     AcceleratedVideoDecoder::DecodeResult res;
   1347     res = decoder_->Decode();
   1348     switch (res) {
   1349       case AcceleratedVideoDecoder::kAllocateNewSurfaces:
   1350         VLOGF(2) << "Decoder requesting a new set of surfaces";
   1351         InitiateSurfaceSetChange();
   1352         return;
   1353 
   1354       case AcceleratedVideoDecoder::kRanOutOfStreamData:
   1355         decoder_current_bitstream_buffer_.reset();
   1356         if (!TrySetNewBistreamBuffer())
   1357           return;
   1358 
   1359         break;
   1360 
   1361       case AcceleratedVideoDecoder::kRanOutOfSurfaces:
   1362         // No more surfaces for the decoder, we'll come back once we have more.
   1363         DVLOGF(4) << "Ran out of surfaces";
   1364         return;
   1365 
   1366       case AcceleratedVideoDecoder::kNeedContextUpdate:
   1367         DVLOGF(4) << "Awaiting context update";
   1368         return;
   1369 
   1370       case AcceleratedVideoDecoder::kDecodeError:
   1371         VLOGF(1) << "Error decoding stream";
   1372         NOTIFY_ERROR(PLATFORM_FAILURE);
   1373         return;
   1374     }
   1375   }
   1376 }
   1377 
   1378 void V4L2SliceVideoDecodeAccelerator::InitiateSurfaceSetChange() {
   1379   VLOGF(2);
   1380   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1381   DCHECK_EQ(state_, kDecoding);
   1382 
   1383   DCHECK(!surface_set_change_pending_);
   1384   surface_set_change_pending_ = true;
   1385   NewEventPending();
   1386 }
   1387 
   1388 bool V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChange() {
   1389   VLOGF(2);
   1390   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1391 
   1392   if (!surface_set_change_pending_)
   1393     return true;
   1394 
   1395   if (!surfaces_at_device_.empty())
   1396     return false;
   1397 
   1398   DCHECK_EQ(state_, kIdle);
   1399   DCHECK(decoder_display_queue_.empty());
   1400   // All output buffers should've been returned from decoder and device by now.
   1401   // The only remaining owner of surfaces may be display (client), and we will
   1402   // dismiss them when destroying output buffers below.
   1403   DCHECK_EQ(free_output_buffers_.size() + surfaces_at_display_.size(),
   1404             output_buffer_map_.size());
   1405 
   1406   // Keep input queue running while we switch outputs.
   1407   if (!StopDevicePoll(true)) {
   1408     NOTIFY_ERROR(PLATFORM_FAILURE);
   1409     return false;
   1410   }
   1411 
   1412   // Dequeued decoded surfaces may be pended in pending_picture_ready_ if they
   1413   // are waiting for some pictures to be cleared. We should post them right away
   1414   // because they are about to be dismissed and destroyed for surface set
   1415   // change.
   1416   SendPictureReady();
   1417 
   1418   // This will return only once all buffers are dismissed and destroyed.
   1419   // This does not wait until they are displayed however, as display retains
   1420   // references to the buffers bound to textures and will release them
   1421   // after displaying.
   1422   if (!DestroyOutputs(true)) {
   1423     NOTIFY_ERROR(PLATFORM_FAILURE);
   1424     return false;
   1425   }
   1426 
   1427   if (!CreateOutputBuffers()) {
   1428     NOTIFY_ERROR(PLATFORM_FAILURE);
   1429     return false;
   1430   }
   1431 
   1432   surface_set_change_pending_ = false;
   1433   VLOGF(2) << "Surface set change finished";
   1434   return true;
   1435 }
   1436 
   1437 bool V4L2SliceVideoDecodeAccelerator::DestroyOutputs(bool dismiss) {
   1438   VLOGF(2);
   1439   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1440   std::vector<int32_t> picture_buffers_to_dismiss;
   1441 
   1442   if (output_buffer_map_.empty())
   1443     return true;
   1444 
   1445   for (const auto& output_record : output_buffer_map_) {
   1446     DCHECK(!output_record.at_device);
   1447     picture_buffers_to_dismiss.push_back(output_record.picture_id);
   1448   }
   1449 
   1450   if (dismiss) {
   1451     VLOGF(2) << "Scheduling picture dismissal";
   1452     base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC,
   1453                              base::WaitableEvent::InitialState::NOT_SIGNALED);
   1454     child_task_runner_->PostTask(
   1455         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::DismissPictures,
   1456                               weak_this_, picture_buffers_to_dismiss, &done));
   1457     done.Wait();
   1458   }
   1459 
   1460   // At this point client can't call ReusePictureBuffer on any of the pictures
   1461   // anymore, so it's safe to destroy.
   1462   return DestroyOutputBuffers();
   1463 }
   1464 
   1465 bool V4L2SliceVideoDecodeAccelerator::DestroyOutputBuffers() {
   1466   VLOGF(2);
   1467   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread() ||
   1468          !decoder_thread_.IsRunning());
   1469   DCHECK(!output_streamon_);
   1470   DCHECK(surfaces_at_device_.empty());
   1471   DCHECK(decoder_display_queue_.empty());
   1472   DCHECK_EQ(surfaces_at_display_.size() + free_output_buffers_.size(),
   1473             output_buffer_map_.size());
   1474 
   1475   if (output_buffer_map_.empty())
   1476     return true;
   1477 
   1478   // It's ok to do this, client will retain references to textures, but we are
   1479   // not interested in reusing the surfaces anymore.
   1480   // This will prevent us from reusing old surfaces in case we have some
   1481   // ReusePictureBuffer() pending on ChildThread already. It's ok to ignore
   1482   // them, because we have already dismissed them (in DestroyOutputs()).
   1483   for (const auto& surface_at_display : surfaces_at_display_) {
   1484     size_t index = surface_at_display.second->output_record();
   1485     DCHECK_LT(index, output_buffer_map_.size());
   1486     OutputRecord& output_record = output_buffer_map_[index];
   1487     DCHECK(output_record.at_client);
   1488     output_record.at_client = false;
   1489   }
   1490   surfaces_at_display_.clear();
   1491   DCHECK_EQ(free_output_buffers_.size(), output_buffer_map_.size());
   1492 
   1493   free_output_buffers_.clear();
   1494   output_buffer_map_.clear();
   1495 
   1496   struct v4l2_requestbuffers reqbufs;
   1497   memset(&reqbufs, 0, sizeof(reqbufs));
   1498   reqbufs.count = 0;
   1499   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1500   reqbufs.memory = V4L2_MEMORY_MMAP;
   1501   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
   1502 
   1503   return true;
   1504 }
   1505 
   1506 void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffers(
   1507     const std::vector<PictureBuffer>& buffers) {
   1508   VLOGF(2);
   1509   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1510 
   1511   decoder_thread_task_runner_->PostTask(
   1512       FROM_HERE,
   1513       base::Bind(&V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask,
   1514                  base::Unretained(this), buffers));
   1515 }
   1516 
   1517 void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask(
   1518     const std::vector<PictureBuffer>& buffers) {
   1519   VLOGF(2);
   1520   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1521   DCHECK_EQ(state_, kAwaitingPictureBuffers);
   1522 
   1523   const uint32_t req_buffer_count = decoder_->GetRequiredNumOfPictures();
   1524 
   1525   if (buffers.size() < req_buffer_count) {
   1526     VLOGF(1) << "Failed to provide requested picture buffers. "
   1527              << "(Got " << buffers.size() << ", requested " << req_buffer_count
   1528              << ")";
   1529     NOTIFY_ERROR(INVALID_ARGUMENT);
   1530     return;
   1531   }
   1532 
   1533   // Allocate the output buffers.
   1534   struct v4l2_requestbuffers reqbufs;
   1535   memset(&reqbufs, 0, sizeof(reqbufs));
   1536   reqbufs.count = buffers.size();
   1537   reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1538   reqbufs.memory =
   1539       (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
   1540                                                     : V4L2_MEMORY_DMABUF);
   1541   IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
   1542 
   1543   if (reqbufs.count != buffers.size()) {
   1544     VLOGF(1) << "Could not allocate enough output buffers";
   1545     NOTIFY_ERROR(PLATFORM_FAILURE);
   1546     return;
   1547   }
   1548 
   1549   DCHECK(free_output_buffers_.empty());
   1550   DCHECK(output_buffer_map_.empty());
   1551   output_buffer_map_.resize(buffers.size());
   1552   for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
   1553     DCHECK(buffers[i].size() == coded_size_);
   1554 
   1555     OutputRecord& output_record = output_buffer_map_[i];
   1556     DCHECK(!output_record.at_device);
   1557     DCHECK(!output_record.at_client);
   1558     DCHECK_EQ(output_record.picture_id, -1);
   1559     DCHECK(output_record.dmabuf_fds.empty());
   1560     DCHECK_EQ(output_record.cleared, false);
   1561 
   1562     output_record.picture_id = buffers[i].id();
   1563 
   1564     // This will remain true until ImportBufferForPicture is called, either by
   1565     // the client, or by ourselves, if we are allocating.
   1566     output_record.at_client = true;
   1567     if (output_mode_ == Config::OutputMode::ALLOCATE) {
   1568       std::vector<base::ScopedFD> dmabuf_fds = device_->GetDmabufsForV4L2Buffer(
   1569           i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
   1570       if (dmabuf_fds.empty()) {
   1571         NOTIFY_ERROR(PLATFORM_FAILURE);
   1572         return;
   1573       }
   1574 
   1575       auto passed_dmabuf_fds(base::WrapUnique(
   1576           new std::vector<base::ScopedFD>(std::move(dmabuf_fds))));
   1577       ImportBufferForPictureTask(output_record.picture_id,
   1578                                  std::move(passed_dmabuf_fds));
   1579     }  // else we'll get triggered via ImportBufferForPicture() from client.
   1580     DVLOGF(3) << "buffer[" << i << "]: picture_id=" << output_record.picture_id;
   1581   }
   1582 
   1583   if (!StartDevicePoll()) {
   1584     NOTIFY_ERROR(PLATFORM_FAILURE);
   1585     return;
   1586   }
   1587 
   1588   // Put us in kIdle to allow further event processing.
   1589   // ProcessPendingEventsIfNeeded() will put us back into kDecoding after all
   1590   // other pending events are processed successfully.
   1591   state_ = kIdle;
   1592   ProcessPendingEventsIfNeeded();
   1593 }
   1594 
   1595 void V4L2SliceVideoDecodeAccelerator::ImportBufferForPicture(
   1596     int32_t picture_buffer_id,
   1597     VideoPixelFormat pixel_format,
   1598     const NativePixmapHandle& native_pixmap_handle) {
   1599   DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
   1600   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1601 
   1602   auto passed_dmabuf_fds(base::WrapUnique(new std::vector<base::ScopedFD>()));
   1603   for (const auto& fd : native_pixmap_handle.fds) {
   1604     DCHECK_NE(fd.fd, -1);
   1605     passed_dmabuf_fds->push_back(base::ScopedFD(fd.fd));
   1606   }
   1607 
   1608   if (output_mode_ != Config::OutputMode::IMPORT) {
   1609     VLOGF(1) << "Cannot import in non-import mode";
   1610     NOTIFY_ERROR(INVALID_ARGUMENT);
   1611     return;
   1612   }
   1613 
   1614   if (pixel_format !=
   1615       V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_)) {
   1616     VLOGF(1) << "Unsupported import format: " << pixel_format;
   1617     NOTIFY_ERROR(INVALID_ARGUMENT);
   1618     return;
   1619   }
   1620 
   1621   decoder_thread_task_runner_->PostTask(
   1622       FROM_HERE,
   1623       base::Bind(&V4L2SliceVideoDecodeAccelerator::ImportBufferForPictureTask,
   1624                  base::Unretained(this), picture_buffer_id,
   1625                  base::Passed(&passed_dmabuf_fds)));
   1626 }
   1627 
   1628 void V4L2SliceVideoDecodeAccelerator::ImportBufferForPictureTask(
   1629     int32_t picture_buffer_id,
   1630     std::unique_ptr<std::vector<base::ScopedFD>> passed_dmabuf_fds) {
   1631   DVLOGF(3) << "picture_buffer_id=" << picture_buffer_id;
   1632   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1633 
   1634   const auto iter =
   1635       std::find_if(output_buffer_map_.begin(), output_buffer_map_.end(),
   1636                    [picture_buffer_id](const OutputRecord& output_record) {
   1637                      return output_record.picture_id == picture_buffer_id;
   1638                    });
   1639   if (iter == output_buffer_map_.end()) {
   1640     // It's possible that we've already posted a DismissPictureBuffer for this
   1641     // picture, but it has not yet executed when this ImportBufferForPicture was
   1642     // posted to us by the client. In that case just ignore this (we've already
   1643     // dismissed it and accounted for that).
   1644     DVLOGF(3) << "got picture id=" << picture_buffer_id
   1645               << " not in use (anymore?).";
   1646     return;
   1647   }
   1648 
   1649   if (!iter->at_client) {
   1650     VLOGF(1) << "Cannot import buffer that not owned by client";
   1651     NOTIFY_ERROR(INVALID_ARGUMENT);
   1652     return;
   1653   }
   1654 
   1655   size_t index = iter - output_buffer_map_.begin();
   1656   DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
   1657                        index),
   1658             0);
   1659 
   1660   DCHECK(!iter->at_device);
   1661   iter->at_client = false;
   1662 
   1663   DCHECK_EQ(output_planes_count_, passed_dmabuf_fds->size());
   1664   iter->dmabuf_fds.swap(*passed_dmabuf_fds);
   1665   free_output_buffers_.push_back(index);
   1666   ScheduleDecodeBufferTaskIfNeeded();
   1667 }
   1668 
   1669 void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer(
   1670     int32_t picture_buffer_id) {
   1671   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1672   DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
   1673 
   1674   decoder_thread_task_runner_->PostTask(
   1675       FROM_HERE,
   1676       base::Bind(&V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask,
   1677                  base::Unretained(this), picture_buffer_id));
   1678 }
   1679 
   1680 void V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask(
   1681     int32_t picture_buffer_id) {
   1682   DVLOGF(4) << "picture_buffer_id=" << picture_buffer_id;
   1683   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1684 
   1685   V4L2DecodeSurfaceByPictureBufferId::iterator it =
   1686       surfaces_at_display_.find(picture_buffer_id);
   1687   if (it == surfaces_at_display_.end()) {
   1688     // It's possible that we've already posted a DismissPictureBuffer for this
   1689     // picture, but it has not yet executed when this ReusePictureBuffer was
   1690     // posted to us by the client. In that case just ignore this (we've already
   1691     // dismissed it and accounted for that) and let the sync object get
   1692     // destroyed.
   1693     DVLOGF(3) << "got picture id=" << picture_buffer_id
   1694               << " not in use (anymore?).";
   1695     return;
   1696   }
   1697 
   1698   OutputRecord& output_record = output_buffer_map_[it->second->output_record()];
   1699   if (output_record.at_device || !output_record.at_client) {
   1700     VLOGF(1) << "picture_buffer_id not reusable";
   1701     NOTIFY_ERROR(INVALID_ARGUMENT);
   1702     return;
   1703   }
   1704 
   1705   DCHECK(!output_record.at_device);
   1706   output_record.at_client = false;
   1707 
   1708   surfaces_at_display_.erase(it);
   1709 }
   1710 
   1711 void V4L2SliceVideoDecodeAccelerator::Flush() {
   1712   VLOGF(2);
   1713   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1714 
   1715   decoder_thread_task_runner_->PostTask(
   1716       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::FlushTask,
   1717                             base::Unretained(this)));
   1718 }
   1719 
   1720 void V4L2SliceVideoDecodeAccelerator::FlushTask() {
   1721   VLOGF(2);
   1722   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1723 
   1724   // Queue an empty buffer which - when reached - will trigger flush sequence.
   1725   decoder_input_queue_.push(
   1726       linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
   1727           decode_client_, decode_task_runner_, nullptr, kFlushBufferId)));
   1728 
   1729   ScheduleDecodeBufferTaskIfNeeded();
   1730 }
   1731 
   1732 void V4L2SliceVideoDecodeAccelerator::InitiateFlush() {
   1733   VLOGF(2);
   1734   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1735 
   1736   // This will trigger output for all remaining surfaces in the decoder.
   1737   // However, not all of them may be decoded yet (they would be queued
   1738   // in hardware then).
   1739   if (!decoder_->Flush()) {
   1740     DVLOGF(1) << "Failed flushing the decoder.";
   1741     NOTIFY_ERROR(PLATFORM_FAILURE);
   1742     return;
   1743   }
   1744 
   1745   // Put the decoder in an idle state, ready to resume.
   1746   decoder_->Reset();
   1747 
   1748   DCHECK(!decoder_flushing_);
   1749   decoder_flushing_ = true;
   1750   NewEventPending();
   1751 }
   1752 
   1753 bool V4L2SliceVideoDecodeAccelerator::FinishFlush() {
   1754   VLOGF(4);
   1755   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1756 
   1757   if (!decoder_flushing_)
   1758     return true;
   1759 
   1760   if (!surfaces_at_device_.empty())
   1761     return false;
   1762 
   1763   DCHECK_EQ(state_, kIdle);
   1764 
   1765   // At this point, all remaining surfaces are decoded and dequeued, and since
   1766   // we have already scheduled output for them in InitiateFlush(), their
   1767   // respective PictureReady calls have been posted (or they have been queued on
   1768   // pending_picture_ready_). So at this time, once we SendPictureReady(),
   1769   // we will have all remaining PictureReady() posted to the client and we
   1770   // can post NotifyFlushDone().
   1771   DCHECK(decoder_display_queue_.empty());
   1772 
   1773   // Decoder should have already returned all surfaces and all surfaces are
   1774   // out of hardware. There can be no other owners of input buffers.
   1775   DCHECK_EQ(free_input_buffers_.size(), input_buffer_map_.size());
   1776 
   1777   SendPictureReady();
   1778 
   1779   decoder_flushing_ = false;
   1780   VLOGF(2) << "Flush finished";
   1781 
   1782   child_task_runner_->PostTask(FROM_HERE,
   1783                                base::Bind(&Client::NotifyFlushDone, client_));
   1784 
   1785   return true;
   1786 }
   1787 
   1788 void V4L2SliceVideoDecodeAccelerator::Reset() {
   1789   VLOGF(2);
   1790   DCHECK(child_task_runner_->BelongsToCurrentThread());
   1791 
   1792   decoder_thread_task_runner_->PostTask(
   1793       FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::ResetTask,
   1794                             base::Unretained(this)));
   1795 }
   1796 
   1797 void V4L2SliceVideoDecodeAccelerator::ResetTask() {
   1798   VLOGF(2);
   1799   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1800 
   1801   if (decoder_resetting_) {
   1802     // This is a bug in the client, multiple Reset()s before NotifyResetDone()
   1803     // are not allowed.
   1804     NOTREACHED() << "Client should not be requesting multiple Reset()s";
   1805     return;
   1806   }
   1807 
   1808   // Put the decoder in an idle state, ready to resume.
   1809   decoder_->Reset();
   1810 
   1811   // Drop all remaining inputs.
   1812   decoder_current_bitstream_buffer_.reset();
   1813   while (!decoder_input_queue_.empty())
   1814     decoder_input_queue_.pop();
   1815 
   1816   decoder_resetting_ = true;
   1817   NewEventPending();
   1818 }
   1819 
   1820 bool V4L2SliceVideoDecodeAccelerator::FinishReset() {
   1821   VLOGF(4);
   1822   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   1823 
   1824   if (!decoder_resetting_)
   1825     return true;
   1826 
   1827   if (!surfaces_at_device_.empty())
   1828     return false;
   1829 
   1830   DCHECK_EQ(state_, kIdle);
   1831   DCHECK(!decoder_flushing_);
   1832   SendPictureReady();
   1833 
   1834   // Drop any pending outputs.
   1835   while (!decoder_display_queue_.empty())
   1836     decoder_display_queue_.pop();
   1837 
   1838   // At this point we can have no input buffers in the decoder, because we
   1839   // Reset()ed it in ResetTask(), and have not scheduled any new Decode()s
   1840   // having been in kIdle since. We don't have any surfaces in the HW either -
   1841   // we just checked that surfaces_at_device_.empty(), and inputs are tied
   1842   // to surfaces. Since there can be no other owners of input buffers, we can
   1843   // simply mark them all as available.
   1844   DCHECK_EQ(input_buffer_queued_count_, 0);
   1845   free_input_buffers_.clear();
   1846   for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
   1847     DCHECK(!input_buffer_map_[i].at_device);
   1848     ReuseInputBuffer(i);
   1849   }
   1850 
   1851   decoder_resetting_ = false;
   1852   VLOGF(2) << "Reset finished";
   1853 
   1854   child_task_runner_->PostTask(FROM_HERE,
   1855                                base::Bind(&Client::NotifyResetDone, client_));
   1856 
   1857   return true;
   1858 }
   1859 
   1860 void V4L2SliceVideoDecodeAccelerator::SetErrorState(Error error) {
   1861   // We can touch decoder_state_ only if this is the decoder thread or the
   1862   // decoder thread isn't running.
   1863   if (decoder_thread_.IsRunning() &&
   1864       !decoder_thread_task_runner_->BelongsToCurrentThread()) {
   1865     decoder_thread_task_runner_->PostTask(
   1866         FROM_HERE, base::Bind(&V4L2SliceVideoDecodeAccelerator::SetErrorState,
   1867                               base::Unretained(this), error));
   1868     return;
   1869   }
   1870 
   1871   // Post NotifyError only if we are already initialized, as the API does
   1872   // not allow doing so before that.
   1873   if (state_ != kError && state_ != kUninitialized)
   1874     NotifyError(error);
   1875 
   1876   state_ = kError;
   1877 }
   1878 
   1879 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::V4L2H264Accelerator(
   1880     V4L2SliceVideoDecodeAccelerator* v4l2_dec)
   1881     : num_slices_(0), v4l2_dec_(v4l2_dec) {
   1882   DCHECK(v4l2_dec_);
   1883 }
   1884 
   1885 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::~V4L2H264Accelerator() {}
   1886 
   1887 scoped_refptr<H264Picture>
   1888 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::CreateH264Picture() {
   1889   scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
   1890   if (!dec_surface)
   1891     return nullptr;
   1892 
   1893   return new V4L2H264Picture(dec_surface);
   1894 }
   1895 
   1896 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
   1897     H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
   1898                                     uint8_t dst_list[kDPBIndicesListSize]) {
   1899   size_t i;
   1900   for (i = 0; i < src_pic_list.size() && i < kDPBIndicesListSize; ++i) {
   1901     const scoped_refptr<H264Picture>& pic = src_pic_list[i];
   1902     dst_list[i] = pic ? pic->dpb_position : VIDEO_MAX_FRAME;
   1903   }
   1904 
   1905   while (i < kDPBIndicesListSize)
   1906     dst_list[i++] = VIDEO_MAX_FRAME;
   1907 }
   1908 
   1909 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::H264DPBToV4L2DPB(
   1910     const H264DPB& dpb,
   1911     std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces) {
   1912   memset(v4l2_decode_param_.dpb, 0, sizeof(v4l2_decode_param_.dpb));
   1913   size_t i = 0;
   1914   for (const auto& pic : dpb) {
   1915     if (i >= arraysize(v4l2_decode_param_.dpb)) {
   1916       VLOGF(1) << "Invalid DPB size";
   1917       break;
   1918     }
   1919 
   1920     int index = VIDEO_MAX_FRAME;
   1921     if (!pic->nonexisting) {
   1922       scoped_refptr<V4L2DecodeSurface> dec_surface =
   1923           H264PictureToV4L2DecodeSurface(pic);
   1924       index = dec_surface->output_record();
   1925       ref_surfaces->push_back(dec_surface);
   1926     }
   1927 
   1928     struct v4l2_h264_dpb_entry& entry = v4l2_decode_param_.dpb[i++];
   1929     entry.buf_index = index;
   1930     entry.frame_num = pic->frame_num;
   1931     entry.pic_num = pic->pic_num;
   1932     entry.top_field_order_cnt = pic->top_field_order_cnt;
   1933     entry.bottom_field_order_cnt = pic->bottom_field_order_cnt;
   1934     entry.flags = (pic->ref ? V4L2_H264_DPB_ENTRY_FLAG_ACTIVE : 0) |
   1935                   (pic->long_term ? V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM : 0);
   1936   }
   1937 }
   1938 
   1939 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitFrameMetadata(
   1940     const H264SPS* sps,
   1941     const H264PPS* pps,
   1942     const H264DPB& dpb,
   1943     const H264Picture::Vector& ref_pic_listp0,
   1944     const H264Picture::Vector& ref_pic_listb0,
   1945     const H264Picture::Vector& ref_pic_listb1,
   1946     const scoped_refptr<H264Picture>& pic) {
   1947   struct v4l2_ext_control ctrl;
   1948   std::vector<struct v4l2_ext_control> ctrls;
   1949 
   1950   struct v4l2_ctrl_h264_sps v4l2_sps;
   1951   memset(&v4l2_sps, 0, sizeof(v4l2_sps));
   1952   v4l2_sps.constraint_set_flags =
   1953       (sps->constraint_set0_flag ? V4L2_H264_SPS_CONSTRAINT_SET0_FLAG : 0) |
   1954       (sps->constraint_set1_flag ? V4L2_H264_SPS_CONSTRAINT_SET1_FLAG : 0) |
   1955       (sps->constraint_set2_flag ? V4L2_H264_SPS_CONSTRAINT_SET2_FLAG : 0) |
   1956       (sps->constraint_set3_flag ? V4L2_H264_SPS_CONSTRAINT_SET3_FLAG : 0) |
   1957       (sps->constraint_set4_flag ? V4L2_H264_SPS_CONSTRAINT_SET4_FLAG : 0) |
   1958       (sps->constraint_set5_flag ? V4L2_H264_SPS_CONSTRAINT_SET5_FLAG : 0);
   1959 #define SPS_TO_V4L2SPS(a) v4l2_sps.a = sps->a
   1960   SPS_TO_V4L2SPS(profile_idc);
   1961   SPS_TO_V4L2SPS(level_idc);
   1962   SPS_TO_V4L2SPS(seq_parameter_set_id);
   1963   SPS_TO_V4L2SPS(chroma_format_idc);
   1964   SPS_TO_V4L2SPS(bit_depth_luma_minus8);
   1965   SPS_TO_V4L2SPS(bit_depth_chroma_minus8);
   1966   SPS_TO_V4L2SPS(log2_max_frame_num_minus4);
   1967   SPS_TO_V4L2SPS(pic_order_cnt_type);
   1968   SPS_TO_V4L2SPS(log2_max_pic_order_cnt_lsb_minus4);
   1969   SPS_TO_V4L2SPS(offset_for_non_ref_pic);
   1970   SPS_TO_V4L2SPS(offset_for_top_to_bottom_field);
   1971   SPS_TO_V4L2SPS(num_ref_frames_in_pic_order_cnt_cycle);
   1972 
   1973   static_assert(arraysize(v4l2_sps.offset_for_ref_frame) ==
   1974                     arraysize(sps->offset_for_ref_frame),
   1975                 "offset_for_ref_frame arrays must be same size");
   1976   for (size_t i = 0; i < arraysize(v4l2_sps.offset_for_ref_frame); ++i)
   1977     v4l2_sps.offset_for_ref_frame[i] = sps->offset_for_ref_frame[i];
   1978   SPS_TO_V4L2SPS(max_num_ref_frames);
   1979   SPS_TO_V4L2SPS(pic_width_in_mbs_minus1);
   1980   SPS_TO_V4L2SPS(pic_height_in_map_units_minus1);
   1981 #undef SPS_TO_V4L2SPS
   1982 
   1983 #define SET_V4L2_SPS_FLAG_IF(cond, flag) \
   1984   v4l2_sps.flags |= ((sps->cond) ? (flag) : 0)
   1985   SET_V4L2_SPS_FLAG_IF(separate_colour_plane_flag,
   1986                        V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE);
   1987   SET_V4L2_SPS_FLAG_IF(qpprime_y_zero_transform_bypass_flag,
   1988                        V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS);
   1989   SET_V4L2_SPS_FLAG_IF(delta_pic_order_always_zero_flag,
   1990                        V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO);
   1991   SET_V4L2_SPS_FLAG_IF(gaps_in_frame_num_value_allowed_flag,
   1992                        V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED);
   1993   SET_V4L2_SPS_FLAG_IF(frame_mbs_only_flag, V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY);
   1994   SET_V4L2_SPS_FLAG_IF(mb_adaptive_frame_field_flag,
   1995                        V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD);
   1996   SET_V4L2_SPS_FLAG_IF(direct_8x8_inference_flag,
   1997                        V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE);
   1998 #undef SET_V4L2_SPS_FLAG_IF
   1999   memset(&ctrl, 0, sizeof(ctrl));
   2000   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SPS;
   2001   ctrl.size = sizeof(v4l2_sps);
   2002   ctrl.p_h264_sps = &v4l2_sps;
   2003   ctrls.push_back(ctrl);
   2004 
   2005   struct v4l2_ctrl_h264_pps v4l2_pps;
   2006   memset(&v4l2_pps, 0, sizeof(v4l2_pps));
   2007 #define PPS_TO_V4L2PPS(a) v4l2_pps.a = pps->a
   2008   PPS_TO_V4L2PPS(pic_parameter_set_id);
   2009   PPS_TO_V4L2PPS(seq_parameter_set_id);
   2010   PPS_TO_V4L2PPS(num_slice_groups_minus1);
   2011   PPS_TO_V4L2PPS(num_ref_idx_l0_default_active_minus1);
   2012   PPS_TO_V4L2PPS(num_ref_idx_l1_default_active_minus1);
   2013   PPS_TO_V4L2PPS(weighted_bipred_idc);
   2014   PPS_TO_V4L2PPS(pic_init_qp_minus26);
   2015   PPS_TO_V4L2PPS(pic_init_qs_minus26);
   2016   PPS_TO_V4L2PPS(chroma_qp_index_offset);
   2017   PPS_TO_V4L2PPS(second_chroma_qp_index_offset);
   2018 #undef PPS_TO_V4L2PPS
   2019 
   2020 #define SET_V4L2_PPS_FLAG_IF(cond, flag) \
   2021   v4l2_pps.flags |= ((pps->cond) ? (flag) : 0)
   2022   SET_V4L2_PPS_FLAG_IF(entropy_coding_mode_flag,
   2023                        V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE);
   2024   SET_V4L2_PPS_FLAG_IF(
   2025       bottom_field_pic_order_in_frame_present_flag,
   2026       V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT);
   2027   SET_V4L2_PPS_FLAG_IF(weighted_pred_flag, V4L2_H264_PPS_FLAG_WEIGHTED_PRED);
   2028   SET_V4L2_PPS_FLAG_IF(deblocking_filter_control_present_flag,
   2029                        V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT);
   2030   SET_V4L2_PPS_FLAG_IF(constrained_intra_pred_flag,
   2031                        V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED);
   2032   SET_V4L2_PPS_FLAG_IF(redundant_pic_cnt_present_flag,
   2033                        V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT);
   2034   SET_V4L2_PPS_FLAG_IF(transform_8x8_mode_flag,
   2035                        V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE);
   2036   SET_V4L2_PPS_FLAG_IF(pic_scaling_matrix_present_flag,
   2037                        V4L2_H264_PPS_FLAG_PIC_SCALING_MATRIX_PRESENT);
   2038 #undef SET_V4L2_PPS_FLAG_IF
   2039   memset(&ctrl, 0, sizeof(ctrl));
   2040   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PPS;
   2041   ctrl.size = sizeof(v4l2_pps);
   2042   ctrl.p_h264_pps = &v4l2_pps;
   2043   ctrls.push_back(ctrl);
   2044 
   2045   struct v4l2_ctrl_h264_scaling_matrix v4l2_scaling_matrix;
   2046   memset(&v4l2_scaling_matrix, 0, sizeof(v4l2_scaling_matrix));
   2047 
   2048   static_assert(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <=
   2049                         arraysize(pps->scaling_list4x4) &&
   2050                     arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <=
   2051                         arraysize(pps->scaling_list4x4[0]) &&
   2052                     arraysize(v4l2_scaling_matrix.scaling_list_8x8) <=
   2053                         arraysize(pps->scaling_list8x8) &&
   2054                     arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <=
   2055                         arraysize(pps->scaling_list8x8[0]),
   2056                 "scaling_lists must be of correct size");
   2057   static_assert(arraysize(v4l2_scaling_matrix.scaling_list_4x4) <=
   2058                         arraysize(sps->scaling_list4x4) &&
   2059                     arraysize(v4l2_scaling_matrix.scaling_list_4x4[0]) <=
   2060                         arraysize(sps->scaling_list4x4[0]) &&
   2061                     arraysize(v4l2_scaling_matrix.scaling_list_8x8) <=
   2062                         arraysize(sps->scaling_list8x8) &&
   2063                     arraysize(v4l2_scaling_matrix.scaling_list_8x8[0]) <=
   2064                         arraysize(sps->scaling_list8x8[0]),
   2065                 "scaling_lists must be of correct size");
   2066 
   2067   const auto* scaling_list4x4 = &sps->scaling_list4x4[0];
   2068   const auto* scaling_list8x8 = &sps->scaling_list8x8[0];
   2069   if (pps->pic_scaling_matrix_present_flag) {
   2070     scaling_list4x4 = &pps->scaling_list4x4[0];
   2071     scaling_list8x8 = &pps->scaling_list8x8[0];
   2072   }
   2073 
   2074   for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_4x4); ++i) {
   2075     for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_4x4[i]);
   2076          ++j) {
   2077       v4l2_scaling_matrix.scaling_list_4x4[i][j] = scaling_list4x4[i][j];
   2078     }
   2079   }
   2080   for (size_t i = 0; i < arraysize(v4l2_scaling_matrix.scaling_list_8x8); ++i) {
   2081     for (size_t j = 0; j < arraysize(v4l2_scaling_matrix.scaling_list_8x8[i]);
   2082          ++j) {
   2083       v4l2_scaling_matrix.scaling_list_8x8[i][j] = scaling_list8x8[i][j];
   2084     }
   2085   }
   2086 
   2087   memset(&ctrl, 0, sizeof(ctrl));
   2088   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX;
   2089   ctrl.size = sizeof(v4l2_scaling_matrix);
   2090   ctrl.p_h264_scal_mtrx = &v4l2_scaling_matrix;
   2091   ctrls.push_back(ctrl);
   2092 
   2093   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2094       H264PictureToV4L2DecodeSurface(pic);
   2095 
   2096   struct v4l2_ext_controls ext_ctrls;
   2097   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2098   ext_ctrls.count = ctrls.size();
   2099   ext_ctrls.controls = &ctrls[0];
   2100   ext_ctrls.config_store = dec_surface->config_store();
   2101   v4l2_dec_->SubmitExtControls(&ext_ctrls);
   2102 
   2103   H264PictureListToDPBIndicesList(ref_pic_listp0,
   2104                                   v4l2_decode_param_.ref_pic_list_p0);
   2105   H264PictureListToDPBIndicesList(ref_pic_listb0,
   2106                                   v4l2_decode_param_.ref_pic_list_b0);
   2107   H264PictureListToDPBIndicesList(ref_pic_listb1,
   2108                                   v4l2_decode_param_.ref_pic_list_b1);
   2109 
   2110   std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
   2111   H264DPBToV4L2DPB(dpb, &ref_surfaces);
   2112   dec_surface->SetReferenceSurfaces(ref_surfaces);
   2113 
   2114   return true;
   2115 }
   2116 
   2117 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitSlice(
   2118     const H264PPS* pps,
   2119     const H264SliceHeader* slice_hdr,
   2120     const H264Picture::Vector& ref_pic_list0,
   2121     const H264Picture::Vector& ref_pic_list1,
   2122     const scoped_refptr<H264Picture>& pic,
   2123     const uint8_t* data,
   2124     size_t size) {
   2125   if (num_slices_ == kMaxSlices) {
   2126     VLOGF(1) << "Over limit of supported slices per frame";
   2127     return false;
   2128   }
   2129 
   2130   struct v4l2_ctrl_h264_slice_param& v4l2_slice_param =
   2131       v4l2_slice_params_[num_slices_++];
   2132   memset(&v4l2_slice_param, 0, sizeof(v4l2_slice_param));
   2133 
   2134   v4l2_slice_param.size = size;
   2135 #define SHDR_TO_V4L2SPARM(a) v4l2_slice_param.a = slice_hdr->a
   2136   SHDR_TO_V4L2SPARM(header_bit_size);
   2137   SHDR_TO_V4L2SPARM(first_mb_in_slice);
   2138   SHDR_TO_V4L2SPARM(slice_type);
   2139   SHDR_TO_V4L2SPARM(pic_parameter_set_id);
   2140   SHDR_TO_V4L2SPARM(colour_plane_id);
   2141   SHDR_TO_V4L2SPARM(frame_num);
   2142   SHDR_TO_V4L2SPARM(idr_pic_id);
   2143   SHDR_TO_V4L2SPARM(pic_order_cnt_lsb);
   2144   SHDR_TO_V4L2SPARM(delta_pic_order_cnt_bottom);
   2145   SHDR_TO_V4L2SPARM(delta_pic_order_cnt0);
   2146   SHDR_TO_V4L2SPARM(delta_pic_order_cnt1);
   2147   SHDR_TO_V4L2SPARM(redundant_pic_cnt);
   2148   SHDR_TO_V4L2SPARM(dec_ref_pic_marking_bit_size);
   2149   SHDR_TO_V4L2SPARM(cabac_init_idc);
   2150   SHDR_TO_V4L2SPARM(slice_qp_delta);
   2151   SHDR_TO_V4L2SPARM(slice_qs_delta);
   2152   SHDR_TO_V4L2SPARM(disable_deblocking_filter_idc);
   2153   SHDR_TO_V4L2SPARM(slice_alpha_c0_offset_div2);
   2154   SHDR_TO_V4L2SPARM(slice_beta_offset_div2);
   2155   SHDR_TO_V4L2SPARM(num_ref_idx_l0_active_minus1);
   2156   SHDR_TO_V4L2SPARM(num_ref_idx_l1_active_minus1);
   2157   SHDR_TO_V4L2SPARM(pic_order_cnt_bit_size);
   2158 #undef SHDR_TO_V4L2SPARM
   2159 
   2160 #define SET_V4L2_SPARM_FLAG_IF(cond, flag) \
   2161   v4l2_slice_param.flags |= ((slice_hdr->cond) ? (flag) : 0)
   2162   SET_V4L2_SPARM_FLAG_IF(field_pic_flag, V4L2_SLICE_FLAG_FIELD_PIC);
   2163   SET_V4L2_SPARM_FLAG_IF(bottom_field_flag, V4L2_SLICE_FLAG_BOTTOM_FIELD);
   2164   SET_V4L2_SPARM_FLAG_IF(direct_spatial_mv_pred_flag,
   2165                          V4L2_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED);
   2166   SET_V4L2_SPARM_FLAG_IF(sp_for_switch_flag, V4L2_SLICE_FLAG_SP_FOR_SWITCH);
   2167 #undef SET_V4L2_SPARM_FLAG_IF
   2168 
   2169   struct v4l2_h264_pred_weight_table* pred_weight_table =
   2170       &v4l2_slice_param.pred_weight_table;
   2171 
   2172   if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) &&
   2173        pps->weighted_pred_flag) ||
   2174       (slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) {
   2175     pred_weight_table->luma_log2_weight_denom =
   2176         slice_hdr->luma_log2_weight_denom;
   2177     pred_weight_table->chroma_log2_weight_denom =
   2178         slice_hdr->chroma_log2_weight_denom;
   2179 
   2180     struct v4l2_h264_weight_factors* factorsl0 =
   2181         &pred_weight_table->weight_factors[0];
   2182 
   2183     for (int i = 0; i < 32; ++i) {
   2184       factorsl0->luma_weight[i] =
   2185           slice_hdr->pred_weight_table_l0.luma_weight[i];
   2186       factorsl0->luma_offset[i] =
   2187           slice_hdr->pred_weight_table_l0.luma_offset[i];
   2188 
   2189       for (int j = 0; j < 2; ++j) {
   2190         factorsl0->chroma_weight[i][j] =
   2191             slice_hdr->pred_weight_table_l0.chroma_weight[i][j];
   2192         factorsl0->chroma_offset[i][j] =
   2193             slice_hdr->pred_weight_table_l0.chroma_offset[i][j];
   2194       }
   2195     }
   2196 
   2197     if (slice_hdr->IsBSlice()) {
   2198       struct v4l2_h264_weight_factors* factorsl1 =
   2199           &pred_weight_table->weight_factors[1];
   2200 
   2201       for (int i = 0; i < 32; ++i) {
   2202         factorsl1->luma_weight[i] =
   2203             slice_hdr->pred_weight_table_l1.luma_weight[i];
   2204         factorsl1->luma_offset[i] =
   2205             slice_hdr->pred_weight_table_l1.luma_offset[i];
   2206 
   2207         for (int j = 0; j < 2; ++j) {
   2208           factorsl1->chroma_weight[i][j] =
   2209               slice_hdr->pred_weight_table_l1.chroma_weight[i][j];
   2210           factorsl1->chroma_offset[i][j] =
   2211               slice_hdr->pred_weight_table_l1.chroma_offset[i][j];
   2212         }
   2213       }
   2214     }
   2215   }
   2216 
   2217   H264PictureListToDPBIndicesList(ref_pic_list0,
   2218                                   v4l2_slice_param.ref_pic_list0);
   2219   H264PictureListToDPBIndicesList(ref_pic_list1,
   2220                                   v4l2_slice_param.ref_pic_list1);
   2221 
   2222   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2223       H264PictureToV4L2DecodeSurface(pic);
   2224 
   2225   v4l2_decode_param_.nal_ref_idc = slice_hdr->nal_ref_idc;
   2226 
   2227   // TODO(posciak): Don't add start code back here, but have it passed from
   2228   // the parser.
   2229   size_t data_copy_size = size + 3;
   2230   std::unique_ptr<uint8_t[]> data_copy(new uint8_t[data_copy_size]);
   2231   memset(data_copy.get(), 0, data_copy_size);
   2232   data_copy[2] = 0x01;
   2233   memcpy(data_copy.get() + 3, data, size);
   2234   return v4l2_dec_->SubmitSlice(dec_surface->input_record(), data_copy.get(),
   2235                                 data_copy_size);
   2236 }
   2237 
   2238 bool V4L2SliceVideoDecodeAccelerator::SubmitSlice(int index,
   2239                                                   const uint8_t* data,
   2240                                                   size_t size) {
   2241   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2242 
   2243   InputRecord& input_record = input_buffer_map_[index];
   2244 
   2245   if (input_record.bytes_used + size > input_record.length) {
   2246     VLOGF(1) << "Input buffer too small";
   2247     return false;
   2248   }
   2249 
   2250   memcpy(static_cast<uint8_t*>(input_record.address) + input_record.bytes_used,
   2251          data, size);
   2252   input_record.bytes_used += size;
   2253 
   2254   return true;
   2255 }
   2256 
   2257 bool V4L2SliceVideoDecodeAccelerator::SubmitExtControls(
   2258     struct v4l2_ext_controls* ext_ctrls) {
   2259   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2260   DCHECK_GT(ext_ctrls->config_store, 0u);
   2261   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, ext_ctrls);
   2262   return true;
   2263 }
   2264 
   2265 bool V4L2SliceVideoDecodeAccelerator::GetExtControls(
   2266     struct v4l2_ext_controls* ext_ctrls) {
   2267   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2268   DCHECK_GT(ext_ctrls->config_store, 0u);
   2269   IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_EXT_CTRLS, ext_ctrls);
   2270   return true;
   2271 }
   2272 
   2273 bool V4L2SliceVideoDecodeAccelerator::IsCtrlExposed(uint32_t ctrl_id) {
   2274   struct v4l2_queryctrl query_ctrl;
   2275   memset(&query_ctrl, 0, sizeof(query_ctrl));
   2276   query_ctrl.id = ctrl_id;
   2277 
   2278   return (device_->Ioctl(VIDIOC_QUERYCTRL, &query_ctrl) == 0);
   2279 }
   2280 
   2281 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitDecode(
   2282     const scoped_refptr<H264Picture>& pic) {
   2283   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2284       H264PictureToV4L2DecodeSurface(pic);
   2285 
   2286   v4l2_decode_param_.num_slices = num_slices_;
   2287   v4l2_decode_param_.idr_pic_flag = pic->idr;
   2288   v4l2_decode_param_.top_field_order_cnt = pic->top_field_order_cnt;
   2289   v4l2_decode_param_.bottom_field_order_cnt = pic->bottom_field_order_cnt;
   2290 
   2291   struct v4l2_ext_control ctrl;
   2292   std::vector<struct v4l2_ext_control> ctrls;
   2293 
   2294   memset(&ctrl, 0, sizeof(ctrl));
   2295   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAM;
   2296   ctrl.size = sizeof(v4l2_slice_params_);
   2297   ctrl.p_h264_slice_param = v4l2_slice_params_;
   2298   ctrls.push_back(ctrl);
   2299 
   2300   memset(&ctrl, 0, sizeof(ctrl));
   2301   ctrl.id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAM;
   2302   ctrl.size = sizeof(v4l2_decode_param_);
   2303   ctrl.p_h264_decode_param = &v4l2_decode_param_;
   2304   ctrls.push_back(ctrl);
   2305 
   2306   struct v4l2_ext_controls ext_ctrls;
   2307   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2308   ext_ctrls.count = ctrls.size();
   2309   ext_ctrls.controls = &ctrls[0];
   2310   ext_ctrls.config_store = dec_surface->config_store();
   2311   if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
   2312     return false;
   2313 
   2314   Reset();
   2315 
   2316   v4l2_dec_->DecodeSurface(dec_surface);
   2317   return true;
   2318 }
   2319 
   2320 bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::OutputPicture(
   2321     const scoped_refptr<H264Picture>& pic) {
   2322   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2323       H264PictureToV4L2DecodeSurface(pic);
   2324   dec_surface->set_visible_rect(pic->visible_rect);
   2325   v4l2_dec_->SurfaceReady(dec_surface);
   2326   return true;
   2327 }
   2328 
   2329 void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::Reset() {
   2330   num_slices_ = 0;
   2331   memset(&v4l2_decode_param_, 0, sizeof(v4l2_decode_param_));
   2332   memset(&v4l2_slice_params_, 0, sizeof(v4l2_slice_params_));
   2333 }
   2334 
   2335 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2336 V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::
   2337     H264PictureToV4L2DecodeSurface(const scoped_refptr<H264Picture>& pic) {
   2338   V4L2H264Picture* v4l2_pic = pic->AsV4L2H264Picture();
   2339   CHECK(v4l2_pic);
   2340   return v4l2_pic->dec_surface();
   2341 }
   2342 
   2343 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::V4L2VP8Accelerator(
   2344     V4L2SliceVideoDecodeAccelerator* v4l2_dec)
   2345     : v4l2_dec_(v4l2_dec) {
   2346   DCHECK(v4l2_dec_);
   2347 }
   2348 
   2349 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::~V4L2VP8Accelerator() {}
   2350 
   2351 scoped_refptr<VP8Picture>
   2352 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::CreateVP8Picture() {
   2353   scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
   2354   if (!dec_surface)
   2355     return nullptr;
   2356 
   2357   return new V4L2VP8Picture(dec_surface);
   2358 }
   2359 
   2360 #define ARRAY_MEMCPY_CHECKED(to, from)                               \
   2361   do {                                                               \
   2362     static_assert(sizeof(to) == sizeof(from),                        \
   2363                   #from " and " #to " arrays must be of same size"); \
   2364     memcpy(to, from, sizeof(to));                                    \
   2365   } while (0)
   2366 
   2367 static void FillV4L2SegmentationHeader(
   2368     const Vp8SegmentationHeader& vp8_sgmnt_hdr,
   2369     struct v4l2_vp8_sgmnt_hdr* v4l2_sgmnt_hdr) {
   2370 #define SET_V4L2_SGMNT_HDR_FLAG_IF(cond, flag) \
   2371   v4l2_sgmnt_hdr->flags |= ((vp8_sgmnt_hdr.cond) ? (flag) : 0)
   2372   SET_V4L2_SGMNT_HDR_FLAG_IF(segmentation_enabled,
   2373                              V4L2_VP8_SEGMNT_HDR_FLAG_ENABLED);
   2374   SET_V4L2_SGMNT_HDR_FLAG_IF(update_mb_segmentation_map,
   2375                              V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_MAP);
   2376   SET_V4L2_SGMNT_HDR_FLAG_IF(update_segment_feature_data,
   2377                              V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_FEATURE_DATA);
   2378 #undef SET_V4L2_SPARM_FLAG_IF
   2379   v4l2_sgmnt_hdr->segment_feature_mode = vp8_sgmnt_hdr.segment_feature_mode;
   2380 
   2381   ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->quant_update,
   2382                        vp8_sgmnt_hdr.quantizer_update_value);
   2383   ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->lf_update,
   2384                        vp8_sgmnt_hdr.lf_update_value);
   2385   ARRAY_MEMCPY_CHECKED(v4l2_sgmnt_hdr->segment_probs,
   2386                        vp8_sgmnt_hdr.segment_prob);
   2387 }
   2388 
   2389 static void FillV4L2LoopfilterHeader(
   2390     const Vp8LoopFilterHeader& vp8_loopfilter_hdr,
   2391     struct v4l2_vp8_loopfilter_hdr* v4l2_lf_hdr) {
   2392 #define SET_V4L2_LF_HDR_FLAG_IF(cond, flag) \
   2393   v4l2_lf_hdr->flags |= ((vp8_loopfilter_hdr.cond) ? (flag) : 0)
   2394   SET_V4L2_LF_HDR_FLAG_IF(loop_filter_adj_enable, V4L2_VP8_LF_HDR_ADJ_ENABLE);
   2395   SET_V4L2_LF_HDR_FLAG_IF(mode_ref_lf_delta_update,
   2396                           V4L2_VP8_LF_HDR_DELTA_UPDATE);
   2397 #undef SET_V4L2_SGMNT_HDR_FLAG_IF
   2398 
   2399 #define LF_HDR_TO_V4L2_LF_HDR(a) v4l2_lf_hdr->a = vp8_loopfilter_hdr.a;
   2400   LF_HDR_TO_V4L2_LF_HDR(type);
   2401   LF_HDR_TO_V4L2_LF_HDR(level);
   2402   LF_HDR_TO_V4L2_LF_HDR(sharpness_level);
   2403 #undef LF_HDR_TO_V4L2_LF_HDR
   2404 
   2405   ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->ref_frm_delta_magnitude,
   2406                        vp8_loopfilter_hdr.ref_frame_delta);
   2407   ARRAY_MEMCPY_CHECKED(v4l2_lf_hdr->mb_mode_delta_magnitude,
   2408                        vp8_loopfilter_hdr.mb_mode_delta);
   2409 }
   2410 
   2411 static void FillV4L2QuantizationHeader(
   2412     const Vp8QuantizationHeader& vp8_quant_hdr,
   2413     struct v4l2_vp8_quantization_hdr* v4l2_quant_hdr) {
   2414   v4l2_quant_hdr->y_ac_qi = vp8_quant_hdr.y_ac_qi;
   2415   v4l2_quant_hdr->y_dc_delta = vp8_quant_hdr.y_dc_delta;
   2416   v4l2_quant_hdr->y2_dc_delta = vp8_quant_hdr.y2_dc_delta;
   2417   v4l2_quant_hdr->y2_ac_delta = vp8_quant_hdr.y2_ac_delta;
   2418   v4l2_quant_hdr->uv_dc_delta = vp8_quant_hdr.uv_dc_delta;
   2419   v4l2_quant_hdr->uv_ac_delta = vp8_quant_hdr.uv_ac_delta;
   2420 }
   2421 
   2422 static void FillV4L2Vp8EntropyHeader(
   2423     const Vp8EntropyHeader& vp8_entropy_hdr,
   2424     struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) {
   2425   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->coeff_probs,
   2426                        vp8_entropy_hdr.coeff_probs);
   2427   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->y_mode_probs,
   2428                        vp8_entropy_hdr.y_mode_probs);
   2429   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->uv_mode_probs,
   2430                        vp8_entropy_hdr.uv_mode_probs);
   2431   ARRAY_MEMCPY_CHECKED(v4l2_entropy_hdr->mv_probs, vp8_entropy_hdr.mv_probs);
   2432 }
   2433 
   2434 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
   2435     const scoped_refptr<VP8Picture>& pic,
   2436     const Vp8FrameHeader* frame_hdr,
   2437     const scoped_refptr<VP8Picture>& last_frame,
   2438     const scoped_refptr<VP8Picture>& golden_frame,
   2439     const scoped_refptr<VP8Picture>& alt_frame) {
   2440   struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr;
   2441   memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
   2442 
   2443 #define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
   2444   FHDR_TO_V4L2_FHDR(key_frame);
   2445   FHDR_TO_V4L2_FHDR(version);
   2446   FHDR_TO_V4L2_FHDR(width);
   2447   FHDR_TO_V4L2_FHDR(horizontal_scale);
   2448   FHDR_TO_V4L2_FHDR(height);
   2449   FHDR_TO_V4L2_FHDR(vertical_scale);
   2450   FHDR_TO_V4L2_FHDR(sign_bias_golden);
   2451   FHDR_TO_V4L2_FHDR(sign_bias_alternate);
   2452   FHDR_TO_V4L2_FHDR(prob_skip_false);
   2453   FHDR_TO_V4L2_FHDR(prob_intra);
   2454   FHDR_TO_V4L2_FHDR(prob_last);
   2455   FHDR_TO_V4L2_FHDR(prob_gf);
   2456   FHDR_TO_V4L2_FHDR(bool_dec_range);
   2457   FHDR_TO_V4L2_FHDR(bool_dec_value);
   2458   FHDR_TO_V4L2_FHDR(bool_dec_count);
   2459 #undef FHDR_TO_V4L2_FHDR
   2460 
   2461 #define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
   2462   v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
   2463   SET_V4L2_FRM_HDR_FLAG_IF(is_experimental,
   2464                            V4L2_VP8_FRAME_HDR_FLAG_EXPERIMENTAL);
   2465   SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP8_FRAME_HDR_FLAG_SHOW_FRAME);
   2466   SET_V4L2_FRM_HDR_FLAG_IF(mb_no_skip_coeff,
   2467                            V4L2_VP8_FRAME_HDR_FLAG_MB_NO_SKIP_COEFF);
   2468 #undef SET_V4L2_FRM_HDR_FLAG_IF
   2469 
   2470   FillV4L2SegmentationHeader(frame_hdr->segmentation_hdr,
   2471                              &v4l2_frame_hdr.sgmnt_hdr);
   2472 
   2473   FillV4L2LoopfilterHeader(frame_hdr->loopfilter_hdr, &v4l2_frame_hdr.lf_hdr);
   2474 
   2475   FillV4L2QuantizationHeader(frame_hdr->quantization_hdr,
   2476                              &v4l2_frame_hdr.quant_hdr);
   2477 
   2478   FillV4L2Vp8EntropyHeader(frame_hdr->entropy_hdr, &v4l2_frame_hdr.entropy_hdr);
   2479 
   2480   v4l2_frame_hdr.first_part_size =
   2481       base::checked_cast<__u32>(frame_hdr->first_part_size);
   2482   v4l2_frame_hdr.first_part_offset =
   2483       base::checked_cast<__u32>(frame_hdr->first_part_offset);
   2484   v4l2_frame_hdr.macroblock_bit_offset =
   2485       base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset);
   2486   v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions;
   2487 
   2488   static_assert(arraysize(v4l2_frame_hdr.dct_part_sizes) ==
   2489                     arraysize(frame_hdr->dct_partition_sizes),
   2490                 "DCT partition size arrays must have equal number of elements");
   2491   for (size_t i = 0; i < frame_hdr->num_of_dct_partitions &&
   2492                      i < arraysize(v4l2_frame_hdr.dct_part_sizes);
   2493        ++i)
   2494     v4l2_frame_hdr.dct_part_sizes[i] = frame_hdr->dct_partition_sizes[i];
   2495 
   2496   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2497       VP8PictureToV4L2DecodeSurface(pic);
   2498   std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
   2499 
   2500   if (last_frame) {
   2501     scoped_refptr<V4L2DecodeSurface> last_frame_surface =
   2502         VP8PictureToV4L2DecodeSurface(last_frame);
   2503     v4l2_frame_hdr.last_frame = last_frame_surface->output_record();
   2504     ref_surfaces.push_back(last_frame_surface);
   2505   } else {
   2506     v4l2_frame_hdr.last_frame = VIDEO_MAX_FRAME;
   2507   }
   2508 
   2509   if (golden_frame) {
   2510     scoped_refptr<V4L2DecodeSurface> golden_frame_surface =
   2511         VP8PictureToV4L2DecodeSurface(golden_frame);
   2512     v4l2_frame_hdr.golden_frame = golden_frame_surface->output_record();
   2513     ref_surfaces.push_back(golden_frame_surface);
   2514   } else {
   2515     v4l2_frame_hdr.golden_frame = VIDEO_MAX_FRAME;
   2516   }
   2517 
   2518   if (alt_frame) {
   2519     scoped_refptr<V4L2DecodeSurface> alt_frame_surface =
   2520         VP8PictureToV4L2DecodeSurface(alt_frame);
   2521     v4l2_frame_hdr.alt_frame = alt_frame_surface->output_record();
   2522     ref_surfaces.push_back(alt_frame_surface);
   2523   } else {
   2524     v4l2_frame_hdr.alt_frame = VIDEO_MAX_FRAME;
   2525   }
   2526 
   2527   struct v4l2_ext_control ctrl;
   2528   memset(&ctrl, 0, sizeof(ctrl));
   2529   ctrl.id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HDR;
   2530   ctrl.size = sizeof(v4l2_frame_hdr);
   2531   ctrl.p_vp8_frame_hdr = &v4l2_frame_hdr;
   2532 
   2533   struct v4l2_ext_controls ext_ctrls;
   2534   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2535   ext_ctrls.count = 1;
   2536   ext_ctrls.controls = &ctrl;
   2537   ext_ctrls.config_store = dec_surface->config_store();
   2538 
   2539   if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
   2540     return false;
   2541 
   2542   dec_surface->SetReferenceSurfaces(ref_surfaces);
   2543 
   2544   if (!v4l2_dec_->SubmitSlice(dec_surface->input_record(), frame_hdr->data,
   2545                               frame_hdr->frame_size))
   2546     return false;
   2547 
   2548   v4l2_dec_->DecodeSurface(dec_surface);
   2549   return true;
   2550 }
   2551 
   2552 bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::OutputPicture(
   2553     const scoped_refptr<VP8Picture>& pic) {
   2554   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2555       VP8PictureToV4L2DecodeSurface(pic);
   2556   dec_surface->set_visible_rect(pic->visible_rect);
   2557   v4l2_dec_->SurfaceReady(dec_surface);
   2558   return true;
   2559 }
   2560 
   2561 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2562 V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::
   2563     VP8PictureToV4L2DecodeSurface(const scoped_refptr<VP8Picture>& pic) {
   2564   V4L2VP8Picture* v4l2_pic = pic->AsV4L2VP8Picture();
   2565   CHECK(v4l2_pic);
   2566   return v4l2_pic->dec_surface();
   2567 }
   2568 
   2569 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::V4L2VP9Accelerator(
   2570     V4L2SliceVideoDecodeAccelerator* v4l2_dec)
   2571     : v4l2_dec_(v4l2_dec) {
   2572   DCHECK(v4l2_dec_);
   2573 
   2574   device_needs_frame_context_ =
   2575       v4l2_dec_->IsCtrlExposed(V4L2_CID_MPEG_VIDEO_VP9_ENTROPY);
   2576   DVLOG_IF(1, device_needs_frame_context_)
   2577       << "Device requires frame context parsing";
   2578 }
   2579 
   2580 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::~V4L2VP9Accelerator() {}
   2581 
   2582 scoped_refptr<VP9Picture>
   2583 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::CreateVP9Picture() {
   2584   scoped_refptr<V4L2DecodeSurface> dec_surface = v4l2_dec_->CreateSurface();
   2585   if (!dec_surface)
   2586     return nullptr;
   2587 
   2588   return new V4L2VP9Picture(dec_surface);
   2589 }
   2590 
   2591 static void FillV4L2VP9LoopFilterParams(
   2592     const Vp9LoopFilterParams& vp9_lf_params,
   2593     struct v4l2_vp9_loop_filter_params* v4l2_lf_params) {
   2594 #define SET_LF_PARAMS_FLAG_IF(cond, flag) \
   2595   v4l2_lf_params->flags |= ((vp9_lf_params.cond) ? (flag) : 0)
   2596   SET_LF_PARAMS_FLAG_IF(delta_enabled, V4L2_VP9_LOOP_FLTR_FLAG_DELTA_ENABLED);
   2597   SET_LF_PARAMS_FLAG_IF(delta_update, V4L2_VP9_LOOP_FLTR_FLAG_DELTA_UPDATE);
   2598 #undef SET_LF_PARAMS_FLAG_IF
   2599 
   2600   v4l2_lf_params->level = vp9_lf_params.level;
   2601   v4l2_lf_params->sharpness = vp9_lf_params.sharpness;
   2602 
   2603   ARRAY_MEMCPY_CHECKED(v4l2_lf_params->deltas, vp9_lf_params.ref_deltas);
   2604   ARRAY_MEMCPY_CHECKED(v4l2_lf_params->mode_deltas, vp9_lf_params.mode_deltas);
   2605   ARRAY_MEMCPY_CHECKED(v4l2_lf_params->lvl_lookup, vp9_lf_params.lvl);
   2606 }
   2607 
   2608 static void FillV4L2VP9QuantizationParams(
   2609     const Vp9QuantizationParams& vp9_quant_params,
   2610     struct v4l2_vp9_quantization_params* v4l2_q_params) {
   2611 #define SET_Q_PARAMS_FLAG_IF(cond, flag) \
   2612   v4l2_q_params->flags |= ((vp9_quant_params.cond) ? (flag) : 0)
   2613   SET_Q_PARAMS_FLAG_IF(IsLossless(), V4L2_VP9_QUANT_PARAMS_FLAG_LOSSLESS);
   2614 #undef SET_Q_PARAMS_FLAG_IF
   2615 
   2616 #define Q_PARAMS_TO_V4L2_Q_PARAMS(a) v4l2_q_params->a = vp9_quant_params.a
   2617   Q_PARAMS_TO_V4L2_Q_PARAMS(base_q_idx);
   2618   Q_PARAMS_TO_V4L2_Q_PARAMS(delta_q_y_dc);
   2619   Q_PARAMS_TO_V4L2_Q_PARAMS(delta_q_uv_dc);
   2620   Q_PARAMS_TO_V4L2_Q_PARAMS(delta_q_uv_ac);
   2621 #undef Q_PARAMS_TO_V4L2_Q_PARAMS
   2622 }
   2623 
   2624 static void FillV4L2VP9SegmentationParams(
   2625     const Vp9SegmentationParams& vp9_segm_params,
   2626     struct v4l2_vp9_segmentation_params* v4l2_segm_params) {
   2627 #define SET_SEG_PARAMS_FLAG_IF(cond, flag) \
   2628   v4l2_segm_params->flags |= ((vp9_segm_params.cond) ? (flag) : 0)
   2629   SET_SEG_PARAMS_FLAG_IF(enabled, V4L2_VP9_SGMNT_PARAM_FLAG_ENABLED);
   2630   SET_SEG_PARAMS_FLAG_IF(update_map, V4L2_VP9_SGMNT_PARAM_FLAG_UPDATE_MAP);
   2631   SET_SEG_PARAMS_FLAG_IF(temporal_update,
   2632                          V4L2_VP9_SGMNT_PARAM_FLAG_TEMPORAL_UPDATE);
   2633   SET_SEG_PARAMS_FLAG_IF(update_data, V4L2_VP9_SGMNT_PARAM_FLAG_UPDATE_DATA);
   2634   SET_SEG_PARAMS_FLAG_IF(abs_or_delta_update,
   2635                          V4L2_VP9_SGMNT_PARAM_FLAG_ABS_OR_DELTA_UPDATE);
   2636 #undef SET_SEG_PARAMS_FLAG_IF
   2637 
   2638   ARRAY_MEMCPY_CHECKED(v4l2_segm_params->tree_probs,
   2639                        vp9_segm_params.tree_probs);
   2640   ARRAY_MEMCPY_CHECKED(v4l2_segm_params->pred_probs,
   2641                        vp9_segm_params.pred_probs);
   2642   ARRAY_MEMCPY_CHECKED(v4l2_segm_params->feature_data,
   2643                        vp9_segm_params.feature_data);
   2644 
   2645   static_assert(arraysize(v4l2_segm_params->feature_enabled) ==
   2646                         arraysize(vp9_segm_params.feature_enabled) &&
   2647                     arraysize(v4l2_segm_params->feature_enabled[0]) ==
   2648                         arraysize(vp9_segm_params.feature_enabled[0]),
   2649                 "feature_enabled arrays must be of same size");
   2650   for (size_t i = 0; i < arraysize(v4l2_segm_params->feature_enabled); ++i) {
   2651     for (size_t j = 0; j < arraysize(v4l2_segm_params->feature_enabled[i]);
   2652          ++j) {
   2653       v4l2_segm_params->feature_enabled[i][j] =
   2654           vp9_segm_params.feature_enabled[i][j];
   2655     }
   2656   }
   2657 }
   2658 
   2659 static void FillV4L2Vp9EntropyContext(
   2660     const Vp9FrameContext& vp9_frame_ctx,
   2661     struct v4l2_vp9_entropy_ctx* v4l2_entropy_ctx) {
   2662 #define ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(a) \
   2663   ARRAY_MEMCPY_CHECKED(v4l2_entropy_ctx->a, vp9_frame_ctx.a)
   2664   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(tx_probs_8x8);
   2665   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(tx_probs_16x16);
   2666   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(tx_probs_32x32);
   2667 
   2668   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(coef_probs);
   2669   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(skip_prob);
   2670   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(inter_mode_probs);
   2671   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(interp_filter_probs);
   2672   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(is_inter_prob);
   2673 
   2674   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(comp_mode_prob);
   2675   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(single_ref_prob);
   2676   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(comp_ref_prob);
   2677 
   2678   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(y_mode_probs);
   2679   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(uv_mode_probs);
   2680 
   2681   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(partition_probs);
   2682 
   2683   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_joint_probs);
   2684   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_sign_prob);
   2685   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class_probs);
   2686   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class0_bit_prob);
   2687   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_bits_prob);
   2688   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class0_fr_probs);
   2689   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_fr_probs);
   2690   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_class0_hp_prob);
   2691   ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR(mv_hp_prob);
   2692 #undef ARRAY_MEMCPY_CHECKED_FRM_CTX_TO_V4L2_ENTR
   2693 }
   2694 
   2695 bool V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::SubmitDecode(
   2696     const scoped_refptr<VP9Picture>& pic,
   2697     const Vp9SegmentationParams& segm_params,
   2698     const Vp9LoopFilterParams& lf_params,
   2699     const std::vector<scoped_refptr<VP9Picture>>& ref_pictures,
   2700     const base::Closure& done_cb) {
   2701   const Vp9FrameHeader* frame_hdr = pic->frame_hdr.get();
   2702   DCHECK(frame_hdr);
   2703 
   2704   struct v4l2_ctrl_vp9_frame_hdr v4l2_frame_hdr;
   2705   memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
   2706 
   2707 #define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
   2708   FHDR_TO_V4L2_FHDR(profile);
   2709   FHDR_TO_V4L2_FHDR(frame_type);
   2710 
   2711   FHDR_TO_V4L2_FHDR(bit_depth);
   2712   FHDR_TO_V4L2_FHDR(color_range);
   2713   FHDR_TO_V4L2_FHDR(subsampling_x);
   2714   FHDR_TO_V4L2_FHDR(subsampling_y);
   2715 
   2716   FHDR_TO_V4L2_FHDR(frame_width);
   2717   FHDR_TO_V4L2_FHDR(frame_height);
   2718   FHDR_TO_V4L2_FHDR(render_width);
   2719   FHDR_TO_V4L2_FHDR(render_height);
   2720 
   2721   FHDR_TO_V4L2_FHDR(reset_frame_context);
   2722 
   2723   FHDR_TO_V4L2_FHDR(interpolation_filter);
   2724   FHDR_TO_V4L2_FHDR(frame_context_idx);
   2725 
   2726   FHDR_TO_V4L2_FHDR(tile_cols_log2);
   2727   FHDR_TO_V4L2_FHDR(tile_rows_log2);
   2728 
   2729   FHDR_TO_V4L2_FHDR(header_size_in_bytes);
   2730 #undef FHDR_TO_V4L2_FHDR
   2731   v4l2_frame_hdr.color_space = static_cast<uint8_t>(frame_hdr->color_space);
   2732 
   2733   FillV4L2VP9QuantizationParams(frame_hdr->quant_params,
   2734                                 &v4l2_frame_hdr.quant_params);
   2735 
   2736 #define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
   2737   v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
   2738   SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP9_FRAME_HDR_FLAG_SHOW_FRAME);
   2739   SET_V4L2_FRM_HDR_FLAG_IF(error_resilient_mode,
   2740                            V4L2_VP9_FRAME_HDR_FLAG_ERR_RES);
   2741   SET_V4L2_FRM_HDR_FLAG_IF(intra_only, V4L2_VP9_FRAME_HDR_FLAG_FRAME_INTRA);
   2742   SET_V4L2_FRM_HDR_FLAG_IF(allow_high_precision_mv,
   2743                            V4L2_VP9_FRAME_HDR_ALLOW_HIGH_PREC_MV);
   2744   SET_V4L2_FRM_HDR_FLAG_IF(refresh_frame_context,
   2745                            V4L2_VP9_FRAME_HDR_REFRESH_FRAME_CTX);
   2746   SET_V4L2_FRM_HDR_FLAG_IF(frame_parallel_decoding_mode,
   2747                            V4L2_VP9_FRAME_HDR_PARALLEL_DEC_MODE);
   2748 #undef SET_V4L2_FRM_HDR_FLAG_IF
   2749 
   2750   FillV4L2VP9LoopFilterParams(lf_params, &v4l2_frame_hdr.lf_params);
   2751   FillV4L2VP9SegmentationParams(segm_params, &v4l2_frame_hdr.sgmnt_params);
   2752 
   2753   std::vector<struct v4l2_ext_control> ctrls;
   2754 
   2755   struct v4l2_ext_control ctrl;
   2756   memset(&ctrl, 0, sizeof(ctrl));
   2757   ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_FRAME_HDR;
   2758   ctrl.size = sizeof(v4l2_frame_hdr);
   2759   ctrl.p_vp9_frame_hdr = &v4l2_frame_hdr;
   2760   ctrls.push_back(ctrl);
   2761 
   2762   struct v4l2_ctrl_vp9_decode_param v4l2_decode_param;
   2763   memset(&v4l2_decode_param, 0, sizeof(v4l2_decode_param));
   2764   DCHECK_EQ(ref_pictures.size(), arraysize(v4l2_decode_param.ref_frames));
   2765 
   2766   std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
   2767   for (size_t i = 0; i < ref_pictures.size(); ++i) {
   2768     if (ref_pictures[i]) {
   2769       scoped_refptr<V4L2DecodeSurface> ref_surface =
   2770           VP9PictureToV4L2DecodeSurface(ref_pictures[i]);
   2771 
   2772       v4l2_decode_param.ref_frames[i] = ref_surface->output_record();
   2773       ref_surfaces.push_back(ref_surface);
   2774     } else {
   2775       v4l2_decode_param.ref_frames[i] = VIDEO_MAX_FRAME;
   2776     }
   2777   }
   2778 
   2779   static_assert(arraysize(v4l2_decode_param.active_ref_frames) ==
   2780                     arraysize(frame_hdr->ref_frame_idx),
   2781                 "active reference frame array sizes mismatch");
   2782 
   2783   for (size_t i = 0; i < arraysize(frame_hdr->ref_frame_idx); ++i) {
   2784     uint8_t idx = frame_hdr->ref_frame_idx[i];
   2785     if (idx >= ref_pictures.size())
   2786       return false;
   2787 
   2788     struct v4l2_vp9_reference_frame* v4l2_ref_frame =
   2789         &v4l2_decode_param.active_ref_frames[i];
   2790 
   2791     scoped_refptr<VP9Picture> ref_pic = ref_pictures[idx];
   2792     if (ref_pic) {
   2793       scoped_refptr<V4L2DecodeSurface> ref_surface =
   2794           VP9PictureToV4L2DecodeSurface(ref_pic);
   2795       v4l2_ref_frame->buf_index = ref_surface->output_record();
   2796 #define REF_TO_V4L2_REF(a) v4l2_ref_frame->a = ref_pic->frame_hdr->a
   2797       REF_TO_V4L2_REF(frame_width);
   2798       REF_TO_V4L2_REF(frame_height);
   2799       REF_TO_V4L2_REF(bit_depth);
   2800       REF_TO_V4L2_REF(subsampling_x);
   2801       REF_TO_V4L2_REF(subsampling_y);
   2802 #undef REF_TO_V4L2_REF
   2803     } else {
   2804       v4l2_ref_frame->buf_index = VIDEO_MAX_FRAME;
   2805     }
   2806   }
   2807 
   2808   memset(&ctrl, 0, sizeof(ctrl));
   2809   ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_DECODE_PARAM;
   2810   ctrl.size = sizeof(v4l2_decode_param);
   2811   ctrl.p_vp9_decode_param = &v4l2_decode_param;
   2812   ctrls.push_back(ctrl);
   2813 
   2814   // Defined outside of the if() clause below as it must remain valid until
   2815   // the call to SubmitExtControls().
   2816   struct v4l2_ctrl_vp9_entropy v4l2_entropy;
   2817   if (device_needs_frame_context_) {
   2818     memset(&v4l2_entropy, 0, sizeof(v4l2_entropy));
   2819     FillV4L2Vp9EntropyContext(frame_hdr->initial_frame_context,
   2820                               &v4l2_entropy.initial_entropy_ctx);
   2821     FillV4L2Vp9EntropyContext(frame_hdr->frame_context,
   2822                               &v4l2_entropy.current_entropy_ctx);
   2823     v4l2_entropy.tx_mode = frame_hdr->compressed_header.tx_mode;
   2824     v4l2_entropy.reference_mode = frame_hdr->compressed_header.reference_mode;
   2825 
   2826     memset(&ctrl, 0, sizeof(ctrl));
   2827     ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_ENTROPY;
   2828     ctrl.size = sizeof(v4l2_entropy);
   2829     ctrl.p_vp9_entropy = &v4l2_entropy;
   2830     ctrls.push_back(ctrl);
   2831   }
   2832 
   2833   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2834       VP9PictureToV4L2DecodeSurface(pic);
   2835 
   2836   struct v4l2_ext_controls ext_ctrls;
   2837   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2838   ext_ctrls.count = ctrls.size();
   2839   ext_ctrls.controls = &ctrls[0];
   2840   ext_ctrls.config_store = dec_surface->config_store();
   2841   if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
   2842     return false;
   2843 
   2844   dec_surface->SetReferenceSurfaces(ref_surfaces);
   2845   dec_surface->SetDecodeDoneCallback(done_cb);
   2846 
   2847   if (!v4l2_dec_->SubmitSlice(dec_surface->input_record(), frame_hdr->data,
   2848                               frame_hdr->frame_size))
   2849     return false;
   2850 
   2851   v4l2_dec_->DecodeSurface(dec_surface);
   2852   return true;
   2853 }
   2854 
   2855 bool V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::OutputPicture(
   2856     const scoped_refptr<VP9Picture>& pic) {
   2857   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2858       VP9PictureToV4L2DecodeSurface(pic);
   2859   dec_surface->set_visible_rect(pic->visible_rect);
   2860   v4l2_dec_->SurfaceReady(dec_surface);
   2861   return true;
   2862 }
   2863 
   2864 static void FillVp9FrameContext(struct v4l2_vp9_entropy_ctx& v4l2_entropy_ctx,
   2865                                 Vp9FrameContext* vp9_frame_ctx) {
   2866 #define ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(a) \
   2867   ARRAY_MEMCPY_CHECKED(vp9_frame_ctx->a, v4l2_entropy_ctx.a)
   2868   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(tx_probs_8x8);
   2869   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(tx_probs_16x16);
   2870   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(tx_probs_32x32);
   2871 
   2872   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(coef_probs);
   2873   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(skip_prob);
   2874   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(inter_mode_probs);
   2875   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(interp_filter_probs);
   2876   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(is_inter_prob);
   2877 
   2878   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(comp_mode_prob);
   2879   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(single_ref_prob);
   2880   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(comp_ref_prob);
   2881 
   2882   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(y_mode_probs);
   2883   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(uv_mode_probs);
   2884 
   2885   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(partition_probs);
   2886 
   2887   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_joint_probs);
   2888   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_sign_prob);
   2889   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class_probs);
   2890   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class0_bit_prob);
   2891   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_bits_prob);
   2892   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class0_fr_probs);
   2893   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_fr_probs);
   2894   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_class0_hp_prob);
   2895   ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX(mv_hp_prob);
   2896 #undef ARRAY_MEMCPY_CHECKED_V4L2_ENTR_TO_FRM_CTX
   2897 }
   2898 
   2899 bool V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::GetFrameContext(
   2900     const scoped_refptr<VP9Picture>& pic,
   2901     Vp9FrameContext* frame_ctx) {
   2902   struct v4l2_ctrl_vp9_entropy v4l2_entropy;
   2903   memset(&v4l2_entropy, 0, sizeof(v4l2_entropy));
   2904 
   2905   struct v4l2_ext_control ctrl;
   2906   memset(&ctrl, 0, sizeof(ctrl));
   2907   ctrl.id = V4L2_CID_MPEG_VIDEO_VP9_ENTROPY;
   2908   ctrl.size = sizeof(v4l2_entropy);
   2909   ctrl.p_vp9_entropy = &v4l2_entropy;
   2910 
   2911   scoped_refptr<V4L2DecodeSurface> dec_surface =
   2912       VP9PictureToV4L2DecodeSurface(pic);
   2913 
   2914   struct v4l2_ext_controls ext_ctrls;
   2915   memset(&ext_ctrls, 0, sizeof(ext_ctrls));
   2916   ext_ctrls.count = 1;
   2917   ext_ctrls.controls = &ctrl;
   2918   ext_ctrls.config_store = dec_surface->config_store();
   2919 
   2920   if (!v4l2_dec_->GetExtControls(&ext_ctrls))
   2921     return false;
   2922 
   2923   FillVp9FrameContext(v4l2_entropy.current_entropy_ctx, frame_ctx);
   2924   return true;
   2925 }
   2926 
   2927 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2928 V4L2SliceVideoDecodeAccelerator::V4L2VP9Accelerator::
   2929     VP9PictureToV4L2DecodeSurface(const scoped_refptr<VP9Picture>& pic) {
   2930   V4L2VP9Picture* v4l2_pic = pic->AsV4L2VP9Picture();
   2931   CHECK(v4l2_pic);
   2932   return v4l2_pic->dec_surface();
   2933 }
   2934 
   2935 void V4L2SliceVideoDecodeAccelerator::DecodeSurface(
   2936     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
   2937   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2938 
   2939   DVLOGF(4) << "Submitting decode for surface: " << dec_surface->ToString();
   2940   Enqueue(dec_surface);
   2941 }
   2942 
   2943 void V4L2SliceVideoDecodeAccelerator::SurfaceReady(
   2944     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
   2945   DVLOGF(4);
   2946   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2947 
   2948   decoder_display_queue_.push(dec_surface);
   2949   TryOutputSurfaces();
   2950 }
   2951 
   2952 void V4L2SliceVideoDecodeAccelerator::TryOutputSurfaces() {
   2953   while (!decoder_display_queue_.empty()) {
   2954     scoped_refptr<V4L2DecodeSurface> dec_surface =
   2955         decoder_display_queue_.front();
   2956 
   2957     if (!dec_surface->decoded())
   2958       break;
   2959 
   2960     decoder_display_queue_.pop();
   2961     OutputSurface(dec_surface);
   2962   }
   2963 }
   2964 
   2965 void V4L2SliceVideoDecodeAccelerator::OutputSurface(
   2966     const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
   2967   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2968 
   2969   OutputRecord& output_record =
   2970       output_buffer_map_[dec_surface->output_record()];
   2971 
   2972   bool inserted =
   2973       surfaces_at_display_
   2974           .insert(std::make_pair(output_record.picture_id, dec_surface))
   2975           .second;
   2976   DCHECK(inserted);
   2977 
   2978   DCHECK(!output_record.at_client);
   2979   DCHECK(!output_record.at_device);
   2980   DCHECK_NE(output_record.picture_id, -1);
   2981   output_record.at_client = true;
   2982 
   2983   Picture picture(output_record.picture_id, dec_surface->bitstream_id(),
   2984                   dec_surface->visible_rect(), true /* allow_overlay */);
   2985   DVLOGF(4) << dec_surface->ToString()
   2986             << ", bitstream_id: " << picture.bitstream_buffer_id()
   2987             << ", picture_id: " << picture.picture_buffer_id()
   2988             << ", visible_rect: " << picture.visible_rect().ToString();
   2989   pending_picture_ready_.push(PictureRecord(output_record.cleared, picture));
   2990   SendPictureReady();
   2991   output_record.cleared = true;
   2992 }
   2993 
   2994 scoped_refptr<V4L2SliceVideoDecodeAccelerator::V4L2DecodeSurface>
   2995 V4L2SliceVideoDecodeAccelerator::CreateSurface() {
   2996   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   2997   DCHECK_EQ(state_, kDecoding);
   2998 
   2999   if (free_input_buffers_.empty() || free_output_buffers_.empty())
   3000     return nullptr;
   3001 
   3002   int input = free_input_buffers_.front();
   3003   free_input_buffers_.pop_front();
   3004   int output = free_output_buffers_.front();
   3005   free_output_buffers_.pop_front();
   3006 
   3007   InputRecord& input_record = input_buffer_map_[input];
   3008   DCHECK_EQ(input_record.bytes_used, 0u);
   3009   DCHECK_EQ(input_record.input_id, -1);
   3010   DCHECK(decoder_current_bitstream_buffer_ != nullptr);
   3011   input_record.input_id = decoder_current_bitstream_buffer_->input_id;
   3012 
   3013   scoped_refptr<V4L2DecodeSurface> dec_surface = new V4L2DecodeSurface(
   3014       decoder_current_bitstream_buffer_->input_id, input, output,
   3015       base::Bind(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
   3016                  base::Unretained(this)));
   3017 
   3018   DVLOGF(4) << "Created surface " << input << " -> " << output;
   3019   return dec_surface;
   3020 }
   3021 
   3022 void V4L2SliceVideoDecodeAccelerator::SendPictureReady() {
   3023   DVLOGF(4);
   3024   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   3025   bool send_now =
   3026       (decoder_resetting_ || decoder_flushing_ || surface_set_change_pending_);
   3027   while (!pending_picture_ready_.empty()) {
   3028     bool cleared = pending_picture_ready_.front().cleared;
   3029     const Picture& picture = pending_picture_ready_.front().picture;
   3030     if (cleared && picture_clearing_count_ == 0) {
   3031       DVLOGF(4) << "Posting picture ready to decode task runner for: "
   3032                 << picture.picture_buffer_id();
   3033       // This picture is cleared. It can be posted to a thread different than
   3034       // the main GPU thread to reduce latency. This should be the case after
   3035       // all pictures are cleared at the beginning.
   3036       decode_task_runner_->PostTask(
   3037           FROM_HERE,
   3038           base::Bind(&Client::PictureReady, decode_client_, picture));
   3039       pending_picture_ready_.pop();
   3040     } else if (!cleared || send_now) {
   3041       DVLOGF(4) << "cleared=" << pending_picture_ready_.front().cleared
   3042                 << ", decoder_resetting_=" << decoder_resetting_
   3043                 << ", decoder_flushing_=" << decoder_flushing_
   3044                 << ", surface_set_change_pending_="
   3045                 << surface_set_change_pending_
   3046                 << ", picture_clearing_count_=" << picture_clearing_count_;
   3047       DVLOGF(4) << "Posting picture ready to GPU for: "
   3048                 << picture.picture_buffer_id();
   3049       // If the picture is not cleared, post it to the child thread because it
   3050       // has to be cleared in the child thread. A picture only needs to be
   3051       // cleared once. If the decoder is resetting or flushing or changing
   3052       // resolution, send all pictures to ensure PictureReady arrive before
   3053       // reset done, flush done, or picture dismissed.
   3054       child_task_runner_->PostTaskAndReply(
   3055           FROM_HERE, base::Bind(&Client::PictureReady, client_, picture),
   3056           // Unretained is safe. If Client::PictureReady gets to run, |this| is
   3057           // alive. Destroy() will wait the decode thread to finish.
   3058           base::Bind(&V4L2SliceVideoDecodeAccelerator::PictureCleared,
   3059                      base::Unretained(this)));
   3060       picture_clearing_count_++;
   3061       pending_picture_ready_.pop();
   3062     } else {
   3063       // This picture is cleared. But some pictures are about to be cleared on
   3064       // the child thread. To preserve the order, do not send this until those
   3065       // pictures are cleared.
   3066       break;
   3067     }
   3068   }
   3069 }
   3070 
   3071 void V4L2SliceVideoDecodeAccelerator::PictureCleared() {
   3072   DVLOGF(4) << "clearing count=" << picture_clearing_count_;
   3073   DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
   3074   DCHECK_GT(picture_clearing_count_, 0);
   3075   picture_clearing_count_--;
   3076   SendPictureReady();
   3077 }
   3078 
   3079 bool V4L2SliceVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
   3080     const base::WeakPtr<Client>& decode_client,
   3081     const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) {
   3082   decode_client_ = decode_client;
   3083   decode_task_runner_ = decode_task_runner;
   3084   return true;
   3085 }
   3086 
   3087 // static
   3088 VideoDecodeAccelerator::SupportedProfiles
   3089 V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() {
   3090   scoped_refptr<V4L2Device> device(new V4L2Device());
   3091   if (!device)
   3092     return SupportedProfiles();
   3093 
   3094   return device->GetSupportedDecodeProfiles(arraysize(supported_input_fourccs_),
   3095                                             supported_input_fourccs_);
   3096 }
   3097 
   3098 }  // namespace media
   3099