1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include <dlfcn.h> 6 #include <errno.h> 7 #include <fcntl.h> 8 #include <linux/videodev2.h> 9 #include <poll.h> 10 #include <sys/eventfd.h> 11 #include <sys/ioctl.h> 12 #include <sys/mman.h> 13 14 #include "base/bind.h" 15 #include "base/debug/trace_event.h" 16 #include "base/memory/shared_memory.h" 17 #include "base/message_loop/message_loop.h" 18 #include "base/message_loop/message_loop_proxy.h" 19 #include "base/posix/eintr_wrapper.h" 20 #include "content/common/gpu/media/exynos_video_decode_accelerator.h" 21 #include "content/common/gpu/media/h264_parser.h" 22 #include "ui/gl/scoped_binders.h" 23 24 namespace content { 25 26 #define NOTIFY_ERROR(x) \ 27 do { \ 28 SetDecoderState(kError); \ 29 DLOG(ERROR) << "calling NotifyError(): " << x; \ 30 NotifyError(x); \ 31 } while (0) 32 33 #define IOCTL_OR_ERROR_RETURN(fd, type, arg) \ 34 do { \ 35 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ 36 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ 37 NOTIFY_ERROR(PLATFORM_FAILURE); \ 38 return; \ 39 } \ 40 } while (0) 41 42 #define IOCTL_OR_ERROR_RETURN_FALSE(fd, type, arg) \ 43 do { \ 44 if (HANDLE_EINTR(ioctl(fd, type, arg) != 0)) { \ 45 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ 46 NOTIFY_ERROR(PLATFORM_FAILURE); \ 47 return false; \ 48 } \ 49 } while (0) 50 51 namespace { 52 53 // TODO(posciak): remove once we update linux-headers. 54 #ifndef V4L2_EVENT_RESOLUTION_CHANGE 55 #define V4L2_EVENT_RESOLUTION_CHANGE 5 56 #endif 57 58 const char kExynosMfcDevice[] = "/dev/mfc-dec"; 59 const char kExynosGscDevice[] = "/dev/gsc1"; 60 const char kMaliDriver[] = "libmali.so"; 61 62 typedef EGLBoolean (*MaliEglImageGetBufferExtPhandleFunc)(EGLImageKHR, EGLint*, 63 void*); 64 65 void* libmali_handle = NULL; 66 MaliEglImageGetBufferExtPhandleFunc 67 mali_egl_image_get_buffer_ext_phandle = NULL; 68 } // anonymous namespace 69 70 struct ExynosVideoDecodeAccelerator::BitstreamBufferRef { 71 BitstreamBufferRef( 72 base::WeakPtr<Client>& client, 73 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy, 74 base::SharedMemory* shm, 75 size_t size, 76 int32 input_id); 77 ~BitstreamBufferRef(); 78 const base::WeakPtr<Client> client; 79 const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy; 80 const scoped_ptr<base::SharedMemory> shm; 81 const size_t size; 82 off_t bytes_used; 83 const int32 input_id; 84 }; 85 86 struct ExynosVideoDecodeAccelerator::PictureBufferArrayRef { 87 PictureBufferArrayRef(EGLDisplay egl_display, size_t count); 88 ~PictureBufferArrayRef(); 89 90 struct PictureBufferRef { 91 EGLImageKHR egl_image; 92 int egl_image_fd; 93 int32 client_id; 94 }; 95 96 EGLDisplay const egl_display; 97 std::vector<PictureBufferRef> picture_buffers; 98 }; 99 100 struct ExynosVideoDecodeAccelerator::EGLSyncKHRRef { 101 EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync); 102 ~EGLSyncKHRRef(); 103 EGLDisplay const egl_display; 104 EGLSyncKHR egl_sync; 105 }; 106 107 ExynosVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef( 108 base::WeakPtr<Client>& client, 109 scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy, 110 base::SharedMemory* shm, size_t size, int32 input_id) 111 : client(client), 112 client_message_loop_proxy(client_message_loop_proxy), 113 shm(shm), 114 size(size), 115 bytes_used(0), 116 input_id(input_id) { 117 } 118 119 ExynosVideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() { 120 if (input_id >= 0) { 121 client_message_loop_proxy->PostTask(FROM_HERE, base::Bind( 122 &Client::NotifyEndOfBitstreamBuffer, client, input_id)); 123 } 124 } 125 126 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::PictureBufferArrayRef( 127 EGLDisplay egl_display, size_t count) 128 : egl_display(egl_display), 129 picture_buffers(count) { 130 for (size_t i = 0; i < picture_buffers.size(); ++i) { 131 PictureBufferRef& buffer = picture_buffers[i]; 132 buffer.egl_image = EGL_NO_IMAGE_KHR; 133 buffer.egl_image_fd = -1; 134 buffer.client_id = -1; 135 } 136 } 137 138 ExynosVideoDecodeAccelerator::PictureBufferArrayRef::~PictureBufferArrayRef() { 139 for (size_t i = 0; i < picture_buffers.size(); ++i) { 140 PictureBufferRef& buffer = picture_buffers[i]; 141 if (buffer.egl_image != EGL_NO_IMAGE_KHR) 142 eglDestroyImageKHR(egl_display, buffer.egl_image); 143 if (buffer.egl_image_fd != -1) 144 HANDLE_EINTR(close(buffer.egl_image_fd)); 145 } 146 } 147 148 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef( 149 EGLDisplay egl_display, EGLSyncKHR egl_sync) 150 : egl_display(egl_display), 151 egl_sync(egl_sync) { 152 } 153 154 ExynosVideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() { 155 if (egl_sync != EGL_NO_SYNC_KHR) 156 eglDestroySyncKHR(egl_display, egl_sync); 157 } 158 159 ExynosVideoDecodeAccelerator::MfcInputRecord::MfcInputRecord() 160 : at_device(false), 161 address(NULL), 162 length(0), 163 bytes_used(0), 164 input_id(-1) { 165 } 166 167 ExynosVideoDecodeAccelerator::MfcInputRecord::~MfcInputRecord() { 168 } 169 170 ExynosVideoDecodeAccelerator::MfcOutputRecord::MfcOutputRecord() 171 : at_device(false), 172 input_id(-1) { 173 bytes_used[0] = 0; 174 bytes_used[1] = 0; 175 address[0] = NULL; 176 address[1] = NULL; 177 length[0] = 0; 178 length[1] = 0; 179 } 180 181 ExynosVideoDecodeAccelerator::MfcOutputRecord::~MfcOutputRecord() { 182 } 183 184 ExynosVideoDecodeAccelerator::GscInputRecord::GscInputRecord() 185 : at_device(false), 186 mfc_output(-1) { 187 } 188 189 ExynosVideoDecodeAccelerator::GscInputRecord::~GscInputRecord() { 190 } 191 192 ExynosVideoDecodeAccelerator::GscOutputRecord::GscOutputRecord() 193 : at_device(false), 194 at_client(false), 195 fd(-1), 196 egl_image(EGL_NO_IMAGE_KHR), 197 egl_sync(EGL_NO_SYNC_KHR), 198 picture_id(-1) { 199 } 200 201 ExynosVideoDecodeAccelerator::GscOutputRecord::~GscOutputRecord() { 202 } 203 204 ExynosVideoDecodeAccelerator::ExynosVideoDecodeAccelerator( 205 EGLDisplay egl_display, 206 EGLContext egl_context, 207 Client* client, 208 const base::Callback<bool(void)>& make_context_current) 209 : child_message_loop_proxy_(base::MessageLoopProxy::current()), 210 weak_this_(base::AsWeakPtr(this)), 211 client_ptr_factory_(client), 212 client_(client_ptr_factory_.GetWeakPtr()), 213 decoder_thread_("ExynosDecoderThread"), 214 decoder_state_(kUninitialized), 215 decoder_delay_bitstream_buffer_id_(-1), 216 decoder_current_input_buffer_(-1), 217 decoder_decode_buffer_tasks_scheduled_(0), 218 decoder_frames_at_client_(0), 219 decoder_flushing_(false), 220 resolution_change_pending_(false), 221 resolution_change_reset_pending_(false), 222 decoder_partial_frame_pending_(false), 223 mfc_fd_(-1), 224 mfc_input_streamon_(false), 225 mfc_input_buffer_queued_count_(0), 226 mfc_output_streamon_(false), 227 mfc_output_buffer_queued_count_(0), 228 mfc_output_buffer_pixelformat_(0), 229 mfc_output_dpb_size_(0), 230 gsc_fd_(-1), 231 gsc_input_streamon_(false), 232 gsc_input_buffer_queued_count_(0), 233 gsc_output_streamon_(false), 234 gsc_output_buffer_queued_count_(0), 235 device_poll_thread_("ExynosDevicePollThread"), 236 device_poll_interrupt_fd_(-1), 237 make_context_current_(make_context_current), 238 egl_display_(egl_display), 239 egl_context_(egl_context), 240 video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN) { 241 } 242 243 ExynosVideoDecodeAccelerator::~ExynosVideoDecodeAccelerator() { 244 DCHECK(!decoder_thread_.IsRunning()); 245 DCHECK(!device_poll_thread_.IsRunning()); 246 247 if (device_poll_interrupt_fd_ != -1) { 248 HANDLE_EINTR(close(device_poll_interrupt_fd_)); 249 device_poll_interrupt_fd_ = -1; 250 } 251 if (gsc_fd_ != -1) { 252 DestroyGscInputBuffers(); 253 DestroyGscOutputBuffers(); 254 HANDLE_EINTR(close(gsc_fd_)); 255 gsc_fd_ = -1; 256 } 257 if (mfc_fd_ != -1) { 258 DestroyMfcInputBuffers(); 259 DestroyMfcOutputBuffers(); 260 HANDLE_EINTR(close(mfc_fd_)); 261 mfc_fd_ = -1; 262 } 263 264 // These maps have members that should be manually destroyed, e.g. file 265 // descriptors, mmap() segments, etc. 266 DCHECK(mfc_input_buffer_map_.empty()); 267 DCHECK(mfc_output_buffer_map_.empty()); 268 DCHECK(gsc_input_buffer_map_.empty()); 269 DCHECK(gsc_output_buffer_map_.empty()); 270 } 271 272 bool ExynosVideoDecodeAccelerator::Initialize( 273 media::VideoCodecProfile profile) { 274 DVLOG(3) << "Initialize()"; 275 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 276 DCHECK_EQ(decoder_state_, kUninitialized); 277 278 switch (profile) { 279 case media::H264PROFILE_BASELINE: 280 DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE"; 281 break; 282 case media::H264PROFILE_MAIN: 283 DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN"; 284 break; 285 case media::H264PROFILE_HIGH: 286 DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH"; 287 break; 288 case media::VP8PROFILE_MAIN: 289 DVLOG(2) << "Initialize(): profile VP8PROFILE_MAIN"; 290 break; 291 default: 292 DLOG(ERROR) << "Initialize(): unsupported profile=" << profile; 293 return false; 294 }; 295 video_profile_ = profile; 296 297 static bool sandbox_initialized = PostSandboxInitialization(); 298 if (!sandbox_initialized) { 299 DLOG(ERROR) << "Initialize(): PostSandboxInitialization() failed"; 300 NOTIFY_ERROR(PLATFORM_FAILURE); 301 return false; 302 } 303 304 if (egl_display_ == EGL_NO_DISPLAY) { 305 DLOG(ERROR) << "Initialize(): could not get EGLDisplay"; 306 NOTIFY_ERROR(PLATFORM_FAILURE); 307 return false; 308 } 309 310 if (egl_context_ == EGL_NO_CONTEXT) { 311 DLOG(ERROR) << "Initialize(): could not get EGLContext"; 312 NOTIFY_ERROR(PLATFORM_FAILURE); 313 return false; 314 } 315 316 // We need the context to be initialized to query extensions. 317 if (!make_context_current_.Run()) { 318 DLOG(ERROR) << "Initialize(): could not make context current"; 319 NOTIFY_ERROR(PLATFORM_FAILURE); 320 return false; 321 } 322 323 if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) { 324 DLOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync"; 325 NOTIFY_ERROR(PLATFORM_FAILURE); 326 return false; 327 } 328 329 // Open the video devices. 330 DVLOG(2) << "Initialize(): opening MFC device: " << kExynosMfcDevice; 331 mfc_fd_ = HANDLE_EINTR(open(kExynosMfcDevice, 332 O_RDWR | O_NONBLOCK | O_CLOEXEC)); 333 if (mfc_fd_ == -1) { 334 DPLOG(ERROR) << "Initialize(): could not open MFC device: " 335 << kExynosMfcDevice; 336 NOTIFY_ERROR(PLATFORM_FAILURE); 337 return false; 338 } 339 DVLOG(2) << "Initialize(): opening GSC device: " << kExynosGscDevice; 340 gsc_fd_ = HANDLE_EINTR(open(kExynosGscDevice, 341 O_RDWR | O_NONBLOCK | O_CLOEXEC)); 342 if (gsc_fd_ == -1) { 343 DPLOG(ERROR) << "Initialize(): could not open GSC device: " 344 << kExynosGscDevice; 345 NOTIFY_ERROR(PLATFORM_FAILURE); 346 return false; 347 } 348 349 // Create the interrupt fd. 350 DCHECK_EQ(device_poll_interrupt_fd_, -1); 351 device_poll_interrupt_fd_ = eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC); 352 if (device_poll_interrupt_fd_ == -1) { 353 DPLOG(ERROR) << "Initialize(): eventfd() failed"; 354 NOTIFY_ERROR(PLATFORM_FAILURE); 355 return false; 356 } 357 358 // Capabilities check. 359 struct v4l2_capability caps; 360 const __u32 kCapsRequired = 361 V4L2_CAP_VIDEO_CAPTURE_MPLANE | 362 V4L2_CAP_VIDEO_OUTPUT_MPLANE | 363 V4L2_CAP_STREAMING; 364 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYCAP, &caps); 365 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 366 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" 367 ", caps check failed: 0x" << std::hex << caps.capabilities; 368 NOTIFY_ERROR(PLATFORM_FAILURE); 369 return false; 370 } 371 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QUERYCAP, &caps); 372 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 373 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" 374 ", caps check failed: 0x" << std::hex << caps.capabilities; 375 NOTIFY_ERROR(PLATFORM_FAILURE); 376 return false; 377 } 378 379 if (!CreateMfcInputBuffers()) 380 return false; 381 382 // MFC output format has to be setup before streaming starts. 383 struct v4l2_format format; 384 memset(&format, 0, sizeof(format)); 385 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 386 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12MT_16X16; 387 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); 388 389 // Subscribe to the resolution change event. 390 struct v4l2_event_subscription sub; 391 memset(&sub, 0, sizeof(sub)); 392 sub.type = V4L2_EVENT_RESOLUTION_CHANGE; 393 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_SUBSCRIBE_EVENT, &sub); 394 395 // Initialize format-specific bits. 396 if (video_profile_ >= media::H264PROFILE_MIN && 397 video_profile_ <= media::H264PROFILE_MAX) { 398 decoder_h264_parser_.reset(new content::H264Parser()); 399 } 400 401 if (!decoder_thread_.Start()) { 402 DLOG(ERROR) << "Initialize(): decoder thread failed to start"; 403 NOTIFY_ERROR(PLATFORM_FAILURE); 404 return false; 405 } 406 407 SetDecoderState(kInitialized); 408 409 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 410 &Client::NotifyInitializeDone, client_)); 411 return true; 412 } 413 414 void ExynosVideoDecodeAccelerator::Decode( 415 const media::BitstreamBuffer& bitstream_buffer) { 416 DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() 417 << ", size=" << bitstream_buffer.size(); 418 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 419 420 scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef( 421 client_, child_message_loop_proxy_, 422 new base::SharedMemory(bitstream_buffer.handle(), true), 423 bitstream_buffer.size(), bitstream_buffer.id())); 424 if (!bitstream_record->shm->Map(bitstream_buffer.size())) { 425 DLOG(ERROR) << "Decode(): could not map bitstream_buffer"; 426 NOTIFY_ERROR(UNREADABLE_INPUT); 427 return; 428 } 429 DVLOG(3) << "Decode(): mapped to addr=" << bitstream_record->shm->memory(); 430 431 // DecodeTask() will take care of running a DecodeBufferTask(). 432 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 433 &ExynosVideoDecodeAccelerator::DecodeTask, base::Unretained(this), 434 base::Passed(&bitstream_record))); 435 } 436 437 void ExynosVideoDecodeAccelerator::AssignPictureBuffers( 438 const std::vector<media::PictureBuffer>& buffers) { 439 DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size(); 440 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 441 442 if (buffers.size() != gsc_output_buffer_map_.size()) { 443 DLOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture" 444 " buffers. (Got " << buffers.size() << ", requested " << 445 gsc_output_buffer_map_.size() << ")"; 446 NOTIFY_ERROR(INVALID_ARGUMENT); 447 return; 448 } 449 450 for (size_t i = 0; i < buffers.size(); ++i) { 451 if (buffers[i].size() != frame_buffer_size_) { 452 DLOG(ERROR) << "AssignPictureBuffers(): invalid buffer size"; 453 NOTIFY_ERROR(INVALID_ARGUMENT); 454 return; 455 } 456 } 457 458 if (!make_context_current_.Run()) { 459 DLOG(ERROR) << "AssignPictureBuffers(): could not make context current"; 460 NOTIFY_ERROR(PLATFORM_FAILURE); 461 return; 462 } 463 464 scoped_ptr<PictureBufferArrayRef> pic_buffers_ref( 465 new PictureBufferArrayRef(egl_display_, buffers.size())); 466 467 const static EGLint kImageAttrs[] = { 468 EGL_IMAGE_PRESERVED_KHR, 0, 469 EGL_NONE, 470 }; 471 Display* x_display = base::MessagePumpForUI::GetDefaultXDisplay(); 472 gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_2D, 0); 473 for (size_t i = 0; i < pic_buffers_ref->picture_buffers.size(); ++i) { 474 PictureBufferArrayRef::PictureBufferRef& buffer = 475 pic_buffers_ref->picture_buffers[i]; 476 // Create the X pixmap and then create an EGLImageKHR from it, so we can 477 // get dma_buf backing. 478 Pixmap pixmap = XCreatePixmap(x_display, RootWindow(x_display, 0), 479 buffers[i].size().width(), buffers[i].size().height(), 32); 480 if (!pixmap) { 481 DLOG(ERROR) << "AssignPictureBuffers(): could not create X pixmap"; 482 NOTIFY_ERROR(PLATFORM_FAILURE); 483 return; 484 } 485 glBindTexture(GL_TEXTURE_2D, buffers[i].texture_id()); 486 EGLImageKHR egl_image = eglCreateImageKHR( 487 egl_display_, EGL_NO_CONTEXT, EGL_NATIVE_PIXMAP_KHR, 488 (EGLClientBuffer)pixmap, kImageAttrs); 489 // We can free the X pixmap immediately -- according to the 490 // EGL_KHR_image_base spec, the backing storage does not go away until the 491 // last referencing EGLImage is destroyed. 492 XFreePixmap(x_display, pixmap); 493 if (egl_image == EGL_NO_IMAGE_KHR) { 494 DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR"; 495 NOTIFY_ERROR(PLATFORM_FAILURE); 496 return; 497 } 498 buffer.egl_image = egl_image; 499 int fd; 500 if (!mali_egl_image_get_buffer_ext_phandle(buffer.egl_image, NULL, &fd)) { 501 DLOG(ERROR) << "AssignPictureBuffers(): " 502 << "could not get EGLImageKHR dmabuf fd"; 503 NOTIFY_ERROR(PLATFORM_FAILURE); 504 return; 505 } 506 buffer.egl_image_fd = fd; 507 glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, egl_image); 508 buffer.client_id = buffers[i].id(); 509 } 510 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 511 &ExynosVideoDecodeAccelerator::AssignPictureBuffersTask, 512 base::Unretained(this), base::Passed(&pic_buffers_ref))); 513 } 514 515 void ExynosVideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) { 516 DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id; 517 // Must be run on child thread, as we'll insert a sync in the EGL context. 518 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 519 520 if (!make_context_current_.Run()) { 521 DLOG(ERROR) << "ReusePictureBuffer(): could not make context current"; 522 NOTIFY_ERROR(PLATFORM_FAILURE); 523 return; 524 } 525 526 EGLSyncKHR egl_sync = 527 eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL); 528 if (egl_sync == EGL_NO_SYNC_KHR) { 529 DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed"; 530 NOTIFY_ERROR(PLATFORM_FAILURE); 531 return; 532 } 533 534 scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef( 535 egl_display_, egl_sync)); 536 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 537 &ExynosVideoDecodeAccelerator::ReusePictureBufferTask, 538 base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref))); 539 } 540 541 void ExynosVideoDecodeAccelerator::Flush() { 542 DVLOG(3) << "Flush()"; 543 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 544 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 545 &ExynosVideoDecodeAccelerator::FlushTask, base::Unretained(this))); 546 } 547 548 void ExynosVideoDecodeAccelerator::Reset() { 549 DVLOG(3) << "Reset()"; 550 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 551 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 552 &ExynosVideoDecodeAccelerator::ResetTask, base::Unretained(this))); 553 } 554 555 void ExynosVideoDecodeAccelerator::Destroy() { 556 DVLOG(3) << "Destroy()"; 557 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 558 559 // We're destroying; cancel all callbacks. 560 client_ptr_factory_.InvalidateWeakPtrs(); 561 562 // If the decoder thread is running, destroy using posted task. 563 if (decoder_thread_.IsRunning()) { 564 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 565 &ExynosVideoDecodeAccelerator::DestroyTask, base::Unretained(this))); 566 // DestroyTask() will cause the decoder_thread_ to flush all tasks. 567 decoder_thread_.Stop(); 568 } else { 569 // Otherwise, call the destroy task directly. 570 DestroyTask(); 571 } 572 573 // Set to kError state just in case. 574 SetDecoderState(kError); 575 576 delete this; 577 } 578 579 // static 580 void ExynosVideoDecodeAccelerator::PreSandboxInitialization() { 581 DVLOG(3) << "PreSandboxInitialization()"; 582 dlerror(); 583 584 libmali_handle = dlopen(kMaliDriver, RTLD_LAZY | RTLD_LOCAL); 585 if (libmali_handle == NULL) { 586 DPLOG(ERROR) << "failed to dlopen() " << kMaliDriver << ": " << dlerror(); 587 } 588 } 589 590 // static 591 bool ExynosVideoDecodeAccelerator::PostSandboxInitialization() { 592 DVLOG(3) << "PostSandboxInitialization()"; 593 if (libmali_handle == NULL) { 594 DLOG(ERROR) << "PostSandboxInitialization(): no " << kMaliDriver 595 << " driver handle"; 596 return false; 597 } 598 599 dlerror(); 600 mali_egl_image_get_buffer_ext_phandle = 601 reinterpret_cast<MaliEglImageGetBufferExtPhandleFunc>( 602 dlsym(libmali_handle, "mali_egl_image_get_buffer_ext_phandle")); 603 if (mali_egl_image_get_buffer_ext_phandle == NULL) { 604 DPLOG(ERROR) << "PostSandboxInitialization(): failed to dlsym() " 605 << "mali_egl_image_get_buffer_ext_phandle: " << dlerror(); 606 return false; 607 } 608 609 return true; 610 } 611 612 void ExynosVideoDecodeAccelerator::DecodeTask( 613 scoped_ptr<BitstreamBufferRef> bitstream_record) { 614 DVLOG(3) << "DecodeTask(): input_id=" << bitstream_record->input_id; 615 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 616 DCHECK_NE(decoder_state_, kUninitialized); 617 TRACE_EVENT1("Video Decoder", "EVDA::DecodeTask", "input_id", 618 bitstream_record->input_id); 619 620 if (decoder_state_ == kResetting || decoder_flushing_) { 621 // In the case that we're resetting or flushing, we need to delay decoding 622 // the BitstreamBuffers that come after the Reset() or Flush() call. When 623 // we're here, we know that this DecodeTask() was scheduled by a Decode() 624 // call that came after (in the client thread) the Reset() or Flush() call; 625 // thus set up the delay if necessary. 626 if (decoder_delay_bitstream_buffer_id_ == -1) 627 decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id; 628 } else if (decoder_state_ == kError) { 629 DVLOG(2) << "DecodeTask(): early out: kError state"; 630 return; 631 } 632 633 decoder_input_queue_.push_back( 634 linked_ptr<BitstreamBufferRef>(bitstream_record.release())); 635 decoder_decode_buffer_tasks_scheduled_++; 636 DecodeBufferTask(); 637 } 638 639 void ExynosVideoDecodeAccelerator::DecodeBufferTask() { 640 DVLOG(3) << "DecodeBufferTask()"; 641 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 642 DCHECK_NE(decoder_state_, kUninitialized); 643 TRACE_EVENT0("Video Decoder", "EVDA::DecodeBufferTask"); 644 645 decoder_decode_buffer_tasks_scheduled_--; 646 647 if (decoder_state_ == kResetting) { 648 DVLOG(2) << "DecodeBufferTask(): early out: kResetting state"; 649 return; 650 } else if (decoder_state_ == kError) { 651 DVLOG(2) << "DecodeBufferTask(): early out: kError state"; 652 return; 653 } else if (decoder_state_ == kChangingResolution) { 654 DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending"; 655 return; 656 } 657 658 if (decoder_current_bitstream_buffer_ == NULL) { 659 if (decoder_input_queue_.empty()) { 660 // We're waiting for a new buffer -- exit without scheduling a new task. 661 return; 662 } 663 linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front(); 664 if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) { 665 // We're asked to delay decoding on this and subsequent buffers. 666 return; 667 } 668 669 // Setup to use the next buffer. 670 decoder_current_bitstream_buffer_.reset(buffer_ref.release()); 671 decoder_input_queue_.pop_front(); 672 DVLOG(3) << "DecodeBufferTask(): reading input_id=" 673 << decoder_current_bitstream_buffer_->input_id 674 << ", addr=" << (decoder_current_bitstream_buffer_->shm ? 675 decoder_current_bitstream_buffer_->shm->memory() : 676 NULL) 677 << ", size=" << decoder_current_bitstream_buffer_->size; 678 } 679 bool schedule_task = false; 680 const size_t size = decoder_current_bitstream_buffer_->size; 681 size_t decoded_size = 0; 682 if (size == 0) { 683 const int32 input_id = decoder_current_bitstream_buffer_->input_id; 684 if (input_id >= 0) { 685 // This is a buffer queued from the client that has zero size. Skip. 686 schedule_task = true; 687 } else { 688 // This is a buffer of zero size, queued to flush the pipe. Flush. 689 DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(), 690 static_cast<base::SharedMemory*>(NULL)); 691 // Enqueue a buffer guaranteed to be empty. To do that, we flush the 692 // current input, enqueue no data to the next frame, then flush that down. 693 schedule_task = true; 694 if (decoder_current_input_buffer_ != -1 && 695 mfc_input_buffer_map_[decoder_current_input_buffer_].input_id != 696 kFlushBufferId) 697 schedule_task = FlushInputFrame(); 698 699 if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) { 700 DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer"; 701 decoder_partial_frame_pending_ = false; 702 schedule_task = true; 703 } else { 704 // If we failed to enqueue the empty buffer (due to pipeline 705 // backpressure), don't advance the bitstream buffer queue, and don't 706 // schedule the next task. This bitstream buffer queue entry will get 707 // reprocessed when the pipeline frees up. 708 schedule_task = false; 709 } 710 } 711 } else { 712 // This is a buffer queued from the client, with actual contents. Decode. 713 const uint8* const data = 714 reinterpret_cast<const uint8*>( 715 decoder_current_bitstream_buffer_->shm->memory()) + 716 decoder_current_bitstream_buffer_->bytes_used; 717 const size_t data_size = 718 decoder_current_bitstream_buffer_->size - 719 decoder_current_bitstream_buffer_->bytes_used; 720 if (!AdvanceFrameFragment(data, data_size, &decoded_size)) { 721 NOTIFY_ERROR(UNREADABLE_INPUT); 722 return; 723 } 724 // AdvanceFrameFragment should not return a size larger than the buffer 725 // size, even on invalid data. 726 CHECK_LE(decoded_size, data_size); 727 728 switch (decoder_state_) { 729 case kInitialized: 730 case kAfterReset: 731 schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size); 732 break; 733 case kDecoding: 734 schedule_task = DecodeBufferContinue(data, decoded_size); 735 break; 736 default: 737 NOTIFY_ERROR(ILLEGAL_STATE); 738 return; 739 } 740 } 741 if (decoder_state_ == kError) { 742 // Failed during decode. 743 return; 744 } 745 746 if (schedule_task) { 747 decoder_current_bitstream_buffer_->bytes_used += decoded_size; 748 if (decoder_current_bitstream_buffer_->bytes_used == 749 decoder_current_bitstream_buffer_->size) { 750 // Our current bitstream buffer is done; return it. 751 int32 input_id = decoder_current_bitstream_buffer_->input_id; 752 DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id; 753 // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer(). 754 decoder_current_bitstream_buffer_.reset(); 755 } 756 ScheduleDecodeBufferTaskIfNeeded(); 757 } 758 } 759 760 bool ExynosVideoDecodeAccelerator::AdvanceFrameFragment( 761 const uint8* data, 762 size_t size, 763 size_t* endpos) { 764 if (video_profile_ >= media::H264PROFILE_MIN && 765 video_profile_ <= media::H264PROFILE_MAX) { 766 // For H264, we need to feed HW one frame at a time. This is going to take 767 // some parsing of our input stream. 768 decoder_h264_parser_->SetStream(data, size); 769 content::H264NALU nalu; 770 content::H264Parser::Result result; 771 *endpos = 0; 772 773 // Keep on peeking the next NALs while they don't indicate a frame 774 // boundary. 775 for (;;) { 776 bool end_of_frame = false; 777 result = decoder_h264_parser_->AdvanceToNextNALU(&nalu); 778 if (result == content::H264Parser::kInvalidStream || 779 result == content::H264Parser::kUnsupportedStream) 780 return false; 781 if (result == content::H264Parser::kEOStream) { 782 // We've reached the end of the buffer before finding a frame boundary. 783 decoder_partial_frame_pending_ = true; 784 return true; 785 } 786 switch (nalu.nal_unit_type) { 787 case content::H264NALU::kNonIDRSlice: 788 case content::H264NALU::kIDRSlice: 789 if (nalu.size < 1) 790 return false; 791 // For these two, if the "first_mb_in_slice" field is zero, start a 792 // new frame and return. This field is Exp-Golomb coded starting on 793 // the eighth data bit of the NAL; a zero value is encoded with a 794 // leading '1' bit in the byte, which we can detect as the byte being 795 // (unsigned) greater than or equal to 0x80. 796 if (nalu.data[1] >= 0x80) { 797 end_of_frame = true; 798 break; 799 } 800 break; 801 case content::H264NALU::kSPS: 802 case content::H264NALU::kPPS: 803 case content::H264NALU::kEOSeq: 804 case content::H264NALU::kEOStream: 805 // These unconditionally signal a frame boundary. 806 end_of_frame = true; 807 break; 808 default: 809 // For all others, keep going. 810 break; 811 } 812 if (end_of_frame) { 813 if (!decoder_partial_frame_pending_ && *endpos == 0) { 814 // The frame was previously restarted, and we haven't filled the 815 // current frame with any contents yet. Start the new frame here and 816 // continue parsing NALs. 817 } else { 818 // The frame wasn't previously restarted and/or we have contents for 819 // the current frame; signal the start of a new frame here: we don't 820 // have a partial frame anymore. 821 decoder_partial_frame_pending_ = false; 822 return true; 823 } 824 } 825 *endpos = (nalu.data + nalu.size) - data; 826 } 827 NOTREACHED(); 828 return false; 829 } else { 830 DCHECK_GE(video_profile_, media::VP8PROFILE_MIN); 831 DCHECK_LE(video_profile_, media::VP8PROFILE_MAX); 832 // For VP8, we can just dump the entire buffer. No fragmentation needed, 833 // and we never return a partial frame. 834 *endpos = size; 835 decoder_partial_frame_pending_ = false; 836 return true; 837 } 838 } 839 840 void ExynosVideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() { 841 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 842 843 // If we're behind on tasks, schedule another one. 844 int buffers_to_decode = decoder_input_queue_.size(); 845 if (decoder_current_bitstream_buffer_ != NULL) 846 buffers_to_decode++; 847 if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) { 848 decoder_decode_buffer_tasks_scheduled_++; 849 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 850 &ExynosVideoDecodeAccelerator::DecodeBufferTask, 851 base::Unretained(this))); 852 } 853 } 854 855 bool ExynosVideoDecodeAccelerator::DecodeBufferInitial( 856 const void* data, size_t size, size_t* endpos) { 857 DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size; 858 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 859 DCHECK_NE(decoder_state_, kUninitialized); 860 DCHECK_NE(decoder_state_, kDecoding); 861 DCHECK(!device_poll_thread_.IsRunning()); 862 // Initial decode. We haven't been able to get output stream format info yet. 863 // Get it, and start decoding. 864 865 // Copy in and send to HW. 866 if (!AppendToInputFrame(data, size)) 867 return false; 868 869 // If we only have a partial frame, don't flush and process yet. 870 if (decoder_partial_frame_pending_) 871 return true; 872 873 if (!FlushInputFrame()) 874 return false; 875 876 // Recycle buffers. 877 DequeueMfc(); 878 879 // Check and see if we have format info yet. 880 struct v4l2_format format; 881 bool again = false; 882 if (!GetFormatInfo(&format, &again)) 883 return false; 884 885 if (again) { 886 // Need more stream to decode format, return true and schedule next buffer. 887 *endpos = size; 888 return true; 889 } 890 891 // Run this initialization only on first startup. 892 if (decoder_state_ == kInitialized) { 893 DVLOG(3) << "DecodeBufferInitial(): running initialization"; 894 // Success! Setup our parameters. 895 if (!CreateBuffersForFormat(format)) 896 return false; 897 898 // MFC expects to process the initial buffer once during stream init to 899 // configure stream parameters, but will not consume the steam data on that 900 // iteration. Subsequent iterations (including after reset) do not require 901 // the stream init step. 902 *endpos = 0; 903 } else { 904 *endpos = size; 905 } 906 907 // StartDevicePoll will raise the error if there is one. 908 if (!StartDevicePoll()) 909 return false; 910 911 decoder_state_ = kDecoding; 912 ScheduleDecodeBufferTaskIfNeeded(); 913 return true; 914 } 915 916 bool ExynosVideoDecodeAccelerator::DecodeBufferContinue( 917 const void* data, size_t size) { 918 DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size; 919 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 920 DCHECK_EQ(decoder_state_, kDecoding); 921 922 // Both of these calls will set kError state if they fail. 923 // Only flush the frame if it's complete. 924 return (AppendToInputFrame(data, size) && 925 (decoder_partial_frame_pending_ || FlushInputFrame())); 926 } 927 928 bool ExynosVideoDecodeAccelerator::AppendToInputFrame( 929 const void* data, size_t size) { 930 DVLOG(3) << "AppendToInputFrame()"; 931 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 932 DCHECK_NE(decoder_state_, kUninitialized); 933 DCHECK_NE(decoder_state_, kResetting); 934 DCHECK_NE(decoder_state_, kError); 935 // This routine can handle data == NULL and size == 0, which occurs when 936 // we queue an empty buffer for the purposes of flushing the pipe. 937 938 // Flush if we're too big 939 if (decoder_current_input_buffer_ != -1) { 940 MfcInputRecord& input_record = 941 mfc_input_buffer_map_[decoder_current_input_buffer_]; 942 if (input_record.bytes_used + size > input_record.length) { 943 if (!FlushInputFrame()) 944 return false; 945 decoder_current_input_buffer_ = -1; 946 } 947 } 948 949 // Try to get an available input buffer 950 if (decoder_current_input_buffer_ == -1) { 951 if (mfc_free_input_buffers_.empty()) { 952 // See if we can get more free buffers from HW 953 DequeueMfc(); 954 if (mfc_free_input_buffers_.empty()) { 955 // Nope! 956 DVLOG(2) << "AppendToInputFrame(): stalled for input buffers"; 957 return false; 958 } 959 } 960 decoder_current_input_buffer_ = mfc_free_input_buffers_.back(); 961 mfc_free_input_buffers_.pop_back(); 962 MfcInputRecord& input_record = 963 mfc_input_buffer_map_[decoder_current_input_buffer_]; 964 DCHECK_EQ(input_record.bytes_used, 0); 965 DCHECK_EQ(input_record.input_id, -1); 966 DCHECK(decoder_current_bitstream_buffer_ != NULL); 967 input_record.input_id = decoder_current_bitstream_buffer_->input_id; 968 } 969 970 DCHECK(data != NULL || size == 0); 971 if (size == 0) { 972 // If we asked for an empty buffer, return now. We return only after 973 // getting the next input buffer, since we might actually want an empty 974 // input buffer for flushing purposes. 975 return true; 976 } 977 978 // Copy in to the buffer. 979 MfcInputRecord& input_record = 980 mfc_input_buffer_map_[decoder_current_input_buffer_]; 981 if (size > input_record.length - input_record.bytes_used) { 982 LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring"; 983 NOTIFY_ERROR(UNREADABLE_INPUT); 984 return false; 985 } 986 memcpy( 987 reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used, 988 data, 989 size); 990 input_record.bytes_used += size; 991 992 return true; 993 } 994 995 bool ExynosVideoDecodeAccelerator::FlushInputFrame() { 996 DVLOG(3) << "FlushInputFrame()"; 997 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 998 DCHECK_NE(decoder_state_, kUninitialized); 999 DCHECK_NE(decoder_state_, kResetting); 1000 DCHECK_NE(decoder_state_, kError); 1001 1002 if (decoder_current_input_buffer_ == -1) 1003 return true; 1004 1005 MfcInputRecord& input_record = 1006 mfc_input_buffer_map_[decoder_current_input_buffer_]; 1007 DCHECK_NE(input_record.input_id, -1); 1008 DCHECK(input_record.input_id != kFlushBufferId || 1009 input_record.bytes_used == 0); 1010 // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we 1011 // got from the client. We can skip it if it is empty. 1012 // * if input_id < 0 (should be kFlushBufferId in this case), this input 1013 // buffer was prompted by a flush buffer, and should be queued even when 1014 // empty. 1015 if (input_record.input_id >= 0 && input_record.bytes_used == 0) { 1016 input_record.input_id = -1; 1017 mfc_free_input_buffers_.push_back(decoder_current_input_buffer_); 1018 decoder_current_input_buffer_ = -1; 1019 return true; 1020 } 1021 1022 // Queue it to MFC. 1023 mfc_input_ready_queue_.push_back(decoder_current_input_buffer_); 1024 decoder_current_input_buffer_ = -1; 1025 DVLOG(3) << "FlushInputFrame(): submitting input_id=" 1026 << input_record.input_id; 1027 // Kick the MFC once since there's new available input for it. 1028 EnqueueMfc(); 1029 1030 return (decoder_state_ != kError); 1031 } 1032 1033 void ExynosVideoDecodeAccelerator::AssignPictureBuffersTask( 1034 scoped_ptr<PictureBufferArrayRef> pic_buffers) { 1035 DVLOG(3) << "AssignPictureBuffersTask()"; 1036 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1037 DCHECK_NE(decoder_state_, kUninitialized); 1038 TRACE_EVENT0("Video Decoder", "EVDA::AssignPictureBuffersTask"); 1039 1040 // We run AssignPictureBuffersTask even if we're in kResetting. 1041 if (decoder_state_ == kError) { 1042 DVLOG(2) << "AssignPictureBuffersTask(): early out: kError state"; 1043 return; 1044 } 1045 1046 DCHECK_EQ(pic_buffers->picture_buffers.size(), gsc_output_buffer_map_.size()); 1047 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { 1048 // We should be blank right now. 1049 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; 1050 DCHECK_EQ(output_record.fd, -1); 1051 DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR); 1052 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1053 DCHECK_EQ(output_record.picture_id, -1); 1054 PictureBufferArrayRef::PictureBufferRef& buffer = 1055 pic_buffers->picture_buffers[i]; 1056 output_record.fd = buffer.egl_image_fd; 1057 output_record.egl_image = buffer.egl_image; 1058 output_record.picture_id = buffer.client_id; 1059 1060 // Take ownership of the EGLImage and fd. 1061 buffer.egl_image = EGL_NO_IMAGE_KHR; 1062 buffer.egl_image_fd = -1; 1063 // And add this buffer to the free list. 1064 gsc_free_output_buffers_.push_back(i); 1065 } 1066 1067 // We got buffers! Kick the GSC. 1068 EnqueueGsc(); 1069 1070 if (decoder_state_ == kChangingResolution) 1071 ResumeAfterResolutionChange(); 1072 } 1073 1074 void ExynosVideoDecodeAccelerator::ServiceDeviceTask(bool mfc_event_pending) { 1075 DVLOG(3) << "ServiceDeviceTask()"; 1076 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1077 DCHECK_NE(decoder_state_, kUninitialized); 1078 DCHECK_NE(decoder_state_, kInitialized); 1079 DCHECK_NE(decoder_state_, kAfterReset); 1080 TRACE_EVENT0("Video Decoder", "EVDA::ServiceDeviceTask"); 1081 1082 if (decoder_state_ == kResetting) { 1083 DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state"; 1084 return; 1085 } else if (decoder_state_ == kError) { 1086 DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; 1087 return; 1088 } else if (decoder_state_ == kChangingResolution) { 1089 DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state"; 1090 return; 1091 } 1092 1093 if (mfc_event_pending) 1094 DequeueMfcEvents(); 1095 DequeueMfc(); 1096 DequeueGsc(); 1097 EnqueueMfc(); 1098 EnqueueGsc(); 1099 1100 // Clear the interrupt fd. 1101 if (!ClearDevicePollInterrupt()) 1102 return; 1103 1104 unsigned int poll_fds = 0; 1105 // Add MFC fd, if we should poll on it. 1106 // MFC can be polled as soon as either input or output buffers are queued. 1107 if (mfc_input_buffer_queued_count_ + mfc_output_buffer_queued_count_ > 0) 1108 poll_fds |= kPollMfc; 1109 // Add GSC fd, if we should poll on it. 1110 // GSC has to wait until both input and output buffers are queued. 1111 if (gsc_input_buffer_queued_count_ > 0 && gsc_output_buffer_queued_count_ > 0) 1112 poll_fds |= kPollGsc; 1113 1114 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), 1115 // so either: 1116 // * device_poll_thread_ is running normally 1117 // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask() 1118 // shut it down, in which case we're either in kResetting or kError states 1119 // respectively, and we should have early-outed already. 1120 DCHECK(device_poll_thread_.message_loop()); 1121 // Queue the DevicePollTask() now. 1122 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 1123 &ExynosVideoDecodeAccelerator::DevicePollTask, 1124 base::Unretained(this), 1125 poll_fds)); 1126 1127 DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC[" 1128 << decoder_input_queue_.size() << "->" 1129 << mfc_input_ready_queue_.size() << "] => MFC[" 1130 << mfc_free_input_buffers_.size() << "+" 1131 << mfc_input_buffer_queued_count_ << "/" 1132 << mfc_input_buffer_map_.size() << "->" 1133 << mfc_free_output_buffers_.size() << "+" 1134 << mfc_output_buffer_queued_count_ << "/" 1135 << mfc_output_buffer_map_.size() << "] => " 1136 << mfc_output_gsc_input_queue_.size() << " => GSC[" 1137 << gsc_free_input_buffers_.size() << "+" 1138 << gsc_input_buffer_queued_count_ << "/" 1139 << gsc_input_buffer_map_.size() << "->" 1140 << gsc_free_output_buffers_.size() << "+" 1141 << gsc_output_buffer_queued_count_ << "/" 1142 << gsc_output_buffer_map_.size() << "] => VDA[" 1143 << decoder_frames_at_client_ << "]"; 1144 1145 ScheduleDecodeBufferTaskIfNeeded(); 1146 StartResolutionChangeIfNeeded(); 1147 } 1148 1149 void ExynosVideoDecodeAccelerator::EnqueueMfc() { 1150 DVLOG(3) << "EnqueueMfc()"; 1151 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1152 DCHECK_NE(decoder_state_, kUninitialized); 1153 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueMfc"); 1154 1155 // Drain the pipe of completed decode buffers. 1156 const int old_mfc_inputs_queued = mfc_input_buffer_queued_count_; 1157 while (!mfc_input_ready_queue_.empty()) { 1158 if (!EnqueueMfcInputRecord()) 1159 return; 1160 } 1161 if (old_mfc_inputs_queued == 0 && mfc_input_buffer_queued_count_ != 0) { 1162 // We just started up a previously empty queue. 1163 // Queue state changed; signal interrupt. 1164 if (!SetDevicePollInterrupt()) 1165 return; 1166 // Start VIDIOC_STREAMON if we haven't yet. 1167 if (!mfc_input_streamon_) { 1168 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1169 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); 1170 mfc_input_streamon_ = true; 1171 } 1172 } 1173 1174 // Enqueue all the MFC outputs we can. 1175 const int old_mfc_outputs_queued = mfc_output_buffer_queued_count_; 1176 while (!mfc_free_output_buffers_.empty()) { 1177 if (!EnqueueMfcOutputRecord()) 1178 return; 1179 } 1180 if (old_mfc_outputs_queued == 0 && mfc_output_buffer_queued_count_ != 0) { 1181 // We just started up a previously empty queue. 1182 // Queue state changed; signal interrupt. 1183 if (!SetDevicePollInterrupt()) 1184 return; 1185 // Start VIDIOC_STREAMON if we haven't yet. 1186 if (!mfc_output_streamon_) { 1187 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1188 IOCTL_OR_ERROR_RETURN(mfc_fd_, VIDIOC_STREAMON, &type); 1189 mfc_output_streamon_ = true; 1190 } 1191 } 1192 } 1193 1194 void ExynosVideoDecodeAccelerator::DequeueMfcEvents() { 1195 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1196 DCHECK_EQ(decoder_state_, kDecoding); 1197 DVLOG(3) << "DequeueMfcEvents()"; 1198 1199 struct v4l2_event ev; 1200 memset(&ev, 0, sizeof(ev)); 1201 1202 while (ioctl(mfc_fd_, VIDIOC_DQEVENT, &ev) == 0) { 1203 if (ev.type == V4L2_EVENT_RESOLUTION_CHANGE) { 1204 DVLOG(3) << "DequeueMfcEvents(): got resolution change event."; 1205 DCHECK(!resolution_change_pending_); 1206 resolution_change_pending_ = true; 1207 } else { 1208 DLOG(FATAL) << "DequeueMfcEvents(): got an event (" << ev.type 1209 << ") we haven't subscribed to."; 1210 } 1211 } 1212 } 1213 1214 void ExynosVideoDecodeAccelerator::DequeueMfc() { 1215 DVLOG(3) << "DequeueMfc()"; 1216 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1217 DCHECK_NE(decoder_state_, kUninitialized); 1218 TRACE_EVENT0("Video Decoder", "EVDA::DequeueMfc"); 1219 1220 // Dequeue completed MFC input (VIDEO_OUTPUT) buffers, and recycle to the free 1221 // list. 1222 struct v4l2_buffer dqbuf; 1223 struct v4l2_plane planes[2]; 1224 while (mfc_input_buffer_queued_count_ > 0) { 1225 DCHECK(mfc_input_streamon_); 1226 memset(&dqbuf, 0, sizeof(dqbuf)); 1227 memset(planes, 0, sizeof(planes)); 1228 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1229 dqbuf.memory = V4L2_MEMORY_MMAP; 1230 dqbuf.m.planes = planes; 1231 dqbuf.length = 1; 1232 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { 1233 if (errno == EAGAIN) { 1234 // EAGAIN if we're just out of buffers to dequeue. 1235 break; 1236 } 1237 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; 1238 NOTIFY_ERROR(PLATFORM_FAILURE); 1239 return; 1240 } 1241 MfcInputRecord& input_record = mfc_input_buffer_map_[dqbuf.index]; 1242 DCHECK(input_record.at_device); 1243 mfc_free_input_buffers_.push_back(dqbuf.index); 1244 input_record.at_device = false; 1245 input_record.bytes_used = 0; 1246 input_record.input_id = -1; 1247 mfc_input_buffer_queued_count_--; 1248 } 1249 1250 // Dequeue completed MFC output (VIDEO_CAPTURE) buffers, and queue to the 1251 // completed queue. 1252 while (mfc_output_buffer_queued_count_ > 0) { 1253 DCHECK(mfc_output_streamon_); 1254 memset(&dqbuf, 0, sizeof(dqbuf)); 1255 memset(planes, 0, sizeof(planes)); 1256 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1257 dqbuf.memory = V4L2_MEMORY_MMAP; 1258 dqbuf.m.planes = planes; 1259 dqbuf.length = 2; 1260 if (ioctl(mfc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { 1261 if (errno == EAGAIN) { 1262 // EAGAIN if we're just out of buffers to dequeue. 1263 break; 1264 } 1265 DPLOG(ERROR) << "DequeueMfc(): ioctl() failed: VIDIOC_DQBUF"; 1266 NOTIFY_ERROR(PLATFORM_FAILURE); 1267 return; 1268 } 1269 MfcOutputRecord& output_record = mfc_output_buffer_map_[dqbuf.index]; 1270 DCHECK(output_record.at_device); 1271 output_record.at_device = false; 1272 output_record.bytes_used[0] = dqbuf.m.planes[0].bytesused; 1273 output_record.bytes_used[1] = dqbuf.m.planes[1].bytesused; 1274 if (output_record.bytes_used[0] + output_record.bytes_used[1] == 0) { 1275 // This is an empty output buffer returned as part of a flush. 1276 mfc_free_output_buffers_.push_back(dqbuf.index); 1277 output_record.input_id = -1; 1278 } else { 1279 // This is an output buffer with contents to pass down the pipe. 1280 mfc_output_gsc_input_queue_.push_back(dqbuf.index); 1281 output_record.input_id = dqbuf.timestamp.tv_sec; 1282 DCHECK(output_record.input_id >= 0); 1283 DVLOG(3) << "DequeueMfc(): dequeued input_id=" << output_record.input_id; 1284 // We don't count this output buffer dequeued yet, or add it to the free 1285 // list, as it has data GSC needs to process. 1286 1287 // We have new frames in mfc_output_gsc_input_queue_. Kick the pipe. 1288 SetDevicePollInterrupt(); 1289 } 1290 mfc_output_buffer_queued_count_--; 1291 } 1292 1293 NotifyFlushDoneIfNeeded(); 1294 } 1295 1296 void ExynosVideoDecodeAccelerator::EnqueueGsc() { 1297 DVLOG(3) << "EnqueueGsc()"; 1298 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1299 DCHECK_NE(decoder_state_, kUninitialized); 1300 DCHECK_NE(decoder_state_, kInitialized); 1301 TRACE_EVENT0("Video Decoder", "EVDA::EnqueueGsc"); 1302 1303 // Drain the pipe of completed MFC output buffers. 1304 const int old_gsc_inputs_queued = gsc_input_buffer_queued_count_; 1305 while (!mfc_output_gsc_input_queue_.empty() && 1306 !gsc_free_input_buffers_.empty()) { 1307 if (!EnqueueGscInputRecord()) 1308 return; 1309 } 1310 if (old_gsc_inputs_queued == 0 && gsc_input_buffer_queued_count_ != 0) { 1311 // We just started up a previously empty queue. 1312 // Queue state changed; signal interrupt. 1313 if (!SetDevicePollInterrupt()) 1314 return; 1315 // Start VIDIOC_STREAMON if we haven't yet. 1316 if (!gsc_input_streamon_) { 1317 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1318 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); 1319 gsc_input_streamon_ = true; 1320 } 1321 } 1322 1323 if (gsc_input_buffer_queued_count_ != 0 && 1324 gsc_output_buffer_queued_count_ == 0 && 1325 !gsc_free_output_buffers_.empty()) { 1326 const int old_gsc_outputs_queued = gsc_output_buffer_queued_count_; 1327 if (!EnqueueGscOutputRecord()) 1328 return; 1329 if (old_gsc_outputs_queued == 0 && gsc_output_buffer_queued_count_ != 0) { 1330 // We just started up a previously empty queue. 1331 // Queue state changed; signal interrupt. 1332 if (!SetDevicePollInterrupt()) 1333 return; 1334 // Start VIDIOC_STREAMON if we haven't yet. 1335 if (!gsc_output_streamon_) { 1336 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1337 IOCTL_OR_ERROR_RETURN(gsc_fd_, VIDIOC_STREAMON, &type); 1338 gsc_output_streamon_ = true; 1339 } 1340 } 1341 } 1342 // Bug check: GSC is liable to race conditions if more than one buffer is 1343 // simultaneously queued. 1344 DCHECK_GE(1, gsc_output_buffer_queued_count_); 1345 } 1346 1347 void ExynosVideoDecodeAccelerator::DequeueGsc() { 1348 DVLOG(3) << "DequeueGsc()"; 1349 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1350 DCHECK_NE(decoder_state_, kUninitialized); 1351 DCHECK_NE(decoder_state_, kInitialized); 1352 DCHECK_NE(decoder_state_, kAfterReset); 1353 TRACE_EVENT0("Video Decoder", "EVDA::DequeueGsc"); 1354 1355 // Dequeue completed GSC input (VIDEO_OUTPUT) buffers, and recycle to the free 1356 // list. Also recycle the corresponding MFC output buffers at this time. 1357 struct v4l2_buffer dqbuf; 1358 struct v4l2_plane planes[2]; 1359 while (gsc_input_buffer_queued_count_ > 0) { 1360 DCHECK(gsc_input_streamon_); 1361 memset(&dqbuf, 0, sizeof(dqbuf)); 1362 memset(planes, 0, sizeof(planes)); 1363 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1364 dqbuf.memory = V4L2_MEMORY_DMABUF; 1365 dqbuf.m.planes = planes; 1366 dqbuf.length = 2; 1367 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { 1368 if (errno == EAGAIN) { 1369 // EAGAIN if we're just out of buffers to dequeue. 1370 break; 1371 } 1372 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; 1373 NOTIFY_ERROR(PLATFORM_FAILURE); 1374 return; 1375 } 1376 GscInputRecord& input_record = gsc_input_buffer_map_[dqbuf.index]; 1377 MfcOutputRecord& output_record = 1378 mfc_output_buffer_map_[input_record.mfc_output]; 1379 DCHECK(input_record.at_device); 1380 gsc_free_input_buffers_.push_back(dqbuf.index); 1381 mfc_free_output_buffers_.push_back(input_record.mfc_output); 1382 input_record.at_device = false; 1383 input_record.mfc_output = -1; 1384 output_record.input_id = -1; 1385 gsc_input_buffer_queued_count_--; 1386 } 1387 1388 // Dequeue completed GSC output (VIDEO_CAPTURE) buffers, and send them off to 1389 // the client. Don't recycle to its free list yet -- we can't do that until 1390 // ReusePictureBuffer() returns it to us. 1391 while (gsc_output_buffer_queued_count_ > 0) { 1392 DCHECK(gsc_output_streamon_); 1393 memset(&dqbuf, 0, sizeof(dqbuf)); 1394 memset(planes, 0, sizeof(planes)); 1395 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1396 dqbuf.memory = V4L2_MEMORY_DMABUF; 1397 dqbuf.m.planes = planes; 1398 dqbuf.length = 1; 1399 if (ioctl(gsc_fd_, VIDIOC_DQBUF, &dqbuf) != 0) { 1400 if (errno == EAGAIN) { 1401 // EAGAIN if we're just out of buffers to dequeue. 1402 break; 1403 } 1404 DPLOG(ERROR) << "DequeueGsc(): ioctl() failed: VIDIOC_DQBUF"; 1405 NOTIFY_ERROR(PLATFORM_FAILURE); 1406 return; 1407 } 1408 GscOutputRecord& output_record = gsc_output_buffer_map_[dqbuf.index]; 1409 DCHECK(output_record.at_device); 1410 DCHECK(!output_record.at_client); 1411 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1412 output_record.at_device = false; 1413 output_record.at_client = true; 1414 gsc_output_buffer_queued_count_--; 1415 DVLOG(3) << "DequeueGsc(): returning input_id=" << dqbuf.timestamp.tv_sec 1416 << " as picture_id=" << output_record.picture_id; 1417 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 1418 &Client::PictureReady, client_, media::Picture( 1419 output_record.picture_id, dqbuf.timestamp.tv_sec))); 1420 decoder_frames_at_client_++; 1421 } 1422 1423 NotifyFlushDoneIfNeeded(); 1424 } 1425 1426 bool ExynosVideoDecodeAccelerator::EnqueueMfcInputRecord() { 1427 DVLOG(3) << "EnqueueMfcInputRecord()"; 1428 DCHECK(!mfc_input_ready_queue_.empty()); 1429 1430 // Enqueue a MFC input (VIDEO_OUTPUT) buffer. 1431 const int buffer = mfc_input_ready_queue_.back(); 1432 MfcInputRecord& input_record = mfc_input_buffer_map_[buffer]; 1433 DCHECK(!input_record.at_device); 1434 struct v4l2_buffer qbuf; 1435 struct v4l2_plane qbuf_plane; 1436 memset(&qbuf, 0, sizeof(qbuf)); 1437 memset(&qbuf_plane, 0, sizeof(qbuf_plane)); 1438 qbuf.index = buffer; 1439 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1440 qbuf.timestamp.tv_sec = input_record.input_id; 1441 qbuf.memory = V4L2_MEMORY_MMAP; 1442 qbuf.m.planes = &qbuf_plane; 1443 qbuf.m.planes[0].bytesused = input_record.bytes_used; 1444 qbuf.length = 1; 1445 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); 1446 mfc_input_ready_queue_.pop_back(); 1447 input_record.at_device = true; 1448 mfc_input_buffer_queued_count_++; 1449 DVLOG(3) << "EnqueueMfcInputRecord(): enqueued input_id=" 1450 << input_record.input_id; 1451 return true; 1452 } 1453 1454 bool ExynosVideoDecodeAccelerator::EnqueueMfcOutputRecord() { 1455 DVLOG(3) << "EnqueueMfcOutputRecord()"; 1456 DCHECK(!mfc_free_output_buffers_.empty()); 1457 1458 // Enqueue a MFC output (VIDEO_CAPTURE) buffer. 1459 const int buffer = mfc_free_output_buffers_.back(); 1460 MfcOutputRecord& output_record = mfc_output_buffer_map_[buffer]; 1461 DCHECK(!output_record.at_device); 1462 DCHECK_EQ(output_record.input_id, -1); 1463 struct v4l2_buffer qbuf; 1464 struct v4l2_plane qbuf_planes[2]; 1465 memset(&qbuf, 0, sizeof(qbuf)); 1466 memset(qbuf_planes, 0, sizeof(qbuf_planes)); 1467 qbuf.index = buffer; 1468 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1469 qbuf.memory = V4L2_MEMORY_MMAP; 1470 qbuf.m.planes = qbuf_planes; 1471 qbuf.length = 2; 1472 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QBUF, &qbuf); 1473 mfc_free_output_buffers_.pop_back(); 1474 output_record.at_device = true; 1475 mfc_output_buffer_queued_count_++; 1476 return true; 1477 } 1478 1479 bool ExynosVideoDecodeAccelerator::EnqueueGscInputRecord() { 1480 DVLOG(3) << "EnqueueGscInputRecord()"; 1481 DCHECK(!gsc_free_input_buffers_.empty()); 1482 1483 // Enqueue a GSC input (VIDEO_OUTPUT) buffer for a complete MFC output 1484 // (VIDEO_CAPTURE) buffer. 1485 const int mfc_buffer = mfc_output_gsc_input_queue_.front(); 1486 const int gsc_buffer = gsc_free_input_buffers_.back(); 1487 MfcOutputRecord& output_record = mfc_output_buffer_map_[mfc_buffer]; 1488 DCHECK(!output_record.at_device); 1489 GscInputRecord& input_record = gsc_input_buffer_map_[gsc_buffer]; 1490 DCHECK(!input_record.at_device); 1491 DCHECK_EQ(input_record.mfc_output, -1); 1492 struct v4l2_buffer qbuf; 1493 struct v4l2_plane qbuf_planes[2]; 1494 memset(&qbuf, 0, sizeof(qbuf)); 1495 memset(qbuf_planes, 0, sizeof(qbuf_planes)); 1496 qbuf.index = gsc_buffer; 1497 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1498 qbuf.timestamp.tv_sec = output_record.input_id; 1499 qbuf.memory = V4L2_MEMORY_USERPTR; 1500 qbuf.m.planes = qbuf_planes; 1501 qbuf.m.planes[0].bytesused = output_record.bytes_used[0]; 1502 qbuf.m.planes[0].length = mfc_output_buffer_size_[0]; 1503 qbuf.m.planes[0].m.userptr = (unsigned long)output_record.address[0]; 1504 qbuf.m.planes[1].bytesused = output_record.bytes_used[1]; 1505 qbuf.m.planes[1].length = mfc_output_buffer_size_[1]; 1506 qbuf.m.planes[1].m.userptr = (unsigned long)output_record.address[1]; 1507 qbuf.length = 2; 1508 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); 1509 mfc_output_gsc_input_queue_.pop_front(); 1510 gsc_free_input_buffers_.pop_back(); 1511 input_record.at_device = true; 1512 input_record.mfc_output = mfc_buffer; 1513 output_record.bytes_used[0] = 0; 1514 output_record.bytes_used[1] = 0; 1515 gsc_input_buffer_queued_count_++; 1516 DVLOG(3) << "EnqueueGscInputRecord(): enqueued input_id=" 1517 << output_record.input_id; 1518 return true; 1519 } 1520 1521 bool ExynosVideoDecodeAccelerator::EnqueueGscOutputRecord() { 1522 DVLOG(3) << "EnqueueGscOutputRecord()"; 1523 DCHECK(!gsc_free_output_buffers_.empty()); 1524 1525 // Enqueue a GSC output (VIDEO_CAPTURE) buffer. 1526 const int buffer = gsc_free_output_buffers_.front(); 1527 GscOutputRecord& output_record = gsc_output_buffer_map_[buffer]; 1528 DCHECK(!output_record.at_device); 1529 DCHECK(!output_record.at_client); 1530 if (output_record.egl_sync != EGL_NO_SYNC_KHR) { 1531 TRACE_EVENT0( 1532 "Video Decoder", 1533 "EVDA::EnqueueGscOutputRecord: eglClientWaitSyncKHR"); 1534 // If we have to wait for completion, wait. Note that 1535 // gsc_free_output_buffers_ is a FIFO queue, so we always wait on the 1536 // buffer that has been in the queue the longest. 1537 eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0, 1538 EGL_FOREVER_KHR); 1539 eglDestroySyncKHR(egl_display_, output_record.egl_sync); 1540 output_record.egl_sync = EGL_NO_SYNC_KHR; 1541 } 1542 struct v4l2_buffer qbuf; 1543 struct v4l2_plane qbuf_plane; 1544 memset(&qbuf, 0, sizeof(qbuf)); 1545 memset(&qbuf_plane, 0, sizeof(qbuf_plane)); 1546 qbuf.index = buffer; 1547 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1548 qbuf.memory = V4L2_MEMORY_DMABUF; 1549 qbuf.m.planes = &qbuf_plane; 1550 qbuf.m.planes[0].m.fd = output_record.fd; 1551 qbuf.length = 1; 1552 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_QBUF, &qbuf); 1553 gsc_free_output_buffers_.pop_front(); 1554 output_record.at_device = true; 1555 gsc_output_buffer_queued_count_++; 1556 return true; 1557 } 1558 1559 void ExynosVideoDecodeAccelerator::ReusePictureBufferTask( 1560 int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) { 1561 DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id=" 1562 << picture_buffer_id; 1563 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1564 TRACE_EVENT0("Video Decoder", "EVDA::ReusePictureBufferTask"); 1565 1566 // We run ReusePictureBufferTask even if we're in kResetting. 1567 if (decoder_state_ == kError) { 1568 DVLOG(2) << "ReusePictureBufferTask(): early out: kError state"; 1569 return; 1570 } 1571 1572 if (decoder_state_ == kChangingResolution) { 1573 DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution"; 1574 return; 1575 } 1576 1577 size_t index; 1578 for (index = 0; index < gsc_output_buffer_map_.size(); ++index) 1579 if (gsc_output_buffer_map_[index].picture_id == picture_buffer_id) 1580 break; 1581 1582 if (index >= gsc_output_buffer_map_.size()) { 1583 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not found"; 1584 NOTIFY_ERROR(INVALID_ARGUMENT); 1585 return; 1586 } 1587 1588 GscOutputRecord& output_record = gsc_output_buffer_map_[index]; 1589 if (output_record.at_device || !output_record.at_client) { 1590 DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable"; 1591 NOTIFY_ERROR(INVALID_ARGUMENT); 1592 return; 1593 } 1594 1595 DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR); 1596 output_record.at_client = false; 1597 output_record.egl_sync = egl_sync_ref->egl_sync; 1598 gsc_free_output_buffers_.push_back(index); 1599 decoder_frames_at_client_--; 1600 // Take ownership of the EGLSync. 1601 egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR; 1602 // We got a buffer back, so kick the GSC. 1603 EnqueueGsc(); 1604 } 1605 1606 void ExynosVideoDecodeAccelerator::FlushTask() { 1607 DVLOG(3) << "FlushTask()"; 1608 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1609 TRACE_EVENT0("Video Decoder", "EVDA::FlushTask"); 1610 1611 // Flush outstanding buffers. 1612 if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) { 1613 // There's nothing in the pipe, so return done immediately. 1614 DVLOG(3) << "FlushTask(): returning flush"; 1615 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 1616 &Client::NotifyFlushDone, client_)); 1617 return; 1618 } else if (decoder_state_ == kError) { 1619 DVLOG(2) << "FlushTask(): early out: kError state"; 1620 return; 1621 } 1622 1623 // We don't support stacked flushing. 1624 DCHECK(!decoder_flushing_); 1625 1626 // Queue up an empty buffer -- this triggers the flush. 1627 decoder_input_queue_.push_back(linked_ptr<BitstreamBufferRef>( 1628 new BitstreamBufferRef(client_, child_message_loop_proxy_, NULL, 0, 1629 kFlushBufferId))); 1630 decoder_flushing_ = true; 1631 1632 ScheduleDecodeBufferTaskIfNeeded(); 1633 } 1634 1635 void ExynosVideoDecodeAccelerator::NotifyFlushDoneIfNeeded() { 1636 if (!decoder_flushing_) 1637 return; 1638 1639 // Pipeline is empty when: 1640 // * Decoder input queue is empty of non-delayed buffers. 1641 // * There is no currently filling input buffer. 1642 // * MFC input holding queue is empty. 1643 // * All MFC input (VIDEO_OUTPUT) buffers are returned. 1644 // * MFC -> GSC holding queue is empty. 1645 // * All GSC input (VIDEO_OUTPUT) buffers are returned. 1646 if (!decoder_input_queue_.empty()) { 1647 if (decoder_input_queue_.front()->input_id != 1648 decoder_delay_bitstream_buffer_id_) 1649 return; 1650 } 1651 if (decoder_current_input_buffer_ != -1) 1652 return; 1653 if ((mfc_input_ready_queue_.size() + 1654 mfc_input_buffer_queued_count_ + mfc_output_gsc_input_queue_.size() + 1655 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ ) != 0) 1656 return; 1657 1658 // TODO(posciak): crbug.com/270039. MFC requires a streamoff-streamon 1659 // sequence after flush to continue, even if we are not resetting. This would 1660 // make sense, because we don't really want to resume from a non-resume point 1661 // (e.g. not from an IDR) if we are flushed. 1662 // MSE player however triggers a Flush() on chunk end, but never Reset(). One 1663 // could argue either way, or even say that Flush() is not needed/harmful when 1664 // transitioning to next chunk. 1665 // For now, do the streamoff-streamon cycle to satisfy MFC and not freeze when 1666 // doing MSE. This should be harmless otherwise. 1667 if (!StopDevicePoll(false)) 1668 return; 1669 1670 if (!StartDevicePoll()) 1671 return; 1672 1673 decoder_delay_bitstream_buffer_id_ = -1; 1674 decoder_flushing_ = false; 1675 DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush"; 1676 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 1677 &Client::NotifyFlushDone, client_)); 1678 1679 // While we were flushing, we early-outed DecodeBufferTask()s. 1680 ScheduleDecodeBufferTaskIfNeeded(); 1681 } 1682 1683 void ExynosVideoDecodeAccelerator::ResetTask() { 1684 DVLOG(3) << "ResetTask()"; 1685 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1686 TRACE_EVENT0("Video Decoder", "EVDA::ResetTask"); 1687 1688 if (decoder_state_ == kError) { 1689 DVLOG(2) << "ResetTask(): early out: kError state"; 1690 return; 1691 } 1692 1693 // If we are in the middle of switching resolutions, postpone reset until 1694 // it's done. We don't have to worry about timing of this wrt to decoding, 1695 // because MFC input pipe is already stopped if we are changing resolution. 1696 // We will come back here after we are done with the resolution change. 1697 DCHECK(!resolution_change_reset_pending_); 1698 if (resolution_change_pending_ || decoder_state_ == kChangingResolution) { 1699 resolution_change_reset_pending_ = true; 1700 return; 1701 } 1702 1703 // We stop streaming and clear buffer tracking info (not preserving 1704 // MFC inputs). 1705 // StopDevicePoll() unconditionally does _not_ destroy buffers, however. 1706 if (!StopDevicePoll(false)) 1707 return; 1708 1709 DequeueMfcEvents(); 1710 1711 resolution_change_pending_ = false; 1712 decoder_current_bitstream_buffer_.reset(); 1713 decoder_input_queue_.clear(); 1714 1715 decoder_current_input_buffer_ = -1; 1716 1717 // If we were flushing, we'll never return any more BitstreamBuffers or 1718 // PictureBuffers; they have all been dropped and returned by now. 1719 NotifyFlushDoneIfNeeded(); 1720 1721 // Mark that we're resetting, then enqueue a ResetDoneTask(). All intervening 1722 // jobs will early-out in the kResetting state. 1723 decoder_state_ = kResetting; 1724 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 1725 &ExynosVideoDecodeAccelerator::ResetDoneTask, base::Unretained(this))); 1726 } 1727 1728 void ExynosVideoDecodeAccelerator::ResetDoneTask() { 1729 DVLOG(3) << "ResetDoneTask()"; 1730 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1731 TRACE_EVENT0("Video Decoder", "EVDA::ResetDoneTask"); 1732 1733 if (decoder_state_ == kError) { 1734 DVLOG(2) << "ResetDoneTask(): early out: kError state"; 1735 return; 1736 } 1737 1738 // Reset format-specific bits. 1739 if (video_profile_ >= media::H264PROFILE_MIN && 1740 video_profile_ <= media::H264PROFILE_MAX) { 1741 decoder_h264_parser_.reset(new content::H264Parser()); 1742 } 1743 1744 // Jobs drained, we're finished resetting. 1745 DCHECK_EQ(decoder_state_, kResetting); 1746 decoder_state_ = kAfterReset; 1747 decoder_partial_frame_pending_ = false; 1748 decoder_delay_bitstream_buffer_id_ = -1; 1749 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 1750 &Client::NotifyResetDone, client_)); 1751 1752 // While we were resetting, we early-outed DecodeBufferTask()s. 1753 ScheduleDecodeBufferTaskIfNeeded(); 1754 } 1755 1756 void ExynosVideoDecodeAccelerator::DestroyTask() { 1757 DVLOG(3) << "DestroyTask()"; 1758 TRACE_EVENT0("Video Decoder", "EVDA::DestroyTask"); 1759 1760 // DestroyTask() should run regardless of decoder_state_. 1761 1762 // Stop streaming and the device_poll_thread_. 1763 StopDevicePoll(false); 1764 1765 decoder_current_bitstream_buffer_.reset(); 1766 decoder_current_input_buffer_ = -1; 1767 decoder_decode_buffer_tasks_scheduled_ = 0; 1768 decoder_frames_at_client_ = 0; 1769 decoder_input_queue_.clear(); 1770 decoder_flushing_ = false; 1771 1772 // Set our state to kError. Just in case. 1773 decoder_state_ = kError; 1774 } 1775 1776 bool ExynosVideoDecodeAccelerator::StartDevicePoll() { 1777 DVLOG(3) << "StartDevicePoll()"; 1778 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1779 DCHECK(!device_poll_thread_.IsRunning()); 1780 1781 // Start up the device poll thread and schedule its first DevicePollTask(). 1782 if (!device_poll_thread_.Start()) { 1783 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; 1784 NOTIFY_ERROR(PLATFORM_FAILURE); 1785 return false; 1786 } 1787 device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 1788 &ExynosVideoDecodeAccelerator::DevicePollTask, 1789 base::Unretained(this), 1790 0)); 1791 1792 return true; 1793 } 1794 1795 bool ExynosVideoDecodeAccelerator::StopDevicePoll(bool keep_mfc_input_state) { 1796 DVLOG(3) << "StopDevicePoll()"; 1797 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1798 1799 // Signal the DevicePollTask() to stop, and stop the device poll thread. 1800 if (!SetDevicePollInterrupt()) 1801 return false; 1802 device_poll_thread_.Stop(); 1803 // Clear the interrupt now, to be sure. 1804 if (!ClearDevicePollInterrupt()) 1805 return false; 1806 1807 // Stop streaming. 1808 if (!keep_mfc_input_state) { 1809 if (mfc_input_streamon_) { 1810 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1811 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); 1812 } 1813 mfc_input_streamon_ = false; 1814 } 1815 if (mfc_output_streamon_) { 1816 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1817 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_STREAMOFF, &type); 1818 } 1819 mfc_output_streamon_ = false; 1820 if (gsc_input_streamon_) { 1821 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1822 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); 1823 } 1824 gsc_input_streamon_ = false; 1825 if (gsc_output_streamon_) { 1826 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1827 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_STREAMOFF, &type); 1828 } 1829 gsc_output_streamon_ = false; 1830 1831 // Reset all our accounting info. 1832 if (!keep_mfc_input_state) { 1833 mfc_input_ready_queue_.clear(); 1834 mfc_free_input_buffers_.clear(); 1835 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { 1836 mfc_free_input_buffers_.push_back(i); 1837 mfc_input_buffer_map_[i].at_device = false; 1838 mfc_input_buffer_map_[i].bytes_used = 0; 1839 mfc_input_buffer_map_[i].input_id = -1; 1840 } 1841 mfc_input_buffer_queued_count_ = 0; 1842 } 1843 mfc_free_output_buffers_.clear(); 1844 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { 1845 mfc_free_output_buffers_.push_back(i); 1846 mfc_output_buffer_map_[i].at_device = false; 1847 mfc_output_buffer_map_[i].input_id = -1; 1848 } 1849 mfc_output_buffer_queued_count_ = 0; 1850 mfc_output_gsc_input_queue_.clear(); 1851 gsc_free_input_buffers_.clear(); 1852 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { 1853 gsc_free_input_buffers_.push_back(i); 1854 gsc_input_buffer_map_[i].at_device = false; 1855 gsc_input_buffer_map_[i].mfc_output = -1; 1856 } 1857 gsc_input_buffer_queued_count_ = 0; 1858 gsc_free_output_buffers_.clear(); 1859 for (size_t i = 0; i < gsc_output_buffer_map_.size(); ++i) { 1860 // Only mark those free that aren't being held by the VDA. 1861 if (!gsc_output_buffer_map_[i].at_client) { 1862 gsc_free_output_buffers_.push_back(i); 1863 gsc_output_buffer_map_[i].at_device = false; 1864 } 1865 } 1866 gsc_output_buffer_queued_count_ = 0; 1867 1868 DVLOG(3) << "StopDevicePoll(): device poll stopped"; 1869 return true; 1870 } 1871 1872 bool ExynosVideoDecodeAccelerator::SetDevicePollInterrupt() { 1873 DVLOG(3) << "SetDevicePollInterrupt()"; 1874 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1875 1876 const uint64 buf = 1; 1877 if (HANDLE_EINTR(write(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { 1878 DPLOG(ERROR) << "SetDevicePollInterrupt(): write() failed"; 1879 NOTIFY_ERROR(PLATFORM_FAILURE); 1880 return false; 1881 } 1882 return true; 1883 } 1884 1885 bool ExynosVideoDecodeAccelerator::ClearDevicePollInterrupt() { 1886 DVLOG(3) << "ClearDevicePollInterrupt()"; 1887 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1888 1889 uint64 buf; 1890 if (HANDLE_EINTR(read(device_poll_interrupt_fd_, &buf, sizeof(buf))) == -1) { 1891 if (errno == EAGAIN) { 1892 // No interrupt flag set, and we're reading nonblocking. Not an error. 1893 return true; 1894 } else { 1895 DPLOG(ERROR) << "ClearDevicePollInterrupt(): read() failed"; 1896 NOTIFY_ERROR(PLATFORM_FAILURE); 1897 return false; 1898 } 1899 } 1900 return true; 1901 } 1902 1903 void ExynosVideoDecodeAccelerator::StartResolutionChangeIfNeeded() { 1904 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1905 DCHECK_EQ(decoder_state_, kDecoding); 1906 1907 if (!resolution_change_pending_) 1908 return; 1909 1910 if (!mfc_output_gsc_input_queue_.empty() || 1911 gsc_input_buffer_queued_count_ + gsc_output_buffer_queued_count_ > 0) { 1912 DVLOG(3) << "StartResolutionChangeIfNeeded(): waiting for GSC to finish."; 1913 return; 1914 } 1915 1916 DVLOG(3) << "No more work for GSC, initiate resolution change"; 1917 1918 // Keep MFC input queue. 1919 if (!StopDevicePoll(true)) 1920 return; 1921 1922 decoder_state_ = kChangingResolution; 1923 DCHECK(resolution_change_pending_); 1924 resolution_change_pending_ = false; 1925 1926 // Post a task to clean up buffers on child thread. This will also ensure 1927 // that we won't accept ReusePictureBuffer() anymore after that. 1928 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 1929 &ExynosVideoDecodeAccelerator::ResolutionChangeDestroyBuffers, 1930 weak_this_)); 1931 } 1932 1933 void ExynosVideoDecodeAccelerator::FinishResolutionChange() { 1934 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1935 DVLOG(3) << "FinishResolutionChange()"; 1936 1937 if (decoder_state_ == kError) { 1938 DVLOG(2) << "FinishResolutionChange(): early out: kError state"; 1939 return; 1940 } 1941 1942 struct v4l2_format format; 1943 bool again; 1944 bool ret = GetFormatInfo(&format, &again); 1945 if (!ret || again) { 1946 DVLOG(3) << "Couldn't get format information after resolution change"; 1947 NOTIFY_ERROR(PLATFORM_FAILURE); 1948 return; 1949 } 1950 1951 if (!CreateBuffersForFormat(format)) { 1952 DVLOG(3) << "Couldn't reallocate buffers after resolution change"; 1953 NOTIFY_ERROR(PLATFORM_FAILURE); 1954 return; 1955 } 1956 1957 // From here we stay in kChangingResolution and wait for 1958 // AssignPictureBuffers() before we can resume. 1959 } 1960 1961 void ExynosVideoDecodeAccelerator::ResumeAfterResolutionChange() { 1962 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 1963 DVLOG(3) << "ResumeAfterResolutionChange()"; 1964 1965 decoder_state_ = kDecoding; 1966 1967 if (resolution_change_reset_pending_) { 1968 resolution_change_reset_pending_ = false; 1969 ResetTask(); 1970 return; 1971 } 1972 1973 if (!StartDevicePoll()) 1974 return; 1975 1976 EnqueueMfc(); 1977 // Gsc will get enqueued in AssignPictureBuffersTask(). 1978 ScheduleDecodeBufferTaskIfNeeded(); 1979 } 1980 1981 void ExynosVideoDecodeAccelerator::DevicePollTask(unsigned int poll_fds) { 1982 DVLOG(3) << "DevicePollTask()"; 1983 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); 1984 TRACE_EVENT0("Video Decoder", "EVDA::DevicePollTask"); 1985 1986 // This routine just polls the set of device fds, and schedules a 1987 // ServiceDeviceTask() on decoder_thread_ when processing needs to occur. 1988 // Other threads may notify this task to return early by writing to 1989 // device_poll_interrupt_fd_. 1990 struct pollfd pollfds[3]; 1991 nfds_t nfds; 1992 int mfc_pollfd = -1; 1993 1994 // Add device_poll_interrupt_fd_; 1995 pollfds[0].fd = device_poll_interrupt_fd_; 1996 pollfds[0].events = POLLIN | POLLERR; 1997 nfds = 1; 1998 1999 if (poll_fds & kPollMfc) { 2000 DVLOG(3) << "DevicePollTask(): adding MFC to poll() set"; 2001 pollfds[nfds].fd = mfc_fd_; 2002 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR | POLLPRI; 2003 mfc_pollfd = nfds; 2004 nfds++; 2005 } 2006 // Add GSC fd, if we should poll on it. 2007 // GSC has to wait until both input and output buffers are queued. 2008 if (poll_fds & kPollGsc) { 2009 DVLOG(3) << "DevicePollTask(): adding GSC to poll() set"; 2010 pollfds[nfds].fd = gsc_fd_; 2011 pollfds[nfds].events = POLLIN | POLLOUT | POLLERR; 2012 nfds++; 2013 } 2014 2015 // Poll it! 2016 if (HANDLE_EINTR(poll(pollfds, nfds, -1)) == -1) { 2017 DPLOG(ERROR) << "DevicePollTask(): poll() failed"; 2018 NOTIFY_ERROR(PLATFORM_FAILURE); 2019 return; 2020 } 2021 2022 bool mfc_event_pending = (mfc_pollfd != -1 && 2023 pollfds[mfc_pollfd].revents & POLLPRI); 2024 2025 // All processing should happen on ServiceDeviceTask(), since we shouldn't 2026 // touch decoder state from this thread. 2027 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 2028 &ExynosVideoDecodeAccelerator::ServiceDeviceTask, 2029 base::Unretained(this), mfc_event_pending)); 2030 } 2031 2032 void ExynosVideoDecodeAccelerator::NotifyError(Error error) { 2033 DVLOG(2) << "NotifyError()"; 2034 2035 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { 2036 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 2037 &ExynosVideoDecodeAccelerator::NotifyError, weak_this_, error)); 2038 return; 2039 } 2040 2041 if (client_) { 2042 client_->NotifyError(error); 2043 client_ptr_factory_.InvalidateWeakPtrs(); 2044 } 2045 } 2046 2047 void ExynosVideoDecodeAccelerator::SetDecoderState(State state) { 2048 DVLOG(3) << "SetDecoderState(): state=" << state; 2049 2050 // We can touch decoder_state_ only if this is the decoder thread or the 2051 // decoder thread isn't running. 2052 if (decoder_thread_.message_loop() != NULL && 2053 decoder_thread_.message_loop() != base::MessageLoop::current()) { 2054 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 2055 &ExynosVideoDecodeAccelerator::SetDecoderState, 2056 base::Unretained(this), state)); 2057 } else { 2058 decoder_state_ = state; 2059 } 2060 } 2061 2062 bool ExynosVideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format, 2063 bool* again) { 2064 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 2065 2066 *again = false; 2067 memset(format, 0, sizeof(*format)); 2068 format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2069 if (HANDLE_EINTR(ioctl(mfc_fd_, VIDIOC_G_FMT, format)) != 0) { 2070 if (errno == EINVAL) { 2071 // EINVAL means we haven't seen sufficient stream to decode the format. 2072 *again = true; 2073 return true; 2074 } else { 2075 DPLOG(ERROR) << "DecodeBufferInitial(): ioctl() failed: VIDIOC_G_FMT"; 2076 NOTIFY_ERROR(PLATFORM_FAILURE); 2077 return false; 2078 } 2079 } 2080 2081 return true; 2082 } 2083 2084 bool ExynosVideoDecodeAccelerator::CreateBuffersForFormat( 2085 const struct v4l2_format& format) { 2086 DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current()); 2087 CHECK_EQ(format.fmt.pix_mp.num_planes, 2); 2088 frame_buffer_size_.SetSize( 2089 format.fmt.pix_mp.width, format.fmt.pix_mp.height); 2090 mfc_output_buffer_size_[0] = format.fmt.pix_mp.plane_fmt[0].sizeimage; 2091 mfc_output_buffer_size_[1] = format.fmt.pix_mp.plane_fmt[1].sizeimage; 2092 mfc_output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat; 2093 DCHECK_EQ(mfc_output_buffer_pixelformat_, V4L2_PIX_FMT_NV12MT_16X16); 2094 DVLOG(3) << "CreateBuffersForFormat(): new resolution: " 2095 << frame_buffer_size_.ToString(); 2096 2097 if (!CreateMfcOutputBuffers() || !CreateGscInputBuffers() || 2098 !CreateGscOutputBuffers()) 2099 return false; 2100 2101 return true; 2102 } 2103 2104 bool ExynosVideoDecodeAccelerator::CreateMfcInputBuffers() { 2105 DVLOG(3) << "CreateMfcInputBuffers()"; 2106 // We always run this as we prepare to initialize. 2107 DCHECK_EQ(decoder_state_, kUninitialized); 2108 DCHECK(!mfc_input_streamon_); 2109 DCHECK(mfc_input_buffer_map_.empty()); 2110 2111 __u32 pixelformat = 0; 2112 if (video_profile_ >= media::H264PROFILE_MIN && 2113 video_profile_ <= media::H264PROFILE_MAX) { 2114 pixelformat = V4L2_PIX_FMT_H264; 2115 } else if (video_profile_ >= media::VP8PROFILE_MIN && 2116 video_profile_ <= media::VP8PROFILE_MAX) { 2117 pixelformat = V4L2_PIX_FMT_VP8; 2118 } else { 2119 NOTREACHED(); 2120 } 2121 2122 struct v4l2_format format; 2123 memset(&format, 0, sizeof(format)); 2124 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2125 format.fmt.pix_mp.pixelformat = pixelformat; 2126 format.fmt.pix_mp.plane_fmt[0].sizeimage = kMfcInputBufferMaxSize; 2127 format.fmt.pix_mp.num_planes = 1; 2128 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_S_FMT, &format); 2129 2130 struct v4l2_requestbuffers reqbufs; 2131 memset(&reqbufs, 0, sizeof(reqbufs)); 2132 reqbufs.count = kMfcInputBufferCount; 2133 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2134 reqbufs.memory = V4L2_MEMORY_MMAP; 2135 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); 2136 mfc_input_buffer_map_.resize(reqbufs.count); 2137 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { 2138 mfc_free_input_buffers_.push_back(i); 2139 2140 // Query for the MEMORY_MMAP pointer. 2141 struct v4l2_plane planes[1]; 2142 struct v4l2_buffer buffer; 2143 memset(&buffer, 0, sizeof(buffer)); 2144 memset(planes, 0, sizeof(planes)); 2145 buffer.index = i; 2146 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2147 buffer.memory = V4L2_MEMORY_MMAP; 2148 buffer.m.planes = planes; 2149 buffer.length = 1; 2150 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer); 2151 void* address = mmap(NULL, buffer.m.planes[0].length, 2152 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_, 2153 buffer.m.planes[0].m.mem_offset); 2154 if (address == MAP_FAILED) { 2155 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed"; 2156 return false; 2157 } 2158 mfc_input_buffer_map_[i].address = address; 2159 mfc_input_buffer_map_[i].length = buffer.m.planes[0].length; 2160 } 2161 2162 return true; 2163 } 2164 2165 bool ExynosVideoDecodeAccelerator::CreateMfcOutputBuffers() { 2166 DVLOG(3) << "CreateMfcOutputBuffers()"; 2167 DCHECK(decoder_state_ == kInitialized || 2168 decoder_state_ == kChangingResolution); 2169 DCHECK(!mfc_output_streamon_); 2170 DCHECK(mfc_output_buffer_map_.empty()); 2171 2172 // Number of MFC output buffers we need. 2173 struct v4l2_control ctrl; 2174 memset(&ctrl, 0, sizeof(ctrl)); 2175 ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; 2176 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_G_CTRL, &ctrl); 2177 mfc_output_dpb_size_ = ctrl.value; 2178 2179 // Output format setup in Initialize(). 2180 2181 // Allocate the output buffers. 2182 struct v4l2_requestbuffers reqbufs; 2183 memset(&reqbufs, 0, sizeof(reqbufs)); 2184 reqbufs.count = mfc_output_dpb_size_ + kDpbOutputBufferExtraCount; 2185 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2186 reqbufs.memory = V4L2_MEMORY_MMAP; 2187 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_REQBUFS, &reqbufs); 2188 2189 // Fill our free-buffers list, and create DMABUFs from them. 2190 mfc_output_buffer_map_.resize(reqbufs.count); 2191 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { 2192 mfc_free_output_buffers_.push_back(i); 2193 2194 // Query for the MEMORY_MMAP pointer. 2195 struct v4l2_plane planes[2]; 2196 struct v4l2_buffer buffer; 2197 memset(&buffer, 0, sizeof(buffer)); 2198 memset(planes, 0, sizeof(planes)); 2199 buffer.index = i; 2200 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2201 buffer.memory = V4L2_MEMORY_MMAP; 2202 buffer.m.planes = planes; 2203 buffer.length = 2; 2204 IOCTL_OR_ERROR_RETURN_FALSE(mfc_fd_, VIDIOC_QUERYBUF, &buffer); 2205 2206 // Get their user memory for GSC input. 2207 for (int j = 0; j < 2; ++j) { 2208 void* address = mmap(NULL, buffer.m.planes[j].length, 2209 PROT_READ | PROT_WRITE, MAP_SHARED, mfc_fd_, 2210 buffer.m.planes[j].m.mem_offset); 2211 if (address == MAP_FAILED) { 2212 DPLOG(ERROR) << "CreateMfcInputBuffers(): mmap() failed"; 2213 return false; 2214 } 2215 mfc_output_buffer_map_[i].address[j] = address; 2216 mfc_output_buffer_map_[i].length[j] = buffer.m.planes[j].length; 2217 } 2218 } 2219 2220 return true; 2221 } 2222 2223 bool ExynosVideoDecodeAccelerator::CreateGscInputBuffers() { 2224 DVLOG(3) << "CreateGscInputBuffers()"; 2225 DCHECK(decoder_state_ == kInitialized || 2226 decoder_state_ == kChangingResolution); 2227 DCHECK(!gsc_input_streamon_); 2228 DCHECK(gsc_input_buffer_map_.empty()); 2229 2230 struct v4l2_format format; 2231 memset(&format, 0, sizeof(format)); 2232 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2233 format.fmt.pix_mp.width = frame_buffer_size_.width(); 2234 format.fmt.pix_mp.height = frame_buffer_size_.height(); 2235 format.fmt.pix_mp.pixelformat = mfc_output_buffer_pixelformat_; 2236 format.fmt.pix_mp.plane_fmt[0].sizeimage = mfc_output_buffer_size_[0]; 2237 format.fmt.pix_mp.plane_fmt[1].sizeimage = mfc_output_buffer_size_[1]; 2238 // NV12MT_16X16 is a tiled format for which bytesperline doesn't make too much 2239 // sense. Convention seems to be to assume 8bpp for these tiled formats. 2240 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width(); 2241 format.fmt.pix_mp.plane_fmt[1].bytesperline = frame_buffer_size_.width(); 2242 format.fmt.pix_mp.num_planes = 2; 2243 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); 2244 2245 struct v4l2_control control; 2246 memset(&control, 0, sizeof(control)); 2247 control.id = V4L2_CID_ROTATE; 2248 control.value = 0; 2249 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); 2250 2251 memset(&control, 0, sizeof(control)); 2252 control.id = V4L2_CID_HFLIP; 2253 control.value = 0; 2254 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); 2255 2256 memset(&control, 0, sizeof(control)); 2257 control.id = V4L2_CID_VFLIP; 2258 control.value = 0; 2259 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); 2260 2261 memset(&control, 0, sizeof(control)); 2262 control.id = V4L2_CID_GLOBAL_ALPHA; 2263 control.value = 255; 2264 if (HANDLE_EINTR(ioctl(gsc_fd_, VIDIOC_S_CTRL, &control)) != 0) { 2265 memset(&control, 0, sizeof(control)); 2266 control.id = V4L2_CID_ALPHA_COMPONENT; 2267 control.value = 255; 2268 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_CTRL, &control); 2269 } 2270 2271 struct v4l2_requestbuffers reqbufs; 2272 memset(&reqbufs, 0, sizeof(reqbufs)); 2273 reqbufs.count = kGscInputBufferCount; 2274 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2275 reqbufs.memory = V4L2_MEMORY_USERPTR; 2276 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); 2277 2278 gsc_input_buffer_map_.resize(reqbufs.count); 2279 for (size_t i = 0; i < gsc_input_buffer_map_.size(); ++i) { 2280 gsc_free_input_buffers_.push_back(i); 2281 gsc_input_buffer_map_[i].mfc_output = -1; 2282 } 2283 2284 return true; 2285 } 2286 2287 bool ExynosVideoDecodeAccelerator::CreateGscOutputBuffers() { 2288 DVLOG(3) << "CreateGscOutputBuffers()"; 2289 DCHECK(decoder_state_ == kInitialized || 2290 decoder_state_ == kChangingResolution); 2291 DCHECK(!gsc_output_streamon_); 2292 DCHECK(gsc_output_buffer_map_.empty()); 2293 2294 // GSC outputs into the EGLImages we create from the textures we are 2295 // assigned. Assume RGBA8888 format. 2296 struct v4l2_format format; 2297 memset(&format, 0, sizeof(format)); 2298 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2299 format.fmt.pix_mp.width = frame_buffer_size_.width(); 2300 format.fmt.pix_mp.height = frame_buffer_size_.height(); 2301 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB32; 2302 format.fmt.pix_mp.plane_fmt[0].sizeimage = 2303 frame_buffer_size_.width() * frame_buffer_size_.height() * 4; 2304 format.fmt.pix_mp.plane_fmt[0].bytesperline = frame_buffer_size_.width() * 4; 2305 format.fmt.pix_mp.num_planes = 1; 2306 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_S_FMT, &format); 2307 2308 struct v4l2_requestbuffers reqbufs; 2309 memset(&reqbufs, 0, sizeof(reqbufs)); 2310 reqbufs.count = mfc_output_dpb_size_ + kDpbOutputBufferExtraCount; 2311 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2312 reqbufs.memory = V4L2_MEMORY_DMABUF; 2313 IOCTL_OR_ERROR_RETURN_FALSE(gsc_fd_, VIDIOC_REQBUFS, &reqbufs); 2314 2315 // We don't actually fill in the freelist or the map here. That happens once 2316 // we have actual usable buffers, after AssignPictureBuffers(); 2317 gsc_output_buffer_map_.resize(reqbufs.count); 2318 2319 DVLOG(3) << "CreateGscOutputBuffers(): ProvidePictureBuffers(): " 2320 << "buffer_count=" << gsc_output_buffer_map_.size() 2321 << ", width=" << frame_buffer_size_.width() 2322 << ", height=" << frame_buffer_size_.height(); 2323 child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind( 2324 &Client::ProvidePictureBuffers, client_, gsc_output_buffer_map_.size(), 2325 gfx::Size(frame_buffer_size_.width(), frame_buffer_size_.height()), 2326 GL_TEXTURE_2D)); 2327 2328 return true; 2329 } 2330 2331 void ExynosVideoDecodeAccelerator::DestroyMfcInputBuffers() { 2332 DVLOG(3) << "DestroyMfcInputBuffers()"; 2333 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 2334 DCHECK(!mfc_input_streamon_); 2335 2336 for (size_t i = 0; i < mfc_input_buffer_map_.size(); ++i) { 2337 if (mfc_input_buffer_map_[i].address != NULL) { 2338 munmap(mfc_input_buffer_map_[i].address, 2339 mfc_input_buffer_map_[i].length); 2340 } 2341 } 2342 2343 struct v4l2_requestbuffers reqbufs; 2344 memset(&reqbufs, 0, sizeof(reqbufs)); 2345 reqbufs.count = 0; 2346 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2347 reqbufs.memory = V4L2_MEMORY_MMAP; 2348 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) 2349 DPLOG(ERROR) << "DestroyMfcInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; 2350 2351 mfc_input_buffer_map_.clear(); 2352 mfc_free_input_buffers_.clear(); 2353 } 2354 2355 void ExynosVideoDecodeAccelerator::DestroyMfcOutputBuffers() { 2356 DVLOG(3) << "DestroyMfcOutputBuffers()"; 2357 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 2358 DCHECK(!mfc_output_streamon_); 2359 2360 for (size_t i = 0; i < mfc_output_buffer_map_.size(); ++i) { 2361 if (mfc_output_buffer_map_[i].address[0] != NULL) 2362 munmap(mfc_output_buffer_map_[i].address[0], 2363 mfc_output_buffer_map_[i].length[0]); 2364 if (mfc_output_buffer_map_[i].address[1] != NULL) 2365 munmap(mfc_output_buffer_map_[i].address[1], 2366 mfc_output_buffer_map_[i].length[1]); 2367 } 2368 2369 struct v4l2_requestbuffers reqbufs; 2370 memset(&reqbufs, 0, sizeof(reqbufs)); 2371 reqbufs.count = 0; 2372 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2373 reqbufs.memory = V4L2_MEMORY_MMAP; 2374 if (ioctl(mfc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) 2375 DPLOG(ERROR) << "DestroyMfcOutputBuffers() ioctl() failed: VIDIOC_REQBUFS"; 2376 2377 mfc_output_buffer_map_.clear(); 2378 mfc_free_output_buffers_.clear(); 2379 } 2380 2381 void ExynosVideoDecodeAccelerator::DestroyGscInputBuffers() { 2382 DVLOG(3) << "DestroyGscInputBuffers()"; 2383 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 2384 DCHECK(!gsc_input_streamon_); 2385 2386 struct v4l2_requestbuffers reqbufs; 2387 memset(&reqbufs, 0, sizeof(reqbufs)); 2388 reqbufs.count = 0; 2389 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 2390 reqbufs.memory = V4L2_MEMORY_DMABUF; 2391 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) 2392 DPLOG(ERROR) << "DestroyGscInputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; 2393 2394 gsc_input_buffer_map_.clear(); 2395 gsc_free_input_buffers_.clear(); 2396 } 2397 2398 void ExynosVideoDecodeAccelerator::DestroyGscOutputBuffers() { 2399 DVLOG(3) << "DestroyGscOutputBuffers()"; 2400 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 2401 DCHECK(!gsc_output_streamon_); 2402 2403 if (gsc_output_buffer_map_.size() != 0) { 2404 if (!make_context_current_.Run()) 2405 DLOG(ERROR) << "DestroyGscOutputBuffers(): " 2406 << "could not make context current"; 2407 2408 size_t i = 0; 2409 do { 2410 GscOutputRecord& output_record = gsc_output_buffer_map_[i]; 2411 if (output_record.fd != -1) 2412 HANDLE_EINTR(close(output_record.fd)); 2413 if (output_record.egl_image != EGL_NO_IMAGE_KHR) 2414 eglDestroyImageKHR(egl_display_, output_record.egl_image); 2415 if (output_record.egl_sync != EGL_NO_SYNC_KHR) 2416 eglDestroySyncKHR(egl_display_, output_record.egl_sync); 2417 if (client_) { 2418 DVLOG(1) << "DestroyGscOutputBuffers(): " 2419 << "dismissing PictureBuffer id=" << output_record.picture_id; 2420 client_->DismissPictureBuffer(output_record.picture_id); 2421 } 2422 ++i; 2423 } while (i < gsc_output_buffer_map_.size()); 2424 } 2425 2426 struct v4l2_requestbuffers reqbufs; 2427 memset(&reqbufs, 0, sizeof(reqbufs)); 2428 reqbufs.count = 0; 2429 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 2430 reqbufs.memory = V4L2_MEMORY_DMABUF; 2431 if (ioctl(gsc_fd_, VIDIOC_REQBUFS, &reqbufs) != 0) 2432 DPLOG(ERROR) << "DestroyGscOutputBuffers(): ioctl() failed: VIDIOC_REQBUFS"; 2433 2434 gsc_output_buffer_map_.clear(); 2435 gsc_free_output_buffers_.clear(); 2436 } 2437 2438 void ExynosVideoDecodeAccelerator::ResolutionChangeDestroyBuffers() { 2439 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 2440 DVLOG(3) << "ResolutionChangeDestroyBuffers()"; 2441 2442 DestroyGscInputBuffers(); 2443 DestroyGscOutputBuffers(); 2444 DestroyMfcOutputBuffers(); 2445 2446 // Finish resolution change on decoder thread. 2447 decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 2448 &ExynosVideoDecodeAccelerator::FinishResolutionChange, 2449 base::Unretained(this))); 2450 } 2451 2452 } // namespace content 2453