1 // Copyright 2013 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "content/renderer/media/rtc_video_decoder.h" 6 7 #include "base/bind.h" 8 #include "base/logging.h" 9 #include "base/memory/ref_counted.h" 10 #include "base/message_loop/message_loop_proxy.h" 11 #include "base/safe_numerics.h" 12 #include "base/stl_util.h" 13 #include "base/task_runner_util.h" 14 #include "content/child/child_thread.h" 15 #include "media/base/bind_to_loop.h" 16 #include "media/filters/gpu_video_decoder_factories.h" 17 #include "third_party/webrtc/system_wrappers/interface/ref_count.h" 18 19 namespace content { 20 21 const int32 RTCVideoDecoder::ID_LAST = 0x3FFFFFFF; 22 const int32 RTCVideoDecoder::ID_HALF = 0x20000000; 23 const int32 RTCVideoDecoder::ID_INVALID = -1; 24 25 // Maximum number of concurrent VDA::Decode() operations RVD will maintain. 26 // Higher values allow better pipelining in the GPU, but also require more 27 // resources. 28 static const size_t kMaxInFlightDecodes = 8; 29 30 // Size of shared-memory segments we allocate. Since we reuse them we let them 31 // be on the beefy side. 32 static const size_t kSharedMemorySegmentBytes = 100 << 10; 33 34 // Maximum number of allocated shared-memory segments. 35 static const int kMaxNumSharedMemorySegments = 16; 36 37 // Maximum number of pending WebRTC buffers that are waiting for the shared 38 // memory. 10 seconds for 30 fps. 39 static const size_t kMaxNumOfPendingBuffers = 300; 40 41 // A shared memory segment and its allocated size. This class has the ownership 42 // of |shm|. 43 class RTCVideoDecoder::SHMBuffer { 44 public: 45 SHMBuffer(base::SharedMemory* shm, size_t size); 46 ~SHMBuffer(); 47 base::SharedMemory* const shm; 48 const size_t size; 49 }; 50 51 RTCVideoDecoder::SHMBuffer::SHMBuffer(base::SharedMemory* shm, size_t size) 52 : shm(shm), size(size) {} 53 54 RTCVideoDecoder::SHMBuffer::~SHMBuffer() { shm->Close(); } 55 56 RTCVideoDecoder::BufferData::BufferData(int32 bitstream_buffer_id, 57 uint32_t timestamp, 58 int width, 59 int height, 60 size_t size) 61 : bitstream_buffer_id(bitstream_buffer_id), 62 timestamp(timestamp), 63 width(width), 64 height(height), 65 size(size) {} 66 67 RTCVideoDecoder::BufferData::BufferData() {} 68 69 RTCVideoDecoder::BufferData::~BufferData() {} 70 71 RTCVideoDecoder::RTCVideoDecoder( 72 const scoped_refptr<media::GpuVideoDecoderFactories>& factories) 73 : weak_factory_(this), 74 weak_this_(weak_factory_.GetWeakPtr()), 75 factories_(factories), 76 vda_loop_proxy_(factories->GetMessageLoop()), 77 decoder_texture_target_(0), 78 next_picture_buffer_id_(0), 79 state_(UNINITIALIZED), 80 decode_complete_callback_(NULL), 81 num_shm_buffers_(0), 82 next_bitstream_buffer_id_(0), 83 reset_bitstream_buffer_id_(ID_INVALID) { 84 DCHECK(!vda_loop_proxy_->BelongsToCurrentThread()); 85 base::WaitableEvent message_loop_async_waiter(false, false); 86 // Waiting here is safe. The media thread is stopped in the child thread and 87 // the child thread is blocked when VideoDecoderFactory::CreateVideoDecoder 88 // runs. 89 vda_loop_proxy_->PostTask(FROM_HERE, 90 base::Bind(&RTCVideoDecoder::Initialize, 91 base::Unretained(this), 92 &message_loop_async_waiter)); 93 message_loop_async_waiter.Wait(); 94 } 95 96 RTCVideoDecoder::~RTCVideoDecoder() { 97 DVLOG(2) << "~RTCVideoDecoder"; 98 // Destroy VDA and remove |this| from the observer if this is vda thread. 99 if (vda_loop_proxy_->BelongsToCurrentThread()) { 100 base::MessageLoop::current()->RemoveDestructionObserver(this); 101 DestroyVDA(); 102 } else { 103 // VDA should have been destroyed in WillDestroyCurrentMessageLoop. 104 DCHECK(!vda_); 105 } 106 107 // Delete all shared memories. 108 STLDeleteElements(&available_shm_segments_); 109 STLDeleteValues(&bitstream_buffers_in_decoder_); 110 STLDeleteContainerPairFirstPointers(decode_buffers_.begin(), 111 decode_buffers_.end()); 112 decode_buffers_.clear(); 113 114 // Delete WebRTC input buffers. 115 for (std::deque<std::pair<webrtc::EncodedImage, BufferData> >::iterator it = 116 pending_buffers_.begin(); 117 it != pending_buffers_.end(); 118 ++it) { 119 delete[] it->first._buffer; 120 } 121 } 122 123 scoped_ptr<RTCVideoDecoder> RTCVideoDecoder::Create( 124 webrtc::VideoCodecType type, 125 const scoped_refptr<media::GpuVideoDecoderFactories>& factories) { 126 scoped_ptr<RTCVideoDecoder> decoder; 127 // Convert WebRTC codec type to media codec profile. 128 media::VideoCodecProfile profile; 129 switch (type) { 130 case webrtc::kVideoCodecVP8: 131 profile = media::VP8PROFILE_MAIN; 132 break; 133 default: 134 DVLOG(2) << "Video codec not supported:" << type; 135 return decoder.Pass(); 136 } 137 138 decoder.reset(new RTCVideoDecoder(factories)); 139 decoder->vda_ 140 .reset(factories->CreateVideoDecodeAccelerator(profile, decoder.get())); 141 // vda can be NULL if VP8 is not supported. 142 if (decoder->vda_ != NULL) { 143 decoder->state_ = INITIALIZED; 144 } else { 145 factories->GetMessageLoop()->DeleteSoon(FROM_HERE, decoder.release()); 146 } 147 return decoder.Pass(); 148 } 149 150 int32_t RTCVideoDecoder::InitDecode(const webrtc::VideoCodec* codecSettings, 151 int32_t /*numberOfCores*/) { 152 DVLOG(2) << "InitDecode"; 153 DCHECK_EQ(codecSettings->codecType, webrtc::kVideoCodecVP8); 154 if (codecSettings->codecSpecific.VP8.feedbackModeOn) { 155 LOG(ERROR) << "Feedback mode not supported"; 156 return WEBRTC_VIDEO_CODEC_ERROR; 157 } 158 159 base::AutoLock auto_lock(lock_); 160 if (state_ == UNINITIALIZED || state_ == DECODE_ERROR) { 161 LOG(ERROR) << "VDA is not initialized. state=" << state_; 162 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 163 } 164 // Create some shared memory if the queue is empty. 165 if (available_shm_segments_.size() == 0) { 166 vda_loop_proxy_->PostTask(FROM_HERE, 167 base::Bind(&RTCVideoDecoder::CreateSHM, 168 weak_this_, 169 kMaxInFlightDecodes, 170 kSharedMemorySegmentBytes)); 171 } 172 return WEBRTC_VIDEO_CODEC_OK; 173 } 174 175 int32_t RTCVideoDecoder::Decode( 176 const webrtc::EncodedImage& inputImage, 177 bool missingFrames, 178 const webrtc::RTPFragmentationHeader* /*fragmentation*/, 179 const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/, 180 int64_t /*renderTimeMs*/) { 181 DVLOG(3) << "Decode"; 182 183 base::AutoLock auto_lock(lock_); 184 if (state_ == UNINITIALIZED || decode_complete_callback_ == NULL) { 185 LOG(ERROR) << "The decoder has not initialized."; 186 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 187 } 188 if (state_ == DECODE_ERROR) { 189 LOG(ERROR) << "Decoding error occurred."; 190 return WEBRTC_VIDEO_CODEC_ERROR; 191 } 192 if (missingFrames || !inputImage._completeFrame) { 193 DLOG(ERROR) << "Missing or incomplete frames."; 194 // Unlike the SW decoder in libvpx, hw decoder cannot handle broken frames. 195 // Return an error to request a key frame. 196 return WEBRTC_VIDEO_CODEC_ERROR; 197 } 198 if (inputImage._frameType == webrtc::kKeyFrame) 199 frame_size_.SetSize(inputImage._encodedWidth, inputImage._encodedHeight); 200 201 // Create buffer metadata. 202 BufferData buffer_data(next_bitstream_buffer_id_, 203 inputImage._timeStamp, 204 frame_size_.width(), 205 frame_size_.height(), 206 inputImage._length); 207 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer. 208 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & ID_LAST; 209 210 // If the shared memory is available and there are no pending buffers, send 211 // the buffer for decode. If not, save the buffer in the queue for decode 212 // later. 213 scoped_ptr<SHMBuffer> shm_buffer; 214 if (pending_buffers_.size() == 0) 215 shm_buffer = GetSHM_Locked(inputImage._length); 216 if (!shm_buffer) { 217 int32_t result = SaveToPendingBuffers_Locked(inputImage, buffer_data); 218 return result ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR; 219 } 220 221 SaveToDecodeBuffers_Locked(inputImage, shm_buffer.Pass(), buffer_data); 222 vda_loop_proxy_->PostTask( 223 FROM_HERE, base::Bind(&RTCVideoDecoder::RequestBufferDecode, weak_this_)); 224 return WEBRTC_VIDEO_CODEC_OK; 225 } 226 227 int32_t RTCVideoDecoder::RegisterDecodeCompleteCallback( 228 webrtc::DecodedImageCallback* callback) { 229 DVLOG(2) << "RegisterDecodeCompleteCallback"; 230 base::AutoLock auto_lock(lock_); 231 decode_complete_callback_ = callback; 232 return WEBRTC_VIDEO_CODEC_OK; 233 } 234 235 int32_t RTCVideoDecoder::Release() { 236 DVLOG(2) << "Release"; 237 // Do not destroy VDA because WebRTC can call InitDecode and start decoding 238 // again. 239 return Reset(); 240 } 241 242 int32_t RTCVideoDecoder::Reset() { 243 DVLOG(2) << "Reset"; 244 base::AutoLock auto_lock(lock_); 245 if (state_ == UNINITIALIZED) { 246 LOG(ERROR) << "Decoder not initialized."; 247 return WEBRTC_VIDEO_CODEC_UNINITIALIZED; 248 } 249 if (next_bitstream_buffer_id_ != 0) 250 reset_bitstream_buffer_id_ = next_bitstream_buffer_id_ - 1; 251 else 252 reset_bitstream_buffer_id_ = ID_LAST; 253 // If VDA is already resetting, no need to request the reset again. 254 if (state_ != RESETTING) { 255 state_ = RESETTING; 256 vda_loop_proxy_->PostTask( 257 FROM_HERE, base::Bind(&RTCVideoDecoder::ResetInternal, weak_this_)); 258 } 259 return WEBRTC_VIDEO_CODEC_OK; 260 } 261 262 void RTCVideoDecoder::NotifyInitializeDone() { 263 DVLOG(2) << "NotifyInitializeDone"; 264 NOTREACHED(); 265 } 266 267 void RTCVideoDecoder::ProvidePictureBuffers(uint32 count, 268 const gfx::Size& size, 269 uint32 texture_target) { 270 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 271 DVLOG(3) << "ProvidePictureBuffers. texture_target=" << texture_target; 272 273 if (!vda_) 274 return; 275 276 std::vector<uint32> texture_ids; 277 std::vector<gpu::Mailbox> texture_mailboxes; 278 decoder_texture_target_ = texture_target; 279 // Discards the sync point returned here since PictureReady will imply that 280 // the produce has already happened, and the texture is ready for use. 281 if (!factories_->CreateTextures(count, 282 size, 283 &texture_ids, 284 &texture_mailboxes, 285 decoder_texture_target_)) { 286 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); 287 return; 288 } 289 DCHECK_EQ(count, texture_ids.size()); 290 DCHECK_EQ(count, texture_mailboxes.size()); 291 292 std::vector<media::PictureBuffer> picture_buffers; 293 for (size_t i = 0; i < texture_ids.size(); ++i) { 294 picture_buffers.push_back(media::PictureBuffer( 295 next_picture_buffer_id_++, size, texture_ids[i], texture_mailboxes[i])); 296 bool inserted = assigned_picture_buffers_.insert(std::make_pair( 297 picture_buffers.back().id(), picture_buffers.back())).second; 298 DCHECK(inserted); 299 } 300 vda_->AssignPictureBuffers(picture_buffers); 301 } 302 303 void RTCVideoDecoder::DismissPictureBuffer(int32 id) { 304 DVLOG(3) << "DismissPictureBuffer. id=" << id; 305 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 306 307 std::map<int32, media::PictureBuffer>::iterator it = 308 assigned_picture_buffers_.find(id); 309 if (it == assigned_picture_buffers_.end()) { 310 NOTREACHED() << "Missing picture buffer: " << id; 311 return; 312 } 313 314 media::PictureBuffer buffer_to_dismiss = it->second; 315 assigned_picture_buffers_.erase(it); 316 317 std::set<int32>::iterator at_display_it = 318 picture_buffers_at_display_.find(id); 319 320 if (at_display_it == picture_buffers_at_display_.end()) { 321 // We can delete the texture immediately as it's not being displayed. 322 factories_->DeleteTexture(buffer_to_dismiss.texture_id()); 323 } else { 324 // Texture in display. Postpone deletion until after it's returned to us. 325 bool inserted = dismissed_picture_buffers_ 326 .insert(std::make_pair(id, buffer_to_dismiss)).second; 327 DCHECK(inserted); 328 } 329 } 330 331 void RTCVideoDecoder::PictureReady(const media::Picture& picture) { 332 DVLOG(3) << "PictureReady"; 333 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 334 335 std::map<int32, media::PictureBuffer>::iterator it = 336 assigned_picture_buffers_.find(picture.picture_buffer_id()); 337 if (it == assigned_picture_buffers_.end()) { 338 NOTREACHED() << "Missing picture buffer: " << picture.picture_buffer_id(); 339 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); 340 return; 341 } 342 const media::PictureBuffer& pb = it->second; 343 344 // Create a media::VideoFrame. 345 uint32_t timestamp = 0, width = 0, height = 0; 346 size_t size = 0; 347 GetBufferData( 348 picture.bitstream_buffer_id(), ×tamp, &width, &height, &size); 349 scoped_refptr<media::VideoFrame> frame = 350 CreateVideoFrame(picture, pb, timestamp, width, height, size); 351 bool inserted = 352 picture_buffers_at_display_.insert(picture.picture_buffer_id()).second; 353 DCHECK(inserted); 354 355 // Create a WebRTC video frame. 356 // TODO(wuchengli): make media::VideoFrame an opaque native handle and put it 357 // into WebRTC frame. 358 webrtc::I420VideoFrame decoded_image; 359 decoded_image.CreateEmptyFrame( 360 width, height, width, (width + 1) / 2, (width + 1) / 2); 361 decoded_image.set_timestamp(timestamp); 362 363 // Invoke decode callback. WebRTC expects no callback after Reset or Release. 364 { 365 base::AutoLock auto_lock(lock_); 366 DCHECK(decode_complete_callback_ != NULL); 367 if (IsBufferAfterReset(picture.bitstream_buffer_id(), 368 reset_bitstream_buffer_id_)) { 369 decode_complete_callback_->Decoded(decoded_image); 370 } 371 } 372 } 373 374 scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( 375 const media::Picture& picture, 376 const media::PictureBuffer& pb, 377 uint32_t timestamp, 378 uint32_t width, 379 uint32_t height, 380 size_t size) { 381 gfx::Rect visible_rect(width, height); 382 gfx::Size natural_size(width, height); 383 DCHECK(decoder_texture_target_); 384 // Convert timestamp from 90KHz to ms. 385 base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( 386 base::checked_numeric_cast<uint64_t>(timestamp) * 1000 / 90); 387 return media::VideoFrame::WrapNativeTexture( 388 new media::VideoFrame::MailboxHolder( 389 pb.texture_mailbox(), 390 0, // sync_point 391 media::BindToCurrentLoop( 392 base::Bind(&RTCVideoDecoder::ReusePictureBuffer, 393 weak_this_, 394 picture.picture_buffer_id()))), 395 decoder_texture_target_, 396 pb.size(), 397 visible_rect, 398 natural_size, 399 timestamp_ms, 400 base::Bind(&media::GpuVideoDecoderFactories::ReadPixels, 401 factories_, 402 pb.texture_id(), 403 decoder_texture_target_, 404 natural_size), 405 base::Closure()); 406 } 407 408 void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { 409 DVLOG(3) << "NotifyEndOfBitstreamBuffer. id=" << id; 410 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 411 412 std::map<int32, SHMBuffer*>::iterator it = 413 bitstream_buffers_in_decoder_.find(id); 414 if (it == bitstream_buffers_in_decoder_.end()) { 415 NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE); 416 NOTREACHED() << "Missing bitstream buffer: " << id; 417 return; 418 } 419 420 { 421 base::AutoLock auto_lock(lock_); 422 PutSHM_Locked(scoped_ptr<SHMBuffer>(it->second)); 423 } 424 bitstream_buffers_in_decoder_.erase(it); 425 426 RequestBufferDecode(); 427 } 428 429 void RTCVideoDecoder::NotifyFlushDone() { 430 DVLOG(3) << "NotifyFlushDone"; 431 NOTREACHED() << "Unexpected flush done notification."; 432 } 433 434 void RTCVideoDecoder::NotifyResetDone() { 435 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 436 DVLOG(3) << "NotifyResetDone"; 437 438 if (!vda_) 439 return; 440 441 input_buffer_data_.clear(); 442 { 443 base::AutoLock auto_lock(lock_); 444 state_ = INITIALIZED; 445 } 446 // Send the pending buffers for decoding. 447 RequestBufferDecode(); 448 } 449 450 void RTCVideoDecoder::NotifyError(media::VideoDecodeAccelerator::Error error) { 451 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 452 if (!vda_) 453 return; 454 455 LOG(ERROR) << "VDA Error:" << error; 456 DestroyVDA(); 457 458 base::AutoLock auto_lock(lock_); 459 state_ = DECODE_ERROR; 460 } 461 462 void RTCVideoDecoder::WillDestroyCurrentMessageLoop() { 463 DVLOG(2) << "WillDestroyCurrentMessageLoop"; 464 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 465 factories_->Abort(); 466 weak_factory_.InvalidateWeakPtrs(); 467 DestroyVDA(); 468 } 469 470 void RTCVideoDecoder::Initialize(base::WaitableEvent* waiter) { 471 DVLOG(2) << "Initialize"; 472 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 473 base::MessageLoop::current()->AddDestructionObserver(this); 474 waiter->Signal(); 475 } 476 477 void RTCVideoDecoder::RequestBufferDecode() { 478 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 479 if (!vda_) 480 return; 481 482 MovePendingBuffersToDecodeBuffers(); 483 484 while (CanMoreDecodeWorkBeDone()) { 485 // Get a buffer and data from the queue. 486 SHMBuffer* shm_buffer = NULL; 487 BufferData buffer_data; 488 { 489 base::AutoLock auto_lock(lock_); 490 // Do not request decode if VDA is resetting. 491 if (decode_buffers_.size() == 0 || state_ == RESETTING) 492 return; 493 shm_buffer = decode_buffers_.front().first; 494 buffer_data = decode_buffers_.front().second; 495 decode_buffers_.pop_front(); 496 // Drop the buffers before Reset or Release is called. 497 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id, 498 reset_bitstream_buffer_id_)) { 499 PutSHM_Locked(scoped_ptr<SHMBuffer>(shm_buffer)); 500 continue; 501 } 502 } 503 504 // Create a BitstreamBuffer and send to VDA to decode. 505 media::BitstreamBuffer bitstream_buffer(buffer_data.bitstream_buffer_id, 506 shm_buffer->shm->handle(), 507 buffer_data.size); 508 bool inserted = bitstream_buffers_in_decoder_ 509 .insert(std::make_pair(bitstream_buffer.id(), shm_buffer)).second; 510 DCHECK(inserted); 511 RecordBufferData(buffer_data); 512 vda_->Decode(bitstream_buffer); 513 } 514 } 515 516 bool RTCVideoDecoder::CanMoreDecodeWorkBeDone() { 517 return bitstream_buffers_in_decoder_.size() < kMaxInFlightDecodes; 518 } 519 520 bool RTCVideoDecoder::IsBufferAfterReset(int32 id_buffer, int32 id_reset) { 521 if (id_reset == ID_INVALID) 522 return true; 523 int32 diff = id_buffer - id_reset; 524 if (diff <= 0) 525 diff += ID_LAST + 1; 526 return diff < ID_HALF; 527 } 528 529 void RTCVideoDecoder::SaveToDecodeBuffers_Locked( 530 const webrtc::EncodedImage& input_image, 531 scoped_ptr<SHMBuffer> shm_buffer, 532 const BufferData& buffer_data) { 533 memcpy(shm_buffer->shm->memory(), input_image._buffer, input_image._length); 534 std::pair<SHMBuffer*, BufferData> buffer_pair = 535 std::make_pair(shm_buffer.release(), buffer_data); 536 537 // Store the buffer and the metadata to the queue. 538 decode_buffers_.push_back(buffer_pair); 539 } 540 541 bool RTCVideoDecoder::SaveToPendingBuffers_Locked( 542 const webrtc::EncodedImage& input_image, 543 const BufferData& buffer_data) { 544 DVLOG(2) << "SaveToPendingBuffers_Locked" 545 << ". pending_buffers size=" << pending_buffers_.size() 546 << ". decode_buffers_ size=" << decode_buffers_.size() 547 << ". available_shm size=" << available_shm_segments_.size(); 548 // Queued too many buffers. Something goes wrong. 549 if (pending_buffers_.size() >= kMaxNumOfPendingBuffers) { 550 LOG(WARNING) << "Too many pending buffers!"; 551 return false; 552 } 553 554 // Clone the input image and save it to the queue. 555 uint8_t* buffer = new uint8_t[input_image._length]; 556 // TODO(wuchengli): avoid memcpy. Extend webrtc::VideoDecoder::Decode() 557 // interface to take a non-const ptr to the frame and add a method to the 558 // frame that will swap buffers with another. 559 memcpy(buffer, input_image._buffer, input_image._length); 560 webrtc::EncodedImage encoded_image( 561 buffer, input_image._length, input_image._length); 562 std::pair<webrtc::EncodedImage, BufferData> buffer_pair = 563 std::make_pair(encoded_image, buffer_data); 564 565 pending_buffers_.push_back(buffer_pair); 566 return true; 567 } 568 569 void RTCVideoDecoder::MovePendingBuffersToDecodeBuffers() { 570 base::AutoLock auto_lock(lock_); 571 while (pending_buffers_.size() > 0) { 572 // Get a pending buffer from the queue. 573 const webrtc::EncodedImage& input_image = pending_buffers_.front().first; 574 const BufferData& buffer_data = pending_buffers_.front().second; 575 576 // Drop the frame if it comes before Reset or Release. 577 if (!IsBufferAfterReset(buffer_data.bitstream_buffer_id, 578 reset_bitstream_buffer_id_)) { 579 delete[] input_image._buffer; 580 pending_buffers_.pop_front(); 581 continue; 582 } 583 // Get shared memory and save it to decode buffers. 584 scoped_ptr<SHMBuffer> shm_buffer = GetSHM_Locked(input_image._length); 585 if (!shm_buffer) 586 return; 587 SaveToDecodeBuffers_Locked(input_image, shm_buffer.Pass(), buffer_data); 588 delete[] input_image._buffer; 589 pending_buffers_.pop_front(); 590 } 591 } 592 593 void RTCVideoDecoder::ResetInternal() { 594 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 595 DVLOG(2) << "ResetInternal"; 596 if (vda_) 597 vda_->Reset(); 598 } 599 600 void RTCVideoDecoder::ReusePictureBuffer(int64 picture_buffer_id, 601 uint32 sync_point) { 602 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 603 DVLOG(3) << "ReusePictureBuffer. id=" << picture_buffer_id; 604 605 if (!vda_) 606 return; 607 608 CHECK(!picture_buffers_at_display_.empty()); 609 610 size_t num_erased = picture_buffers_at_display_.erase(picture_buffer_id); 611 DCHECK(num_erased); 612 613 std::map<int32, media::PictureBuffer>::iterator it = 614 assigned_picture_buffers_.find(picture_buffer_id); 615 616 if (it == assigned_picture_buffers_.end()) { 617 // This picture was dismissed while in display, so we postponed deletion. 618 it = dismissed_picture_buffers_.find(picture_buffer_id); 619 DCHECK(it != dismissed_picture_buffers_.end()); 620 factories_->DeleteTexture(it->second.texture_id()); 621 dismissed_picture_buffers_.erase(it); 622 return; 623 } 624 625 factories_->WaitSyncPoint(sync_point); 626 627 vda_->ReusePictureBuffer(picture_buffer_id); 628 } 629 630 void RTCVideoDecoder::DestroyTextures() { 631 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 632 std::map<int32, media::PictureBuffer>::iterator it; 633 634 for (it = assigned_picture_buffers_.begin(); 635 it != assigned_picture_buffers_.end(); 636 ++it) { 637 factories_->DeleteTexture(it->second.texture_id()); 638 } 639 assigned_picture_buffers_.clear(); 640 641 for (it = dismissed_picture_buffers_.begin(); 642 it != dismissed_picture_buffers_.end(); 643 ++it) { 644 factories_->DeleteTexture(it->second.texture_id()); 645 } 646 dismissed_picture_buffers_.clear(); 647 } 648 649 void RTCVideoDecoder::DestroyVDA() { 650 DVLOG(2) << "DestroyVDA"; 651 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 652 if (vda_) 653 vda_.release()->Destroy(); 654 DestroyTextures(); 655 base::AutoLock auto_lock(lock_); 656 state_ = UNINITIALIZED; 657 } 658 659 scoped_ptr<RTCVideoDecoder::SHMBuffer> RTCVideoDecoder::GetSHM_Locked( 660 size_t min_size) { 661 // Reuse a SHM if possible. 662 SHMBuffer* ret = NULL; 663 if (!available_shm_segments_.empty() && 664 available_shm_segments_.back()->size >= min_size) { 665 ret = available_shm_segments_.back(); 666 available_shm_segments_.pop_back(); 667 } 668 // Post to vda thread to create shared memory if SHM cannot be reused or the 669 // queue is almost empty. 670 if (num_shm_buffers_ < kMaxNumSharedMemorySegments && 671 (ret == NULL || available_shm_segments_.size() <= 1)) { 672 vda_loop_proxy_->PostTask( 673 FROM_HERE, 674 base::Bind(&RTCVideoDecoder::CreateSHM, weak_this_, 1, min_size)); 675 } 676 return scoped_ptr<SHMBuffer>(ret); 677 } 678 679 void RTCVideoDecoder::PutSHM_Locked(scoped_ptr<SHMBuffer> shm_buffer) { 680 available_shm_segments_.push_back(shm_buffer.release()); 681 } 682 683 void RTCVideoDecoder::CreateSHM(int number, size_t min_size) { 684 DCHECK(vda_loop_proxy_->BelongsToCurrentThread()); 685 DVLOG(2) << "CreateSHM. size=" << min_size; 686 int number_to_allocate; 687 { 688 base::AutoLock auto_lock(lock_); 689 number_to_allocate = 690 std::min(kMaxNumSharedMemorySegments - num_shm_buffers_, number); 691 } 692 size_t size_to_allocate = std::max(min_size, kSharedMemorySegmentBytes); 693 for (int i = 0; i < number_to_allocate; i++) { 694 base::SharedMemory* shm = factories_->CreateSharedMemory(size_to_allocate); 695 if (shm != NULL) { 696 base::AutoLock auto_lock(lock_); 697 num_shm_buffers_++; 698 PutSHM_Locked( 699 scoped_ptr<SHMBuffer>(new SHMBuffer(shm, size_to_allocate))); 700 } 701 } 702 // Kick off the decoding. 703 RequestBufferDecode(); 704 } 705 706 void RTCVideoDecoder::RecordBufferData(const BufferData& buffer_data) { 707 input_buffer_data_.push_front(buffer_data); 708 // Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but 709 // that's too small for some pathological B-frame test videos. The cost of 710 // using too-high a value is low (192 bits per extra slot). 711 static const size_t kMaxInputBufferDataSize = 128; 712 // Pop from the back of the list, because that's the oldest and least likely 713 // to be useful in the future data. 714 if (input_buffer_data_.size() > kMaxInputBufferDataSize) 715 input_buffer_data_.pop_back(); 716 } 717 718 void RTCVideoDecoder::GetBufferData(int32 bitstream_buffer_id, 719 uint32_t* timestamp, 720 uint32_t* width, 721 uint32_t* height, 722 size_t* size) { 723 for (std::list<BufferData>::iterator it = input_buffer_data_.begin(); 724 it != input_buffer_data_.end(); 725 ++it) { 726 if (it->bitstream_buffer_id != bitstream_buffer_id) 727 continue; 728 *timestamp = it->timestamp; 729 *width = it->width; 730 *height = it->height; 731 return; 732 } 733 NOTREACHED() << "Missing bitstream buffer id: " << bitstream_buffer_id; 734 } 735 736 } // namespace content 737