1 // Copyright 2013 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "content/renderer/media/rtc_video_encoder.h" 6 7 #include "base/bind.h" 8 #include "base/location.h" 9 #include "base/logging.h" 10 #include "base/memory/scoped_vector.h" 11 #include "base/message_loop/message_loop_proxy.h" 12 #include "base/metrics/histogram.h" 13 #include "base/rand_util.h" 14 #include "base/synchronization/waitable_event.h" 15 #include "media/base/bitstream_buffer.h" 16 #include "media/base/video_frame.h" 17 #include "media/base/video_util.h" 18 #include "media/filters/gpu_video_accelerator_factories.h" 19 #include "media/video/video_encode_accelerator.h" 20 #include "third_party/webrtc/system_wrappers/interface/tick_util.h" 21 22 #define NOTIFY_ERROR(x) \ 23 do { \ 24 DLOG(ERROR) << "calling NotifyError(): " << x; \ 25 NotifyError(x); \ 26 } while (0) 27 28 namespace content { 29 30 // This private class of RTCVideoEncoder does the actual work of communicating 31 // with a media::VideoEncodeAccelerator for handling video encoding. It can 32 // be created on any thread, but should subsequently be posted to (and Destroy() 33 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the 34 // thread on which the instance was constructed. 35 // 36 // This class separates state related to the thread that RTCVideoEncoder 37 // operates on (presently the libjingle worker thread) from the thread that 38 // |gpu_factories_| provides for accelerator operations (presently the media 39 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while 40 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. 41 class RTCVideoEncoder::Impl 42 : public media::VideoEncodeAccelerator::Client, 43 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { 44 public: 45 Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder, 46 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories); 47 48 // Create the VEA and call Initialize() on it. Called once per instantiation, 49 // and then the instance is bound forevermore to whichever thread made the 50 // call. 51 // RTCVideoEncoder expects to be able to call this function synchronously from 52 // its own thread, hence the |async_waiter| and |async_retval| arguments. 53 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, 54 uint32 bitrate, 55 media::VideoCodecProfile profile, 56 base::WaitableEvent* async_waiter, 57 int32_t* async_retval); 58 // Enqueue a frame from WebRTC for encoding. 59 // RTCVideoEncoder expects to be able to call this function synchronously from 60 // its own thread, hence the |async_waiter| and |async_retval| arguments. 61 void Enqueue(const webrtc::I420VideoFrame* input_frame, 62 bool force_keyframe, 63 base::WaitableEvent* async_waiter, 64 int32_t* async_retval); 65 66 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the 67 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, 68 // the buffer is returned to Impl by its index using this function. 69 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id); 70 71 // Request encoding parameter change for the underlying encoder. 72 void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate); 73 74 // Destroy this Impl's encoder. The destructor is not explicitly called, as 75 // Impl is a base::RefCountedThreadSafe. 76 void Destroy(); 77 78 // media::VideoEncodeAccelerator::Client implementation. 79 virtual void RequireBitstreamBuffers(unsigned int input_count, 80 const gfx::Size& input_coded_size, 81 size_t output_buffer_size) OVERRIDE; 82 virtual void BitstreamBufferReady(int32 bitstream_buffer_id, 83 size_t payload_size, 84 bool key_frame) OVERRIDE; 85 virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE; 86 87 private: 88 friend class base::RefCountedThreadSafe<Impl>; 89 90 enum { 91 kInputBufferExtraCount = 1, // The number of input buffers allocated, more 92 // than what is requested by 93 // VEA::RequireBitstreamBuffers(). 94 kOutputBufferCount = 3, 95 }; 96 97 virtual ~Impl(); 98 99 // Perform encoding on an input frame from the input queue. 100 void EncodeOneFrame(); 101 102 // Notify that an input frame is finished for encoding. |index| is the index 103 // of the completed frame in |input_buffers_|. 104 void EncodeFrameFinished(int index); 105 106 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below. 107 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); 108 void SignalAsyncWaiter(int32_t retval); 109 110 base::ThreadChecker thread_checker_; 111 112 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client 113 // notifications. 114 const base::WeakPtr<RTCVideoEncoder> weak_encoder_; 115 116 // The message loop on which to post callbacks to |weak_encoder_|. 117 const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_; 118 119 // Factory for creating VEAs, shared memory buffers, etc. 120 const scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_; 121 122 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. 123 // Do this by waiting on the |async_waiter_| and returning the return value in 124 // |async_retval_| when initialization completes, encoding completes, or 125 // an error occurs. 126 base::WaitableEvent* async_waiter_; 127 int32_t* async_retval_; 128 129 // The underlying VEA to perform encoding on. 130 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_; 131 132 // Next input frame. Since there is at most one next frame, a single-element 133 // queue is sufficient. 134 const webrtc::I420VideoFrame* input_next_frame_; 135 136 // Whether to encode a keyframe next. 137 bool input_next_frame_keyframe_; 138 139 // Frame sizes. 140 gfx::Size input_frame_coded_size_; 141 gfx::Size input_visible_size_; 142 143 // Shared memory buffers for input/output with the VEA. 144 ScopedVector<base::SharedMemory> input_buffers_; 145 ScopedVector<base::SharedMemory> output_buffers_; 146 147 // Input buffers ready to be filled with input from Encode(). As a LIFO since 148 // we don't care about ordering. 149 std::vector<int> input_buffers_free_; 150 151 // The number of output buffers ready to be filled with output from the 152 // encoder. 153 int output_buffers_free_count_; 154 155 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. 156 uint16 picture_id_; 157 158 DISALLOW_COPY_AND_ASSIGN(Impl); 159 }; 160 161 RTCVideoEncoder::Impl::Impl( 162 const base::WeakPtr<RTCVideoEncoder>& weak_encoder, 163 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories) 164 : weak_encoder_(weak_encoder), 165 encoder_message_loop_proxy_(base::MessageLoopProxy::current()), 166 gpu_factories_(gpu_factories), 167 async_waiter_(NULL), 168 async_retval_(NULL), 169 input_next_frame_(NULL), 170 input_next_frame_keyframe_(false), 171 output_buffers_free_count_(0) { 172 thread_checker_.DetachFromThread(); 173 // Picture ID should start on a random number. 174 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); 175 } 176 177 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( 178 const gfx::Size& input_visible_size, 179 uint32 bitrate, 180 media::VideoCodecProfile profile, 181 base::WaitableEvent* async_waiter, 182 int32_t* async_retval) { 183 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; 184 DCHECK(thread_checker_.CalledOnValidThread()); 185 186 RegisterAsyncWaiter(async_waiter, async_retval); 187 188 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. 189 if (bitrate > kuint32max / 1000) { 190 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); 191 return; 192 } 193 194 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator().Pass(); 195 if (!video_encoder_) { 196 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); 197 return; 198 } 199 input_visible_size_ = input_visible_size; 200 if (!video_encoder_->Initialize(media::VideoFrame::I420, 201 input_visible_size_, 202 profile, 203 bitrate * 1000, 204 this)) { 205 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); 206 return; 207 } 208 } 209 210 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame, 211 bool force_keyframe, 212 base::WaitableEvent* async_waiter, 213 int32_t* async_retval) { 214 DVLOG(3) << "Impl::Enqueue()"; 215 DCHECK(thread_checker_.CalledOnValidThread()); 216 DCHECK(!input_next_frame_); 217 218 RegisterAsyncWaiter(async_waiter, async_retval); 219 // If there are no free input and output buffers, drop the frame to avoid a 220 // deadlock. If there is a free input buffer, EncodeOneFrame will run and 221 // unblock Encode(). If there are no free input buffers but there is a free 222 // output buffer, EncodeFrameFinished will be called later to unblock 223 // Encode(). 224 // 225 // The caller of Encode() holds a webrtc lock. The deadlock happens when: 226 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame(). 227 // (2) There are no free input buffers and they cannot be freed because 228 // the encoder has no output buffers. 229 // (3) Output buffers cannot be freed because ReturnEncodedImage is queued 230 // on libjingle worker thread to be run. But the worker thread is waiting 231 // for the same webrtc lock held by the caller of Encode(). 232 // 233 // Dropping a frame is fine. The encoder has been filled with all input 234 // buffers. Returning an error in Encode() is not fatal and WebRTC will just 235 // continue. If this is a key frame, WebRTC will request a key frame again. 236 // Besides, webrtc will drop a frame if Encode() blocks too long. 237 if (input_buffers_free_.empty() && output_buffers_free_count_ == 0) { 238 DVLOG(2) << "Run out of input and output buffers. Drop the frame."; 239 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR); 240 return; 241 } 242 input_next_frame_ = input_frame; 243 input_next_frame_keyframe_ = force_keyframe; 244 245 if (!input_buffers_free_.empty()) 246 EncodeOneFrame(); 247 } 248 249 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId( 250 int32 bitstream_buffer_id) { 251 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): " 252 "bitstream_buffer_id=" << bitstream_buffer_id; 253 DCHECK(thread_checker_.CalledOnValidThread()); 254 if (video_encoder_) { 255 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( 256 bitstream_buffer_id, 257 output_buffers_[bitstream_buffer_id]->handle(), 258 output_buffers_[bitstream_buffer_id]->mapped_size())); 259 output_buffers_free_count_++; 260 } 261 } 262 263 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate, 264 uint32 framerate) { 265 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate 266 << ", framerate=" << framerate; 267 DCHECK(thread_checker_.CalledOnValidThread()); 268 269 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. 270 if (bitrate > kuint32max / 1000) { 271 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); 272 return; 273 } 274 275 if (video_encoder_) 276 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); 277 } 278 279 void RTCVideoEncoder::Impl::Destroy() { 280 DVLOG(3) << "Impl::Destroy()"; 281 DCHECK(thread_checker_.CalledOnValidThread()); 282 video_encoder_.reset(); 283 } 284 285 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( 286 unsigned int input_count, 287 const gfx::Size& input_coded_size, 288 size_t output_buffer_size) { 289 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count 290 << ", input_coded_size=" << input_coded_size.ToString() 291 << ", output_buffer_size=" << output_buffer_size; 292 DCHECK(thread_checker_.CalledOnValidThread()); 293 294 if (!video_encoder_) 295 return; 296 297 input_frame_coded_size_ = input_coded_size; 298 299 for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) { 300 base::SharedMemory* shm = 301 gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize( 302 media::VideoFrame::I420, input_coded_size)); 303 if (!shm) { 304 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " 305 "failed to create input buffer " << i; 306 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); 307 return; 308 } 309 input_buffers_.push_back(shm); 310 input_buffers_free_.push_back(i); 311 } 312 313 for (int i = 0; i < kOutputBufferCount; ++i) { 314 base::SharedMemory* shm = 315 gpu_factories_->CreateSharedMemory(output_buffer_size); 316 if (!shm) { 317 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " 318 "failed to create output buffer " << i; 319 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); 320 return; 321 } 322 output_buffers_.push_back(shm); 323 } 324 325 // Immediately provide all output buffers to the VEA. 326 for (size_t i = 0; i < output_buffers_.size(); ++i) { 327 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( 328 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); 329 output_buffers_free_count_++; 330 } 331 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); 332 } 333 334 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id, 335 size_t payload_size, 336 bool key_frame) { 337 DVLOG(3) << "Impl::BitstreamBufferReady(): " 338 "bitstream_buffer_id=" << bitstream_buffer_id 339 << ", payload_size=" << payload_size 340 << ", key_frame=" << key_frame; 341 DCHECK(thread_checker_.CalledOnValidThread()); 342 343 if (bitstream_buffer_id < 0 || 344 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { 345 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id=" 346 << bitstream_buffer_id; 347 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); 348 return; 349 } 350 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; 351 if (payload_size > output_buffer->mapped_size()) { 352 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size=" 353 << payload_size; 354 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); 355 return; 356 } 357 output_buffers_free_count_--; 358 359 // Use webrtc timestamps to ensure correct RTP sender behavior. 360 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106. 361 const int64 capture_time_us = webrtc::TickTime::MicrosecondTimestamp(); 362 363 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). 364 int64 capture_time_ms = capture_time_us / 1000; 365 uint32_t rtp_timestamp = static_cast<uint32_t>(capture_time_us * 90 / 1000); 366 367 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( 368 reinterpret_cast<uint8_t*>(output_buffer->memory()), 369 payload_size, 370 output_buffer->mapped_size())); 371 image->_encodedWidth = input_visible_size_.width(); 372 image->_encodedHeight = input_visible_size_.height(); 373 image->_timeStamp = rtp_timestamp; 374 image->capture_time_ms_ = capture_time_ms; 375 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame); 376 image->_completeFrame = true; 377 378 encoder_message_loop_proxy_->PostTask( 379 FROM_HERE, 380 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, 381 weak_encoder_, 382 base::Passed(&image), 383 bitstream_buffer_id, 384 picture_id_)); 385 // Picture ID must wrap after reaching the maximum. 386 picture_id_ = (picture_id_ + 1) & 0x7FFF; 387 } 388 389 void RTCVideoEncoder::Impl::NotifyError( 390 media::VideoEncodeAccelerator::Error error) { 391 DVLOG(3) << "Impl::NotifyError(): error=" << error; 392 DCHECK(thread_checker_.CalledOnValidThread()); 393 int32_t retval; 394 switch (error) { 395 case media::VideoEncodeAccelerator::kInvalidArgumentError: 396 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 397 break; 398 default: 399 retval = WEBRTC_VIDEO_CODEC_ERROR; 400 } 401 402 video_encoder_.reset(); 403 404 if (async_waiter_) { 405 SignalAsyncWaiter(retval); 406 } else { 407 encoder_message_loop_proxy_->PostTask( 408 FROM_HERE, 409 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval)); 410 } 411 } 412 413 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } 414 415 void RTCVideoEncoder::Impl::EncodeOneFrame() { 416 DVLOG(3) << "Impl::EncodeOneFrame()"; 417 DCHECK(thread_checker_.CalledOnValidThread()); 418 DCHECK(input_next_frame_); 419 DCHECK(!input_buffers_free_.empty()); 420 421 // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails, 422 // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to 423 // Encode() gets destroyed early. Handle this by resetting our 424 // input_next_frame_* state before we hand off the VideoFrame to the VEA. 425 const webrtc::I420VideoFrame* next_frame = input_next_frame_; 426 bool next_frame_keyframe = input_next_frame_keyframe_; 427 input_next_frame_ = NULL; 428 input_next_frame_keyframe_ = false; 429 430 if (!video_encoder_) { 431 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR); 432 return; 433 } 434 435 const int index = input_buffers_free_.back(); 436 base::SharedMemory* input_buffer = input_buffers_[index]; 437 scoped_refptr<media::VideoFrame> frame = 438 media::VideoFrame::WrapExternalPackedMemory( 439 media::VideoFrame::I420, 440 input_frame_coded_size_, 441 gfx::Rect(input_visible_size_), 442 input_visible_size_, 443 reinterpret_cast<uint8*>(input_buffer->memory()), 444 input_buffer->mapped_size(), 445 input_buffer->handle(), 446 base::TimeDelta(), 447 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)); 448 if (!frame) { 449 DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame"; 450 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); 451 return; 452 } 453 454 // Do a strided copy of the input frame to match the input requirements for 455 // the encoder. 456 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 457 media::CopyYPlane(next_frame->buffer(webrtc::kYPlane), 458 next_frame->stride(webrtc::kYPlane), 459 next_frame->height(), 460 frame.get()); 461 media::CopyUPlane(next_frame->buffer(webrtc::kUPlane), 462 next_frame->stride(webrtc::kUPlane), 463 next_frame->height(), 464 frame.get()); 465 media::CopyVPlane(next_frame->buffer(webrtc::kVPlane), 466 next_frame->stride(webrtc::kVPlane), 467 next_frame->height(), 468 frame.get()); 469 470 video_encoder_->Encode(frame, next_frame_keyframe); 471 input_buffers_free_.pop_back(); 472 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); 473 } 474 475 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { 476 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; 477 DCHECK(thread_checker_.CalledOnValidThread()); 478 DCHECK_GE(index, 0); 479 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); 480 input_buffers_free_.push_back(index); 481 if (input_next_frame_) 482 EncodeOneFrame(); 483 } 484 485 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter, 486 int32_t* retval) { 487 DCHECK(thread_checker_.CalledOnValidThread()); 488 DCHECK(!async_waiter_); 489 DCHECK(!async_retval_); 490 async_waiter_ = waiter; 491 async_retval_ = retval; 492 } 493 494 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) { 495 DCHECK(thread_checker_.CalledOnValidThread()); 496 *async_retval_ = retval; 497 async_waiter_->Signal(); 498 async_retval_ = NULL; 499 async_waiter_ = NULL; 500 } 501 502 #undef NOTIFY_ERROR 503 504 //////////////////////////////////////////////////////////////////////////////// 505 // 506 // RTCVideoEncoder 507 // 508 //////////////////////////////////////////////////////////////////////////////// 509 510 RTCVideoEncoder::RTCVideoEncoder( 511 webrtc::VideoCodecType type, 512 media::VideoCodecProfile profile, 513 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories) 514 : video_codec_type_(type), 515 video_codec_profile_(profile), 516 gpu_factories_(gpu_factories), 517 encoded_image_callback_(NULL), 518 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED), 519 weak_factory_(this) { 520 DVLOG(1) << "RTCVideoEncoder(): profile=" << profile; 521 } 522 523 RTCVideoEncoder::~RTCVideoEncoder() { 524 DCHECK(thread_checker_.CalledOnValidThread()); 525 Release(); 526 DCHECK(!impl_); 527 } 528 529 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, 530 int32_t number_of_cores, 531 uint32_t max_payload_size) { 532 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType 533 << ", width=" << codec_settings->width 534 << ", height=" << codec_settings->height 535 << ", startBitrate=" << codec_settings->startBitrate; 536 DCHECK(thread_checker_.CalledOnValidThread()); 537 DCHECK(!impl_); 538 539 weak_factory_.InvalidateWeakPtrs(); 540 impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_); 541 base::WaitableEvent initialization_waiter(true, false); 542 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; 543 gpu_factories_->GetTaskRunner()->PostTask( 544 FROM_HERE, 545 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, 546 impl_, 547 gfx::Size(codec_settings->width, codec_settings->height), 548 codec_settings->startBitrate, 549 video_codec_profile_, 550 &initialization_waiter, 551 &initialization_retval)); 552 553 // webrtc::VideoEncoder expects this call to be synchronous. 554 initialization_waiter.Wait(); 555 RecordInitEncodeUMA(initialization_retval); 556 return initialization_retval; 557 } 558 559 int32_t RTCVideoEncoder::Encode( 560 const webrtc::I420VideoFrame& input_image, 561 const webrtc::CodecSpecificInfo* codec_specific_info, 562 const std::vector<webrtc::VideoFrameType>* frame_types) { 563 DVLOG(3) << "Encode()"; 564 // TODO(sheu): figure out why this check fails. 565 // DCHECK(thread_checker_.CalledOnValidThread()); 566 if (!impl_) { 567 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; 568 return impl_status_; 569 } 570 571 bool want_key_frame = frame_types && frame_types->size() && 572 frame_types->front() == webrtc::kKeyFrame; 573 base::WaitableEvent encode_waiter(true, false); 574 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; 575 gpu_factories_->GetTaskRunner()->PostTask( 576 FROM_HERE, 577 base::Bind(&RTCVideoEncoder::Impl::Enqueue, 578 impl_, 579 &input_image, 580 want_key_frame, 581 &encode_waiter, 582 &encode_retval)); 583 584 // webrtc::VideoEncoder expects this call to be synchronous. 585 encode_waiter.Wait(); 586 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; 587 return encode_retval; 588 } 589 590 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( 591 webrtc::EncodedImageCallback* callback) { 592 DVLOG(3) << "RegisterEncodeCompleteCallback()"; 593 DCHECK(thread_checker_.CalledOnValidThread()); 594 if (!impl_) { 595 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; 596 return impl_status_; 597 } 598 599 encoded_image_callback_ = callback; 600 return WEBRTC_VIDEO_CODEC_OK; 601 } 602 603 int32_t RTCVideoEncoder::Release() { 604 DVLOG(3) << "Release()"; 605 DCHECK(thread_checker_.CalledOnValidThread()); 606 607 if (impl_) { 608 gpu_factories_->GetTaskRunner()->PostTask( 609 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); 610 impl_ = NULL; 611 weak_factory_.InvalidateWeakPtrs(); 612 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; 613 } 614 return WEBRTC_VIDEO_CODEC_OK; 615 } 616 617 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) { 618 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss 619 << ", rtt=" << rtt; 620 DCHECK(thread_checker_.CalledOnValidThread()); 621 // Ignored. 622 return WEBRTC_VIDEO_CODEC_OK; 623 } 624 625 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { 626 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate 627 << ", frame_rate=" << frame_rate; 628 DCHECK(thread_checker_.CalledOnValidThread()); 629 if (!impl_) { 630 DVLOG(3) << "SetRates(): returning " << impl_status_; 631 return impl_status_; 632 } 633 634 gpu_factories_->GetTaskRunner()->PostTask( 635 FROM_HERE, 636 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, 637 impl_, 638 new_bit_rate, 639 frame_rate)); 640 return WEBRTC_VIDEO_CODEC_OK; 641 } 642 643 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image, 644 int32 bitstream_buffer_id, 645 uint16 picture_id) { 646 DCHECK(thread_checker_.CalledOnValidThread()); 647 DVLOG(3) << "ReturnEncodedImage(): " 648 << "bitstream_buffer_id=" << bitstream_buffer_id 649 << ", picture_id=" << picture_id; 650 651 if (!encoded_image_callback_) 652 return; 653 654 webrtc::CodecSpecificInfo info; 655 memset(&info, 0, sizeof(info)); 656 info.codecType = video_codec_type_; 657 if (video_codec_type_ == webrtc::kVideoCodecVP8) { 658 info.codecSpecific.VP8.pictureId = picture_id; 659 info.codecSpecific.VP8.tl0PicIdx = -1; 660 info.codecSpecific.VP8.keyIdx = -1; 661 } 662 663 // Generate a header describing a single fragment. 664 webrtc::RTPFragmentationHeader header; 665 memset(&header, 0, sizeof(header)); 666 header.VerifyAndAllocateFragmentationHeader(1); 667 header.fragmentationOffset[0] = 0; 668 header.fragmentationLength[0] = image->_length; 669 header.fragmentationPlType[0] = 0; 670 header.fragmentationTimeDiff[0] = 0; 671 672 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); 673 if (retval < 0) { 674 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " 675 << retval; 676 } 677 678 // The call through webrtc::EncodedImageCallback is synchronous, so we can 679 // immediately recycle the output buffer back to the Impl. 680 gpu_factories_->GetTaskRunner()->PostTask( 681 FROM_HERE, 682 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, 683 impl_, 684 bitstream_buffer_id)); 685 } 686 687 void RTCVideoEncoder::NotifyError(int32_t error) { 688 DCHECK(thread_checker_.CalledOnValidThread()); 689 DVLOG(1) << "NotifyError(): error=" << error; 690 691 impl_status_ = error; 692 gpu_factories_->GetTaskRunner()->PostTask( 693 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); 694 impl_ = NULL; 695 } 696 697 void RTCVideoEncoder::RecordInitEncodeUMA(int32_t init_retval) { 698 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", 699 init_retval == WEBRTC_VIDEO_CODEC_OK); 700 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { 701 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", 702 video_codec_profile_, 703 media::VIDEO_CODEC_PROFILE_MAX + 1); 704 } 705 } 706 707 } // namespace content 708