Home | History | Annotate | Download | only in webrtc
      1 /*
      2  * libjingle
      3  * Copyright 2011 Google Inc.
      4  *
      5  * Redistribution and use in source and binary forms, with or without
      6  * modification, are permitted provided that the following conditions are met:
      7  *
      8  *  1. Redistributions of source code must retain the above copyright notice,
      9  *     this list of conditions and the following disclaimer.
     10  *  2. Redistributions in binary form must reproduce the above copyright notice,
     11  *     this list of conditions and the following disclaimer in the documentation
     12  *     and/or other materials provided with the distribution.
     13  *  3. The name of the author may not be used to endorse or promote products
     14  *     derived from this software without specific prior written permission.
     15  *
     16  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
     17  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
     19  * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     20  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     21  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
     22  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     23  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
     24  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
     25  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     26  */
     27 
     28 #include "talk/media/webrtc/webrtcvideoframe.h"
     29 
     30 #include "libyuv/convert.h"
     31 #include "libyuv/convert_from.h"
     32 #include "libyuv/planar_functions.h"
     33 #include "talk/base/logging.h"
     34 #include "talk/media/base/videocapturer.h"
     35 #include "talk/media/base/videocommon.h"
     36 
     37 namespace cricket {
     38 
     39 // Class that wraps ownerhip semantics of a buffer passed to it.
     40 // * Buffers passed using Attach() become owned by this FrameBuffer and will be
     41 //   destroyed on FrameBuffer destruction.
     42 // * Buffers passed using Alias() are not owned and will not be destroyed on
     43 //   FrameBuffer destruction,  The buffer then must outlive the FrameBuffer.
     44 class WebRtcVideoFrame::FrameBuffer {
     45  public:
     46   FrameBuffer();
     47   explicit FrameBuffer(size_t length);
     48   ~FrameBuffer();
     49 
     50   void Attach(uint8* data, size_t length);
     51   void Alias(uint8* data, size_t length);
     52   uint8* data();
     53   size_t length() const;
     54 
     55   webrtc::VideoFrame* frame();
     56   const webrtc::VideoFrame* frame() const;
     57 
     58  private:
     59   talk_base::scoped_ptr<uint8[]> owned_data_;
     60   webrtc::VideoFrame video_frame_;
     61 };
     62 
     63 WebRtcVideoFrame::FrameBuffer::FrameBuffer() {}
     64 
     65 WebRtcVideoFrame::FrameBuffer::FrameBuffer(size_t length) {
     66   uint8* buffer = new uint8[length];
     67   Attach(buffer, length);
     68 }
     69 
     70 WebRtcVideoFrame::FrameBuffer::~FrameBuffer() {
     71   // Make sure that |video_frame_| doesn't delete the buffer, as |owned_data_|
     72   // will release the buffer if this FrameBuffer owns it.
     73   uint8_t* new_memory = NULL;
     74   uint32_t new_length = 0;
     75   uint32_t new_size = 0;
     76   video_frame_.Swap(new_memory, new_length, new_size);
     77 }
     78 
     79 void WebRtcVideoFrame::FrameBuffer::Attach(uint8* data, size_t length) {
     80   Alias(data, length);
     81   owned_data_.reset(data);
     82 }
     83 
     84 void WebRtcVideoFrame::FrameBuffer::Alias(uint8* data, size_t length) {
     85   owned_data_.reset();
     86   uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
     87   uint32_t new_length = static_cast<uint32_t>(length);
     88   uint32_t new_size = static_cast<uint32_t>(length);
     89   video_frame_.Swap(new_memory, new_length, new_size);
     90 }
     91 
     92 uint8* WebRtcVideoFrame::FrameBuffer::data() {
     93   return video_frame_.Buffer();
     94 }
     95 
     96 size_t WebRtcVideoFrame::FrameBuffer::length() const {
     97   return video_frame_.Length();
     98 }
     99 
    100 webrtc::VideoFrame* WebRtcVideoFrame::FrameBuffer::frame() {
    101   return &video_frame_;
    102 }
    103 
    104 const webrtc::VideoFrame* WebRtcVideoFrame::FrameBuffer::frame() const {
    105   return &video_frame_;
    106 }
    107 
    108 WebRtcVideoFrame::WebRtcVideoFrame()
    109     : video_buffer_(new RefCountedBuffer()), is_black_(false) {}
    110 
    111 WebRtcVideoFrame::~WebRtcVideoFrame() {}
    112 
    113 bool WebRtcVideoFrame::Init(
    114     uint32 format, int w, int h, int dw, int dh, uint8* sample,
    115     size_t sample_size, size_t pixel_width, size_t pixel_height,
    116     int64 elapsed_time, int64 time_stamp, int rotation) {
    117   return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
    118                pixel_height, elapsed_time, time_stamp, rotation);
    119 }
    120 
    121 bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh) {
    122   return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
    123                static_cast<uint8*>(frame->data), frame->data_size,
    124                frame->pixel_width, frame->pixel_height, frame->elapsed_time,
    125                frame->time_stamp, frame->rotation);
    126 }
    127 
    128 bool WebRtcVideoFrame::Alias(const CapturedFrame* frame, int dw, int dh) {
    129   if (CanonicalFourCC(frame->fourcc) != FOURCC_I420 || frame->rotation != 0 ||
    130       frame->width != dw || frame->height != dh) {
    131     // TODO(fbarchard): Enable aliasing of more formats.
    132     return Init(frame, dw, dh);
    133   } else {
    134     Alias(static_cast<uint8*>(frame->data),
    135           frame->data_size,
    136           frame->width,
    137           frame->height,
    138           frame->pixel_width,
    139           frame->pixel_height,
    140           frame->elapsed_time,
    141           frame->time_stamp,
    142           frame->rotation);
    143     return true;
    144   }
    145 }
    146 
    147 bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
    148                                    size_t pixel_height, int64 elapsed_time,
    149                                    int64 time_stamp) {
    150   InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time, time_stamp);
    151   if (!is_black_) {
    152     return SetToBlack();
    153   }
    154   return true;
    155 }
    156 
    157 void WebRtcVideoFrame::Alias(
    158     uint8* buffer, size_t buffer_size, int w, int h, size_t pixel_width,
    159     size_t pixel_height, int64 elapsed_time, int64 time_stamp, int rotation) {
    160   talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
    161       new RefCountedBuffer());
    162   video_buffer->Alias(buffer, buffer_size);
    163   Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
    164          elapsed_time, time_stamp, rotation);
    165 }
    166 
    167 size_t WebRtcVideoFrame::GetWidth() const { return frame()->Width(); }
    168 
    169 size_t WebRtcVideoFrame::GetHeight() const { return frame()->Height(); }
    170 
    171 const uint8* WebRtcVideoFrame::GetYPlane() const {
    172   uint8_t* buffer = frame()->Buffer();
    173   return buffer;
    174 }
    175 
    176 const uint8* WebRtcVideoFrame::GetUPlane() const {
    177   uint8_t* buffer = frame()->Buffer();
    178   if (buffer) {
    179     buffer += (frame()->Width() * frame()->Height());
    180   }
    181   return buffer;
    182 }
    183 
    184 const uint8* WebRtcVideoFrame::GetVPlane() const {
    185   uint8_t* buffer = frame()->Buffer();
    186   if (buffer) {
    187     int uv_size = static_cast<int>(GetChromaSize());
    188     buffer += frame()->Width() * frame()->Height() + uv_size;
    189   }
    190   return buffer;
    191 }
    192 
    193 uint8* WebRtcVideoFrame::GetYPlane() {
    194   uint8_t* buffer = frame()->Buffer();
    195   return buffer;
    196 }
    197 
    198 uint8* WebRtcVideoFrame::GetUPlane() {
    199   uint8_t* buffer = frame()->Buffer();
    200   if (buffer) {
    201     buffer += (frame()->Width() * frame()->Height());
    202   }
    203   return buffer;
    204 }
    205 
    206 uint8* WebRtcVideoFrame::GetVPlane() {
    207   uint8_t* buffer = frame()->Buffer();
    208   if (buffer) {
    209     int uv_size = static_cast<int>(GetChromaSize());
    210     buffer += frame()->Width() * frame()->Height() + uv_size;
    211   }
    212   return buffer;
    213 }
    214 
    215 VideoFrame* WebRtcVideoFrame::Copy() const {
    216   uint8* old_buffer = video_buffer_->data();
    217   if (!old_buffer)
    218     return NULL;
    219   size_t new_buffer_size = video_buffer_->length();
    220 
    221   WebRtcVideoFrame* ret_val = new WebRtcVideoFrame();
    222   ret_val->Attach(video_buffer_.get(), new_buffer_size, frame()->Width(),
    223                   frame()->Height(), pixel_width_, pixel_height_, elapsed_time_,
    224                   time_stamp_, rotation_);
    225   return ret_val;
    226 }
    227 
    228 bool WebRtcVideoFrame::MakeExclusive() {
    229   const size_t length = video_buffer_->length();
    230   RefCountedBuffer* exclusive_buffer = new RefCountedBuffer(length);
    231   memcpy(exclusive_buffer->data(), video_buffer_->data(), length);
    232   Attach(exclusive_buffer, length, frame()->Width(), frame()->Height(),
    233          pixel_width_, pixel_height_, elapsed_time_, time_stamp_, rotation_);
    234   return true;
    235 }
    236 
    237 size_t WebRtcVideoFrame::CopyToBuffer(uint8* buffer, size_t size) const {
    238   if (!frame()->Buffer()) {
    239     return 0;
    240   }
    241 
    242   size_t needed = frame()->Length();
    243   if (needed <= size) {
    244     memcpy(buffer, frame()->Buffer(), needed);
    245   }
    246   return needed;
    247 }
    248 
    249 // TODO(fbarchard): Refactor into base class and share with lmi
    250 size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
    251                                             size_t size, int stride_rgb) const {
    252   if (!frame()->Buffer()) {
    253     return 0;
    254   }
    255   size_t width = frame()->Width();
    256   size_t height = frame()->Height();
    257   size_t needed = (stride_rgb >= 0 ? stride_rgb : -stride_rgb) * height;
    258   if (size < needed) {
    259     LOG(LS_WARNING) << "RGB buffer is not large enough";
    260     return needed;
    261   }
    262 
    263   if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
    264                               GetUPitch(), GetVPlane(), GetVPitch(), buffer,
    265                               stride_rgb,
    266                               static_cast<int>(width),
    267                               static_cast<int>(height),
    268                               to_fourcc)) {
    269     LOG(LS_WARNING) << "RGB type not supported: " << to_fourcc;
    270     return 0;  // 0 indicates error
    271   }
    272   return needed;
    273 }
    274 
    275 void WebRtcVideoFrame::Attach(
    276     RefCountedBuffer* video_buffer, size_t buffer_size, int w, int h,
    277     size_t pixel_width, size_t pixel_height, int64 elapsed_time,
    278     int64 time_stamp, int rotation) {
    279   if (video_buffer_.get() == video_buffer) {
    280     return;
    281   }
    282   is_black_ = false;
    283   video_buffer_ = video_buffer;
    284   frame()->SetWidth(w);
    285   frame()->SetHeight(h);
    286   pixel_width_ = pixel_width;
    287   pixel_height_ = pixel_height;
    288   elapsed_time_ = elapsed_time;
    289   time_stamp_ = time_stamp;
    290   rotation_ = rotation;
    291 }
    292 
    293 webrtc::VideoFrame* WebRtcVideoFrame::frame() {
    294   return video_buffer_->frame();
    295 }
    296 
    297 const webrtc::VideoFrame* WebRtcVideoFrame::frame() const {
    298   return video_buffer_->frame();
    299 }
    300 
    301 bool WebRtcVideoFrame::Reset(
    302     uint32 format, int w, int h, int dw, int dh, uint8* sample,
    303     size_t sample_size, size_t pixel_width, size_t pixel_height,
    304     int64 elapsed_time, int64 time_stamp, int rotation) {
    305   if (!Validate(format, w, h, sample, sample_size)) {
    306     return false;
    307   }
    308   // Translate aliases to standard enums (e.g., IYUV -> I420).
    309   format = CanonicalFourCC(format);
    310 
    311   // Round display width and height down to multiple of 4, to avoid webrtc
    312   // size calculation error on odd sizes.
    313   // TODO(Ronghua): Remove this once the webrtc allocator is fixed.
    314   dw = (dw > 4) ? (dw & ~3) : dw;
    315   dh = (dh > 4) ? (dh & ~3) : dh;
    316 
    317   // Set up a new buffer.
    318   // TODO(fbarchard): Support lazy allocation.
    319   int new_width = dw;
    320   int new_height = dh;
    321   if (rotation == 90 || rotation == 270) {  // If rotated swap width, height.
    322     new_width = dh;
    323     new_height = dw;
    324   }
    325 
    326   size_t desired_size = SizeOf(new_width, new_height);
    327   talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
    328       new RefCountedBuffer(desired_size));
    329   // Since the libyuv::ConvertToI420 will handle the rotation, so the
    330   // new frame's rotation should always be 0.
    331   Attach(video_buffer.get(), desired_size, new_width, new_height, pixel_width,
    332          pixel_height, elapsed_time, time_stamp, 0);
    333 
    334   int horiz_crop = ((w - dw) / 2) & ~1;
    335   // ARGB on Windows has negative height.
    336   // The sample's layout in memory is normal, so just correct crop.
    337   int vert_crop = ((abs(h) - dh) / 2) & ~1;
    338   // Conversion functions expect negative height to flip the image.
    339   int idh = (h < 0) ? -dh : dh;
    340   uint8* y = GetYPlane();
    341   int y_stride = GetYPitch();
    342   uint8* u = GetUPlane();
    343   int u_stride = GetUPitch();
    344   uint8* v = GetVPlane();
    345   int v_stride = GetVPitch();
    346   int r = libyuv::ConvertToI420(
    347       sample, sample_size, y, y_stride, u, u_stride, v, v_stride, horiz_crop,
    348       vert_crop, w, h, dw, idh, static_cast<libyuv::RotationMode>(rotation),
    349       format);
    350   if (r) {
    351     LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format)
    352                   << " return code : " << r;
    353     return false;
    354   }
    355   return true;
    356 }
    357 
    358 VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
    359     int w, int h, size_t pixel_width, size_t pixel_height, int64 elapsed_time,
    360     int64 time_stamp) const {
    361   WebRtcVideoFrame* frame = new WebRtcVideoFrame();
    362   frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time,
    363                            time_stamp);
    364   return frame;
    365 }
    366 
    367 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
    368                                          size_t pixel_height,
    369                                          int64 elapsed_time, int64 time_stamp) {
    370   size_t buffer_size = VideoFrame::SizeOf(w, h);
    371   talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
    372       new RefCountedBuffer(buffer_size));
    373   Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
    374          elapsed_time, time_stamp, 0);
    375 }
    376 
    377 }  // namespace cricket
    378