1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "media/base/video_frame.h" 6 7 #include <algorithm> 8 9 #include "base/bind.h" 10 #include "base/callback_helpers.h" 11 #include "base/logging.h" 12 #include "base/memory/aligned_memory.h" 13 #include "base/strings/string_piece.h" 14 #include "media/base/limits.h" 15 #include "media/base/video_util.h" 16 #include "third_party/skia/include/core/SkBitmap.h" 17 18 namespace media { 19 20 // static 21 scoped_refptr<VideoFrame> VideoFrame::CreateFrame( 22 VideoFrame::Format format, 23 const gfx::Size& coded_size, 24 const gfx::Rect& visible_rect, 25 const gfx::Size& natural_size, 26 base::TimeDelta timestamp) { 27 DCHECK(IsValidConfig(format, coded_size, visible_rect, natural_size)); 28 scoped_refptr<VideoFrame> frame(new VideoFrame( 29 format, coded_size, visible_rect, natural_size, timestamp)); 30 switch (format) { 31 case VideoFrame::RGB32: 32 frame->AllocateRGB(4u); 33 break; 34 case VideoFrame::YV12: 35 case VideoFrame::YV12A: 36 case VideoFrame::YV16: 37 case VideoFrame::I420: 38 frame->AllocateYUV(); 39 break; 40 default: 41 LOG(FATAL) << "Unsupported frame format: " << format; 42 } 43 return frame; 44 } 45 46 // static 47 std::string VideoFrame::FormatToString(VideoFrame::Format format) { 48 switch (format) { 49 case VideoFrame::INVALID: 50 return "INVALID"; 51 case VideoFrame::RGB32: 52 return "RGB32"; 53 case VideoFrame::YV12: 54 return "YV12"; 55 case VideoFrame::YV16: 56 return "YV16"; 57 case VideoFrame::EMPTY: 58 return "EMPTY"; 59 case VideoFrame::I420: 60 return "I420"; 61 case VideoFrame::NATIVE_TEXTURE: 62 return "NATIVE_TEXTURE"; 63 #if defined(GOOGLE_TV) 64 case VideoFrame::HOLE: 65 return "HOLE"; 66 #endif 67 case VideoFrame::YV12A: 68 return "YV12A"; 69 } 70 NOTREACHED() << "Invalid videoframe format provided: " << format; 71 return ""; 72 } 73 74 // static 75 bool VideoFrame::IsValidConfig(VideoFrame::Format format, 76 const gfx::Size& coded_size, 77 const gfx::Rect& visible_rect, 78 const gfx::Size& natural_size) { 79 return (format != VideoFrame::INVALID && 80 !coded_size.IsEmpty() && 81 coded_size.GetArea() <= limits::kMaxCanvas && 82 coded_size.width() <= limits::kMaxDimension && 83 coded_size.height() <= limits::kMaxDimension && 84 !visible_rect.IsEmpty() && 85 visible_rect.x() >= 0 && visible_rect.y() >= 0 && 86 visible_rect.right() <= coded_size.width() && 87 visible_rect.bottom() <= coded_size.height() && 88 !natural_size.IsEmpty() && 89 natural_size.GetArea() <= limits::kMaxCanvas && 90 natural_size.width() <= limits::kMaxDimension && 91 natural_size.height() <= limits::kMaxDimension); 92 } 93 94 // static 95 scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( 96 const scoped_refptr<MailboxHolder>& mailbox_holder, 97 uint32 texture_target, 98 const gfx::Size& coded_size, 99 const gfx::Rect& visible_rect, 100 const gfx::Size& natural_size, 101 base::TimeDelta timestamp, 102 const ReadPixelsCB& read_pixels_cb, 103 const base::Closure& no_longer_needed_cb) { 104 scoped_refptr<VideoFrame> frame(new VideoFrame( 105 NATIVE_TEXTURE, coded_size, visible_rect, natural_size, timestamp)); 106 frame->texture_mailbox_holder_ = mailbox_holder; 107 frame->texture_target_ = texture_target; 108 frame->read_pixels_cb_ = read_pixels_cb; 109 frame->no_longer_needed_cb_ = no_longer_needed_cb; 110 111 return frame; 112 } 113 114 void VideoFrame::ReadPixelsFromNativeTexture(const SkBitmap& pixels) { 115 DCHECK_EQ(format_, NATIVE_TEXTURE); 116 if (!read_pixels_cb_.is_null()) 117 read_pixels_cb_.Run(pixels); 118 } 119 120 // static 121 scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory( 122 Format format, 123 const gfx::Size& coded_size, 124 const gfx::Rect& visible_rect, 125 const gfx::Size& natural_size, 126 uint8* data, 127 base::SharedMemoryHandle handle, 128 base::TimeDelta timestamp, 129 const base::Closure& no_longer_needed_cb) { 130 switch (format) { 131 case I420: { 132 scoped_refptr<VideoFrame> frame(new VideoFrame( 133 format, coded_size, visible_rect, natural_size, timestamp)); 134 frame->shared_memory_handle_ = handle; 135 frame->strides_[kYPlane] = coded_size.width(); 136 frame->strides_[kUPlane] = coded_size.width() / 2; 137 frame->strides_[kVPlane] = coded_size.width() / 2; 138 frame->data_[kYPlane] = data; 139 frame->data_[kUPlane] = data + coded_size.GetArea(); 140 frame->data_[kVPlane] = data + (coded_size.GetArea() * 5 / 4); 141 frame->no_longer_needed_cb_ = no_longer_needed_cb; 142 return frame; 143 } 144 default: 145 NOTIMPLEMENTED(); 146 return NULL; 147 } 148 } 149 150 // static 151 scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData( 152 Format format, 153 const gfx::Size& coded_size, 154 const gfx::Rect& visible_rect, 155 const gfx::Size& natural_size, 156 int32 y_stride, 157 int32 u_stride, 158 int32 v_stride, 159 uint8* y_data, 160 uint8* u_data, 161 uint8* v_data, 162 base::TimeDelta timestamp, 163 const base::Closure& no_longer_needed_cb) { 164 DCHECK(format == YV12 || format == YV16 || format == I420) << format; 165 scoped_refptr<VideoFrame> frame(new VideoFrame( 166 format, coded_size, visible_rect, natural_size, timestamp)); 167 frame->strides_[kYPlane] = y_stride; 168 frame->strides_[kUPlane] = u_stride; 169 frame->strides_[kVPlane] = v_stride; 170 frame->data_[kYPlane] = y_data; 171 frame->data_[kUPlane] = u_data; 172 frame->data_[kVPlane] = v_data; 173 frame->no_longer_needed_cb_ = no_longer_needed_cb; 174 return frame; 175 } 176 177 // static 178 scoped_refptr<VideoFrame> VideoFrame::CreateEmptyFrame() { 179 return new VideoFrame( 180 VideoFrame::EMPTY, gfx::Size(), gfx::Rect(), gfx::Size(), 181 base::TimeDelta()); 182 } 183 184 // static 185 scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame( 186 const gfx::Size& size, 187 uint8 y, uint8 u, uint8 v, 188 base::TimeDelta timestamp) { 189 DCHECK(IsValidConfig(VideoFrame::YV12, size, gfx::Rect(size), size)); 190 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame( 191 VideoFrame::YV12, size, gfx::Rect(size), size, timestamp); 192 FillYUV(frame.get(), y, u, v); 193 return frame; 194 } 195 196 // static 197 scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) { 198 const uint8 kBlackY = 0x00; 199 const uint8 kBlackUV = 0x80; 200 const base::TimeDelta kZero; 201 return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero); 202 } 203 204 #if defined(GOOGLE_TV) 205 // This block and other blocks wrapped around #if defined(GOOGLE_TV) is not 206 // maintained by the general compositor team. Please contact the following 207 // people instead: 208 // 209 // wonsik (at) chromium.org 210 // ycheo (at) chromium.org 211 212 // static 213 scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame( 214 const gfx::Size& size) { 215 DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size)); 216 scoped_refptr<VideoFrame> frame(new VideoFrame( 217 VideoFrame::HOLE, size, gfx::Rect(size), size, base::TimeDelta())); 218 return frame; 219 } 220 #endif 221 222 // static 223 size_t VideoFrame::NumPlanes(Format format) { 224 switch (format) { 225 case VideoFrame::NATIVE_TEXTURE: 226 #if defined(GOOGLE_TV) 227 case VideoFrame::HOLE: 228 #endif 229 return 0; 230 case VideoFrame::RGB32: 231 return 1; 232 case VideoFrame::YV12: 233 case VideoFrame::YV16: 234 case VideoFrame::I420: 235 return 3; 236 case VideoFrame::YV12A: 237 return 4; 238 case VideoFrame::EMPTY: 239 case VideoFrame::INVALID: 240 break; 241 } 242 NOTREACHED() << "Unsupported video frame format: " << format; 243 return 0; 244 } 245 246 static inline size_t RoundUp(size_t value, size_t alignment) { 247 // Check that |alignment| is a power of 2. 248 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); 249 return ((value + (alignment - 1)) & ~(alignment-1)); 250 } 251 252 // Release data allocated by AllocateRGB() or AllocateYUV(). 253 static void ReleaseData(uint8* data) { 254 DCHECK(data); 255 base::AlignedFree(data); 256 } 257 258 void VideoFrame::AllocateRGB(size_t bytes_per_pixel) { 259 // Round up to align at least at a 16-byte boundary for each row. 260 // This is sufficient for MMX and SSE2 reads (movq/movdqa). 261 size_t bytes_per_row = RoundUp(coded_size_.width(), 262 kFrameSizeAlignment) * bytes_per_pixel; 263 size_t aligned_height = RoundUp(coded_size_.height(), kFrameSizeAlignment); 264 strides_[VideoFrame::kRGBPlane] = bytes_per_row; 265 data_[VideoFrame::kRGBPlane] = reinterpret_cast<uint8*>( 266 base::AlignedAlloc(bytes_per_row * aligned_height + kFrameSizePadding, 267 kFrameAddressAlignment)); 268 no_longer_needed_cb_ = base::Bind(&ReleaseData, data_[VideoFrame::kRGBPlane]); 269 DCHECK(!(reinterpret_cast<intptr_t>(data_[VideoFrame::kRGBPlane]) & 7)); 270 COMPILE_ASSERT(0 == VideoFrame::kRGBPlane, RGB_data_must_be_index_0); 271 } 272 273 void VideoFrame::AllocateYUV() { 274 DCHECK(format_ == VideoFrame::YV12 || format_ == VideoFrame::YV16 || 275 format_ == VideoFrame::YV12A || format_ == VideoFrame::I420); 276 // Align Y rows at least at 16 byte boundaries. The stride for both 277 // YV12 and YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for 278 // U and V applies to two rows of Y (one byte of UV for 4 bytes of Y), so in 279 // the case of YV12 the strides are identical for the same width surface, but 280 // the number of bytes allocated for YV12 is 1/2 the amount for U & V as 281 // YV16. We also round the height of the surface allocated to be an even 282 // number to avoid any potential of faulting by code that attempts to access 283 // the Y values of the final row, but assumes that the last row of U & V 284 // applies to a full two rows of Y. YV12A is the same as YV12, but with an 285 // additional alpha plane that has the same size and alignment as the Y plane. 286 287 size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane), 288 kFrameSizeAlignment); 289 size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane), 290 kFrameSizeAlignment); 291 // The *2 here is because some formats (e.g. h264) allow interlaced coding, 292 // and then the size needs to be a multiple of two macroblocks (vertically). 293 // See libavcodec/utils.c:avcodec_align_dimensions2(). 294 size_t y_height = RoundUp(coded_size_.height(), kFrameSizeAlignment * 2); 295 size_t uv_height = 296 (format_ == VideoFrame::YV12 || format_ == VideoFrame::YV12A || 297 format_ == VideoFrame::I420) 298 ? y_height / 2 299 : y_height; 300 size_t y_bytes = y_height * y_stride; 301 size_t uv_bytes = uv_height * uv_stride; 302 size_t a_bytes = format_ == VideoFrame::YV12A ? y_bytes : 0; 303 304 // The extra line of UV being allocated is because h264 chroma MC 305 // overreads by one line in some cases, see libavcodec/utils.c: 306 // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm: 307 // put_h264_chroma_mc4_ssse3(). 308 uint8* data = reinterpret_cast<uint8*>( 309 base::AlignedAlloc( 310 y_bytes + (uv_bytes * 2 + uv_stride) + a_bytes + kFrameSizePadding, 311 kFrameAddressAlignment)); 312 no_longer_needed_cb_ = base::Bind(&ReleaseData, data); 313 COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0); 314 data_[VideoFrame::kYPlane] = data; 315 data_[VideoFrame::kUPlane] = data + y_bytes; 316 data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes; 317 strides_[VideoFrame::kYPlane] = y_stride; 318 strides_[VideoFrame::kUPlane] = uv_stride; 319 strides_[VideoFrame::kVPlane] = uv_stride; 320 if (format_ == YV12A) { 321 data_[VideoFrame::kAPlane] = data + y_bytes + (2 * uv_bytes); 322 strides_[VideoFrame::kAPlane] = y_stride; 323 } 324 } 325 326 VideoFrame::VideoFrame(VideoFrame::Format format, 327 const gfx::Size& coded_size, 328 const gfx::Rect& visible_rect, 329 const gfx::Size& natural_size, 330 base::TimeDelta timestamp) 331 : format_(format), 332 coded_size_(coded_size), 333 visible_rect_(visible_rect), 334 natural_size_(natural_size), 335 texture_target_(0), 336 shared_memory_handle_(base::SharedMemory::NULLHandle()), 337 timestamp_(timestamp) { 338 memset(&strides_, 0, sizeof(strides_)); 339 memset(&data_, 0, sizeof(data_)); 340 } 341 342 VideoFrame::~VideoFrame() { 343 if (!no_longer_needed_cb_.is_null()) 344 base::ResetAndReturn(&no_longer_needed_cb_).Run(); 345 } 346 347 bool VideoFrame::IsValidPlane(size_t plane) const { 348 return (plane < NumPlanes(format_)); 349 } 350 351 int VideoFrame::stride(size_t plane) const { 352 DCHECK(IsValidPlane(plane)); 353 return strides_[plane]; 354 } 355 356 int VideoFrame::row_bytes(size_t plane) const { 357 DCHECK(IsValidPlane(plane)); 358 int width = coded_size_.width(); 359 switch (format_) { 360 // 32bpp. 361 case RGB32: 362 return width * 4; 363 364 // Planar, 8bpp. 365 case YV12A: 366 if (plane == kAPlane) 367 return width; 368 // Fallthrough. 369 case YV12: 370 case YV16: 371 case I420: 372 if (plane == kYPlane) 373 return width; 374 return RoundUp(width, 2) / 2; 375 376 default: 377 break; 378 } 379 380 // Intentionally leave out non-production formats. 381 NOTREACHED() << "Unsupported video frame format: " << format_; 382 return 0; 383 } 384 385 int VideoFrame::rows(size_t plane) const { 386 DCHECK(IsValidPlane(plane)); 387 int height = coded_size_.height(); 388 switch (format_) { 389 case RGB32: 390 case YV16: 391 return height; 392 393 case YV12A: 394 if (plane == kAPlane) 395 return height; 396 // Fallthrough. 397 case YV12: 398 case I420: 399 if (plane == kYPlane) 400 return height; 401 return RoundUp(height, 2) / 2; 402 403 default: 404 break; 405 } 406 407 // Intentionally leave out non-production formats. 408 NOTREACHED() << "Unsupported video frame format: " << format_; 409 return 0; 410 } 411 412 uint8* VideoFrame::data(size_t plane) const { 413 DCHECK(IsValidPlane(plane)); 414 return data_[plane]; 415 } 416 417 const scoped_refptr<VideoFrame::MailboxHolder>& VideoFrame::texture_mailbox() 418 const { 419 DCHECK_EQ(format_, NATIVE_TEXTURE); 420 return texture_mailbox_holder_; 421 } 422 423 uint32 VideoFrame::texture_target() const { 424 DCHECK_EQ(format_, NATIVE_TEXTURE); 425 return texture_target_; 426 } 427 428 base::SharedMemoryHandle VideoFrame::shared_memory_handle() const { 429 return shared_memory_handle_; 430 } 431 432 bool VideoFrame::IsEndOfStream() const { 433 return format_ == VideoFrame::EMPTY; 434 } 435 436 void VideoFrame::HashFrameForTesting(base::MD5Context* context) { 437 for (int plane = 0; plane < kMaxPlanes; ++plane) { 438 if (!IsValidPlane(plane)) 439 break; 440 for (int row = 0; row < rows(plane); ++row) { 441 base::MD5Update(context, base::StringPiece( 442 reinterpret_cast<char*>(data(plane) + stride(plane) * row), 443 row_bytes(plane))); 444 } 445 } 446 } 447 448 VideoFrame::MailboxHolder::MailboxHolder( 449 const gpu::Mailbox& mailbox, 450 unsigned sync_point, 451 const TextureNoLongerNeededCallback& release_callback) 452 : mailbox_(mailbox), 453 sync_point_(sync_point), 454 release_callback_(release_callback) {} 455 456 VideoFrame::MailboxHolder::~MailboxHolder() { 457 if (!release_callback_.is_null()) 458 release_callback_.Run(sync_point_); 459 } 460 461 } // namespace media 462