/external/webrtc/webrtc/common_video/libyuv/ |
scaler.cc | 75 const uint8_t* u_ptr = src_frame.buffer(kUPlane) + 76 src_offset_y / 2 * src_frame.stride(kUPlane) + 85 src_frame.stride(kUPlane), 91 dst_frame->buffer(kUPlane), 92 dst_frame->stride(kUPlane),
|
webrtc_libyuv.cc | 251 dst_frame->buffer(kUPlane), 252 dst_frame->stride(kUPlane), 268 src_frame.buffer(kUPlane), 269 src_frame.stride(kUPlane), 287 src_frame.buffer(kUPlane), 288 src_frame.stride(kUPlane), 306 ref_frame->buffer(kUPlane), 307 ref_frame->stride(kUPlane), 312 test_frame->buffer(kUPlane), 313 test_frame->stride(kUPlane), [all...] |
/external/webrtc/webrtc/common_video/ |
i420_buffer_pool_unittest.cc | 25 const uint8_t* u_ptr = buffer->data(kUPlane); 32 EXPECT_EQ(u_ptr, buffer->data(kUPlane)); 42 const uint8_t* u_ptr = buffer->data(kUPlane); 50 EXPECT_NE(u_ptr, buffer->data(kUPlane));
|
i420_video_frame_unittest.cc | 60 int stride_u = frame.stride(kUPlane); 65 EXPECT_EQ(ExpectedSize(stride_u, height, kUPlane), 66 frame.allocated_size(kUPlane)); 111 memset(small_frame.buffer(kUPlane), 2, small_frame.allocated_size(kUPlane)); 152 EXPECT_TRUE(const_frame1_ptr->buffer(kUPlane) == 153 const_frame2_ptr->buffer(kUPlane)); 208 EXPECT_TRUE(EqualPlane(buffer_u, frame2.buffer(kUPlane), stride_uv, 8, 8)); 213 EXPECT_LE(kSizeUv, frame2.allocated_size(kUPlane)); 221 const uint8_t* u = frame.buffer(kUPlane); [all...] |
video_frame.cc | 82 stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) { 119 memcpy(buffer(kUPlane), buffer_u, expected_size_u); 145 CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane), 148 videoFrame.stride(kUPlane), videoFrame.stride(kVPlane)); 228 stride(kUPlane) != frame.stride(kUPlane) || 239 EqualPlane(buffer(kUPlane), frame.buffer(kUPlane), 240 stride(kUPlane), half_width, half_height) &&
|
video_frame_buffer.cc | 67 case kUPlane: 88 case kUPlane: 174 case kUPlane: 188 case kUPlane: 226 const uint8_t* u_plane = buffer->data(kUPlane) + 227 buffer->stride(kUPlane) * uv_offset_y + uv_offset_x; 233 u_plane, buffer->stride(kUPlane),
|
/external/webrtc/webrtc/test/ |
fake_texture_frame.h | 46 memset(buffer->MutableData(kUPlane), 0, half_height * half_width);
|
frame_generator.cc | 43 memset(frame_.buffer(kUPlane), u, frame_.allocated_size(kUPlane)); 206 int offset_u = (current_source_frame_->stride(PlaneType::kUPlane) * 215 ¤t_source_frame_->buffer(PlaneType::kUPlane)[offset_u], 219 current_source_frame_->stride(PlaneType::kUPlane),
|
frame_generator_unittest.cc | 66 ASSERT_EQ(uv_size, frame->allocated_size(PlaneType::kUPlane)); 67 buffer = frame->buffer(PlaneType::kUPlane);
|
/external/webrtc/talk/media/webrtc/ |
webrtcvideoframe.cc | 37 using webrtc::kUPlane; 104 return video_frame_buffer_ ? video_frame_buffer_->data(kUPlane) : nullptr; 117 return video_frame_buffer_ ? video_frame_buffer_->MutableData(kUPlane) 131 return video_frame_buffer_ ? video_frame_buffer_->stride(kUPlane) : 0; 169 video_frame_buffer_->stride(kUPlane), 173 new_buffer->MutableData(kYPlane), new_buffer->MutableData(kUPlane), 175 new_buffer->stride(kUPlane), new_buffer->stride(kVPlane))) {
|
/external/webrtc/webrtc/video/ |
video_capture_input_unittest.cc | 267 (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && 271 (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && 275 (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), 276 frame1.allocated_size(kUPlane)) == 0) &&
|
video_encoder_unittest.cc | 112 memset(frame_.buffer(webrtc::kUPlane), 128, 113 frame_.allocated_size(webrtc::kUPlane));
|
/external/webrtc/webrtc/modules/video_processing/ |
video_denoiser.cc | 64 int stride_u = frame.stride(kUPlane); 71 denoised_frame->CreateFrame(frame.buffer(kYPlane), frame.buffer(kUPlane), 86 uint8_t* u_dst = denoised_frame->buffer(kUPlane); 89 const uint8_t* u_src = frame.buffer(kUPlane);
|
/external/webrtc/webrtc/modules/video_coding/codecs/vp8/ |
simulcast_unittest.h | 129 EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4); 315 memset(input_frame_.buffer(kUPlane), 0, 316 input_frame_.allocated_size(kUPlane)); 565 memset(input_frame_.buffer(kUPlane), 0, 566 input_frame_.allocated_size(kUPlane)); 606 memset(input_frame_.buffer(kUPlane), 0, 607 input_frame_.allocated_size(kUPlane)); 695 plane_offset[kUPlane] = kColorU; 711 plane_offset[kUPlane] += 1; 719 plane_offset[kUPlane] += 1 [all...] |
simulcast_encoder_adapter.cc | 295 input_image.buffer(kUPlane), input_image.stride(kUPlane), 298 dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane),
|
/external/webrtc/webrtc/common_video/include/ |
video_frame_buffer.h | 24 kUPlane = 1,
|
/external/webrtc/webrtc/modules/video_render/ios/ |
open_gles20.mm | 324 width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane));
|
/external/webrtc/talk/app/webrtc/java/jni/ |
androidvideocapturer_jni.cc | 167 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
|
androidmediadecoder_jni.cc | 703 frame_buffer->MutableData(webrtc::kUPlane), 704 frame_buffer->stride(webrtc::kUPlane), 717 frame_buffer->MutableData(webrtc::kUPlane), 718 frame_buffer->stride(webrtc::kUPlane), [all...] |
/external/webrtc/webrtc/modules/video_coding/codecs/h264/ |
h264_video_toolbox_decoder.cc | 68 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
|
h264_video_toolbox_encoder.cc | 144 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
|
/external/webrtc/webrtc/modules/video_render/android/ |
video_render_opengles20.cc | 386 GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane), 387 frameToRender.buffer(kUPlane));
|
/external/webrtc/webrtc/modules/video_capture/test/ |
video_capture_unittest.cc | 69 (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) && 458 memset(test_frame_.buffer(webrtc::kUPlane), 127,
|
/external/webrtc/webrtc/modules/video_processing/test/ |
video_processing_unittest.cc | 76 memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
|
/external/webrtc/webrtc/modules/video_render/test/testAPI/ |
testAPI.cc | 280 memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane));
|