/external/webrtc/talk/media/base/ |
testutils.cc | 325 bool VideoFrameEqual(const VideoFrame* frame0, const VideoFrame* frame1) { 329 const uint8_t* y1 = frame1->GetYPlane(); 330 const uint8_t* u1 = frame1->GetUPlane(); 331 const uint8_t* v1 = frame1->GetVPlane(); 338 y1 += frame1->GetYPitch(); 350 u1 += frame1->GetUPitch(); 351 v1 += frame1->GetVPitch();
|
/external/webrtc/webrtc/video/ |
video_send_stream_tests.cc | 47 void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2); 48 void ExpectEqualTextureFrames(const VideoFrame& frame1, 50 void ExpectEqualBufferFrames(const VideoFrame& frame1, [all...] |
/toolchain/binutils/binutils-2.25/ld/testsuite/ld-mips-elf/ |
eh-frame1-n32.d | 2 #source: eh-frame1.s 3 #source: eh-frame1.s 6 #ld: -shared -melf32btsmipn32 -Teh-frame1.ld
|
eh-frame3.d | 2 #source: eh-frame1.s 3 #source: eh-frame1.s 6 #ld: -EB -Teh-frame1.ld --defsym foo=0x50607080
|
eh-frame2-n64.d | 2 #source: eh-frame1.s 3 #source: eh-frame1.s 6 #ld: -shared -melf64btsmip -Teh-frame1.ld
|
eh-frame1-n64.d | 2 #source: eh-frame1.s 3 #source: eh-frame1.s 6 #ld: -shared -melf64btsmip -Teh-frame1.ld
|
/external/libvpx/config/x86/ |
vp9_rtcd.h | 122 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count); 123 void vp9_temporal_filter_apply_sse2(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/libvpx/config/x86_64/ |
vp9_rtcd.h | 125 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count); 126 void vp9_temporal_filter_apply_sse2(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/opencv3/modules/videostab/src/ |
global_motion.cpp | 667 Mat FromFileMotionReader::estimate(const Mat &/*frame0*/, const Mat &/*frame1*/, bool *ok) 687 Mat ToFileMotionWriter::estimate(const Mat &frame0, const Mat &frame1, bool *ok) 690 Mat_<float> M = motionEstimator_->estimate(frame0, frame1, &ok_); 708 Mat KeypointBasedMotionEstimator::estimate(const Mat &frame0, const Mat &frame1, bool *ok) 721 optFlowEstimator_->run(frame0, frame1, pointsPrev_, points_, status_, noArray()); 779 Mat KeypointBasedMotionEstimatorGpu::estimate(const Mat &frame0, const Mat &frame1, bool *ok) 782 frame1_.upload(frame1); 787 Mat KeypointBasedMotionEstimatorGpu::estimate(const cuda::GpuMat &frame0, const cuda::GpuMat &frame1, bool *ok) 804 optFlowEstimator_.run(frame0, frame1, pointsPrev_, points_, status_);
|
/external/opencv3/doc/py_tutorials/py_video/py_lucas_kanade/ |
py_lucas_kanade.markdown | 183 ret, frame1 = cap.read() 184 prvs = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY) 185 hsv = np.zeros_like(frame1)
|
/external/libvpx/config/arm/ |
vp9_rtcd.h | 101 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/libvpx/config/generic/ |
vp9_rtcd.h | 101 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/libvpx/config/mips32/ |
vp9_rtcd.h | 101 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/libvpx/config/mips32-dspr2/ |
vp9_rtcd.h | 104 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/libvpx/config/mips64/ |
vp9_rtcd.h | 101 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/webrtc/webrtc/modules/video_processing/test/ |
video_processing_unittest.cc | 53 static bool CompareFrames(const webrtc::VideoFrame& frame1, 380 bool CompareFrames(const webrtc::VideoFrame& frame1, 384 int allocated_size1 = frame1.allocated_size(plane_type); 388 const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
|
/external/skia/tests/ |
PathOpsConicIntersectionTest.cpp | 186 const SkDConic frame1[] = { variable 229 frame0, frame1, frame2, frame3, frame4, frame5, frame6 232 const int frameSizes[] = { (int) SK_ARRAY_COUNT(frame0), (int) SK_ARRAY_COUNT(frame1),
|
/external/autotest/server/cros/ap_configurators/ |
buffalo_wzr_d1800h_ap_configurator.py | 133 frame1 = self.driver.find_element_by_xpath('//frame[@name="lower"]') 134 self.driver.switch_to_frame(frame1)
|
/external/chromium-trace/catapult/tracing/tracing/model/ |
model_test.html | 66 var frame1 = new Frame([slice], [{thread: t, start: 1, end: 5}]); 67 p.frames.push.apply(p.frames, frame1);
|
/external/libvpx/config/arm-neon/ |
vp9_rtcd.h | 109 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/libvpx/config/arm64/ |
vp9_rtcd.h | 109 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int block_width, unsigned int block_height, int strength, int filter_weight, unsigned int *accumulator, uint16_t *count);
|
/external/libvpx/libvpx/vp9/encoder/ |
vp9_temporal_filter.c | 122 void vp9_temporal_filter_apply_c(uint8_t *frame1, 138 int src_byte = frame1[byte]; 176 uint16_t *frame1 = CONVERT_TO_SHORTPTR(frame1_8); local 185 int src_byte = frame1[byte];
|
/external/opencv3/modules/superres/include/opencv2/superres/ |
optical_flow.hpp | 59 virtual void calc(InputArray frame0, InputArray frame1, OutputArray flow1, OutputArray flow2 = noArray()) = 0;
|
/external/opencv3/modules/videostab/include/opencv2/videostab/ |
inpainting.hpp | 204 const Mat &flowMask, const Mat &flowX, const Mat &flowY, const Mat &frame1, const Mat &mask1,
|
/external/libvpx/libvpx/vp8/encoder/ |
temporal_filter.c | 88 unsigned char *frame1, 108 int src_byte = frame1[byte];
|