/frameworks/rs/api/ |
rs_allocation_create.spec | 84 arg: rs_yuv_format yuv_format, "YUV layout for the Type" 101 yuv_format indicates the associated YUV format (or RS_YUV_NONE).
|
/frameworks/rs/cpu_ref/ |
rsCpuIntrinsicYuvToRGB.cpp | 130 size_t cstep = cp->alloc->mHal.drvState.yuv.step; 147 // Legacy yuv support didn't fill in uv
|
/frameworks/rs/driver/runtime/ |
rs_structs.h | 42 uint32_t yuv; member in struct:Allocation::__anon30205::__anon30206 77 } yuv; member in struct:Allocation::__anon30205::DrvState
|
/frameworks/rs/ |
rsAllocation.h | 67 uint32_t yuv; member in struct:android::renderscript::Allocation::Hal::State 103 } yuv; member in struct:android::renderscript::Allocation::Hal::DrvState
|
rsScriptC_Lib.cpp | 286 uint32_t yuv) { 287 return rsi_TypeCreate(rsc, element, dimX, dimY, dimZ, mipmaps, faces, yuv);
|
/frameworks/rs/scriptc/ |
rs_object_types.rsh | 200 * rs_yuv_format: YUV format 202 * Android YUV formats that can be associated with a RenderScript Type.
|
/frameworks/support/v8/renderscript/java/src/android/support/v8/renderscript/ |
ScriptIntrinsicColorMatrix.java | 159 * Set the matrix to convert from YUV to RGB with a direct copy of the 4th 178 * Set the matrix to convert from RGB to YUV with a direct copy of the 4th
|
/hardware/intel/img/hwcomposer/merrifield/ips/anniedale/ |
PlaneCapabilities.cpp | 130 if (stride.yuv.yStride > maxStride) { 131 VTRACE("stride %d is too large", stride.yuv.yStride);
|
/hardware/qcom/media/msm8996/mm-video-v4l2/vidc/vdec/src/ |
omx_swvdec_utils.cpp | 423 * @brief Dump output YUV to file. 425 * @param[in] p_buffer: Pointer to output YUV buffer.
|
/packages/apps/DevCamera/res/layout/ |
activity_main.xml | 132 android:textOff="YUV Full" 133 android:textOn="YUV Full"
|
/prebuilts/go/darwin-x86/src/image/color/ |
ycbcr.go | 75 // codecs often use the terms YUV and Y'CbCr interchangeably, but strictly 76 // speaking, the term YUV applies only to analog video signals, and Y' (luma)
|
/prebuilts/go/linux-x86/src/image/color/ |
ycbcr.go | 75 // codecs often use the terms YUV and Y'CbCr interchangeably, but strictly 76 // speaking, the term YUV applies only to analog video signals, and Y' (luma)
|
/prebuilts/sdk/renderscript/include/ |
rs_object_types.rsh | 200 * rs_yuv_format: YUV format 202 * Android YUV formats that can be associated with a RenderScript Type.
|
/device/moto/shamu/camera/QCamera/HAL/usbcamcore/src/ |
QCameraMjpegDecode.cpp | 382 jpeg_buffer_init(&p_output_buffers.data.yuv.luma_buf); 383 jpeg_buffer_init(&p_output_buffers.data.yuv.chroma_buf); 408 p_output_buffers.data.yuv.luma_buf, 413 p_output_buffers.data.yuv.chroma_buf, 527 jpeg_buffer_destroy(&p_output_buffers.data.yuv.luma_buf); 528 jpeg_buffer_destroy(&p_output_buffers.data.yuv.chroma_buf);
|
/external/libyuv/files/unit_test/ |
color_test.cc | 23 // TODO(fbarchard): Port high accuracy YUV to RGB to Neon. 97 /* Start with YUV converted to ARGB. */ \ 165 /* YUV converted to ARGB. */ 191 /* YUV converted to ARGB. */ 212 /* YUV converted to ARGB. */ 229 /* YUV converted to ARGB. */
|
/external/opencv3/modules/videoio/src/ |
cap_xine.cpp | 131 IplImage * yuv = capture->yuv_frame; local 140 int line = yuv->widthStep; 162 yuv->imageData[ offset ] = *( addr_Y++ ); 163 yuv->imageData[ offset + 1 ] = *addr_U; 164 yuv->imageData[ offset + 2 ] = *addr_V; 294 "icvOpenAVI_XINE( CvCaptureAVI_XINE *, const char *)", "couldn't create yuv frame");
|
/external/webrtc/talk/app/webrtc/java/android/org/webrtc/ |
VideoRendererGui.java | 49 * Efficiently renders YUV frames using the GPU for CSC. 64 // If true then for every newly created yuv image renderer createTexture() 70 // List of yuv renderers. 488 "Attempt to create yuv renderer before setting GLSurfaceView"); 515 // Add yuv renderer to rendering list. 527 "Attempt to update yuv renderer before setting GLSurfaceView");
|
/hardware/qcom/camera/usbcamcore/src/ |
QCameraMjpegDecode.cpp | 382 jpeg_buffer_init(&p_output_buffers.data.yuv.luma_buf); 383 jpeg_buffer_init(&p_output_buffers.data.yuv.chroma_buf); 408 p_output_buffers.data.yuv.luma_buf, 413 p_output_buffers.data.yuv.chroma_buf, 527 jpeg_buffer_destroy(&p_output_buffers.data.yuv.luma_buf); 528 jpeg_buffer_destroy(&p_output_buffers.data.yuv.chroma_buf);
|
/ndk/docs/Additional_library_docs/renderscript/ |
classandroid_1_1RSC_1_1Type.html | 75 <p>A <a class="el" href="classandroid_1_1RSC_1_1Type.html">Type</a> also supports YUV format information to support an <a class="el" href="classandroid_1_1RSC_1_1Allocation.html">Allocation</a> in a YUV format. The YUV formats supported are YV12 and NV21. </p> 170 <dl class="return"><dt><b>Returns:</b></dt><dd>YUV format of the <a class="el" href="classandroid_1_1RSC_1_1Allocation.html">Allocation</a> </dd></dl> 249 <p>Returns the YUV format. </p> 250 <dl class="return"><dt><b>Returns:</b></dt><dd>YUV format of the <a class="el" href="classandroid_1_1RSC_1_1Allocation.html">Allocation</a> </dd></dl>
|
/external/libjpeg-turbo/java/ |
TJBench.java | 214 (doYUV ? "Decomp to YUV":"Decompress "), 219 System.out.format("YUV Decode --> Frame rate: %f fps\n", 374 System.out.format("Encode YUV --> Frame rate: %f fps\n", 386 doYUV ? "Comp from YUV" : "Compress ", 633 System.out.println("-yuv = Test YUV encoding/decoding functions"); 634 System.out.println("-yuvpad <p> = If testing YUV encoding/decoding, this specifies the number of"); 635 System.out.println(" bytes to which each row of each plane in the intermediate YUV image is"); 791 if (argv[i].equalsIgnoreCase("-yuv")) { 792 System.out.println("Testing YUV planar encoding/decoding\n") [all...] |
/cts/tests/camera/src/android/hardware/camera2/cts/ |
ExtendedCameraCharacteristicsTest.java | 229 assertTrue("Full device FullHD YUV size not found", yuvSupportFullHD); 254 fail("Size " + s + " not found in YUV format"); 524 // Ensure that max YUV size matches max JPEG size [all...] |
/external/kernel-headers/original/uapi/linux/ |
videodev2.h | 512 #define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */ 513 #define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */ 515 #define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */ 516 #define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */ 519 #define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */ 521 #define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */ 522 #define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */ 523 #define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */ 524 #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */ 525 #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 * [all...] |
/external/libvpx/libvpx/test/ |
test-data.sha1 | 1 d5dfb0151c9051f8c85999255645d7a23916d3c0 *hantro_collage_w352h288.yuv 2 b87815bf86020c592ccc7a846ba2e28ec8043902 *hantro_odd.yuv 20 c934da6fb8cc54ee2a8c17c54cf6076dac37ead0 *park_joy_90p_10_440.yuv 24 82c1bfcca368c2f22bad7d693d690d5499ecdd11 *park_joy_90p_12_440.yuv 29 81e1f3843748438b8f2e71db484eb22daf72e939 *park_joy_90p_8_440.yuv [all...] |
/hardware/qcom/msm8994/original-kernel-headers/linux/ |
videodev2.h | 325 #define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */ 326 #define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */ 328 #define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */ 329 #define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */ 332 #define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */ 334 #define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */ 335 #define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */ 336 #define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */ 337 #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */ 338 #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 * [all...] |
/hardware/qcom/msm8996/original-kernel-headers/linux/ |
videodev2.h | 348 #define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */ 349 #define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */ 351 #define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */ 352 #define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */ 355 #define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */ 357 #define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */ 358 #define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */ 359 #define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */ 360 #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */ 361 #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 * [all...] |