/hardware/ti/omap3/omx/video/src/openmax_il/prepost_processor/src/ |
OMX_VPP_ImgConv.c | 177 to allocate a second frame buffer in 444 YUV space */
183 OMX_U8 *puu,*pvv,*pyy; /* pointers on U,V, and Y on 444 YUV buffers */
185 OMX_U8 *puOut,*pvOut; /* Pointers on U,V, and Y on 444 YUV buffers shifted on 3 lines */
187 OMX_U8 yKey,uKey,vKey; /* Color Key in YUV color space */
188 OMX_U8 nKeyMax1,nKeyMax2,nKeyMax3; /* Color Key range used in RVB to detect Color Key an in YUV to detect Near Color Key */
261 /* compute 444 YUV buffers from RGB input buffer converting RGB color key to an Y color key set at value 0 and and UV color key set at value (0,0) */
311 yKey = (OMX_U8)((77*(OMX_S32)(iRKey) + 150*(OMX_S32)(iGKey) + 29*(OMX_S32)(iBKey))>>8); /* convert RGB color key in YUV space */
|
/cts/apps/CameraITS/pymodules/its/ |
objects.py | 162 ["jpg", "yuv", "raw", "raw10", "raw12"]. 174 fmt_codes = {"raw":0x20, "raw10":0x25, "raw12":0x26,"yuv":0x23, 255 fmt = "yuv"
|
/device/huawei/angler/camera/QCamera2/HAL3/ |
QCamera3Channel.h | 364 /* QCamera3YUVChannel is used to handle flexible YUV streams that are directly 366 * It is also used to handle input buffers that generate YUV outputs */ 424 /* QCamera3PicChannel is for JPEG stream, which contains a YUV stream generated
|
QCamera3Mem.h | 99 // parameters, metadata, and internal YUV data for jpeg encoding.
|
/device/lge/bullhead/camera/QCamera2/HAL3/ |
QCamera3Channel.h | 364 /* QCamera3YUVChannel is used to handle flexible YUV streams that are directly 366 * It is also used to handle input buffers that generate YUV outputs */ 424 /* QCamera3PicChannel is for JPEG stream, which contains a YUV stream generated
|
QCamera3Mem.h | 99 // parameters, metadata, and internal YUV data for jpeg encoding.
|
/external/libjpeg-turbo/ |
tjbench.c | 144 _throwunix("allocating YUV buffer"); 205 doyuv? "Decomp to YUV":"Decompress ", (double)iter/elapsed); 210 printf("YUV Decode --> Frame rate: %f fps\n", 333 _throwunix("allocating YUV buffer"); 404 printf("Encode YUV --> Frame rate: %f fps\n", 415 doyuv? "Comp from YUV":"Compress ", (double)iter/elapsed); 733 printf("-yuv = Test YUV encoding/decoding functions\n"); 734 printf("-yuvpad <p> = If testing YUV encoding/decoding, this specifies the number of\n"); 735 printf(" bytes to which each row of each plane in the intermediate YUV image is\n") [all...] |
/external/opencv3/3rdparty/include/ffmpeg_/libavutil/ |
pixdesc.h | 51 * @note This is separate of the colorspace (RGB, YCbCr, YPbPr, JPEG-style YUV 52 * and all the YUV variants) AVPixFmtDescriptor just stores how values 110 * The pixel format contains RGB-like data (as opposed to YUV/grayscale).
|
/external/opencv3/3rdparty/include/ffmpeg_/libswscale/ |
swscale.h | 112 * Return a pointer to yuv<->rgb coefficients for the given colorspace 233 * @param table the yuv2rgb coefficients describing the output yuv space, normally ff_yuv2rgb_coeffs[x] 234 * @param inv_table the yuv2rgb coefficients describing the input yuv space, normally ff_yuv2rgb_coeffs[x]
|
/external/webp/src/dsp/ |
upsampling.c | 10 // YUV to RGB upsampling functions. 15 #include "./yuv.h" 24 // Fancy upsampling functions to convert YUV to RGB
|
upsampling_neon.c | 10 // NEON version of YUV to RGB upsampling functions. 23 #include "./yuv.h" 90 // YUV->RGB conversion
|
yuv.c | 10 // YUV->RGB conversion functions 14 #include "./yuv.h" 167 // ARGB -> YUV converters
|
/frameworks/av/media/libstagefright/codecs/avc/common/include/ |
avcapi_common.h | 199 @param yuv The address of the yuv pointer returned to the AVC lib. 202 typedef int (*FunctionType_FrameBind)(void *userData, int indx, uint8 **yuv);
|
/frameworks/native/include/media/hardware/ |
VideoAPI.h | 25 * Currently only supporting YUV 230 case MediaImage::MEDIA_IMAGE_TYPE_YUV: return "YUV"; 247 case MediaImage2::MEDIA_IMAGE_TYPE_YUV: return "YUV";
|
/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.8/sysroot/usr/include/linux/ |
videodev2.h | 299 #define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */ 300 #define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */ 302 #define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */ 303 #define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */ 306 #define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */ 308 #define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */ 309 #define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */ 310 #define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */ 311 #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */ 312 #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 * [all...] |
/cts/apps/CameraITS/tests/inprog/ |
test_burst_sameness_auto.py | 57 # Capture bursts of YUV shots.
|
/cts/apps/CameraITS/tests/scene1/ |
test_burst_sameness_manual.py | 50 # Capture bursts of YUV shots.
|
test_raw_sensitivity.py | 57 # Capture raw+yuv, but only look at the raw.
|
/developers/build/prebuilts/gradle/HdrViewfinder/Application/src/main/java/com/example/android/hdrviewfinder/ |
ViewfinderProcessor.java | 58 Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs));
|
/developers/samples/android/media/HdrViewfinder/Application/src/main/java/com/example/android/hdrviewfinder/ |
ViewfinderProcessor.java | 58 Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs));
|
/development/perftests/panorama/feature_mos/src/mosaic/ |
ImageUtils.h | 150 * A YUV image container,
|
/development/samples/browseable/HdrViewfinder/src/com.example.android.hdrviewfinder/ |
ViewfinderProcessor.java | 58 Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs));
|
/development/tools/yuv420sp2rgb/ |
cmdline.c | 38 "Converts yuv 4:2:0 to rgb24 and generates a PPM file.\n"
|
/device/moto/shamu/camera/QCamera2/HAL3/ |
QCamera3Mem.h | 86 // parameters, metadata, and internal YUV data for jpeg encoding.
|
/external/ImageMagick/coders/ |
Android.mk | 147 yuv.c\
|