HomeSort by relevance Sort by last modified time
    Searched full:yuv (Results 26 - 50 of 1091) sorted by null

12 3 4 5 6 7 8 91011>>

  /prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.8/sysroot/usr/include/linux/
ivtv.h 29 YUV streaming, YUV updates through user DMA and the passthrough
33 YUV mode you need to call IVTV_IOC_DMA_FRAME with y_source == NULL
41 All this means that if you want to change the YUV interlacing
42 for the user DMA YUV mode you first need to do call IVTV_IOC_DMA_FRAME
53 then just switch to user DMA YUV output mode */
  /prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/sysroot/usr/include/linux/
ivtv.h 29 YUV streaming, YUV updates through user DMA and the passthrough
33 YUV mode you need to call IVTV_IOC_DMA_FRAME with y_source == NULL
41 All this means that if you want to change the YUV interlacing
42 for the user DMA YUV mode you first need to do call IVTV_IOC_DMA_FRAME
53 then just switch to user DMA YUV output mode */
  /external/opencv3/3rdparty/include/ffmpeg_/libavutil/
pixfmt.h 68 AV_PIX_FMT_YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
69 AV_PIX_FMT_YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
72 AV_PIX_FMT_YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
73 AV_PIX_FMT_YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
74 AV_PIX_FMT_YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
75 AV_PIX_FMT_YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
80 AV_PIX_FMT_YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV420P and setting color_range
81 AV_PIX_FMT_YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV422P and setting color_range
82 AV_PIX_FMT_YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV444P and setting color_range
85 AV_PIX_FMT_UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y
    [all...]
old_pix_fmts.h 32 PIX_FMT_YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
33 PIX_FMT_YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
36 PIX_FMT_YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
37 PIX_FMT_YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
38 PIX_FMT_YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
39 PIX_FMT_YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
44 PIX_FMT_YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV420P and setting color_range
45 PIX_FMT_YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV422P and setting color_range
46 PIX_FMT_YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV444P and setting color_range
49 PIX_FMT_UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y
    [all...]
  /frameworks/base/graphics/java/android/graphics/
YuvImage.java 22 * YuvImage contains YUV data and provides a method that compresses a region of
23 * the YUV data to a Jpeg. The YUV data should be provided as a single byte
27 * To compress a rectangle region in the YUV data, users have to specify the
39 * The YUV format as defined in {@link ImageFormat}.
44 * The raw YUV data.
68 * @param yuv The YUV data. In the case of more than one image plane, all the planes must be
70 * @param format The YUV data format as defined in {@link ImageFormat}.
73 * @param strides (Optional) Row bytes of each image plane. If yuv contains padding, the strid
    [all...]
  /external/webrtc/webrtc/modules/
modules_tests.isolate 15 '<(DEPTH)/resources/foreman_cif.yuv',
16 '<(DEPTH)/resources/paris_qcif.yuv',
  /cts/tests/camera/src/android/hardware/camera2/cts/rs/
ScriptYuvToRgb.java 28 * Convert {@link ImageFormat#YUV_420_888 flexible-YUV} {@link Allocation allocations} into
43 ScriptIntrinsicYuvToRGB.create(getRS(), Element.YUV(getRS())));
46 // XX: Supports any YUV 4:2:0 such as NV21/YV12 or just YUV_420_888 ?
47 if (!inputInfo.isElementEqualTo(ElementInfo.YUV)) {
  /external/autotest/client/site_tests/video_VEAPerf/
control.h264 39 ('tulip2/tulip2-1280x720-1b95123232922fe0067869c74e19cd09.yuv', True, 1280, 720, 1200000, 1, 30),
40 ('tulip2/tulip2-640x360-094bd827de18ca196a83cc6442b7b02f.yuv', True, 640, 360, 500000, 1, 30),
41 ('tulip2/tulip2-320x180-55be7124b3aec1b72bfb57f433297193.yuv', True, 320, 180, 100000, 1, 30),
42 ('tulip2/tulip2-240x136-ed58afc99d08e5d21013c1a5056e43bf.yuv', True, 240, 136, 100000, 1, 30)
control.vp8 39 ('tulip2/tulip2-1280x720-1b95123232922fe0067869c74e19cd09.yuv', True, 1280, 720, 1200000, 11, 30),
40 ('tulip2/tulip2-640x360-094bd827de18ca196a83cc6442b7b02f.yuv', True, 640, 360, 500000, 11, 30),
41 ('tulip2/tulip2-320x180-55be7124b3aec1b72bfb57f433297193.yuv', True, 320, 180, 100000, 11, 30),
42 ('tulip2/tulip2-240x136-ed58afc99d08e5d21013c1a5056e43bf.yuv', True, 240, 136, 100000, 11, 30)
  /external/libhevc/test/decoder/
test.cfg 4 --output /data/local/tmp/hevcdec/out.yuv
  /external/libvpx/libvpx/third_party/libyuv/
README.libvpx 8 libyuv is an open source project that includes YUV conversion and scaling
  /external/libyuv/
README.google 7 libyuv is an open-source library for yuv scaling, conversion, comparison
  /cts/apps/CameraITS/tools/
convert_yuv_to_jpg.py 22 filename.yuv: The YUV420 file to open.
28 print "Usage: python %s <filename.yuv> <w> <h> <layout>"%(sys.argv[0])
33 its.image.write_image(img, fname.replace(".yuv",".jpg"), False)
  /external/webrtc/webrtc/common_video/libyuv/
scaler_unittest.cc 62 webrtc::test::ResourcePath("foreman_cif", "yuv");
127 "LibYuvTest_PointScale_176_144.yuv";
135 "upfrom_176_144.yuv";
147 out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_320_240.yuv";
152 out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_704_576.yuv";
157 out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_300_200.yuv";
162 out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_400_300.yuv";
168 out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_282_231.yuv";
175 "upfrom_282_231.yuv";
199 "LibYuvTest_BilinearScale_176_144.yuv";
    [all...]
  /cts/apps/CameraITS/tests/scene1/
test_jpeg.py 24 """Test that converted YUV images and device JPEG images look the same.
38 # YUV
39 size = its.objects.get_available_output_sizes("yuv", props)[0]
40 out_surface = {"width":size[0], "height":size[1], "format":"yuv"}
43 its.image.write_image(img, "%s_fmt=yuv.jpg" % (NAME))
test_yuv_plus_dng.py 22 """Test capturing a single frame as both DNG and YUV outputs.
37 "yuv", props, (1920, 1080), max_dng_size)[0]
39 {"format":"yuv", "width":w, "height":h}]
  /cts/tools/cts-test-metrics/
CtsCameraTestCases.reportlog.json 1 {"test_reprocessing_throughput":[{"camera_id":"0","format":35,"reprocess_type":"YUV reprocessing","capture_message":"capture latency","latency":[237.0,102.0,99.0,105.0,124.0,92.0],"camera_reprocessing_average_latency":126.5},{"camera_id":"0","format":34,"reprocess_type":"opaque reprocessing","capture_message":"capture latency","latency":[206.0,91.0,92.0,89.0,119.0,84.0],"camera_reprocessing_average_latency":113.5},{"camera_id":"1","format":35,"reprocess_type":"YUV reprocessing","capture_message":"capture latency","latency":[216.0,84.0,80.0,83.0,93.0,76.0],"camera_reprocessing_average_latency":105.33333333333333},{"camera_id":"1","format":34,"reprocess_type":"opaque reprocessing","capture_message":"capture latency","latency":[212.0,83.0,71.0,80.0,93.0,74.0],"camera_reprocessing_average_latency":102.16666666666667},{"camera_id":"0","format":35,"reprocess_type":"YUV reprocessing","capture_message":"capture latency","latency":[228.0,105.0,85.0,86.0,116.0,83.0],"camera_reprocessing_average_latency":117.16666666666667},{"camera_id":"0","format":34,"reprocess_type":"opaque reprocessing","capture_message":"capture latency","latency":[195.0,89.0,94.0,94.0,116.0,86.0],"camera_reprocessing_average_latency":112.33333333333333},{"camera_id":"1","format":35,"reprocess_type":"YUV reprocessing","capture_message":"capture latency","latency":[150.0,83.0,75.0,75.0,102.0,76.0],"camera_reprocessing_average_latency":93.5},{"camera_id":"1","format":34,"reprocess_type":"opaque reprocessing","capture_message":"capture latency","latency":[198.0,85.0,78.0,71.0,95.0,77.0],"camera_reprocessing_average_latency":100.66666666666667}],"test_camera_launch_average":[{"camera_launch_average_time_for_all_cameras":326.1},{"camera_launch_average_time_for_all_cameras":321.8}],"test_reprocessing_latency":[{"camera_id":"0","format":35,"reprocess_type":"YUV reprocessing","capture_message":"shot to shot latency","latency":[303.0,254.0,259.0,196.0,201.0,195.0],"camera_reprocessing_shot_to_shot_average_latency":234.66666666666666},{"camera_id":"0","format":34,"reprocess_type":"opaque reprocessing","capture_message":"shot to shot latency","latency":[248.0,172.0,209.0,188.0,201.0,204.0],"camera_reprocessing_shot_to_shot_average_latency":203.66666666666666},{"camera_id":"1","format":35,"reprocess_type":"YUV reprocessing","capture_message":"shot to shot latency","latency":[190.0,238.0,220.0,213.0,144.0,154.0],"camera_reprocessing_shot_to_shot_average_latency":193.16666666666666},{"camera_id":"1","format":34,"reprocess_type":"opaque reprocessing","capture_message":"shot to shot latency","latency":[237.0,166.0,153.0,148.0,162.0,140.0],"camera_reprocessing_shot_to_shot_average_latency":167.66666666666666},{"camera_id":"0","format":35,"reprocess_type":"YUV reprocessing","capture_message":"shot to shot latency","latency":[302.0,262.0,256.0,197.0,200.0,201.0],"camera_reprocessing_shot_to_shot_average_latency":236.33333333333334},{"camera_id":"0","format":34,"reprocess_type":"opaque reprocessing","capture_message":"shot to shot latency","latency":[251.0,166.0,199.0,199.0,213.0,201.0],"camera_reprocessing_shot_to_shot_average_latency":204.83333333333334},{"camera_id":"1","format":35,"reprocess_type":"YUV reprocessing","capture_message":"shot to shot latency","latency":[199.0,153.0,159.0,164.0,152.0,166.0],"camera_reprocessing_shot_to_shot_average_latency":165.5},{"camera_id":"1","format":34,"reprocess_type":"opaque reprocessing","capture_message":"shot to shot latency","latency":[210.0,143.0,161.0,162.0,158.0,156.0],"camera_reprocessing_shot_to_shot_average_latency":165.0}],"test_high_quality_reprocessing_latency":[{"camera_id":"0","format":35,"reprocess_type":"YUV reprocessing","capture_message":"shot to shot latency for High Quality noise reduction and edge modes","latency":[479.0,398.0,351.0,487.0,461.0,395.0],"camera_reprocessing_shot_to_shot_average_latency":428.5},{"camera_id":"0","format":34,"reprocess_type":"opaque reprocessing","capture_message":"shot to shot latency for High Quality noise reduction and edge modes","latency":[355.0,324.0,335.0,334.0,336.0,347.0],"camera_reprocessing_shot_to_shot_average_latency":338.5},{"camera_id":"1","format":35,"reprocess_type":"YUV reprocessing","capture_message":"shot to shot latency for High Quality noise reduction and edge mo (…)
    [all...]
  /external/skia/src/utils/
SkRGBAToYUV.cpp 17 // Matrices that go from RGBA to YUV.
34 static_assert(kLastEnum_SkYUVColorSpace == 2, "yuv color matrix array problem");
35 static_assert(kJPEG_SkYUVColorSpace == 0, "yuv color matrix array problem");
36 static_assert(kRec601_SkYUVColorSpace == 1, "yuv color matrix array problem");
37 static_assert(kRec709_SkYUVColorSpace == 2, "yuv color matrix array problem");
  /frameworks/base/rs/java/android/renderscript/
ScriptIntrinsicYuvToRGB.java 21 * Intrinsic for converting an Android YUV buffer to RGB.
23 * The input allocation should be supplied in a supported YUV format
24 * as a YUV element Allocation. The output is RGBA; the alpha channel
35 * Create an intrinsic for converting YUV to RGB.
53 * Set the input yuv allocation, must be {@link Element#U8}.
  /cts/tests/camera/src/android/hardware/camera2/cts/
RobustnessTest.java 176 {YUV, MAXIMUM},
180 {YUV, PREVIEW, JPEG, MAXIMUM},
184 {PRIV, PREVIEW, YUV, PREVIEW},
186 {PRIV, PREVIEW, YUV, PREVIEW, JPEG, MAXIMUM}
193 {PRIV, PREVIEW, YUV , RECORD },
195 {YUV , PREVIEW, YUV , RECORD },
199 {PRIV, PREVIEW, YUV, RECORD, JPEG, RECORD },
201 {YUV , PREVIEW, YUV, PREVIEW, JPEG, MAXIMUM
    [all...]
  /external/webrtc/webrtc/tools/converter/
rgba_to_i420_converter.cc 22 * A command-line tool based on libyuv to convert a set of RGBA files to a YUV
31 std::string usage = "Converts RGBA raw image files to I420 frames for YUV.\n"
33 " --frames_dir=. --output_file=output.yuv --width=320 --height=240\n"
44 " Default: output.yuv\n"
57 parser.SetFlag("output_file", "output.yuv");
84 fprintf(stdout, "Successful conversion of RGBA frames to YUV video!\n");
87 fprintf(stdout, "Unsuccessful conversion of RGBA frames to YUV video!\n");
  /cts/tests/tests/graphics/src/android/graphics/cts/
YuvImageTest.java 95 byte[] yuv = new byte[width * height * 2];
101 image = new YuvImage(yuv, mFormats[i], width, height, null);
119 image = new YuvImage(yuv, mFormats[i], width, height, null);
133 image = new YuvImage(yuv, format, -1, height, null);
141 image = new YuvImage(yuv, format, width, -1, null);
147 // abnormal case: yuv array is null
150 fail("not catching null yuv data");
205 byte[] yuv = convertArgbsToYuvs(argb, stride, height, ImageFormat.NV21);
209 YuvImage image = new YuvImage(yuv, ImageFormat.NV21, width, height, strides);
210 assertEquals(yuv, image.getYuvData())
    [all...]
  /external/webrtc/webrtc/tools/frame_analyzer/
frame_analyzer.cc 31 * The video files should be 1420 YUV videos.
46 "--reference_file=ref.yuv --test_file=test.yuv --width=320 --height=240\n"
54 " after decoding of the received YUV video. Default: stats.txt\n"
55 " - reference_file(string): The reference YUV file to compare against."
56 " Default: ref.yuv\n"
57 " - test_file(string): The test YUV file to run the analysis for."
58 " Default: test_file.yuv\n";
70 parser.SetFlag("reference_file", "ref.yuv");
71 parser.SetFlag("test_file", "test.yuv");
    [all...]
  /frameworks/av/include/media/stagefright/
YUVImage.h 17 // A container class to hold YUV data and provide various utilities,
26 // to YUV channels for different formats:
32 // Location of any pixel's YUV channels can then be easily computed using these.
48 // Supported YUV formats
65 // Returns the size of the buffer required to store the YUV data for the given
77 // Get the pixel YUV value at pixel (x,y).
83 // Set the pixel YUV value at pixel (x,y).
108 // Convert the given YUV value to RGB.
117 // YUV Format of the image.
153 // for the YUV channels. Note that this corresponds to data rows and not pixel rows
    [all...]
  /pdk/apps/TestingCamera/src/com/android/testingcamera/
callback.rs 95 // Makes up a conversion for unknown YUV types to try to display something
96 // Asssumes that there's at least 1bpp in input YUV data
107 // Apply yuv->rgb color transform
111 // Converts semiplanar YVU to interleaved YUV, nearest neighbor
126 // Apply yuv->rgb color transform
130 // Converts planar YVU to interleaved YUV, nearest neighbor
144 // Apply yuv->rgb color transform
148 // Converts interleaved 4:2:2 YUV to interleaved YUV, nearest neighbor
162 // Apply yuv->rgb color transfor
    [all...]

Completed in 772 milliseconds

12 3 4 5 6 7 8 91011>>