HomeSort by relevance Sort by last modified time
    Searched refs:yuv (Results 26 - 50 of 89) sorted by null

12 3 4

  /hardware/intel/img/hwcomposer/moorefield_hdmi/include/
DataBuffer.h 40 } yuv; member in union:android::intel::stride::__anon30377
  /hardware/intel/img/hwcomposer/moorefield_hdmi/ips/common/
OverlayPlaneBase.cpp 394 stride.yuv.yStride = yStride;
395 stride.yuv.uvStride = uvStride;
400 stride.yuv.yStride = yStride;
401 stride.yuv.uvStride = uvStride;
407 stride.yuv.yStride = yStride;
408 stride.yuv.uvStride = uvStride;
414 stride.yuv.yStride = yStride;
415 stride.yuv.uvStride = uvStride;
667 uint32_t yStride = mapper.getStride().yuv.yStride;
668 uint32_t uvStride = mapper.getStride().yuv.uvStride
    [all...]
  /external/opencv3/modules/videoio/src/
cap_xine.cpp 131 IplImage * yuv = capture->yuv_frame; local
140 int line = yuv->widthStep;
162 yuv->imageData[ offset ] = *( addr_Y++ );
163 yuv->imageData[ offset + 1 ] = *addr_U;
164 yuv->imageData[ offset + 2 ] = *addr_V;
294 "icvOpenAVI_XINE( CvCaptureAVI_XINE *, const char *)", "couldn't create yuv frame");
  /hardware/intel/img/hwcomposer/merrifield/ips/common/
OverlayPlaneBase.cpp 409 stride.yuv.yStride = yStride;
410 stride.yuv.uvStride = uvStride;
415 stride.yuv.yStride = yStride;
416 stride.yuv.uvStride = uvStride;
421 stride.yuv.yStride = payload->scaling_luma_stride;
422 stride.yuv.uvStride = payload->scaling_chroma_u_stride;
426 stride.yuv.yStride = yStride;
427 stride.yuv.uvStride = uvStride;
434 stride.yuv.yStride = yStride;
435 stride.yuv.uvStride = uvStride
    [all...]
  /cts/apps/CameraITS/pymodules/its/
device.py 89 CAP_YUV = {"format":"yuv"}
91 CAP_RAW_YUV = [{"format":"raw"}, {"format":"yuv"}]
92 CAP_DNG_YUV = [{"format":"dng"}, {"format":"yuv"}]
95 CAP_YUV_JPEG = [{"format":"yuv"}, {"format":"jpeg"}]
96 CAP_RAW_YUV_JPEG = [{"format":"raw"}, {"format":"yuv"}, {"format":"jpeg"}]
97 CAP_DNG_YUV_JPEG = [{"format":"dng"}, {"format":"yuv"}, {"format":"jpeg"}]
478 640x480 YUV surface without sending any data back. The caller needs to
484 format(s) of the captured image. The formats may be "yuv", "jpeg",
486 frame ("yuv") corresponding to a full sensor frame.
489 request images back in multiple formats (e.g.) raw+yuv, raw+jpeg
    [all...]
objects.py 162 ["jpg", "yuv", "raw", "raw10", "raw12"].
174 fmt_codes = {"raw":0x20, "raw10":0x25, "raw12":0x26,"yuv":0x23,
256 fmt = "yuv"
  /device/moto/shamu/camera/QCamera/HAL/usbcamcore/src/
QCameraMjpegDecode.cpp 382 jpeg_buffer_init(&p_output_buffers.data.yuv.luma_buf);
383 jpeg_buffer_init(&p_output_buffers.data.yuv.chroma_buf);
408 p_output_buffers.data.yuv.luma_buf,
413 p_output_buffers.data.yuv.chroma_buf,
527 jpeg_buffer_destroy(&p_output_buffers.data.yuv.luma_buf);
528 jpeg_buffer_destroy(&p_output_buffers.data.yuv.chroma_buf);
  /external/skia/src/codec/
SkJpegCodec.cpp 557 // It is possible to perform a YUV decode for any combination of
662 JSAMPARRAY yuv[3]; local
666 yuv[0] = &rowptrs[0]; // Y rows (DCTSIZE or 2 * DCTSIZE)
667 yuv[1] = &rowptrs[2 * DCTSIZE]; // U rows (DCTSIZE)
668 yuv[2] = &rowptrs[3 * DCTSIZE]; // V rows (DCTSIZE)
692 JDIMENSION linesRead = jpeg_read_raw_data(dinfo, yuv, numRowsPerBlock);
694 // FIXME: Handle incomplete YUV decodes without signalling an error.
726 JDIMENSION linesRead = jpeg_read_raw_data(dinfo, yuv, numRowsPerBlock);
728 // FIXME: Handle incomplete YUV decodes without signalling an error.
  /frameworks/rs/
rsType.cpp 122 // YUV only supports basic 2d
240 if (t->getDimYuv() != params->yuv) continue;
272 nt->mHal.state.dimYuv = params->yuv;
316 p.yuv = getDimYuv();
365 uint32_t dimY, uint32_t dimZ, bool mipmaps, bool faces, uint32_t yuv) {
375 p.yuv = yuv;
rsInternalDefines.h 220 uint32_t yuv; member in struct:__anon28937
  /hardware/qcom/camera/msmcobalt/usbcamcore/src/
QCameraMjpegDecode.cpp 382 jpeg_buffer_init(&p_output_buffers.data.yuv.luma_buf);
383 jpeg_buffer_init(&p_output_buffers.data.yuv.chroma_buf);
408 p_output_buffers.data.yuv.luma_buf,
413 p_output_buffers.data.yuv.chroma_buf,
527 jpeg_buffer_destroy(&p_output_buffers.data.yuv.luma_buf);
528 jpeg_buffer_destroy(&p_output_buffers.data.yuv.chroma_buf);
  /hardware/qcom/camera/usbcamcore/src/
QCameraMjpegDecode.cpp 382 jpeg_buffer_init(&p_output_buffers.data.yuv.luma_buf);
383 jpeg_buffer_init(&p_output_buffers.data.yuv.chroma_buf);
408 p_output_buffers.data.yuv.luma_buf,
413 p_output_buffers.data.yuv.chroma_buf,
527 jpeg_buffer_destroy(&p_output_buffers.data.yuv.luma_buf);
528 jpeg_buffer_destroy(&p_output_buffers.data.yuv.chroma_buf);
  /cts/tests/tests/media/src/android/media/cts/
Vp8CodecTestBase.java 82 // were calculated and were written to yuv file.
259 outputIvfBaseName + resolutionScales[i]+ ".yuv";
316 private static byte[] YUV420ToNV(int width, int height, byte[] yuv) {
317 byte[] nv = new byte[yuv.length];
319 System.arraycopy(yuv, 0, nv, 0, width * height);
326 nv[nv_offset++] = yuv[u_offset++];
327 nv[nv_offset++] = yuv[v_offset++];
338 byte[] yuv = new byte[width * height * 3 / 2];
342 System.arraycopy(nv12, i * stride, yuv, i * width, width);
352 yuv[u_offset++] = nv12[nv_offset++]
541 FileOutputStream yuv = null; local
    [all...]
  /external/libdrm/tests/util/
format.c 43 .yuv = { (order), (xsub), (ysub), (chroma_stride) }
46 /* YUV packed */
51 /* YUV semi-planar */
56 /* YUV planar */
  /external/mesa3d/src/gallium/auxiliary/vl/
vl_compositor.h 110 void *yuv; member in struct:vl_compositor::__anon19042
127 * set yuv -> rgba conversion matrix
  /external/ImageMagick/coders/
Android.mk 147 yuv.c\
  /external/libvpx/libvpx/vp9/encoder/
vp9_denoiser.c 31 static void make_grayscale(YV12_BUFFER_CONFIG *yuv);
665 static void make_grayscale(YV12_BUFFER_CONFIG *yuv) {
667 uint8_t *u = yuv->u_buffer;
668 uint8_t *v = yuv->v_buffer;
670 for (r = 0; r < yuv->uv_height; ++r) {
671 for (c = 0; c < yuv->uv_width; ++c) {
675 u += yuv->uv_stride;
676 v += yuv->uv_stride;
  /hardware/intel/img/hwcomposer/merrifield/ips/anniedale/
PlaneCapabilities.cpp 130 if (stride.yuv.yStride > maxStride) {
131 VTRACE("stride %d is too large", stride.yuv.yStride);
  /external/mesa3d/src/gallium/auxiliary/util/
u_format.csv 140 # YUV formats
141 # http://www.fourcc.org/yuv.php#UYVY
142 PIPE_FORMAT_UYVY , subsampled, 2, 1, x32 , , , , xyz1, yuv
143 # http://www.fourcc.org/yuv.php#YUYV (a.k.a http://www.fourcc.org/yuv.php#YUY2)
144 PIPE_FORMAT_YUYV , subsampled, 2, 1, x32 , , , , xyz1, yuv
268 PIPE_FORMAT_YV12 , other, 1, 1, x8 , x8 , x8 , x8 , xyzw, yuv
269 PIPE_FORMAT_YV16 , other, 1, 1, x8 , x8 , x8 , x8 , xyzw, yuv
270 PIPE_FORMAT_IYUV , other, 1, 1, x8 , x8 , x8 , x8 , xyzw, yuv
271 PIPE_FORMAT_NV12 , other, 1, 1, x8 , x8 , x8 , x8 , xyzw, yuv
    [all...]
  /frameworks/rs/driver/
rsdAllocation.cpp 254 static size_t DeriveYUVLayout(int yuv, Allocation::Hal::DrvState *state) {
259 if (yuv == HAL_PIXEL_FORMAT_YCbCr_420_888) {
264 // YUV only supports basic 2d
271 state->yuv.shift = 1;
272 state->yuv.step = 1;
276 switch(yuv) {
296 state->yuv.step = 2;
322 if (alloc->mHal.state.yuv) {
323 o += DeriveYUVLayout(alloc->mHal.state.yuv, &alloc->mHal.drvState);
864 if (alloc->mHal.state.yuv) {
    [all...]
  /external/libjpeg-turbo/
CMakeLists.txt 427 add_test(TJUnitTest-yuv
430 TJUnitTest -yuv)
431 add_test(TJUnitTest-yuv-nopad
434 TJUnitTest -yuv -noyuvpad)
439 add_test(TJUnitTest-bi-yuv
442 TJUnitTest -bi -yuv)
443 add_test(TJUnitTest-bi-yuv-nopad
446 TJUnitTest -bi -yuv -noyuvpad)
467 add_test(tjunittest${suffix}-yuv tjunittest${suffix} -yuv)
    [all...]
  /external/skia/src/pdf/
SkPDFBitmap.cpp 430 * This PDFObject assumes that its constructor was handed YUV or
481 bool yuv = info.fType == SkJFIFInfo::kYCbCr; local
487 return new PDFJpegBitmap(info.fSize, data, yuv);
497 bool yuv = info.fType == SkJFIFInfo::kYCbCr; local
499 return new PDFJpegBitmap(info.fSize, data, yuv);
  /external/webrtc/talk/media/base/
videoframe_unittest.h 53 #define kImageFilename "faces.1280x720_P420.yuv"
187 rtc::sprintfn(filename, sizeof(filename), "%s.%dx%d_P420.yuv",
241 // Create a test image for YUV 420 formats with 12 bits per pixel.
349 uint8_t yuv[4][3]; local
356 &yuv[i][0], &yuv[i][1], &yuv[i][2]);
358 frame->GetYPlane()[stride_y * y + x] = yuv[0][0];
360 frame->GetYPlane()[stride_y * y + x + 1] = yuv[1][0];
363 frame->GetYPlane()[stride_y * (y + 1) + x] = yuv[2][0]
    [all...]
  /frameworks/av/media/libstagefright/codecs/avc/enc/
SoftAVCEncoder.cpp 141 void *userData, int32_t index, uint8_t **yuv) {
144 return encoder->bindOutputBuffer(index, yuv);
715 int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
718 *yuv = (uint8_t *) mOutputBuffers[index]->data();
  /hardware/intel/img/hwcomposer/merrifield/ips/tangier/
TngOverlayPlane.cpp 145 mapper.getStride().yuv.yStride,

Completed in 710 milliseconds

12 3 4