Home | History | Annotate | Download | only in graphics
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.graphics;
     18 
     19 public class ImageFormat {
     20     /*
     21      * these constants are chosen to be binary compatible with their previous
     22      * location in PixelFormat.java
     23      */
     24 
     25     public static final int UNKNOWN = 0;
     26 
     27     /**
     28      * RGB format used for pictures encoded as RGB_565. See
     29      * {@link android.hardware.Camera.Parameters#setPictureFormat(int)}.
     30      */
     31     public static final int RGB_565 = 4;
     32 
     33     /**
     34      * <p>Android YUV format.</p>
     35      *
     36      * <p>This format is exposed to software decoders and applications.</p>
     37      *
     38      * <p>YV12 is a 4:2:0 YCrCb planar format comprised of a WxH Y plane followed
     39      * by (W/2) x (H/2) Cr and Cb planes.</p>
     40      *
     41      * <p>This format assumes
     42      * <ul>
     43      * <li>an even width</li>
     44      * <li>an even height</li>
     45      * <li>a horizontal stride multiple of 16 pixels</li>
     46      * <li>a vertical stride equal to the height</li>
     47      * </ul>
     48      * </p>
     49      *
     50      * <pre> y_size = stride * height
     51      * c_stride = ALIGN(stride/2, 16)
     52      * c_size = c_stride * height/2
     53      * size = y_size + c_size * 2
     54      * cr_offset = y_size
     55      * cb_offset = y_size + c_size</pre>
     56      *
     57      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
     58      * recommended for YUV output instead.</p>
     59      *
     60      * <p>For the older camera API, this format is guaranteed to be supported for
     61      * {@link android.hardware.Camera} preview images since API level 12; for earlier API versions,
     62      * check {@link android.hardware.Camera.Parameters#getSupportedPreviewFormats()}.
     63      *
     64      * <p>Note that for camera preview callback use (see
     65      * {@link android.hardware.Camera#setPreviewCallback}), the
     66      * <var>stride</var> value is the smallest possible; that is, it is equal
     67      * to:
     68      *
     69      * <pre>stride = ALIGN(width, 16)</pre>
     70      *
     71      * @see android.hardware.Camera.Parameters#setPreviewCallback
     72      * @see android.hardware.Camera.Parameters#setPreviewFormat
     73      * @see android.hardware.Camera.Parameters#getSupportedPreviewFormats
     74      * </p>
     75      */
     76     public static final int YV12 = 0x32315659;
     77 
     78     /**
     79      * <p>Android Y8 format.</p>
     80      *
     81      * <p>Y8 is a YUV planar format comprised of a WxH Y plane only, with each pixel
     82      * being represented by 8 bits. It is equivalent to just the Y plane from {@link #YV12}
     83      * format.</p>
     84      *
     85      * <p>This format assumes
     86      * <ul>
     87      * <li>an even width</li>
     88      * <li>an even height</li>
     89      * <li>a horizontal stride multiple of 16 pixels</li>
     90      * </ul>
     91      * </p>
     92      *
     93      * <pre> y_size = stride * height </pre>
     94      *
     95      * <p>For example, the {@link android.media.Image} object can provide data
     96      * in this format from a {@link android.hardware.camera2.CameraDevice}
     97      * through a {@link android.media.ImageReader} object if this format is
     98      * supported by {@link android.hardware.camera2.CameraDevice}.</p>
     99      *
    100      * @see android.media.Image
    101      * @see android.media.ImageReader
    102      * @see android.hardware.camera2.CameraDevice
    103      *
    104      * @hide
    105      */
    106     public static final int Y8 = 0x20203859;
    107 
    108     /**
    109      * <p>Android Y16 format.</p>
    110      *
    111      * Y16 is a YUV planar format comprised of a WxH Y plane, with each pixel
    112      * being represented by 16 bits. It is just like {@link #Y8}, but has 16
    113      * bits per pixel (little endian).</p>
    114      *
    115      * <p>This format assumes
    116      * <ul>
    117      * <li>an even width</li>
    118      * <li>an even height</li>
    119      * <li>a horizontal stride multiple of 16 pixels</li>
    120      * </ul>
    121      * </p>
    122      *
    123      * <pre> y_size = stride * height </pre>
    124      *
    125      * <p>For example, the {@link android.media.Image} object can provide data
    126      * in this format from a {@link android.hardware.camera2.CameraDevice}
    127      * through a {@link android.media.ImageReader} object if this format is
    128      * supported by {@link android.hardware.camera2.CameraDevice}.</p>
    129      *
    130      * @see android.media.Image
    131      * @see android.media.ImageReader
    132      * @see android.hardware.camera2.CameraDevice
    133      *
    134      * @hide
    135      */
    136     public static final int Y16 = 0x20363159;
    137 
    138     /**
    139      * YCbCr format, used for video.
    140      *
    141      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
    142      * recommended for YUV output instead.</p>
    143      *
    144      * <p>Whether this format is supported by the old camera API can be determined by
    145      * {@link android.hardware.Camera.Parameters#getSupportedPreviewFormats()}.</p>
    146      *
    147      */
    148     public static final int NV16 = 0x10;
    149 
    150     /**
    151      * YCrCb format used for images, which uses the NV21 encoding format.
    152      *
    153      * <p>This is the default format
    154      * for {@link android.hardware.Camera} preview images, when not otherwise set with
    155      * {@link android.hardware.Camera.Parameters#setPreviewFormat(int)}.</p>
    156      *
    157      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
    158      * recommended for YUV output instead.</p>
    159      */
    160     public static final int NV21 = 0x11;
    161 
    162     /**
    163      * YCbCr format used for images, which uses YUYV (YUY2) encoding format.
    164      *
    165      * <p>For the {@link android.hardware.camera2} API, the {@link #YUV_420_888} format is
    166      * recommended for YUV output instead.</p>
    167      *
    168      * <p>This is an alternative format for {@link android.hardware.Camera} preview images. Whether
    169      * this format is supported by the camera hardware can be determined by
    170      * {@link android.hardware.Camera.Parameters#getSupportedPreviewFormats()}.</p>
    171      */
    172     public static final int YUY2 = 0x14;
    173 
    174     /**
    175      * Compressed JPEG format.
    176      *
    177      * <p>This format is always supported as an output format for the
    178      * {@link android.hardware.camera2} API, and as a picture format for the older
    179      * {@link android.hardware.Camera} API</p>
    180      */
    181     public static final int JPEG = 0x100;
    182 
    183     /**
    184      * <p>Multi-plane Android YUV 420 format</p>
    185      *
    186      * <p>This format is a generic YCbCr format, capable of describing any 4:2:0
    187      * chroma-subsampled planar or semiplanar buffer (but not fully interleaved),
    188      * with 8 bits per color sample.</p>
    189      *
    190      * <p>Images in this format are always represented by three separate buffers
    191      * of data, one for each color plane. Additional information always
    192      * accompanies the buffers, describing the row stride and the pixel stride
    193      * for each plane.</p>
    194      *
    195      * <p>The order of planes in the array returned by
    196      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
    197      * plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).</p>
    198      *
    199      * <p>The Y-plane is guaranteed not to be interleaved with the U/V planes
    200      * (in particular, pixel stride is always 1 in
    201      * {@link android.media.Image.Plane#getPixelStride() yPlane.getPixelStride()}).</p>
    202      *
    203      * <p>The U/V planes are guaranteed to have the same row stride and pixel stride
    204      * (in particular,
    205      * {@link android.media.Image.Plane#getRowStride() uPlane.getRowStride()}
    206      * == {@link android.media.Image.Plane#getRowStride() vPlane.getRowStride()} and
    207      * {@link android.media.Image.Plane#getPixelStride() uPlane.getPixelStride()}
    208      * == {@link android.media.Image.Plane#getPixelStride() vPlane.getPixelStride()};
    209      * ).</p>
    210      *
    211      * <p>For example, the {@link android.media.Image} object can provide data
    212      * in this format from a {@link android.hardware.camera2.CameraDevice}
    213      * through a {@link android.media.ImageReader} object.</p>
    214      *
    215      * @see android.media.Image
    216      * @see android.media.ImageReader
    217      * @see android.hardware.camera2.CameraDevice
    218      */
    219     public static final int YUV_420_888 = 0x23;
    220 
    221     /**
    222      * <p>Multi-plane Android YUV 422 format</p>
    223      *
    224      * <p>This format is a generic YCbCr format, capable of describing any 4:2:2
    225      * chroma-subsampled (planar, semiplanar or interleaved) format,
    226      * with 8 bits per color sample.</p>
    227      *
    228      * <p>Images in this format are always represented by three separate buffers
    229      * of data, one for each color plane. Additional information always
    230      * accompanies the buffers, describing the row stride and the pixel stride
    231      * for each plane.</p>
    232      *
    233      * <p>The order of planes in the array returned by
    234      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
    235      * plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).</p>
    236      *
    237      * <p>In contrast to the {@link #YUV_420_888} format, the Y-plane may have a pixel
    238      * stride greater than 1 in
    239      * {@link android.media.Image.Plane#getPixelStride() yPlane.getPixelStride()}.</p>
    240      *
    241      * <p>The U/V planes are guaranteed to have the same row stride and pixel stride
    242      * (in particular,
    243      * {@link android.media.Image.Plane#getRowStride() uPlane.getRowStride()}
    244      * == {@link android.media.Image.Plane#getRowStride() vPlane.getRowStride()} and
    245      * {@link android.media.Image.Plane#getPixelStride() uPlane.getPixelStride()}
    246      * == {@link android.media.Image.Plane#getPixelStride() vPlane.getPixelStride()};
    247      * ).</p>
    248      *
    249      * <p>For example, the {@link android.media.Image} object can provide data
    250      * in this format from a {@link android.media.MediaCodec}
    251      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
    252      *
    253      * @see android.media.Image
    254      * @see android.media.MediaCodec
    255      */
    256     public static final int YUV_422_888 = 0x27;
    257 
    258     /**
    259      * <p>Multi-plane Android YUV 444 format</p>
    260      *
    261      * <p>This format is a generic YCbCr format, capable of describing any 4:4:4
    262      * (planar, semiplanar or interleaved) format,
    263      * with 8 bits per color sample.</p>
    264      *
    265      * <p>Images in this format are always represented by three separate buffers
    266      * of data, one for each color plane. Additional information always
    267      * accompanies the buffers, describing the row stride and the pixel stride
    268      * for each plane.</p>
    269      *
    270      * <p>The order of planes in the array returned by
    271      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
    272      * plane #0 is always Y, plane #1 is always U (Cb), and plane #2 is always V (Cr).</p>
    273      *
    274      * <p>In contrast to the {@link #YUV_420_888} format, the Y-plane may have a pixel
    275      * stride greater than 1 in
    276      * {@link android.media.Image.Plane#getPixelStride() yPlane.getPixelStride()}.</p>
    277      *
    278      * <p>The U/V planes are guaranteed to have the same row stride and pixel stride
    279      * (in particular,
    280      * {@link android.media.Image.Plane#getRowStride() uPlane.getRowStride()}
    281      * == {@link android.media.Image.Plane#getRowStride() vPlane.getRowStride()} and
    282      * {@link android.media.Image.Plane#getPixelStride() uPlane.getPixelStride()}
    283      * == {@link android.media.Image.Plane#getPixelStride() vPlane.getPixelStride()};
    284      * ).</p>
    285      *
    286      * <p>For example, the {@link android.media.Image} object can provide data
    287      * in this format from a {@link android.media.MediaCodec}
    288      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
    289      *
    290      * @see android.media.Image
    291      * @see android.media.MediaCodec
    292      */
    293     public static final int YUV_444_888 = 0x28;
    294 
    295     /**
    296      * <p>Multi-plane Android RGB format</p>
    297      *
    298      * <p>This format is a generic RGB format, capable of describing most RGB formats,
    299      * with 8 bits per color sample.</p>
    300      *
    301      * <p>Images in this format are always represented by three separate buffers
    302      * of data, one for each color plane. Additional information always
    303      * accompanies the buffers, describing the row stride and the pixel stride
    304      * for each plane.</p>
    305      *
    306      * <p>The order of planes in the array returned by
    307      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
    308      * plane #0 is always R (red), plane #1 is always G (green), and plane #2 is always B
    309      * (blue).</p>
    310      *
    311      * <p>All three planes are guaranteed to have the same row strides and pixel strides.</p>
    312      *
    313      * <p>For example, the {@link android.media.Image} object can provide data
    314      * in this format from a {@link android.media.MediaCodec}
    315      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
    316      *
    317      * @see android.media.Image
    318      * @see android.media.MediaCodec
    319      */
    320     public static final int FLEX_RGB_888 = 0x29;
    321 
    322     /**
    323      * <p>Multi-plane Android RGBA format</p>
    324      *
    325      * <p>This format is a generic RGBA format, capable of describing most RGBA formats,
    326      * with 8 bits per color sample.</p>
    327      *
    328      * <p>Images in this format are always represented by four separate buffers
    329      * of data, one for each color plane. Additional information always
    330      * accompanies the buffers, describing the row stride and the pixel stride
    331      * for each plane.</p>
    332      *
    333      * <p>The order of planes in the array returned by
    334      * {@link android.media.Image#getPlanes() Image#getPlanes()} is guaranteed such that
    335      * plane #0 is always R (red), plane #1 is always G (green), plane #2 is always B (blue),
    336      * and plane #3 is always A (alpha). This format may represent pre-multiplied or
    337      * non-premultiplied alpha.</p>
    338      *
    339      * <p>All four planes are guaranteed to have the same row strides and pixel strides.</p>
    340      *
    341      * <p>For example, the {@link android.media.Image} object can provide data
    342      * in this format from a {@link android.media.MediaCodec}
    343      * through {@link android.media.MediaCodec#getOutputImage} object.</p>
    344      *
    345      * @see android.media.Image
    346      * @see android.media.MediaCodec
    347      */
    348     public static final int FLEX_RGBA_8888 = 0x2A;
    349 
    350     /**
    351      * <p>General raw camera sensor image format, usually representing a
    352      * single-channel Bayer-mosaic image. Each pixel color sample is stored with
    353      * 16 bits of precision.</p>
    354      *
    355      * <p>The layout of the color mosaic, the maximum and minimum encoding
    356      * values of the raw pixel data, the color space of the image, and all other
    357      * needed information to interpret a raw sensor image must be queried from
    358      * the {@link android.hardware.camera2.CameraDevice} which produced the
    359      * image.</p>
    360      */
    361     public static final int RAW_SENSOR = 0x20;
    362 
    363     /**
    364      * <p>Private raw camera sensor image format, a single channel image with
    365      * implementation depedent pixel layout.</p>
    366      *
    367      * <p>RAW_PRIVATE is a format for unprocessed raw image buffers coming from an
    368      * image sensor. The actual structure of buffers of this format is
    369      * implementation-dependent.</p>
    370      *
    371      */
    372     public static final int RAW_PRIVATE = 0x24;
    373 
    374     /**
    375      * <p>
    376      * Android 10-bit raw format
    377      * </p>
    378      * <p>
    379      * This is a single-plane, 10-bit per pixel, densely packed (in each row),
    380      * unprocessed format, usually representing raw Bayer-pattern images coming
    381      * from an image sensor.
    382      * </p>
    383      * <p>
    384      * In an image buffer with this format, starting from the first pixel of
    385      * each row, each 4 consecutive pixels are packed into 5 bytes (40 bits).
    386      * Each one of the first 4 bytes contains the top 8 bits of each pixel, The
    387      * fifth byte contains the 2 least significant bits of the 4 pixels, the
    388      * exact layout data for each 4 consecutive pixels is illustrated below
    389      * ({@code Pi[j]} stands for the jth bit of the ith pixel):
    390      * </p>
    391      * <table>
    392      * <thead>
    393      * <tr>
    394      * <th align="center"></th>
    395      * <th align="center">bit 7</th>
    396      * <th align="center">bit 6</th>
    397      * <th align="center">bit 5</th>
    398      * <th align="center">bit 4</th>
    399      * <th align="center">bit 3</th>
    400      * <th align="center">bit 2</th>
    401      * <th align="center">bit 1</th>
    402      * <th align="center">bit 0</th>
    403      * </tr>
    404      * </thead> <tbody>
    405      * <tr>
    406      * <td align="center">Byte 0:</td>
    407      * <td align="center">P0[9]</td>
    408      * <td align="center">P0[8]</td>
    409      * <td align="center">P0[7]</td>
    410      * <td align="center">P0[6]</td>
    411      * <td align="center">P0[5]</td>
    412      * <td align="center">P0[4]</td>
    413      * <td align="center">P0[3]</td>
    414      * <td align="center">P0[2]</td>
    415      * </tr>
    416      * <tr>
    417      * <td align="center">Byte 1:</td>
    418      * <td align="center">P1[9]</td>
    419      * <td align="center">P1[8]</td>
    420      * <td align="center">P1[7]</td>
    421      * <td align="center">P1[6]</td>
    422      * <td align="center">P1[5]</td>
    423      * <td align="center">P1[4]</td>
    424      * <td align="center">P1[3]</td>
    425      * <td align="center">P1[2]</td>
    426      * </tr>
    427      * <tr>
    428      * <td align="center">Byte 2:</td>
    429      * <td align="center">P2[9]</td>
    430      * <td align="center">P2[8]</td>
    431      * <td align="center">P2[7]</td>
    432      * <td align="center">P2[6]</td>
    433      * <td align="center">P2[5]</td>
    434      * <td align="center">P2[4]</td>
    435      * <td align="center">P2[3]</td>
    436      * <td align="center">P2[2]</td>
    437      * </tr>
    438      * <tr>
    439      * <td align="center">Byte 3:</td>
    440      * <td align="center">P3[9]</td>
    441      * <td align="center">P3[8]</td>
    442      * <td align="center">P3[7]</td>
    443      * <td align="center">P3[6]</td>
    444      * <td align="center">P3[5]</td>
    445      * <td align="center">P3[4]</td>
    446      * <td align="center">P3[3]</td>
    447      * <td align="center">P3[2]</td>
    448      * </tr>
    449      * <tr>
    450      * <td align="center">Byte 4:</td>
    451      * <td align="center">P3[1]</td>
    452      * <td align="center">P3[0]</td>
    453      * <td align="center">P2[1]</td>
    454      * <td align="center">P2[0]</td>
    455      * <td align="center">P1[1]</td>
    456      * <td align="center">P1[0]</td>
    457      * <td align="center">P0[1]</td>
    458      * <td align="center">P0[0]</td>
    459      * </tr>
    460      * </tbody>
    461      * </table>
    462      * <p>
    463      * This format assumes
    464      * <ul>
    465      * <li>a width multiple of 4 pixels</li>
    466      * <li>an even height</li>
    467      * </ul>
    468      * </p>
    469      *
    470      * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
    471      * not pixels.
    472      *
    473      * <p>
    474      * Since this is a densely packed format, the pixel stride is always 0. The
    475      * application must use the pixel data layout defined in above table to
    476      * access each row data. When row stride is equal to {@code width * (10 / 8)}, there
    477      * will be no padding bytes at the end of each row, the entire image data is
    478      * densely packed. When stride is larger than {@code width * (10 / 8)}, padding
    479      * bytes will be present at the end of each row.
    480      * </p>
    481      * <p>
    482      * For example, the {@link android.media.Image} object can provide data in
    483      * this format from a {@link android.hardware.camera2.CameraDevice} (if
    484      * supported) through a {@link android.media.ImageReader} object. The
    485      * {@link android.media.Image#getPlanes() Image#getPlanes()} will return a
    486      * single plane containing the pixel data. The pixel stride is always 0 in
    487      * {@link android.media.Image.Plane#getPixelStride()}, and the
    488      * {@link android.media.Image.Plane#getRowStride()} describes the vertical
    489      * neighboring pixel distance (in bytes) between adjacent rows.
    490      * </p>
    491      *
    492      * @see android.media.Image
    493      * @see android.media.ImageReader
    494      * @see android.hardware.camera2.CameraDevice
    495      */
    496     public static final int RAW10 = 0x25;
    497 
    498     /**
    499      * <p>
    500      * Android 12-bit raw format
    501      * </p>
    502      * <p>
    503      * This is a single-plane, 12-bit per pixel, densely packed (in each row),
    504      * unprocessed format, usually representing raw Bayer-pattern images coming
    505      * from an image sensor.
    506      * </p>
    507      * <p>
    508      * In an image buffer with this format, starting from the first pixel of each
    509      * row, each two consecutive pixels are packed into 3 bytes (24 bits). The first
    510      * and second byte contains the top 8 bits of first and second pixel. The third
    511      * byte contains the 4 least significant bits of the two pixels, the exact layout
    512      * data for each two consecutive pixels is illustrated below (Pi[j] stands for
    513      * the jth bit of the ith pixel):
    514      * </p>
    515      * <table>
    516      * <thead>
    517      * <tr>
    518      * <th align="center"></th>
    519      * <th align="center">bit 7</th>
    520      * <th align="center">bit 6</th>
    521      * <th align="center">bit 5</th>
    522      * <th align="center">bit 4</th>
    523      * <th align="center">bit 3</th>
    524      * <th align="center">bit 2</th>
    525      * <th align="center">bit 1</th>
    526      * <th align="center">bit 0</th>
    527      * </tr>
    528      * </thead> <tbody>
    529      * <tr>
    530      * <td align="center">Byte 0:</td>
    531      * <td align="center">P0[11]</td>
    532      * <td align="center">P0[10]</td>
    533      * <td align="center">P0[ 9]</td>
    534      * <td align="center">P0[ 8]</td>
    535      * <td align="center">P0[ 7]</td>
    536      * <td align="center">P0[ 6]</td>
    537      * <td align="center">P0[ 5]</td>
    538      * <td align="center">P0[ 4]</td>
    539      * </tr>
    540      * <tr>
    541      * <td align="center">Byte 1:</td>
    542      * <td align="center">P1[11]</td>
    543      * <td align="center">P1[10]</td>
    544      * <td align="center">P1[ 9]</td>
    545      * <td align="center">P1[ 8]</td>
    546      * <td align="center">P1[ 7]</td>
    547      * <td align="center">P1[ 6]</td>
    548      * <td align="center">P1[ 5]</td>
    549      * <td align="center">P1[ 4]</td>
    550      * </tr>
    551      * <tr>
    552      * <td align="center">Byte 2:</td>
    553      * <td align="center">P1[ 3]</td>
    554      * <td align="center">P1[ 2]</td>
    555      * <td align="center">P1[ 1]</td>
    556      * <td align="center">P1[ 0]</td>
    557      * <td align="center">P0[ 3]</td>
    558      * <td align="center">P0[ 2]</td>
    559      * <td align="center">P0[ 1]</td>
    560      * <td align="center">P0[ 0]</td>
    561      * </tr>
    562      * </tbody>
    563      * </table>
    564      * <p>
    565      * This format assumes
    566      * <ul>
    567      * <li>a width multiple of 4 pixels</li>
    568      * <li>an even height</li>
    569      * </ul>
    570      * </p>
    571      *
    572      * <pre>size = row stride * height</pre> where the row stride is in <em>bytes</em>,
    573      * not pixels.
    574      *
    575      * <p>
    576      * Since this is a densely packed format, the pixel stride is always 0. The
    577      * application must use the pixel data layout defined in above table to
    578      * access each row data. When row stride is equal to {@code width * (12 / 8)}, there
    579      * will be no padding bytes at the end of each row, the entire image data is
    580      * densely packed. When stride is larger than {@code width * (12 / 8)}, padding
    581      * bytes will be present at the end of each row.
    582      * </p>
    583      * <p>
    584      * For example, the {@link android.media.Image} object can provide data in
    585      * this format from a {@link android.hardware.camera2.CameraDevice} (if
    586      * supported) through a {@link android.media.ImageReader} object. The
    587      * {@link android.media.Image#getPlanes() Image#getPlanes()} will return a
    588      * single plane containing the pixel data. The pixel stride is always 0 in
    589      * {@link android.media.Image.Plane#getPixelStride()}, and the
    590      * {@link android.media.Image.Plane#getRowStride()} describes the vertical
    591      * neighboring pixel distance (in bytes) between adjacent rows.
    592      * </p>
    593      *
    594      * @see android.media.Image
    595      * @see android.media.ImageReader
    596      * @see android.hardware.camera2.CameraDevice
    597      */
    598     public static final int RAW12 = 0x26;
    599 
    600     /**
    601      * <p>Android dense depth image format.</p>
    602      *
    603      * <p>Each pixel is 16 bits, representing a depth ranging measurement from a depth camera or
    604      * similar sensor. The 16-bit sample consists of a confidence value and the actual ranging
    605      * measurement.</p>
    606      *
    607      * <p>The confidence value is an estimate of correctness for this sample.  It is encoded in the
    608      * 3 most significant bits of the sample, with a value of 0 representing 100% confidence, a
    609      * value of 1 representing 0% confidence, a value of 2 representing 1/7, a value of 3
    610      * representing 2/7, and so on.</p>
    611      *
    612      * <p>As an example, the following sample extracts the range and confidence from the first pixel
    613      * of a DEPTH16-format {@link android.media.Image}, and converts the confidence to a
    614      * floating-point value between 0 and 1.f inclusive, with 1.f representing maximum confidence:
    615      *
    616      * <pre>
    617      *    ShortBuffer shortDepthBuffer = img.getPlanes()[0].getBuffer().asShortBuffer();
    618      *    short depthSample = shortDepthBuffer.get()
    619      *    short depthRange = (short) (depthSample & 0x1FFF);
    620      *    short depthConfidence = (short) ((depthSample >> 13) & 0x7);
    621      *    float depthPercentage = depthConfidence == 0 ? 1.f : (depthConfidence - 1) / 7.f;
    622      * </pre>
    623      * </p>
    624      *
    625      * <p>This format assumes
    626      * <ul>
    627      * <li>an even width</li>
    628      * <li>an even height</li>
    629      * <li>a horizontal stride multiple of 16 pixels</li>
    630      * </ul>
    631      * </p>
    632      *
    633      * <pre> y_size = stride * height </pre>
    634      *
    635      * When produced by a camera, the units for the range are millimeters.
    636      */
    637     public static final int DEPTH16 = 0x44363159;
    638 
    639     /**
    640      * Android sparse depth point cloud format.
    641      *
    642      * <p>A variable-length list of 3D points plus a confidence value, with each point represented
    643      * by four floats; first the X, Y, Z position coordinates, and then the confidence value.</p>
    644      *
    645      * <p>The number of points is {@code (size of the buffer in bytes) / 16}.
    646      *
    647      * <p>The coordinate system and units of the position values depend on the source of the point
    648      * cloud data. The confidence value is between 0.f and 1.f, inclusive, with 0 representing 0%
    649      * confidence and 1.f representing 100% confidence in the measured position values.</p>
    650      *
    651      * <p>As an example, the following code extracts the first depth point in a DEPTH_POINT_CLOUD
    652      * format {@link android.media.Image}:
    653      * <pre>
    654      *    FloatBuffer floatDepthBuffer = img.getPlanes()[0].getBuffer().asFloatBuffer();
    655      *    float x = floatDepthBuffer.get();
    656      *    float y = floatDepthBuffer.get();
    657      *    float z = floatDepthBuffer.get();
    658      *    float confidence = floatDepthBuffer.get();
    659      * </pre>
    660      *
    661      * For camera devices that support the
    662      * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT DEPTH_OUTPUT}
    663      * capability, DEPTH_POINT_CLOUD coordinates have units of meters, and the coordinate system is
    664      * defined by the camera's pose transforms:
    665      * {@link android.hardware.camera2.CameraCharacteristics#LENS_POSE_TRANSLATION} and
    666      * {@link android.hardware.camera2.CameraCharacteristics#LENS_POSE_ROTATION}. That means the origin is
    667      * the optical center of the camera device, and the positive Z axis points along the camera's optical axis,
    668      * toward the scene.
    669      */
    670     public static final int DEPTH_POINT_CLOUD = 0x101;
    671 
    672     /**
    673      * Unprocessed implementation-dependent raw
    674      * depth measurements, opaque with 16 bit
    675      * samples.
    676      *
    677      * @hide
    678      */
    679     public static final int RAW_DEPTH = 0x1002;
    680 
    681     /**
    682      * Android private opaque image format.
    683      * <p>
    684      * The choices of the actual format and pixel data layout are entirely up to
    685      * the device-specific and framework internal implementations, and may vary
    686      * depending on use cases even for the same device. The buffers of this
    687      * format can be produced by components like
    688      * {@link android.media.ImageWriter ImageWriter} , and interpreted correctly
    689      * by consumers like {@link android.hardware.camera2.CameraDevice
    690      * CameraDevice} based on the device/framework private information. However,
    691      * these buffers are not directly accessible to the application.
    692      * </p>
    693      * <p>
    694      * When an {@link android.media.Image Image} of this format is obtained from
    695      * an {@link android.media.ImageReader ImageReader} or
    696      * {@link android.media.ImageWriter ImageWriter}, the
    697      * {@link android.media.Image#getPlanes() getPlanes()} method will return an
    698      * empty {@link android.media.Image.Plane Plane} array.
    699      * </p>
    700      * <p>
    701      * If a buffer of this format is to be used as an OpenGL ES texture, the
    702      * framework will assume that sampling the texture will always return an
    703      * alpha value of 1.0 (i.e. the buffer contains only opaque pixel values).
    704      * </p>
    705      */
    706     public static final int PRIVATE = 0x22;
    707 
    708     /**
    709      * Use this function to retrieve the number of bits per pixel of an
    710      * ImageFormat.
    711      *
    712      * @param format
    713      * @return the number of bits per pixel of the given format or -1 if the
    714      *         format doesn't exist or is not supported.
    715      */
    716     public static int getBitsPerPixel(int format) {
    717         switch (format) {
    718             case RGB_565:
    719                 return 16;
    720             case NV16:
    721                 return 16;
    722             case YUY2:
    723                 return 16;
    724             case YV12:
    725                 return 12;
    726             case Y8:
    727                 return 8;
    728             case Y16:
    729             case DEPTH16:
    730                 return 16;
    731             case NV21:
    732                 return 12;
    733             case YUV_420_888:
    734                 return 12;
    735             case YUV_422_888:
    736                 return 16;
    737             case YUV_444_888:
    738                 return 24;
    739             case FLEX_RGB_888:
    740                 return 24;
    741             case FLEX_RGBA_8888:
    742                 return 32;
    743             case RAW_DEPTH:
    744             case RAW_SENSOR:
    745                 return 16;
    746             case RAW10:
    747                 return 10;
    748             case RAW12:
    749                 return 12;
    750         }
    751         return -1;
    752     }
    753 
    754     /**
    755      * Determine whether or not this is a public-visible {@code format}.
    756      *
    757      * <p>In particular, {@code @hide} formats will return {@code false}.</p>
    758      *
    759      * <p>Any other formats (including UNKNOWN) will return {@code false}.</p>
    760      *
    761      * @param format an integer format
    762      * @return a boolean
    763      *
    764      * @hide
    765      */
    766     public static boolean isPublicFormat(int format) {
    767         switch (format) {
    768             case RGB_565:
    769             case NV16:
    770             case YUY2:
    771             case YV12:
    772             case JPEG:
    773             case NV21:
    774             case YUV_420_888:
    775             case YUV_422_888:
    776             case YUV_444_888:
    777             case FLEX_RGB_888:
    778             case FLEX_RGBA_8888:
    779             case RAW_SENSOR:
    780             case RAW_PRIVATE:
    781             case RAW10:
    782             case RAW12:
    783             case DEPTH16:
    784             case DEPTH_POINT_CLOUD:
    785             case PRIVATE:
    786             case RAW_DEPTH:
    787                 return true;
    788         }
    789 
    790         return false;
    791     }
    792 }
    793