Home | History | Annotate | Download | only in media
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media;
     18 
     19 import java.nio.ByteBuffer;
     20 import java.lang.AutoCloseable;
     21 
     22 import android.annotation.Nullable;
     23 import android.graphics.Rect;
     24 import android.hardware.HardwareBuffer;
     25 
     26 /**
     27  * <p>A single complete image buffer to use with a media source such as a
     28  * {@link MediaCodec} or a
     29  * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p>
     30  *
     31  * <p>This class allows for efficient direct application access to the pixel
     32  * data of the Image through one or more
     33  * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a
     34  * {@link Plane} that describes the layout of the pixel data in that plane. Due
     35  * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class,
     36  * Images are not directly usable as UI resources.</p>
     37  *
     38  * <p>Since Images are often directly produced or consumed by hardware
     39  * components, they are a limited resource shared across the system, and should
     40  * be closed as soon as they are no longer needed.</p>
     41  *
     42  * <p>For example, when using the {@link ImageReader} class to read out Images
     43  * from various media sources, not closing old Image objects will prevent the
     44  * availability of new Images once
     45  * {@link ImageReader#getMaxImages the maximum outstanding image count} is
     46  * reached. When this happens, the function acquiring new Images will typically
     47  * throw an {@link IllegalStateException}.</p>
     48  *
     49  * @see ImageReader
     50  */
     51 public abstract class Image implements AutoCloseable {
     52     /**
     53      * @hide
     54      */
     55     protected boolean mIsImageValid = false;
     56 
     57     /**
     58      * @hide
     59      */
     60     protected Image() {
     61     }
     62 
     63     /**
     64      * Throw IllegalStateException if the image is invalid (already closed).
     65      *
     66      * @hide
     67      */
     68     protected void throwISEIfImageIsInvalid() {
     69         if (!mIsImageValid) {
     70             throw new IllegalStateException("Image is already closed");
     71         }
     72     }
     73     /**
     74      * Get the format for this image. This format determines the number of
     75      * ByteBuffers needed to represent the image, and the general layout of the
     76      * pixel data in each in ByteBuffer.
     77      *
     78      * <p>
     79      * The format is one of the values from
     80      * {@link android.graphics.ImageFormat ImageFormat}. The mapping between the
     81      * formats and the planes is as follows:
     82      * </p>
     83      *
     84      * <table>
     85      * <tr>
     86      *   <th>Format</th>
     87      *   <th>Plane count</th>
     88      *   <th>Layout details</th>
     89      * </tr>
     90      * <tr>
     91      *   <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td>
     92      *   <td>1</td>
     93      *   <td>Compressed data, so row and pixel strides are 0. To uncompress, use
     94      *      {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
     95      *   </td>
     96      * </tr>
     97      * <tr>
     98      *   <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td>
     99      *   <td>3</td>
    100      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
    101      *     The chroma planes have half the width and height of the luminance
    102      *     plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits.
    103      *     Each plane has its own row stride and pixel stride.</td>
    104      * </tr>
    105      * <tr>
    106      *   <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td>
    107      *   <td>3</td>
    108      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
    109      *     The chroma planes have half the width and the full height of the luminance
    110      *     plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits.
    111      *     Each plane has its own row stride and pixel stride.</td>
    112      * </tr>
    113      * <tr>
    114      *   <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td>
    115      *   <td>3</td>
    116      *   <td>A luminance plane followed by the Cb and Cr chroma planes.
    117      *     The chroma planes have the same width and height as that of the luminance
    118      *     plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits.
    119      *     Each plane has its own row stride and pixel stride.</td>
    120      * </tr>
    121      * <tr>
    122      *   <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td>
    123      *   <td>3</td>
    124      *   <td>A R (red) plane followed by the G (green) and B (blue) planes.
    125      *     All planes have the same widths and heights.
    126      *     Each pixel sample in each plane has 8 bits.
    127      *     Each plane has its own row stride and pixel stride.</td>
    128      * </tr>
    129      * <tr>
    130      *   <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td>
    131      *   <td>4</td>
    132      *   <td>A R (red) plane followed by the G (green), B (blue), and
    133      *     A (alpha) planes. All planes have the same widths and heights.
    134      *     Each pixel sample in each plane has 8 bits.
    135      *     Each plane has its own row stride and pixel stride.</td>
    136      * </tr>
    137      * <tr>
    138      *   <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
    139      *   <td>1</td>
    140      *   <td>A single plane of raw sensor image data, with 16 bits per color
    141      *     sample. The details of the layout need to be queried from the source of
    142      *     the raw sensor data, such as
    143      *     {@link android.hardware.camera2.CameraDevice CameraDevice}.
    144      *   </td>
    145      * </tr>
    146      * <tr>
    147      *   <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td>
    148      *   <td>1</td>
    149      *   <td>A single plane of raw sensor image data of private layout.
    150      *   The details of the layout is implementation specific. Row stride and
    151      *   pixel stride are undefined for this format. Calling {@link Plane#getRowStride()}
    152      *   or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause
    153      *   UnSupportedOperationException being thrown.
    154      *   </td>
    155      * </tr>
    156      * </table>
    157      *
    158      * @see android.graphics.ImageFormat
    159      */
    160     public abstract int getFormat();
    161 
    162     /**
    163      * The width of the image in pixels. For formats where some color channels
    164      * are subsampled, this is the width of the largest-resolution plane.
    165      */
    166     public abstract int getWidth();
    167 
    168     /**
    169      * The height of the image in pixels. For formats where some color channels
    170      * are subsampled, this is the height of the largest-resolution plane.
    171      */
    172     public abstract int getHeight();
    173 
    174     /**
    175      * Get the timestamp associated with this frame.
    176      * <p>
    177      * The timestamp is measured in nanoseconds, and is normally monotonically
    178      * increasing. The timestamps for the images from different sources may have
    179      * different timebases therefore may not be comparable. The specific meaning and
    180      * timebase of the timestamp depend on the source providing images. See
    181      * {@link android.hardware.Camera Camera},
    182      * {@link android.hardware.camera2.CameraDevice CameraDevice},
    183      * {@link MediaPlayer} and {@link MediaCodec} for more details.
    184      * </p>
    185      */
    186     public abstract long getTimestamp();
    187 
    188     /**
    189      * Get the transformation associated with this frame.
    190      * @return The window transformation that needs to be applied for this frame.
    191      * @hide
    192      */
    193     public abstract int getTransform();
    194 
    195     /**
    196      * Get the scaling mode associated with this frame.
    197      * @return The scaling mode that needs to be applied for this frame.
    198      * @hide
    199      */
    200     public abstract int getScalingMode();
    201 
    202     /**
    203      * Get the {@link android.hardware.HardwareBuffer HardwareBuffer} handle of the input image
    204      * intended for GPU and/or hardware access.
    205      * <p>
    206      * The returned {@link android.hardware.HardwareBuffer HardwareBuffer} shall not be used
    207      * after  {@link Image#close Image.close()} has been called.
    208      * </p>
    209      * @return the HardwareBuffer associated with this Image or null if this Image doesn't support
    210      * this feature. (Unsupported use cases include Image instances obtained through
    211      * {@link android.media.MediaCodec MediaCodec}, and on versions prior to Android P,
    212      * {@link android.media.ImageWriter ImageWriter}).
    213      */
    214     @Nullable
    215     public HardwareBuffer getHardwareBuffer() {
    216         throwISEIfImageIsInvalid();
    217         return null;
    218     }
    219 
    220     /**
    221      * Set the timestamp associated with this frame.
    222      * <p>
    223      * The timestamp is measured in nanoseconds, and is normally monotonically
    224      * increasing. The timestamps for the images from different sources may have
    225      * different timebases therefore may not be comparable. The specific meaning and
    226      * timebase of the timestamp depend on the source providing images. See
    227      * {@link android.hardware.Camera Camera},
    228      * {@link android.hardware.camera2.CameraDevice CameraDevice},
    229      * {@link MediaPlayer} and {@link MediaCodec} for more details.
    230      * </p>
    231      * <p>
    232      * For images dequeued from {@link ImageWriter} via
    233      * {@link ImageWriter#dequeueInputImage()}, it's up to the application to
    234      * set the timestamps correctly before sending them back to the
    235      * {@link ImageWriter}, or the timestamp will be generated automatically when
    236      * {@link ImageWriter#queueInputImage queueInputImage()} is called.
    237      * </p>
    238      *
    239      * @param timestamp The timestamp to be set for this image.
    240      */
    241     public void setTimestamp(long timestamp) {
    242         throwISEIfImageIsInvalid();
    243         return;
    244     }
    245 
    246     private Rect mCropRect;
    247 
    248     /**
    249      * Get the crop rectangle associated with this frame.
    250      * <p>
    251      * The crop rectangle specifies the region of valid pixels in the image,
    252      * using coordinates in the largest-resolution plane.
    253      */
    254     public Rect getCropRect() {
    255         throwISEIfImageIsInvalid();
    256 
    257         if (mCropRect == null) {
    258             return new Rect(0, 0, getWidth(), getHeight());
    259         } else {
    260             return new Rect(mCropRect); // return a copy
    261         }
    262     }
    263 
    264     /**
    265      * Set the crop rectangle associated with this frame.
    266      * <p>
    267      * The crop rectangle specifies the region of valid pixels in the image,
    268      * using coordinates in the largest-resolution plane.
    269      */
    270     public void setCropRect(Rect cropRect) {
    271         throwISEIfImageIsInvalid();
    272 
    273         if (cropRect != null) {
    274             cropRect = new Rect(cropRect);  // make a copy
    275             if (!cropRect.intersect(0, 0, getWidth(), getHeight())) {
    276                 cropRect.setEmpty();
    277             }
    278         }
    279         mCropRect = cropRect;
    280     }
    281 
    282     /**
    283      * Get the array of pixel planes for this Image. The number of planes is
    284      * determined by the format of the Image. The application will get an empty
    285      * array if the image format is {@link android.graphics.ImageFormat#PRIVATE
    286      * PRIVATE}, because the image pixel data is not directly accessible. The
    287      * application can check the image format by calling
    288      * {@link Image#getFormat()}.
    289      */
    290     public abstract Plane[] getPlanes();
    291 
    292     /**
    293      * Free up this frame for reuse.
    294      * <p>
    295      * After calling this method, calling any methods on this {@code Image} will
    296      * result in an {@link IllegalStateException}, and attempting to read from
    297      * or write to {@link ByteBuffer ByteBuffers} returned by an earlier
    298      * {@link Plane#getBuffer} call will have undefined behavior. If the image
    299      * was obtained from {@link ImageWriter} via
    300      * {@link ImageWriter#dequeueInputImage()}, after calling this method, any
    301      * image data filled by the application will be lost and the image will be
    302      * returned to {@link ImageWriter} for reuse. Images given to
    303      * {@link ImageWriter#queueInputImage queueInputImage()} are automatically
    304      * closed.
    305      * </p>
    306      */
    307     @Override
    308     public abstract void close();
    309 
    310     /**
    311      * <p>
    312      * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}).
    313      * </p>
    314      * <p>
    315      * This is a package private method that is only used internally.
    316      * </p>
    317      *
    318      * @return true if the image is attachable to a new owner, false if the image is still attached
    319      *         to its current owner, or the image is a stand-alone image and is not attachable to
    320      *         a new owner.
    321      */
    322     boolean isAttachable() {
    323         throwISEIfImageIsInvalid();
    324 
    325         return false;
    326     }
    327 
    328     /**
    329      * <p>
    330      * Get the owner of the {@link Image}.
    331      * </p>
    332      * <p>
    333      * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter},
    334      * {@link MediaCodec} etc. This method returns the owner that produces this image, or null
    335      * if the image is stand-alone image or the owner is unknown.
    336      * </p>
    337      * <p>
    338      * This is a package private method that is only used internally.
    339      * </p>
    340      *
    341      * @return The owner of the Image.
    342      */
    343     Object getOwner() {
    344         throwISEIfImageIsInvalid();
    345 
    346         return null;
    347     }
    348 
    349     /**
    350      * Get native context (buffer pointer) associated with this image.
    351      * <p>
    352      * This is a package private method that is only used internally. It can be
    353      * used to get the native buffer pointer and passed to native, which may be
    354      * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse
    355      * JNI call.
    356      * </p>
    357      *
    358      * @return native context associated with this Image.
    359      */
    360     long getNativeContext() {
    361         throwISEIfImageIsInvalid();
    362 
    363         return 0;
    364     }
    365 
    366     /**
    367      * <p>A single color plane of image data.</p>
    368      *
    369      * <p>The number and meaning of the planes in an Image are determined by the
    370      * format of the Image.</p>
    371      *
    372      * <p>Once the Image has been closed, any access to the the plane's
    373      * ByteBuffer will fail.</p>
    374      *
    375      * @see #getFormat
    376      */
    377     public static abstract class Plane {
    378         /**
    379          * @hide
    380          */
    381         protected Plane() {
    382         }
    383 
    384         /**
    385          * <p>The row stride for this color plane, in bytes.</p>
    386          *
    387          * <p>This is the distance between the start of two consecutive rows of
    388          * pixels in the image. Note that row stried is undefined for some formats
    389          * such as
    390          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
    391          * and calling getRowStride on images of these formats will
    392          * cause an UnsupportedOperationException being thrown.
    393          * For formats where row stride is well defined, the row stride
    394          * is always greater than 0.</p>
    395          */
    396         public abstract int getRowStride();
    397         /**
    398          * <p>The distance between adjacent pixel samples, in bytes.</p>
    399          *
    400          * <p>This is the distance between two consecutive pixel values in a row
    401          * of pixels. It may be larger than the size of a single pixel to
    402          * account for interleaved image data or padded formats.
    403          * Note that pixel stride is undefined for some formats such as
    404          * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
    405          * and calling getPixelStride on images of these formats will
    406          * cause an UnsupportedOperationException being thrown.
    407          * For formats where pixel stride is well defined, the pixel stride
    408          * is always greater than 0.</p>
    409          */
    410         public abstract int getPixelStride();
    411         /**
    412          * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer}
    413          * containing the frame data.</p>
    414          *
    415          * <p>In particular, the buffer returned will always have
    416          * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so
    417          * the underlying data could be mapped as a pointer in JNI without doing
    418          * any copies with {@code GetDirectBufferAddress}.</p>
    419          *
    420          * <p>For raw formats, each plane is only guaranteed to contain data
    421          * up to the last pixel in the last row. In other words, the stride
    422          * after the last row may not be mapped into the buffer. This is a
    423          * necessary requirement for any interleaved format.</p>
    424          *
    425          * @return the byte buffer containing the image data for this plane.
    426          */
    427         public abstract ByteBuffer getBuffer();
    428     }
    429 
    430 }
    431