Home | History | Annotate | Download | only in camera2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.hardware.camera2;
     18 
     19 import android.graphics.Point;
     20 import android.graphics.Rect;
     21 import android.hardware.camera2.impl.CameraMetadataNative;
     22 
     23 /**
     24  * <p>The results of a single image capture from the image sensor.</p>
     25  *
     26  * <p>Contains the final configuration for the capture hardware (sensor, lens,
     27  * flash), the processing pipeline, the control algorithms, and the output
     28  * buffers.</p>
     29  *
     30  * <p>CaptureResults are produced by a {@link CameraDevice} after processing a
     31  * {@link CaptureRequest}. All properties listed for capture requests can also
     32  * be queried on the capture result, to determine the final values used for
     33  * capture. The result also includes additional metadata about the state of the
     34  * camera device during the capture.</p>
     35  *
     36  */
     37 public final class CaptureResult extends CameraMetadata {
     38 
     39     private final CameraMetadataNative mResults;
     40     private final CaptureRequest mRequest;
     41     private final int mSequenceId;
     42 
     43     /**
     44      * Takes ownership of the passed-in properties object
     45      * @hide
     46      */
     47     public CaptureResult(CameraMetadataNative results, CaptureRequest parent, int sequenceId) {
     48         if (results == null) {
     49             throw new IllegalArgumentException("results was null");
     50         }
     51 
     52         if (parent == null) {
     53             throw new IllegalArgumentException("parent was null");
     54         }
     55 
     56         mResults = results;
     57         mRequest = parent;
     58         mSequenceId = sequenceId;
     59     }
     60 
     61     @Override
     62     public <T> T get(Key<T> key) {
     63         return mResults.get(key);
     64     }
     65 
     66     /**
     67      * Get the request associated with this result.
     68      *
     69      * <p>Whenever a request is successfully captured, with
     70      * {@link CameraDevice.CaptureListener#onCaptureCompleted},
     71      * the {@code result}'s {@code getRequest()} will return that {@code request}.
     72      * </p>
     73      *
     74      * <p>In particular,
     75      * <code><pre>cameraDevice.capture(someRequest, new CaptureListener() {
     76      *     {@literal @}Override
     77      *     void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) {
     78      *         assert(myResult.getRequest.equals(myRequest) == true);
     79      *     }
     80      * };
     81      * </code></pre>
     82      * </p>
     83      *
     84      * @return The request associated with this result. Never {@code null}.
     85      */
     86     public CaptureRequest getRequest() {
     87         return mRequest;
     88     }
     89 
     90     /**
     91      * Get the frame number associated with this result.
     92      *
     93      * <p>Whenever a request has been processed, regardless of failure or success,
     94      * it gets a unique frame number assigned to its future result/failure.</p>
     95      *
     96      * <p>This value monotonically increments, starting with 0,
     97      * for every new result or failure; and the scope is the lifetime of the
     98      * {@link CameraDevice}.</p>
     99      *
    100      * @return int frame number
    101      */
    102     public int getFrameNumber() {
    103         return get(REQUEST_FRAME_COUNT);
    104     }
    105 
    106     /**
    107      * The sequence ID for this failure that was returned by the
    108      * {@link CameraDevice#capture} family of functions.
    109      *
    110      * <p>The sequence ID is a unique monotonically increasing value starting from 0,
    111      * incremented every time a new group of requests is submitted to the CameraDevice.</p>
    112      *
    113      * @return int The ID for the sequence of requests that this capture result is a part of
    114      *
    115      * @see CameraDevice.CaptureListener#onCaptureSequenceCompleted
    116      */
    117     public int getSequenceId() {
    118         return mSequenceId;
    119     }
    120 
    121     /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
    122      * The key entries below this point are generated from metadata
    123      * definitions in /system/media/camera/docs. Do not modify by hand or
    124      * modify the comment blocks at the start or end.
    125      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
    126 
    127     /**
    128      * <p>
    129      * A color transform matrix to use to transform
    130      * from sensor RGB color space to output linear sRGB color space
    131      * </p>
    132      * <p>
    133      * This matrix is either set by HAL when the request
    134      * android.colorCorrection.mode is not TRANSFORM_MATRIX, or
    135      * directly by the application in the request when the
    136      * android.colorCorrection.mode is TRANSFORM_MATRIX.
    137      * </p><p>
    138      * In the latter case, the HAL may round the matrix to account
    139      * for precision issues; the final rounded matrix should be
    140      * reported back in this matrix result metadata.
    141      * </p>
    142      */
    143     public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM =
    144             new Key<Rational[]>("android.colorCorrection.transform", Rational[].class);
    145 
    146     /**
    147      * <p>
    148      * Gains applying to Bayer color channels for
    149      * white-balance
    150      * </p>
    151      * <p>
    152      * The 4-channel white-balance gains are defined in
    153      * the order of [R G_even G_odd B], where G_even is the gain
    154      * for green pixels on even rows of the output, and G_odd
    155      * is the gain for greenpixels on the odd rows. if a HAL
    156      * does not support a separate gain for even/odd green channels,
    157      * it should use the G_even value,and write G_odd equal to
    158      * G_even in the output result metadata.
    159      * </p><p>
    160      * This array is either set by HAL when the request
    161      * android.colorCorrection.mode is not TRANSFORM_MATRIX, or
    162      * directly by the application in the request when the
    163      * android.colorCorrection.mode is TRANSFORM_MATRIX.
    164      * </p><p>
    165      * The ouput should be the gains actually applied by the HAL to
    166      * the current frame.
    167      * </p>
    168      */
    169     public static final Key<float[]> COLOR_CORRECTION_GAINS =
    170             new Key<float[]>("android.colorCorrection.gains", float[].class);
    171 
    172     /**
    173      * <p>
    174      * The ID sent with the latest
    175      * CAMERA2_TRIGGER_PRECAPTURE_METERING call
    176      * </p>
    177      * <p>
    178      * Must be 0 if no
    179      * CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
    180      * by HAL. Always updated even if AE algorithm ignores the
    181      * trigger
    182      * </p>
    183      *
    184      * @hide
    185      */
    186     public static final Key<Integer> CONTROL_AE_PRECAPTURE_ID =
    187             new Key<Integer>("android.control.aePrecaptureId", int.class);
    188 
    189     /**
    190      * <p>
    191      * List of areas to use for
    192      * metering
    193      * </p>
    194      * <p>
    195      * Each area is a rectangle plus weight: xmin, ymin,
    196      * xmax, ymax, weight. The rectangle is defined inclusive of the
    197      * specified coordinates.
    198      * </p><p>
    199      * The coordinate system is based on the active pixel array,
    200      * with (0,0) being the top-left pixel in the active pixel array, and
    201      * (android.sensor.info.activeArraySize.width - 1,
    202      * android.sensor.info.activeArraySize.height - 1) being the
    203      * bottom-right pixel in the active pixel array. The weight
    204      * should be nonnegative.
    205      * </p><p>
    206      * If all regions have 0 weight, then no specific metering area
    207      * needs to be used by the HAL. If the metering region is
    208      * outside the current android.scaler.cropRegion, the HAL
    209      * should ignore the sections outside the region and output the
    210      * used sections in the frame metadata
    211      * </p>
    212      */
    213     public static final Key<int[]> CONTROL_AE_REGIONS =
    214             new Key<int[]>("android.control.aeRegions", int[].class);
    215 
    216     /**
    217      * <p>
    218      * Current state of AE algorithm
    219      * </p>
    220      * <p>
    221      * Whenever the AE algorithm state changes, a
    222      * MSG_AUTOEXPOSURE notification must be send if a
    223      * notification callback is registered.
    224      * </p>
    225      * @see #CONTROL_AE_STATE_INACTIVE
    226      * @see #CONTROL_AE_STATE_SEARCHING
    227      * @see #CONTROL_AE_STATE_CONVERGED
    228      * @see #CONTROL_AE_STATE_LOCKED
    229      * @see #CONTROL_AE_STATE_FLASH_REQUIRED
    230      * @see #CONTROL_AE_STATE_PRECAPTURE
    231      */
    232     public static final Key<Integer> CONTROL_AE_STATE =
    233             new Key<Integer>("android.control.aeState", int.class);
    234 
    235     /**
    236      * <p>
    237      * Whether AF is currently enabled, and what
    238      * mode it is set to
    239      * </p>
    240      * @see #CONTROL_AF_MODE_OFF
    241      * @see #CONTROL_AF_MODE_AUTO
    242      * @see #CONTROL_AF_MODE_MACRO
    243      * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO
    244      * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE
    245      * @see #CONTROL_AF_MODE_EDOF
    246      */
    247     public static final Key<Integer> CONTROL_AF_MODE =
    248             new Key<Integer>("android.control.afMode", int.class);
    249 
    250     /**
    251      * <p>
    252      * List of areas to use for focus
    253      * estimation
    254      * </p>
    255      * <p>
    256      * Each area is a rectangle plus weight: xmin, ymin,
    257      * xmax, ymax, weight. The rectangle is defined inclusive of the
    258      * specified coordinates.
    259      * </p><p>
    260      * The coordinate system is based on the active pixel array,
    261      * with (0,0) being the top-left pixel in the active pixel array, and
    262      * (android.sensor.info.activeArraySize.width - 1,
    263      * android.sensor.info.activeArraySize.height - 1) being the
    264      * bottom-right pixel in the active pixel array. The weight
    265      * should be nonnegative.
    266      * </p><p>
    267      * If all regions have 0 weight, then no specific focus area
    268      * needs to be used by the HAL. If the focusing region is
    269      * outside the current android.scaler.cropRegion, the HAL
    270      * should ignore the sections outside the region and output the
    271      * used sections in the frame metadata
    272      * </p>
    273      */
    274     public static final Key<int[]> CONTROL_AF_REGIONS =
    275             new Key<int[]>("android.control.afRegions", int[].class);
    276 
    277     /**
    278      * <p>
    279      * Current state of AF algorithm
    280      * </p>
    281      * <p>
    282      * Whenever the AF algorithm state changes, a
    283      * MSG_AUTOFOCUS notification must be send if a notification
    284      * callback is registered.
    285      * </p>
    286      * @see #CONTROL_AF_STATE_INACTIVE
    287      * @see #CONTROL_AF_STATE_PASSIVE_SCAN
    288      * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED
    289      * @see #CONTROL_AF_STATE_ACTIVE_SCAN
    290      * @see #CONTROL_AF_STATE_FOCUSED_LOCKED
    291      * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
    292      * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED
    293      */
    294     public static final Key<Integer> CONTROL_AF_STATE =
    295             new Key<Integer>("android.control.afState", int.class);
    296 
    297     /**
    298      * <p>
    299      * The ID sent with the latest
    300      * CAMERA2_TRIGGER_AUTOFOCUS call
    301      * </p>
    302      * <p>
    303      * Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
    304      * received yet by HAL. Always updated even if AF algorithm
    305      * ignores the trigger
    306      * </p>
    307      *
    308      * @hide
    309      */
    310     public static final Key<Integer> CONTROL_AF_TRIGGER_ID =
    311             new Key<Integer>("android.control.afTriggerId", int.class);
    312 
    313     /**
    314      * <p>
    315      * Whether AWB is currently setting the color
    316      * transform fields, and what its illumination target
    317      * is
    318      * </p>
    319      * <p>
    320      * [BC - AWB lock,AWB modes]
    321      * </p>
    322      * @see #CONTROL_AWB_MODE_OFF
    323      * @see #CONTROL_AWB_MODE_AUTO
    324      * @see #CONTROL_AWB_MODE_INCANDESCENT
    325      * @see #CONTROL_AWB_MODE_FLUORESCENT
    326      * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT
    327      * @see #CONTROL_AWB_MODE_DAYLIGHT
    328      * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT
    329      * @see #CONTROL_AWB_MODE_TWILIGHT
    330      * @see #CONTROL_AWB_MODE_SHADE
    331      */
    332     public static final Key<Integer> CONTROL_AWB_MODE =
    333             new Key<Integer>("android.control.awbMode", int.class);
    334 
    335     /**
    336      * <p>
    337      * List of areas to use for illuminant
    338      * estimation
    339      * </p>
    340      * <p>
    341      * Only used in AUTO mode.
    342      * </p><p>
    343      * Each area is a rectangle plus weight: xmin, ymin,
    344      * xmax, ymax, weight. The rectangle is defined inclusive of the
    345      * specified coordinates.
    346      * </p><p>
    347      * The coordinate system is based on the active pixel array,
    348      * with (0,0) being the top-left pixel in the active pixel array, and
    349      * (android.sensor.info.activeArraySize.width - 1,
    350      * android.sensor.info.activeArraySize.height - 1) being the
    351      * bottom-right pixel in the active pixel array. The weight
    352      * should be nonnegative.
    353      * </p><p>
    354      * If all regions have 0 weight, then no specific metering area
    355      * needs to be used by the HAL. If the metering region is
    356      * outside the current android.scaler.cropRegion, the HAL
    357      * should ignore the sections outside the region and output the
    358      * used sections in the frame metadata
    359      * </p>
    360      */
    361     public static final Key<int[]> CONTROL_AWB_REGIONS =
    362             new Key<int[]>("android.control.awbRegions", int[].class);
    363 
    364     /**
    365      * <p>
    366      * Current state of AWB algorithm
    367      * </p>
    368      * <p>
    369      * Whenever the AWB algorithm state changes, a
    370      * MSG_AUTOWHITEBALANCE notification must be send if a
    371      * notification callback is registered.
    372      * </p>
    373      * @see #CONTROL_AWB_STATE_INACTIVE
    374      * @see #CONTROL_AWB_STATE_SEARCHING
    375      * @see #CONTROL_AWB_STATE_CONVERGED
    376      * @see #CONTROL_AWB_STATE_LOCKED
    377      */
    378     public static final Key<Integer> CONTROL_AWB_STATE =
    379             new Key<Integer>("android.control.awbState", int.class);
    380 
    381     /**
    382      * <p>
    383      * Overall mode of 3A control
    384      * routines
    385      * </p>
    386      * @see #CONTROL_MODE_OFF
    387      * @see #CONTROL_MODE_AUTO
    388      * @see #CONTROL_MODE_USE_SCENE_MODE
    389      */
    390     public static final Key<Integer> CONTROL_MODE =
    391             new Key<Integer>("android.control.mode", int.class);
    392 
    393     /**
    394      * <p>
    395      * Operation mode for edge
    396      * enhancement
    397      * </p>
    398      * @see #EDGE_MODE_OFF
    399      * @see #EDGE_MODE_FAST
    400      * @see #EDGE_MODE_HIGH_QUALITY
    401      */
    402     public static final Key<Integer> EDGE_MODE =
    403             new Key<Integer>("android.edge.mode", int.class);
    404 
    405     /**
    406      * <p>
    407      * Select flash operation mode
    408      * </p>
    409      * @see #FLASH_MODE_OFF
    410      * @see #FLASH_MODE_SINGLE
    411      * @see #FLASH_MODE_TORCH
    412      */
    413     public static final Key<Integer> FLASH_MODE =
    414             new Key<Integer>("android.flash.mode", int.class);
    415 
    416     /**
    417      * <p>
    418      * Current state of the flash
    419      * unit
    420      * </p>
    421      * @see #FLASH_STATE_UNAVAILABLE
    422      * @see #FLASH_STATE_CHARGING
    423      * @see #FLASH_STATE_READY
    424      * @see #FLASH_STATE_FIRED
    425      */
    426     public static final Key<Integer> FLASH_STATE =
    427             new Key<Integer>("android.flash.state", int.class);
    428 
    429     /**
    430      * <p>
    431      * GPS coordinates to include in output JPEG
    432      * EXIF
    433      * </p>
    434      */
    435     public static final Key<double[]> JPEG_GPS_COORDINATES =
    436             new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
    437 
    438     /**
    439      * <p>
    440      * 32 characters describing GPS algorithm to
    441      * include in EXIF
    442      * </p>
    443      */
    444     public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
    445             new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
    446 
    447     /**
    448      * <p>
    449      * Time GPS fix was made to include in
    450      * EXIF
    451      * </p>
    452      */
    453     public static final Key<Long> JPEG_GPS_TIMESTAMP =
    454             new Key<Long>("android.jpeg.gpsTimestamp", long.class);
    455 
    456     /**
    457      * <p>
    458      * Orientation of JPEG image to
    459      * write
    460      * </p>
    461      */
    462     public static final Key<Integer> JPEG_ORIENTATION =
    463             new Key<Integer>("android.jpeg.orientation", int.class);
    464 
    465     /**
    466      * <p>
    467      * Compression quality of the final JPEG
    468      * image
    469      * </p>
    470      * <p>
    471      * 85-95 is typical usage range
    472      * </p>
    473      */
    474     public static final Key<Byte> JPEG_QUALITY =
    475             new Key<Byte>("android.jpeg.quality", byte.class);
    476 
    477     /**
    478      * <p>
    479      * Compression quality of JPEG
    480      * thumbnail
    481      * </p>
    482      */
    483     public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
    484             new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
    485 
    486     /**
    487      * <p>
    488      * Resolution of embedded JPEG
    489      * thumbnail
    490      * </p>
    491      */
    492     public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE =
    493             new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class);
    494 
    495     /**
    496      * <p>
    497      * Size of the lens aperture
    498      * </p>
    499      * <p>
    500      * Will not be supported on most devices. Can only
    501      * pick from supported list
    502      * </p>
    503      */
    504     public static final Key<Float> LENS_APERTURE =
    505             new Key<Float>("android.lens.aperture", float.class);
    506 
    507     /**
    508      * <p>
    509      * State of lens neutral density
    510      * filter(s)
    511      * </p>
    512      * <p>
    513      * Will not be supported on most devices. Can only
    514      * pick from supported list
    515      * </p>
    516      */
    517     public static final Key<Float> LENS_FILTER_DENSITY =
    518             new Key<Float>("android.lens.filterDensity", float.class);
    519 
    520     /**
    521      * <p>
    522      * Lens optical zoom setting
    523      * </p>
    524      * <p>
    525      * Will not be supported on most devices.
    526      * </p>
    527      */
    528     public static final Key<Float> LENS_FOCAL_LENGTH =
    529             new Key<Float>("android.lens.focalLength", float.class);
    530 
    531     /**
    532      * <p>
    533      * Distance to plane of sharpest focus,
    534      * measured from frontmost surface of the lens
    535      * </p>
    536      * <p>
    537      * Should be zero for fixed-focus cameras
    538      * </p>
    539      */
    540     public static final Key<Float> LENS_FOCUS_DISTANCE =
    541             new Key<Float>("android.lens.focusDistance", float.class);
    542 
    543     /**
    544      * <p>
    545      * The range of scene distances that are in
    546      * sharp focus (depth of field)
    547      * </p>
    548      * <p>
    549      * If variable focus not supported, can still report
    550      * fixed depth of field range
    551      * </p>
    552      */
    553     public static final Key<float[]> LENS_FOCUS_RANGE =
    554             new Key<float[]>("android.lens.focusRange", float[].class);
    555 
    556     /**
    557      * <p>
    558      * Whether optical image stabilization is
    559      * enabled.
    560      * </p>
    561      * <p>
    562      * Will not be supported on most devices.
    563      * </p>
    564      * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
    565      * @see #LENS_OPTICAL_STABILIZATION_MODE_ON
    566      */
    567     public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE =
    568             new Key<Integer>("android.lens.opticalStabilizationMode", int.class);
    569 
    570     /**
    571      * <p>
    572      * Current lens status
    573      * </p>
    574      * @see #LENS_STATE_STATIONARY
    575      * @see #LENS_STATE_MOVING
    576      */
    577     public static final Key<Integer> LENS_STATE =
    578             new Key<Integer>("android.lens.state", int.class);
    579 
    580     /**
    581      * <p>
    582      * Mode of operation for the noise reduction
    583      * algorithm
    584      * </p>
    585      * @see #NOISE_REDUCTION_MODE_OFF
    586      * @see #NOISE_REDUCTION_MODE_FAST
    587      * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY
    588      */
    589     public static final Key<Integer> NOISE_REDUCTION_MODE =
    590             new Key<Integer>("android.noiseReduction.mode", int.class);
    591 
    592     /**
    593      * <p>
    594      * A frame counter set by the framework. This value monotonically
    595      * increases with every new result (that is, each new result has a unique
    596      * frameCount value).
    597      * </p>
    598      * <p>
    599      * Reset on release()
    600      * </p>
    601      */
    602     public static final Key<Integer> REQUEST_FRAME_COUNT =
    603             new Key<Integer>("android.request.frameCount", int.class);
    604 
    605     /**
    606      * <p>
    607      * An application-specified ID for the current
    608      * request. Must be maintained unchanged in output
    609      * frame
    610      * </p>
    611      *
    612      * @hide
    613      */
    614     public static final Key<Integer> REQUEST_ID =
    615             new Key<Integer>("android.request.id", int.class);
    616 
    617     /**
    618      * <p>
    619      * (x, y, width, height).
    620      * </p><p>
    621      * A rectangle with the top-level corner of (x,y) and size
    622      * (width, height). The region of the sensor that is used for
    623      * output. Each stream must use this rectangle to produce its
    624      * output, cropping to a smaller region if necessary to
    625      * maintain the stream's aspect ratio.
    626      * </p><p>
    627      * HAL2.x uses only (x, y, width)
    628      * </p>
    629      * <p>
    630      * Any additional per-stream cropping must be done to
    631      * maximize the final pixel area of the stream.
    632      * </p><p>
    633      * For example, if the crop region is set to a 4:3 aspect
    634      * ratio, then 4:3 streams should use the exact crop
    635      * region. 16:9 streams should further crop vertically
    636      * (letterbox).
    637      * </p><p>
    638      * Conversely, if the crop region is set to a 16:9, then 4:3
    639      * outputs should crop horizontally (pillarbox), and 16:9
    640      * streams should match exactly. These additional crops must
    641      * be centered within the crop region.
    642      * </p><p>
    643      * The output streams must maintain square pixels at all
    644      * times, no matter what the relative aspect ratios of the
    645      * crop region and the stream are.  Negative values for
    646      * corner are allowed for raw output if full pixel array is
    647      * larger than active pixel array. Width and height may be
    648      * rounded to nearest larger supportable width, especially
    649      * for raw output, where only a few fixed scales may be
    650      * possible. The width and height of the crop region cannot
    651      * be set to be smaller than floor( activeArraySize.width /
    652      * android.scaler.maxDigitalZoom ) and floor(
    653      * activeArraySize.height / android.scaler.maxDigitalZoom),
    654      * respectively.
    655      * </p>
    656      */
    657     public static final Key<android.graphics.Rect> SCALER_CROP_REGION =
    658             new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class);
    659 
    660     /**
    661      * <p>
    662      * Duration each pixel is exposed to
    663      * light.
    664      * </p><p>
    665      * If the sensor can't expose this exact duration, it should shorten the
    666      * duration exposed to the nearest possible value (rather than expose longer).
    667      * </p>
    668      * <p>
    669      * 1/10000 - 30 sec range. No bulb mode
    670      * </p>
    671      */
    672     public static final Key<Long> SENSOR_EXPOSURE_TIME =
    673             new Key<Long>("android.sensor.exposureTime", long.class);
    674 
    675     /**
    676      * <p>
    677      * Duration from start of frame exposure to
    678      * start of next frame exposure
    679      * </p>
    680      * <p>
    681      * Exposure time has priority, so duration is set to
    682      * max(duration, exposure time + overhead)
    683      * </p>
    684      */
    685     public static final Key<Long> SENSOR_FRAME_DURATION =
    686             new Key<Long>("android.sensor.frameDuration", long.class);
    687 
    688     /**
    689      * <p>
    690      * Gain applied to image data. Must be
    691      * implemented through analog gain only if set to values
    692      * below 'maximum analog sensitivity'.
    693      * </p><p>
    694      * If the sensor can't apply this exact gain, it should lessen the
    695      * gain to the nearest possible value (rather than gain more).
    696      * </p>
    697      * <p>
    698      * ISO 12232:2006 REI method
    699      * </p>
    700      */
    701     public static final Key<Integer> SENSOR_SENSITIVITY =
    702             new Key<Integer>("android.sensor.sensitivity", int.class);
    703 
    704     /**
    705      * <p>
    706      * Time at start of exposure of first
    707      * row
    708      * </p>
    709      * <p>
    710      * Monotonic, should be synced to other timestamps in
    711      * system
    712      * </p>
    713      */
    714     public static final Key<Long> SENSOR_TIMESTAMP =
    715             new Key<Long>("android.sensor.timestamp", long.class);
    716 
    717     /**
    718      * <p>
    719      * The temperature of the sensor, sampled at the time
    720      * exposure began for this frame.
    721      * </p><p>
    722      * The thermal diode being queried should be inside the sensor PCB, or
    723      * somewhere close to it.
    724      * </p>
    725      *
    726      * <b>Optional</b> - This value may be null on some devices.
    727      *
    728      * <b>{@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL}</b> -
    729      * Present on all devices that report being FULL level hardware devices in the
    730      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL HARDWARE_LEVEL} key.
    731      */
    732     public static final Key<Float> SENSOR_TEMPERATURE =
    733             new Key<Float>("android.sensor.temperature", float.class);
    734 
    735     /**
    736      * <p>
    737      * State of the face detector
    738      * unit
    739      * </p>
    740      * <p>
    741      * Whether face detection is enabled, and whether it
    742      * should output just the basic fields or the full set of
    743      * fields. Value must be one of the
    744      * android.statistics.info.availableFaceDetectModes.
    745      * </p>
    746      * @see #STATISTICS_FACE_DETECT_MODE_OFF
    747      * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE
    748      * @see #STATISTICS_FACE_DETECT_MODE_FULL
    749      */
    750     public static final Key<Integer> STATISTICS_FACE_DETECT_MODE =
    751             new Key<Integer>("android.statistics.faceDetectMode", int.class);
    752 
    753     /**
    754      * <p>
    755      * List of unique IDs for detected
    756      * faces
    757      * </p>
    758      * <p>
    759      * Only available if faceDetectMode == FULL
    760      * </p>
    761      */
    762     public static final Key<int[]> STATISTICS_FACE_IDS =
    763             new Key<int[]>("android.statistics.faceIds", int[].class);
    764 
    765     /**
    766      * <p>
    767      * List of landmarks for detected
    768      * faces
    769      * </p>
    770      * <p>
    771      * Only available if faceDetectMode == FULL
    772      * </p>
    773      */
    774     public static final Key<int[]> STATISTICS_FACE_LANDMARKS =
    775             new Key<int[]>("android.statistics.faceLandmarks", int[].class);
    776 
    777     /**
    778      * <p>
    779      * List of the bounding rectangles for detected
    780      * faces
    781      * </p>
    782      * <p>
    783      * Only available if faceDetectMode != OFF
    784      * </p>
    785      */
    786     public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES =
    787             new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class);
    788 
    789     /**
    790      * <p>
    791      * List of the face confidence scores for
    792      * detected faces
    793      * </p>
    794      * <p>
    795      * Only available if faceDetectMode != OFF. The value should be
    796      * meaningful (for example, setting 100 at all times is illegal).
    797      * </p>
    798      */
    799     public static final Key<byte[]> STATISTICS_FACE_SCORES =
    800             new Key<byte[]>("android.statistics.faceScores", byte[].class);
    801 
    802     /**
    803      * <p>
    804      * A low-resolution map of lens shading, per
    805      * color channel
    806      * </p>
    807      * <p>
    808      * Assume bilinear interpolation of map. The least
    809      * shaded section of the image should have a gain factor
    810      * of 1; all other sections should have gains above 1.
    811      * the map should be on the order of 30-40 rows, and
    812      * must be smaller than 64x64.
    813      * </p><p>
    814      * When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
    815      * must take into account the colorCorrection settings.
    816      * </p>
    817      */
    818     public static final Key<float[]> STATISTICS_LENS_SHADING_MAP =
    819             new Key<float[]>("android.statistics.lensShadingMap", float[].class);
    820 
    821     /**
    822      * <p>
    823      * The best-fit color channel gains calculated
    824      * by the HAL's statistics units for the current output frame
    825      * </p>
    826      * <p>
    827      * This may be different than the gains used for this frame,
    828      * since statistics processing on data from a new frame
    829      * typically completes after the transform has already been
    830      * applied to that frame.
    831      * </p><p>
    832      * The 4 channel gains are defined in Bayer domain,
    833      * see android.colorCorrection.gains for details.
    834      * </p><p>
    835      * This value should always be calculated by the AWB block,
    836      * regardless of the android.control.* current values.
    837      * </p>
    838      */
    839     public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS =
    840             new Key<float[]>("android.statistics.predictedColorGains", float[].class);
    841 
    842     /**
    843      * <p>
    844      * The best-fit color transform matrix estimate
    845      * calculated by the HAL's statistics units for the current
    846      * output frame
    847      * </p>
    848      * <p>
    849      * The HAL must provide the estimate from its
    850      * statistics unit on the white balance transforms to use
    851      * for the next frame. These are the values the HAL believes
    852      * are the best fit for the current output frame. This may
    853      * be different than the transform used for this frame, since
    854      * statistics processing on data from a new frame typically
    855      * completes after the transform has already been applied to
    856      * that frame.
    857      * </p><p>
    858      * These estimates must be provided for all frames, even if
    859      * capture settings and color transforms are set by the application.
    860      * </p><p>
    861      * This value should always be calculated by the AWB block,
    862      * regardless of the android.control.* current values.
    863      * </p>
    864      */
    865     public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM =
    866             new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class);
    867 
    868     /**
    869      * <p>
    870      * The HAL estimated scene illumination lighting
    871      * frequency
    872      * </p>
    873      * <p>
    874      * Report NONE if there doesn't appear to be flickering
    875      * illumination
    876      * </p>
    877      * @see #STATISTICS_SCENE_FLICKER_NONE
    878      * @see #STATISTICS_SCENE_FLICKER_50HZ
    879      * @see #STATISTICS_SCENE_FLICKER_60HZ
    880      */
    881     public static final Key<Integer> STATISTICS_SCENE_FLICKER =
    882             new Key<Integer>("android.statistics.sceneFlicker", int.class);
    883 
    884     /**
    885      * <p>
    886      * Table mapping blue input values to output
    887      * values
    888      * </p>
    889      * <p>
    890      * Tonemapping / contrast / gamma curve for the blue
    891      * channel, to use when android.tonemap.mode is CONTRAST_CURVE.
    892      * </p><p>
    893      * See android.tonemap.curveRed for more details.
    894      * </p>
    895      */
    896     public static final Key<float[]> TONEMAP_CURVE_BLUE =
    897             new Key<float[]>("android.tonemap.curveBlue", float[].class);
    898 
    899     /**
    900      * <p>
    901      * Table mapping green input values to output
    902      * values
    903      * </p>
    904      * <p>
    905      * Tonemapping / contrast / gamma curve for the green
    906      * channel, to use when android.tonemap.mode is CONTRAST_CURVE.
    907      * </p><p>
    908      * See android.tonemap.curveRed for more details.
    909      * </p>
    910      */
    911     public static final Key<float[]> TONEMAP_CURVE_GREEN =
    912             new Key<float[]>("android.tonemap.curveGreen", float[].class);
    913 
    914     /**
    915      * <p>
    916      * Table mapping red input values to output
    917      * values
    918      * </p>
    919      * <p>
    920      * Tonemapping / contrast / gamma curve for the red
    921      * channel, to use when android.tonemap.mode is CONTRAST_CURVE.
    922      * </p><p>
    923      * Since the input and output ranges may vary depending on
    924      * the camera pipeline, the input and output pixel values
    925      * are represented by normalized floating-point values
    926      * between 0 and 1, with 0 == black and 1 == white.
    927      * </p><p>
    928      * The curve should be linearly interpolated between the
    929      * defined points. The points will be listed in increasing
    930      * order of P_IN. For example, if the array is: [0.0, 0.0,
    931      * 0.3, 0.5, 1.0, 1.0], then the input->output mapping
    932      * for a few sample points would be: 0 -> 0, 0.15 ->
    933      * 0.25, 0.3 -> 0.5, 0.5 -> 0.64
    934      * </p>
    935      */
    936     public static final Key<float[]> TONEMAP_CURVE_RED =
    937             new Key<float[]>("android.tonemap.curveRed", float[].class);
    938 
    939     /**
    940      * @see #TONEMAP_MODE_CONTRAST_CURVE
    941      * @see #TONEMAP_MODE_FAST
    942      * @see #TONEMAP_MODE_HIGH_QUALITY
    943      */
    944     public static final Key<Integer> TONEMAP_MODE =
    945             new Key<Integer>("android.tonemap.mode", int.class);
    946 
    947     /**
    948      * <p>
    949      * This LED is nominally used to indicate to the user
    950      * that the camera is powered on and may be streaming images back to the
    951      * Application Processor. In certain rare circumstances, the OS may
    952      * disable this when video is processed locally and not transmitted to
    953      * any untrusted applications.
    954      * </p><p>
    955      * In particular, the LED *must* always be on when the data could be
    956      * transmitted off the device. The LED *should* always be on whenever
    957      * data is stored locally on the device.
    958      * </p><p>
    959      * The LED *may* be off if a trusted application is using the data that
    960      * doesn't violate the above rules.
    961      * </p>
    962      *
    963      * @hide
    964      */
    965     public static final Key<Boolean> LED_TRANSMIT =
    966             new Key<Boolean>("android.led.transmit", boolean.class);
    967 
    968     /**
    969      * <p>
    970      * Whether black-level compensation is locked
    971      * to its current values, or is free to vary
    972      * </p>
    973      * <p>
    974      * When set to ON, the values used for black-level
    975      * compensation must not change until the lock is set to
    976      * OFF
    977      * </p><p>
    978      * Since changes to certain capture parameters (such as
    979      * exposure time) may require resetting of black level
    980      * compensation, the HAL must report whether setting the
    981      * black level lock was successful in the output result
    982      * metadata.
    983      * </p><p>
    984      * The black level locking must happen at the sensor, and not at the ISP.
    985      * If for some reason black level locking is no longer legal (for example,
    986      * the analog gain has changed, which forces black levels to be
    987      * recalculated), then the HAL is free to override this request (and it
    988      * must report 'OFF' when this does happen) until the next time locking
    989      * is legal again.
    990      * </p>
    991      */
    992     public static final Key<Boolean> BLACK_LEVEL_LOCK =
    993             new Key<Boolean>("android.blackLevel.lock", boolean.class);
    994 
    995     /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
    996      * End generated code
    997      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
    998 
    999     /**
   1000      * <p>
   1001      * List of the {@link Face Faces} detected through camera face detection
   1002      * in this result.
   1003      * </p>
   1004      * <p>
   1005      * Only available if {@link #STATISTICS_FACE_DETECT_MODE} {@code !=}
   1006      * {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_OFF OFF}.
   1007      * </p>
   1008      *
   1009      * @see Face
   1010      */
   1011     public static final Key<Face[]> STATISTICS_FACES =
   1012             new Key<Face[]>("android.statistics.faces", Face[].class);
   1013 }
   1014