Home | History | Annotate | Download | only in camera2
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.hardware.camera2;
     18 
     19 import android.hardware.camera2.impl.CameraMetadataNative;
     20 import android.hardware.camera2.CameraDevice.CaptureListener;
     21 import android.os.Parcel;
     22 import android.os.Parcelable;
     23 import android.view.Surface;
     24 
     25 import java.util.HashSet;
     26 import java.util.Objects;
     27 
     28 
     29 /**
     30  * <p>An immutable package of settings and outputs needed to capture a single
     31  * image from the camera device.</p>
     32  *
     33  * <p>Contains the configuration for the capture hardware (sensor, lens, flash),
     34  * the processing pipeline, the control algorithms, and the output buffers. Also
     35  * contains the list of target Surfaces to send image data to for this
     36  * capture.</p>
     37  *
     38  * <p>CaptureRequests can be created by using a {@link Builder} instance,
     39  * obtained by calling {@link CameraDevice#createCaptureRequest}</p>
     40  *
     41  * <p>CaptureRequests are given to {@link CameraDevice#capture} or
     42  * {@link CameraDevice#setRepeatingRequest} to capture images from a camera.</p>
     43  *
     44  * <p>Each request can specify a different subset of target Surfaces for the
     45  * camera to send the captured data to. All the surfaces used in a request must
     46  * be part of the surface list given to the last call to
     47  * {@link CameraDevice#configureOutputs}, when the request is submitted to the
     48  * camera device.</p>
     49  *
     50  * <p>For example, a request meant for repeating preview might only include the
     51  * Surface for the preview SurfaceView or SurfaceTexture, while a
     52  * high-resolution still capture would also include a Surface from a ImageReader
     53  * configured for high-resolution JPEG images.</p>
     54  *
     55  * @see CameraDevice#capture
     56  * @see CameraDevice#setRepeatingRequest
     57  * @see CameraDevice#createCaptureRequest
     58  */
     59 public final class CaptureRequest extends CameraMetadata implements Parcelable {
     60 
     61     private final HashSet<Surface> mSurfaceSet;
     62     private final CameraMetadataNative mSettings;
     63 
     64     private Object mUserTag;
     65 
     66     /**
     67      * Construct empty request.
     68      *
     69      * Used by Binder to unparcel this object only.
     70      */
     71     private CaptureRequest() {
     72         mSettings = new CameraMetadataNative();
     73         mSurfaceSet = new HashSet<Surface>();
     74     }
     75 
     76     /**
     77      * Clone from source capture request.
     78      *
     79      * Used by the Builder to create an immutable copy.
     80      */
     81     @SuppressWarnings("unchecked")
     82     private CaptureRequest(CaptureRequest source) {
     83         mSettings = new CameraMetadataNative(source.mSettings);
     84         mSurfaceSet = (HashSet<Surface>) source.mSurfaceSet.clone();
     85         mUserTag = source.mUserTag;
     86     }
     87 
     88     /**
     89      * Take ownership of passed-in settings.
     90      *
     91      * Used by the Builder to create a mutable CaptureRequest.
     92      */
     93     private CaptureRequest(CameraMetadataNative settings) {
     94         mSettings = settings;
     95         mSurfaceSet = new HashSet<Surface>();
     96     }
     97 
     98     @SuppressWarnings("unchecked")
     99     @Override
    100     public <T> T get(Key<T> key) {
    101         return mSettings.get(key);
    102     }
    103 
    104     /**
    105      * Retrieve the tag for this request, if any.
    106      *
    107      * <p>This tag is not used for anything by the camera device, but can be
    108      * used by an application to easily identify a CaptureRequest when it is
    109      * returned by
    110      * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted}
    111      * </p>
    112      *
    113      * @return the last tag Object set on this request, or {@code null} if
    114      *     no tag has been set.
    115      * @see Builder#setTag
    116      */
    117     public Object getTag() {
    118         return mUserTag;
    119     }
    120 
    121     /**
    122      * Determine whether this CaptureRequest is equal to another CaptureRequest.
    123      *
    124      * <p>A request is considered equal to another is if it's set of key/values is equal, it's
    125      * list of output surfaces is equal, and the user tag is equal.</p>
    126      *
    127      * @param other Another instance of CaptureRequest.
    128      *
    129      * @return True if the requests are the same, false otherwise.
    130      */
    131     @Override
    132     public boolean equals(Object other) {
    133         return other instanceof CaptureRequest
    134                 && equals((CaptureRequest)other);
    135     }
    136 
    137     private boolean equals(CaptureRequest other) {
    138         return other != null
    139                 && Objects.equals(mUserTag, other.mUserTag)
    140                 && mSurfaceSet.equals(other.mSurfaceSet)
    141                 && mSettings.equals(other.mSettings);
    142     }
    143 
    144     @Override
    145     public int hashCode() {
    146         return mSettings.hashCode();
    147     }
    148 
    149     public static final Parcelable.Creator<CaptureRequest> CREATOR =
    150             new Parcelable.Creator<CaptureRequest>() {
    151         @Override
    152         public CaptureRequest createFromParcel(Parcel in) {
    153             CaptureRequest request = new CaptureRequest();
    154             request.readFromParcel(in);
    155 
    156             return request;
    157         }
    158 
    159         @Override
    160         public CaptureRequest[] newArray(int size) {
    161             return new CaptureRequest[size];
    162         }
    163     };
    164 
    165     /**
    166      * Expand this object from a Parcel.
    167      * Hidden since this breaks the immutability of CaptureRequest, but is
    168      * needed to receive CaptureRequests with aidl.
    169      *
    170      * @param in The parcel from which the object should be read
    171      * @hide
    172      */
    173     public void readFromParcel(Parcel in) {
    174         mSettings.readFromParcel(in);
    175 
    176         mSurfaceSet.clear();
    177 
    178         Parcelable[] parcelableArray = in.readParcelableArray(Surface.class.getClassLoader());
    179 
    180         if (parcelableArray == null) {
    181             return;
    182         }
    183 
    184         for (Parcelable p : parcelableArray) {
    185             Surface s = (Surface) p;
    186             mSurfaceSet.add(s);
    187         }
    188     }
    189 
    190     @Override
    191     public int describeContents() {
    192         return 0;
    193     }
    194 
    195     @Override
    196     public void writeToParcel(Parcel dest, int flags) {
    197         mSettings.writeToParcel(dest, flags);
    198         dest.writeParcelableArray(mSurfaceSet.toArray(new Surface[mSurfaceSet.size()]), flags);
    199     }
    200 
    201     /**
    202      * A builder for capture requests.
    203      *
    204      * <p>To obtain a builder instance, use the
    205      * {@link CameraDevice#createCaptureRequest} method, which initializes the
    206      * request fields to one of the templates defined in {@link CameraDevice}.
    207      *
    208      * @see CameraDevice#createCaptureRequest
    209      * @see #TEMPLATE_PREVIEW
    210      * @see #TEMPLATE_RECORD
    211      * @see #TEMPLATE_STILL_CAPTURE
    212      * @see #TEMPLATE_VIDEO_SNAPSHOT
    213      * @see #TEMPLATE_MANUAL
    214      */
    215     public final static class Builder {
    216 
    217         private final CaptureRequest mRequest;
    218 
    219         /**
    220          * Initialize the builder using the template; the request takes
    221          * ownership of the template.
    222          *
    223          * @hide
    224          */
    225         public Builder(CameraMetadataNative template) {
    226             mRequest = new CaptureRequest(template);
    227         }
    228 
    229         /**
    230          * <p>Add a surface to the list of targets for this request</p>
    231          *
    232          * <p>The Surface added must be one of the surfaces included in the most
    233          * recent call to {@link CameraDevice#configureOutputs}, when the
    234          * request is given to the camera device.</p>
    235          *
    236          * <p>Adding a target more than once has no effect.</p>
    237          *
    238          * @param outputTarget Surface to use as an output target for this request
    239          */
    240         public void addTarget(Surface outputTarget) {
    241             mRequest.mSurfaceSet.add(outputTarget);
    242         }
    243 
    244         /**
    245          * <p>Remove a surface from the list of targets for this request.</p>
    246          *
    247          * <p>Removing a target that is not currently added has no effect.</p>
    248          *
    249          * @param outputTarget Surface to use as an output target for this request
    250          */
    251         public void removeTarget(Surface outputTarget) {
    252             mRequest.mSurfaceSet.remove(outputTarget);
    253         }
    254 
    255         /**
    256          * Set a capture request field to a value. The field definitions can be
    257          * found in {@link CaptureRequest}.
    258          *
    259          * @param key The metadata field to write.
    260          * @param value The value to set the field to, which must be of a matching
    261          * type to the key.
    262          */
    263         public <T> void set(Key<T> key, T value) {
    264             mRequest.mSettings.set(key, value);
    265         }
    266 
    267         /**
    268          * Get a capture request field value. The field definitions can be
    269          * found in {@link CaptureRequest}.
    270          *
    271          * @throws IllegalArgumentException if the key was not valid
    272          *
    273          * @param key The metadata field to read.
    274          * @return The value of that key, or {@code null} if the field is not set.
    275          */
    276         public <T> T get(Key<T> key) {
    277             return mRequest.mSettings.get(key);
    278         }
    279 
    280         /**
    281          * Set a tag for this request.
    282          *
    283          * <p>This tag is not used for anything by the camera device, but can be
    284          * used by an application to easily identify a CaptureRequest when it is
    285          * returned by
    286          * {@link CameraDevice.CaptureListener#onCaptureCompleted CaptureListener.onCaptureCompleted}
    287          *
    288          * @param tag an arbitrary Object to store with this request
    289          * @see CaptureRequest#getTag
    290          */
    291         public void setTag(Object tag) {
    292             mRequest.mUserTag = tag;
    293         }
    294 
    295         /**
    296          * Build a request using the current target Surfaces and settings.
    297          *
    298          * @return A new capture request instance, ready for submission to the
    299          * camera device.
    300          */
    301         public CaptureRequest build() {
    302             return new CaptureRequest(mRequest);
    303         }
    304 
    305 
    306         /**
    307          * @hide
    308          */
    309         public boolean isEmpty() {
    310             return mRequest.mSettings.isEmpty();
    311         }
    312 
    313     }
    314 
    315     /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
    316      * The key entries below this point are generated from metadata
    317      * definitions in /system/media/camera/docs. Do not modify by hand or
    318      * modify the comment blocks at the start or end.
    319      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
    320 
    321     /**
    322      * <p>
    323      * When android.control.awbMode is not OFF, TRANSFORM_MATRIX
    324      * should be ignored.
    325      * </p>
    326      * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX
    327      * @see #COLOR_CORRECTION_MODE_FAST
    328      * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY
    329      */
    330     public static final Key<Integer> COLOR_CORRECTION_MODE =
    331             new Key<Integer>("android.colorCorrection.mode", int.class);
    332 
    333     /**
    334      * <p>
    335      * A color transform matrix to use to transform
    336      * from sensor RGB color space to output linear sRGB color space
    337      * </p>
    338      * <p>
    339      * This matrix is either set by HAL when the request
    340      * android.colorCorrection.mode is not TRANSFORM_MATRIX, or
    341      * directly by the application in the request when the
    342      * android.colorCorrection.mode is TRANSFORM_MATRIX.
    343      * </p><p>
    344      * In the latter case, the HAL may round the matrix to account
    345      * for precision issues; the final rounded matrix should be
    346      * reported back in this matrix result metadata.
    347      * </p>
    348      */
    349     public static final Key<Rational[]> COLOR_CORRECTION_TRANSFORM =
    350             new Key<Rational[]>("android.colorCorrection.transform", Rational[].class);
    351 
    352     /**
    353      * <p>
    354      * Gains applying to Bayer color channels for
    355      * white-balance
    356      * </p>
    357      * <p>
    358      * The 4-channel white-balance gains are defined in
    359      * the order of [R G_even G_odd B], where G_even is the gain
    360      * for green pixels on even rows of the output, and G_odd
    361      * is the gain for greenpixels on the odd rows. if a HAL
    362      * does not support a separate gain for even/odd green channels,
    363      * it should use the G_even value,and write G_odd equal to
    364      * G_even in the output result metadata.
    365      * </p><p>
    366      * This array is either set by HAL when the request
    367      * android.colorCorrection.mode is not TRANSFORM_MATRIX, or
    368      * directly by the application in the request when the
    369      * android.colorCorrection.mode is TRANSFORM_MATRIX.
    370      * </p><p>
    371      * The ouput should be the gains actually applied by the HAL to
    372      * the current frame.
    373      * </p>
    374      */
    375     public static final Key<float[]> COLOR_CORRECTION_GAINS =
    376             new Key<float[]>("android.colorCorrection.gains", float[].class);
    377 
    378     /**
    379      * <p>
    380      * Enum for controlling
    381      * antibanding
    382      * </p>
    383      * @see #CONTROL_AE_ANTIBANDING_MODE_OFF
    384      * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ
    385      * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ
    386      * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO
    387      */
    388     public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE =
    389             new Key<Integer>("android.control.aeAntibandingMode", int.class);
    390 
    391     /**
    392      * <p>
    393      * Adjustment to AE target image
    394      * brightness
    395      * </p>
    396      * <p>
    397      * For example, if EV step is 0.333, '6' will mean an
    398      * exposure compensation of +2 EV; -3 will mean an exposure
    399      * compensation of -1
    400      * </p>
    401      */
    402     public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION =
    403             new Key<Integer>("android.control.aeExposureCompensation", int.class);
    404 
    405     /**
    406      * <p>
    407      * Whether AE is currently locked to its latest
    408      * calculated values
    409      * </p>
    410      * <p>
    411      * Note that even when AE is locked, the flash may be
    412      * fired if the AE mode is ON_AUTO_FLASH / ON_ALWAYS_FLASH /
    413      * ON_AUTO_FLASH_REDEYE.
    414      * </p>
    415      */
    416     public static final Key<Boolean> CONTROL_AE_LOCK =
    417             new Key<Boolean>("android.control.aeLock", boolean.class);
    418 
    419     /**
    420      * <p>
    421      * Whether AE is currently updating the sensor
    422      * exposure and sensitivity fields
    423      * </p>
    424      * <p>
    425      * Only effective if android.control.mode =
    426      * AUTO
    427      * </p>
    428      * @see #CONTROL_AE_MODE_OFF
    429      * @see #CONTROL_AE_MODE_ON
    430      * @see #CONTROL_AE_MODE_ON_AUTO_FLASH
    431      * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH
    432      * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
    433      */
    434     public static final Key<Integer> CONTROL_AE_MODE =
    435             new Key<Integer>("android.control.aeMode", int.class);
    436 
    437     /**
    438      * <p>
    439      * List of areas to use for
    440      * metering
    441      * </p>
    442      * <p>
    443      * Each area is a rectangle plus weight: xmin, ymin,
    444      * xmax, ymax, weight. The rectangle is defined inclusive of the
    445      * specified coordinates.
    446      * </p><p>
    447      * The coordinate system is based on the active pixel array,
    448      * with (0,0) being the top-left pixel in the active pixel array, and
    449      * (android.sensor.info.activeArraySize.width - 1,
    450      * android.sensor.info.activeArraySize.height - 1) being the
    451      * bottom-right pixel in the active pixel array. The weight
    452      * should be nonnegative.
    453      * </p><p>
    454      * If all regions have 0 weight, then no specific metering area
    455      * needs to be used by the HAL. If the metering region is
    456      * outside the current android.scaler.cropRegion, the HAL
    457      * should ignore the sections outside the region and output the
    458      * used sections in the frame metadata
    459      * </p>
    460      */
    461     public static final Key<int[]> CONTROL_AE_REGIONS =
    462             new Key<int[]>("android.control.aeRegions", int[].class);
    463 
    464     /**
    465      * <p>
    466      * Range over which fps can be adjusted to
    467      * maintain exposure
    468      * </p>
    469      * <p>
    470      * Only constrains AE algorithm, not manual control
    471      * of android.sensor.exposureTime
    472      * </p>
    473      */
    474     public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE =
    475             new Key<int[]>("android.control.aeTargetFpsRange", int[].class);
    476 
    477     /**
    478      * <p>
    479      * Whether the HAL must trigger precapture
    480      * metering.
    481      * </p>
    482      * <p>
    483      * This entry is normally set to IDLE, or is not
    484      * included at all in the request settings. When included and
    485      * set to START, the HAL must trigger the autoexposure
    486      * precapture metering sequence.
    487      * </p><p>
    488      * The effect of AE precapture trigger depends on the current
    489      * AE mode and state; see the camera HAL device v3 header for
    490      * details.
    491      * </p>
    492      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE
    493      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START
    494      */
    495     public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER =
    496             new Key<Integer>("android.control.aePrecaptureTrigger", int.class);
    497 
    498     /**
    499      * <p>
    500      * Whether AF is currently enabled, and what
    501      * mode it is set to
    502      * </p>
    503      * @see #CONTROL_AF_MODE_OFF
    504      * @see #CONTROL_AF_MODE_AUTO
    505      * @see #CONTROL_AF_MODE_MACRO
    506      * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO
    507      * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE
    508      * @see #CONTROL_AF_MODE_EDOF
    509      */
    510     public static final Key<Integer> CONTROL_AF_MODE =
    511             new Key<Integer>("android.control.afMode", int.class);
    512 
    513     /**
    514      * <p>
    515      * List of areas to use for focus
    516      * estimation
    517      * </p>
    518      * <p>
    519      * Each area is a rectangle plus weight: xmin, ymin,
    520      * xmax, ymax, weight. The rectangle is defined inclusive of the
    521      * specified coordinates.
    522      * </p><p>
    523      * The coordinate system is based on the active pixel array,
    524      * with (0,0) being the top-left pixel in the active pixel array, and
    525      * (android.sensor.info.activeArraySize.width - 1,
    526      * android.sensor.info.activeArraySize.height - 1) being the
    527      * bottom-right pixel in the active pixel array. The weight
    528      * should be nonnegative.
    529      * </p><p>
    530      * If all regions have 0 weight, then no specific focus area
    531      * needs to be used by the HAL. If the focusing region is
    532      * outside the current android.scaler.cropRegion, the HAL
    533      * should ignore the sections outside the region and output the
    534      * used sections in the frame metadata
    535      * </p>
    536      */
    537     public static final Key<int[]> CONTROL_AF_REGIONS =
    538             new Key<int[]>("android.control.afRegions", int[].class);
    539 
    540     /**
    541      * <p>
    542      * Whether the HAL must trigger autofocus.
    543      * </p>
    544      * <p>
    545      * This entry is normally set to IDLE, or is not
    546      * included at all in the request settings.
    547      * </p><p>
    548      * When included and set to START, the HAL must trigger the
    549      * autofocus algorithm. The effect of AF trigger depends on the
    550      * current AF mode and state; see the camera HAL device v3
    551      * header for details. When set to CANCEL, the HAL must cancel
    552      * any active trigger, and return to initial AF state.
    553      * </p>
    554      * @see #CONTROL_AF_TRIGGER_IDLE
    555      * @see #CONTROL_AF_TRIGGER_START
    556      * @see #CONTROL_AF_TRIGGER_CANCEL
    557      */
    558     public static final Key<Integer> CONTROL_AF_TRIGGER =
    559             new Key<Integer>("android.control.afTrigger", int.class);
    560 
    561     /**
    562      * <p>
    563      * Whether AWB is currently locked to its
    564      * latest calculated values
    565      * </p>
    566      * <p>
    567      * Note that AWB lock is only meaningful for AUTO
    568      * mode; in other modes, AWB is already fixed to a specific
    569      * setting
    570      * </p>
    571      */
    572     public static final Key<Boolean> CONTROL_AWB_LOCK =
    573             new Key<Boolean>("android.control.awbLock", boolean.class);
    574 
    575     /**
    576      * <p>
    577      * Whether AWB is currently setting the color
    578      * transform fields, and what its illumination target
    579      * is
    580      * </p>
    581      * <p>
    582      * [BC - AWB lock,AWB modes]
    583      * </p>
    584      * @see #CONTROL_AWB_MODE_OFF
    585      * @see #CONTROL_AWB_MODE_AUTO
    586      * @see #CONTROL_AWB_MODE_INCANDESCENT
    587      * @see #CONTROL_AWB_MODE_FLUORESCENT
    588      * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT
    589      * @see #CONTROL_AWB_MODE_DAYLIGHT
    590      * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT
    591      * @see #CONTROL_AWB_MODE_TWILIGHT
    592      * @see #CONTROL_AWB_MODE_SHADE
    593      */
    594     public static final Key<Integer> CONTROL_AWB_MODE =
    595             new Key<Integer>("android.control.awbMode", int.class);
    596 
    597     /**
    598      * <p>
    599      * List of areas to use for illuminant
    600      * estimation
    601      * </p>
    602      * <p>
    603      * Only used in AUTO mode.
    604      * </p><p>
    605      * Each area is a rectangle plus weight: xmin, ymin,
    606      * xmax, ymax, weight. The rectangle is defined inclusive of the
    607      * specified coordinates.
    608      * </p><p>
    609      * The coordinate system is based on the active pixel array,
    610      * with (0,0) being the top-left pixel in the active pixel array, and
    611      * (android.sensor.info.activeArraySize.width - 1,
    612      * android.sensor.info.activeArraySize.height - 1) being the
    613      * bottom-right pixel in the active pixel array. The weight
    614      * should be nonnegative.
    615      * </p><p>
    616      * If all regions have 0 weight, then no specific metering area
    617      * needs to be used by the HAL. If the metering region is
    618      * outside the current android.scaler.cropRegion, the HAL
    619      * should ignore the sections outside the region and output the
    620      * used sections in the frame metadata
    621      * </p>
    622      */
    623     public static final Key<int[]> CONTROL_AWB_REGIONS =
    624             new Key<int[]>("android.control.awbRegions", int[].class);
    625 
    626     /**
    627      * <p>
    628      * Information to 3A routines about the purpose
    629      * of this capture, to help decide optimal 3A
    630      * strategy
    631      * </p>
    632      * <p>
    633      * Only used if android.control.mode != OFF.
    634      * </p>
    635      * @see #CONTROL_CAPTURE_INTENT_CUSTOM
    636      * @see #CONTROL_CAPTURE_INTENT_PREVIEW
    637      * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE
    638      * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD
    639      * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT
    640      * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG
    641      */
    642     public static final Key<Integer> CONTROL_CAPTURE_INTENT =
    643             new Key<Integer>("android.control.captureIntent", int.class);
    644 
    645     /**
    646      * <p>
    647      * Whether any special color effect is in use.
    648      * Only used if android.control.mode != OFF
    649      * </p>
    650      * @see #CONTROL_EFFECT_MODE_OFF
    651      * @see #CONTROL_EFFECT_MODE_MONO
    652      * @see #CONTROL_EFFECT_MODE_NEGATIVE
    653      * @see #CONTROL_EFFECT_MODE_SOLARIZE
    654      * @see #CONTROL_EFFECT_MODE_SEPIA
    655      * @see #CONTROL_EFFECT_MODE_POSTERIZE
    656      * @see #CONTROL_EFFECT_MODE_WHITEBOARD
    657      * @see #CONTROL_EFFECT_MODE_BLACKBOARD
    658      * @see #CONTROL_EFFECT_MODE_AQUA
    659      */
    660     public static final Key<Integer> CONTROL_EFFECT_MODE =
    661             new Key<Integer>("android.control.effectMode", int.class);
    662 
    663     /**
    664      * <p>
    665      * Overall mode of 3A control
    666      * routines
    667      * </p>
    668      * @see #CONTROL_MODE_OFF
    669      * @see #CONTROL_MODE_AUTO
    670      * @see #CONTROL_MODE_USE_SCENE_MODE
    671      */
    672     public static final Key<Integer> CONTROL_MODE =
    673             new Key<Integer>("android.control.mode", int.class);
    674 
    675     /**
    676      * <p>
    677      * Which scene mode is active when
    678      * android.control.mode = SCENE_MODE
    679      * </p>
    680      * @see #CONTROL_SCENE_MODE_UNSUPPORTED
    681      * @see #CONTROL_SCENE_MODE_FACE_PRIORITY
    682      * @see #CONTROL_SCENE_MODE_ACTION
    683      * @see #CONTROL_SCENE_MODE_PORTRAIT
    684      * @see #CONTROL_SCENE_MODE_LANDSCAPE
    685      * @see #CONTROL_SCENE_MODE_NIGHT
    686      * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT
    687      * @see #CONTROL_SCENE_MODE_THEATRE
    688      * @see #CONTROL_SCENE_MODE_BEACH
    689      * @see #CONTROL_SCENE_MODE_SNOW
    690      * @see #CONTROL_SCENE_MODE_SUNSET
    691      * @see #CONTROL_SCENE_MODE_STEADYPHOTO
    692      * @see #CONTROL_SCENE_MODE_FIREWORKS
    693      * @see #CONTROL_SCENE_MODE_SPORTS
    694      * @see #CONTROL_SCENE_MODE_PARTY
    695      * @see #CONTROL_SCENE_MODE_CANDLELIGHT
    696      * @see #CONTROL_SCENE_MODE_BARCODE
    697      */
    698     public static final Key<Integer> CONTROL_SCENE_MODE =
    699             new Key<Integer>("android.control.sceneMode", int.class);
    700 
    701     /**
    702      * <p>
    703      * Whether video stabilization is
    704      * active
    705      * </p>
    706      * <p>
    707      * If enabled, video stabilization can modify the
    708      * android.scaler.cropRegion to keep the video stream
    709      * stabilized
    710      * </p>
    711      */
    712     public static final Key<Boolean> CONTROL_VIDEO_STABILIZATION_MODE =
    713             new Key<Boolean>("android.control.videoStabilizationMode", boolean.class);
    714 
    715     /**
    716      * <p>
    717      * Operation mode for edge
    718      * enhancement
    719      * </p>
    720      * @see #EDGE_MODE_OFF
    721      * @see #EDGE_MODE_FAST
    722      * @see #EDGE_MODE_HIGH_QUALITY
    723      */
    724     public static final Key<Integer> EDGE_MODE =
    725             new Key<Integer>("android.edge.mode", int.class);
    726 
    727     /**
    728      * <p>
    729      * Select flash operation mode
    730      * </p>
    731      * @see #FLASH_MODE_OFF
    732      * @see #FLASH_MODE_SINGLE
    733      * @see #FLASH_MODE_TORCH
    734      */
    735     public static final Key<Integer> FLASH_MODE =
    736             new Key<Integer>("android.flash.mode", int.class);
    737 
    738     /**
    739      * <p>
    740      * GPS coordinates to include in output JPEG
    741      * EXIF
    742      * </p>
    743      */
    744     public static final Key<double[]> JPEG_GPS_COORDINATES =
    745             new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
    746 
    747     /**
    748      * <p>
    749      * 32 characters describing GPS algorithm to
    750      * include in EXIF
    751      * </p>
    752      */
    753     public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
    754             new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
    755 
    756     /**
    757      * <p>
    758      * Time GPS fix was made to include in
    759      * EXIF
    760      * </p>
    761      */
    762     public static final Key<Long> JPEG_GPS_TIMESTAMP =
    763             new Key<Long>("android.jpeg.gpsTimestamp", long.class);
    764 
    765     /**
    766      * <p>
    767      * Orientation of JPEG image to
    768      * write
    769      * </p>
    770      */
    771     public static final Key<Integer> JPEG_ORIENTATION =
    772             new Key<Integer>("android.jpeg.orientation", int.class);
    773 
    774     /**
    775      * <p>
    776      * Compression quality of the final JPEG
    777      * image
    778      * </p>
    779      * <p>
    780      * 85-95 is typical usage range
    781      * </p>
    782      */
    783     public static final Key<Byte> JPEG_QUALITY =
    784             new Key<Byte>("android.jpeg.quality", byte.class);
    785 
    786     /**
    787      * <p>
    788      * Compression quality of JPEG
    789      * thumbnail
    790      * </p>
    791      */
    792     public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
    793             new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
    794 
    795     /**
    796      * <p>
    797      * Resolution of embedded JPEG
    798      * thumbnail
    799      * </p>
    800      */
    801     public static final Key<android.hardware.camera2.Size> JPEG_THUMBNAIL_SIZE =
    802             new Key<android.hardware.camera2.Size>("android.jpeg.thumbnailSize", android.hardware.camera2.Size.class);
    803 
    804     /**
    805      * <p>
    806      * Size of the lens aperture
    807      * </p>
    808      * <p>
    809      * Will not be supported on most devices. Can only
    810      * pick from supported list
    811      * </p>
    812      */
    813     public static final Key<Float> LENS_APERTURE =
    814             new Key<Float>("android.lens.aperture", float.class);
    815 
    816     /**
    817      * <p>
    818      * State of lens neutral density
    819      * filter(s)
    820      * </p>
    821      * <p>
    822      * Will not be supported on most devices. Can only
    823      * pick from supported list
    824      * </p>
    825      */
    826     public static final Key<Float> LENS_FILTER_DENSITY =
    827             new Key<Float>("android.lens.filterDensity", float.class);
    828 
    829     /**
    830      * <p>
    831      * Lens optical zoom setting
    832      * </p>
    833      * <p>
    834      * Will not be supported on most devices.
    835      * </p>
    836      */
    837     public static final Key<Float> LENS_FOCAL_LENGTH =
    838             new Key<Float>("android.lens.focalLength", float.class);
    839 
    840     /**
    841      * <p>
    842      * Distance to plane of sharpest focus,
    843      * measured from frontmost surface of the lens
    844      * </p>
    845      * <p>
    846      * 0 = infinity focus. Used value should be clamped
    847      * to (0,minimum focus distance)
    848      * </p>
    849      */
    850     public static final Key<Float> LENS_FOCUS_DISTANCE =
    851             new Key<Float>("android.lens.focusDistance", float.class);
    852 
    853     /**
    854      * <p>
    855      * Whether optical image stabilization is
    856      * enabled.
    857      * </p>
    858      * <p>
    859      * Will not be supported on most devices.
    860      * </p>
    861      * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
    862      * @see #LENS_OPTICAL_STABILIZATION_MODE_ON
    863      */
    864     public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE =
    865             new Key<Integer>("android.lens.opticalStabilizationMode", int.class);
    866 
    867     /**
    868      * <p>
    869      * Mode of operation for the noise reduction
    870      * algorithm
    871      * </p>
    872      * @see #NOISE_REDUCTION_MODE_OFF
    873      * @see #NOISE_REDUCTION_MODE_FAST
    874      * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY
    875      */
    876     public static final Key<Integer> NOISE_REDUCTION_MODE =
    877             new Key<Integer>("android.noiseReduction.mode", int.class);
    878 
    879     /**
    880      * <p>
    881      * An application-specified ID for the current
    882      * request. Must be maintained unchanged in output
    883      * frame
    884      * </p>
    885      *
    886      * @hide
    887      */
    888     public static final Key<Integer> REQUEST_ID =
    889             new Key<Integer>("android.request.id", int.class);
    890 
    891     /**
    892      * <p>
    893      * (x, y, width, height).
    894      * </p><p>
    895      * A rectangle with the top-level corner of (x,y) and size
    896      * (width, height). The region of the sensor that is used for
    897      * output. Each stream must use this rectangle to produce its
    898      * output, cropping to a smaller region if necessary to
    899      * maintain the stream's aspect ratio.
    900      * </p><p>
    901      * HAL2.x uses only (x, y, width)
    902      * </p>
    903      * <p>
    904      * Any additional per-stream cropping must be done to
    905      * maximize the final pixel area of the stream.
    906      * </p><p>
    907      * For example, if the crop region is set to a 4:3 aspect
    908      * ratio, then 4:3 streams should use the exact crop
    909      * region. 16:9 streams should further crop vertically
    910      * (letterbox).
    911      * </p><p>
    912      * Conversely, if the crop region is set to a 16:9, then 4:3
    913      * outputs should crop horizontally (pillarbox), and 16:9
    914      * streams should match exactly. These additional crops must
    915      * be centered within the crop region.
    916      * </p><p>
    917      * The output streams must maintain square pixels at all
    918      * times, no matter what the relative aspect ratios of the
    919      * crop region and the stream are.  Negative values for
    920      * corner are allowed for raw output if full pixel array is
    921      * larger than active pixel array. Width and height may be
    922      * rounded to nearest larger supportable width, especially
    923      * for raw output, where only a few fixed scales may be
    924      * possible. The width and height of the crop region cannot
    925      * be set to be smaller than floor( activeArraySize.width /
    926      * android.scaler.maxDigitalZoom ) and floor(
    927      * activeArraySize.height / android.scaler.maxDigitalZoom),
    928      * respectively.
    929      * </p>
    930      */
    931     public static final Key<android.graphics.Rect> SCALER_CROP_REGION =
    932             new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class);
    933 
    934     /**
    935      * <p>
    936      * Duration each pixel is exposed to
    937      * light.
    938      * </p><p>
    939      * If the sensor can't expose this exact duration, it should shorten the
    940      * duration exposed to the nearest possible value (rather than expose longer).
    941      * </p>
    942      * <p>
    943      * 1/10000 - 30 sec range. No bulb mode
    944      * </p>
    945      */
    946     public static final Key<Long> SENSOR_EXPOSURE_TIME =
    947             new Key<Long>("android.sensor.exposureTime", long.class);
    948 
    949     /**
    950      * <p>
    951      * Duration from start of frame exposure to
    952      * start of next frame exposure
    953      * </p>
    954      * <p>
    955      * Exposure time has priority, so duration is set to
    956      * max(duration, exposure time + overhead)
    957      * </p>
    958      */
    959     public static final Key<Long> SENSOR_FRAME_DURATION =
    960             new Key<Long>("android.sensor.frameDuration", long.class);
    961 
    962     /**
    963      * <p>
    964      * Gain applied to image data. Must be
    965      * implemented through analog gain only if set to values
    966      * below 'maximum analog sensitivity'.
    967      * </p><p>
    968      * If the sensor can't apply this exact gain, it should lessen the
    969      * gain to the nearest possible value (rather than gain more).
    970      * </p>
    971      * <p>
    972      * ISO 12232:2006 REI method
    973      * </p>
    974      */
    975     public static final Key<Integer> SENSOR_SENSITIVITY =
    976             new Key<Integer>("android.sensor.sensitivity", int.class);
    977 
    978     /**
    979      * <p>
    980      * State of the face detector
    981      * unit
    982      * </p>
    983      * <p>
    984      * Whether face detection is enabled, and whether it
    985      * should output just the basic fields or the full set of
    986      * fields. Value must be one of the
    987      * android.statistics.info.availableFaceDetectModes.
    988      * </p>
    989      * @see #STATISTICS_FACE_DETECT_MODE_OFF
    990      * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE
    991      * @see #STATISTICS_FACE_DETECT_MODE_FULL
    992      */
    993     public static final Key<Integer> STATISTICS_FACE_DETECT_MODE =
    994             new Key<Integer>("android.statistics.faceDetectMode", int.class);
    995 
    996     /**
    997      * <p>
    998      * Whether the HAL needs to output the lens
    999      * shading map in output result metadata
   1000      * </p>
   1001      * <p>
   1002      * When set to ON,
   1003      * android.statistics.lensShadingMap must be provided in
   1004      * the output result metdata.
   1005      * </p>
   1006      * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
   1007      * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
   1008      */
   1009     public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE =
   1010             new Key<Integer>("android.statistics.lensShadingMapMode", int.class);
   1011 
   1012     /**
   1013      * <p>
   1014      * Table mapping blue input values to output
   1015      * values
   1016      * </p>
   1017      * <p>
   1018      * Tonemapping / contrast / gamma curve for the blue
   1019      * channel, to use when android.tonemap.mode is CONTRAST_CURVE.
   1020      * </p><p>
   1021      * See android.tonemap.curveRed for more details.
   1022      * </p>
   1023      */
   1024     public static final Key<float[]> TONEMAP_CURVE_BLUE =
   1025             new Key<float[]>("android.tonemap.curveBlue", float[].class);
   1026 
   1027     /**
   1028      * <p>
   1029      * Table mapping green input values to output
   1030      * values
   1031      * </p>
   1032      * <p>
   1033      * Tonemapping / contrast / gamma curve for the green
   1034      * channel, to use when android.tonemap.mode is CONTRAST_CURVE.
   1035      * </p><p>
   1036      * See android.tonemap.curveRed for more details.
   1037      * </p>
   1038      */
   1039     public static final Key<float[]> TONEMAP_CURVE_GREEN =
   1040             new Key<float[]>("android.tonemap.curveGreen", float[].class);
   1041 
   1042     /**
   1043      * <p>
   1044      * Table mapping red input values to output
   1045      * values
   1046      * </p>
   1047      * <p>
   1048      * Tonemapping / contrast / gamma curve for the red
   1049      * channel, to use when android.tonemap.mode is CONTRAST_CURVE.
   1050      * </p><p>
   1051      * Since the input and output ranges may vary depending on
   1052      * the camera pipeline, the input and output pixel values
   1053      * are represented by normalized floating-point values
   1054      * between 0 and 1, with 0 == black and 1 == white.
   1055      * </p><p>
   1056      * The curve should be linearly interpolated between the
   1057      * defined points. The points will be listed in increasing
   1058      * order of P_IN. For example, if the array is: [0.0, 0.0,
   1059      * 0.3, 0.5, 1.0, 1.0], then the input->output mapping
   1060      * for a few sample points would be: 0 -> 0, 0.15 ->
   1061      * 0.25, 0.3 -> 0.5, 0.5 -> 0.64
   1062      * </p>
   1063      */
   1064     public static final Key<float[]> TONEMAP_CURVE_RED =
   1065             new Key<float[]>("android.tonemap.curveRed", float[].class);
   1066 
   1067     /**
   1068      * @see #TONEMAP_MODE_CONTRAST_CURVE
   1069      * @see #TONEMAP_MODE_FAST
   1070      * @see #TONEMAP_MODE_HIGH_QUALITY
   1071      */
   1072     public static final Key<Integer> TONEMAP_MODE =
   1073             new Key<Integer>("android.tonemap.mode", int.class);
   1074 
   1075     /**
   1076      * <p>
   1077      * This LED is nominally used to indicate to the user
   1078      * that the camera is powered on and may be streaming images back to the
   1079      * Application Processor. In certain rare circumstances, the OS may
   1080      * disable this when video is processed locally and not transmitted to
   1081      * any untrusted applications.
   1082      * </p><p>
   1083      * In particular, the LED *must* always be on when the data could be
   1084      * transmitted off the device. The LED *should* always be on whenever
   1085      * data is stored locally on the device.
   1086      * </p><p>
   1087      * The LED *may* be off if a trusted application is using the data that
   1088      * doesn't violate the above rules.
   1089      * </p>
   1090      *
   1091      * @hide
   1092      */
   1093     public static final Key<Boolean> LED_TRANSMIT =
   1094             new Key<Boolean>("android.led.transmit", boolean.class);
   1095 
   1096     /**
   1097      * <p>
   1098      * Whether black-level compensation is locked
   1099      * to its current values, or is free to vary
   1100      * </p>
   1101      * <p>
   1102      * When set to ON, the values used for black-level
   1103      * compensation must not change until the lock is set to
   1104      * OFF
   1105      * </p><p>
   1106      * Since changes to certain capture parameters (such as
   1107      * exposure time) may require resetting of black level
   1108      * compensation, the HAL must report whether setting the
   1109      * black level lock was successful in the output result
   1110      * metadata.
   1111      * </p><p>
   1112      * The black level locking must happen at the sensor, and not at the ISP.
   1113      * If for some reason black level locking is no longer legal (for example,
   1114      * the analog gain has changed, which forces black levels to be
   1115      * recalculated), then the HAL is free to override this request (and it
   1116      * must report 'OFF' when this does happen) until the next time locking
   1117      * is legal again.
   1118      * </p>
   1119      */
   1120     public static final Key<Boolean> BLACK_LEVEL_LOCK =
   1121             new Key<Boolean>("android.blackLevel.lock", boolean.class);
   1122 
   1123     /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
   1124      * End generated code
   1125      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
   1126 }
   1127