Home | History | Annotate | Download | only in camera2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.hardware.camera2;
     18 
     19 import android.annotation.NonNull;
     20 import android.annotation.Nullable;
     21 import android.hardware.camera2.impl.CameraMetadataNative;
     22 import android.hardware.camera2.impl.CaptureResultExtras;
     23 import android.hardware.camera2.impl.PublicKey;
     24 import android.hardware.camera2.impl.SyntheticKey;
     25 import android.hardware.camera2.utils.TypeReference;
     26 import android.util.Log;
     27 import android.util.Rational;
     28 
     29 import java.util.List;
     30 
     31 /**
     32  * <p>The subset of the results of a single image capture from the image sensor.</p>
     33  *
     34  * <p>Contains a subset of the final configuration for the capture hardware (sensor, lens,
     35  * flash), the processing pipeline, the control algorithms, and the output
     36  * buffers.</p>
     37  *
     38  * <p>CaptureResults are produced by a {@link CameraDevice} after processing a
     39  * {@link CaptureRequest}. All properties listed for capture requests can also
     40  * be queried on the capture result, to determine the final values used for
     41  * capture. The result also includes additional metadata about the state of the
     42  * camera device during the capture.</p>
     43  *
     44  * <p>Not all properties returned by {@link CameraCharacteristics#getAvailableCaptureResultKeys()}
     45  * are necessarily available. Some results are {@link CaptureResult partial} and will
     46  * not have every key set. Only {@link TotalCaptureResult total} results are guaranteed to have
     47  * every key available that was enabled by the request.</p>
     48  *
     49  * <p>{@link CaptureResult} objects are immutable.</p>
     50  *
     51  */
     52 public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
     53 
     54     private static final String TAG = "CaptureResult";
     55     private static final boolean VERBOSE = false;
     56 
     57     /**
     58      * A {@code Key} is used to do capture result field lookups with
     59      * {@link CaptureResult#get}.
     60      *
     61      * <p>For example, to get the timestamp corresponding to the exposure of the first row:
     62      * <code><pre>
     63      * long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
     64      * </pre></code>
     65      * </p>
     66      *
     67      * <p>To enumerate over all possible keys for {@link CaptureResult}, see
     68      * {@link CameraCharacteristics#getAvailableCaptureResultKeys}.</p>
     69      *
     70      * @see CaptureResult#get
     71      * @see CameraCharacteristics#getAvailableCaptureResultKeys
     72      */
     73     public final static class Key<T> {
     74         private final CameraMetadataNative.Key<T> mKey;
     75 
     76         /**
     77          * Visible for testing and vendor extensions only.
     78          *
     79          * @hide
     80          */
     81         public Key(String name, Class<T> type, long vendorId) {
     82             mKey = new CameraMetadataNative.Key<T>(name, type, vendorId);
     83         }
     84 
     85         /**
     86          * Visible for testing and vendor extensions only.
     87          *
     88          * @hide
     89          */
     90         public Key(String name, String fallbackName, Class<T> type) {
     91             mKey = new CameraMetadataNative.Key<T>(name, fallbackName, type);
     92         }
     93 
     94        /**
     95          * Visible for testing and vendor extensions only.
     96          *
     97          * @hide
     98          */
     99         public Key(String name, Class<T> type) {
    100             mKey = new CameraMetadataNative.Key<T>(name, type);
    101         }
    102 
    103         /**
    104          * Visible for testing and vendor extensions only.
    105          *
    106          * @hide
    107          */
    108         public Key(String name, TypeReference<T> typeReference) {
    109             mKey = new CameraMetadataNative.Key<T>(name, typeReference);
    110         }
    111 
    112         /**
    113          * Return a camelCase, period separated name formatted like:
    114          * {@code "root.section[.subsections].name"}.
    115          *
    116          * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."};
    117          * keys that are device/platform-specific are prefixed with {@code "com."}.</p>
    118          *
    119          * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would
    120          * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device
    121          * specific key might look like {@code "com.google.nexus.data.private"}.</p>
    122          *
    123          * @return String representation of the key name
    124          */
    125         @NonNull
    126         public String getName() {
    127             return mKey.getName();
    128         }
    129 
    130         /**
    131          * Return vendor tag id.
    132          *
    133          * @hide
    134          */
    135         public long getVendorId() {
    136             return mKey.getVendorId();
    137         }
    138 
    139         /**
    140          * {@inheritDoc}
    141          */
    142         @Override
    143         public final int hashCode() {
    144             return mKey.hashCode();
    145         }
    146 
    147         /**
    148          * {@inheritDoc}
    149          */
    150         @SuppressWarnings("unchecked")
    151         @Override
    152         public final boolean equals(Object o) {
    153             return o instanceof Key && ((Key<T>)o).mKey.equals(mKey);
    154         }
    155 
    156         /**
    157          * Return this {@link Key} as a string representation.
    158          *
    159          * <p>{@code "CaptureResult.Key(%s)"}, where {@code %s} represents
    160          * the name of this key as returned by {@link #getName}.</p>
    161          *
    162          * @return string representation of {@link Key}
    163          */
    164         @NonNull
    165         @Override
    166         public String toString() {
    167             return String.format("CaptureResult.Key(%s)", mKey.getName());
    168         }
    169 
    170         /**
    171          * Visible for CameraMetadataNative implementation only; do not use.
    172          *
    173          * TODO: Make this private or remove it altogether.
    174          *
    175          * @hide
    176          */
    177         public CameraMetadataNative.Key<T> getNativeKey() {
    178             return mKey;
    179         }
    180 
    181         @SuppressWarnings({ "unchecked" })
    182         /*package*/ Key(CameraMetadataNative.Key<?> nativeKey) {
    183             mKey = (CameraMetadataNative.Key<T>) nativeKey;
    184         }
    185     }
    186 
    187     private final CameraMetadataNative mResults;
    188     private final CaptureRequest mRequest;
    189     private final int mSequenceId;
    190     private final long mFrameNumber;
    191 
    192     /**
    193      * Takes ownership of the passed-in properties object
    194      *
    195      * <p>For internal use only</p>
    196      * @hide
    197      */
    198     public CaptureResult(CameraMetadataNative results, CaptureRequest parent,
    199             CaptureResultExtras extras) {
    200         if (results == null) {
    201             throw new IllegalArgumentException("results was null");
    202         }
    203 
    204         if (parent == null) {
    205             throw new IllegalArgumentException("parent was null");
    206         }
    207 
    208         if (extras == null) {
    209             throw new IllegalArgumentException("extras was null");
    210         }
    211 
    212         mResults = CameraMetadataNative.move(results);
    213         if (mResults.isEmpty()) {
    214             throw new AssertionError("Results must not be empty");
    215         }
    216         setNativeInstance(mResults);
    217         mRequest = parent;
    218         mSequenceId = extras.getRequestId();
    219         mFrameNumber = extras.getFrameNumber();
    220     }
    221 
    222     /**
    223      * Returns a copy of the underlying {@link CameraMetadataNative}.
    224      * @hide
    225      */
    226     public CameraMetadataNative getNativeCopy() {
    227         return new CameraMetadataNative(mResults);
    228     }
    229 
    230     /**
    231      * Creates a request-less result.
    232      *
    233      * <p><strong>For testing only.</strong></p>
    234      * @hide
    235      */
    236     public CaptureResult(CameraMetadataNative results, int sequenceId) {
    237         if (results == null) {
    238             throw new IllegalArgumentException("results was null");
    239         }
    240 
    241         mResults = CameraMetadataNative.move(results);
    242         if (mResults.isEmpty()) {
    243             throw new AssertionError("Results must not be empty");
    244         }
    245 
    246         setNativeInstance(mResults);
    247         mRequest = null;
    248         mSequenceId = sequenceId;
    249         mFrameNumber = -1;
    250     }
    251 
    252     /**
    253      * Get a capture result field value.
    254      *
    255      * <p>The field definitions can be found in {@link CaptureResult}.</p>
    256      *
    257      * <p>Querying the value for the same key more than once will return a value
    258      * which is equal to the previous queried value.</p>
    259      *
    260      * @throws IllegalArgumentException if the key was not valid
    261      *
    262      * @param key The result field to read.
    263      * @return The value of that key, or {@code null} if the field is not set.
    264      */
    265     @Nullable
    266     public <T> T get(Key<T> key) {
    267         T value = mResults.get(key);
    268         if (VERBOSE) Log.v(TAG, "#get for Key = " + key.getName() + ", returned value = " + value);
    269         return value;
    270     }
    271 
    272     /**
    273      * {@inheritDoc}
    274      * @hide
    275      */
    276     @SuppressWarnings("unchecked")
    277     @Override
    278     protected <T> T getProtected(Key<?> key) {
    279         return (T) mResults.get(key);
    280     }
    281 
    282     /**
    283      * {@inheritDoc}
    284      * @hide
    285      */
    286     @SuppressWarnings("unchecked")
    287     @Override
    288     protected Class<Key<?>> getKeyClass() {
    289         Object thisClass = Key.class;
    290         return (Class<Key<?>>)thisClass;
    291     }
    292 
    293     /**
    294      * Dumps the native metadata contents to logcat.
    295      *
    296      * <p>Visibility for testing/debugging only. The results will not
    297      * include any synthesized keys, as they are invisible to the native layer.</p>
    298      *
    299      * @hide
    300      */
    301     public void dumpToLog() {
    302         mResults.dumpToLog();
    303     }
    304 
    305     /**
    306      * {@inheritDoc}
    307      */
    308     @Override
    309     @NonNull
    310     public List<Key<?>> getKeys() {
    311         // Force the javadoc for this function to show up on the CaptureResult page
    312         return super.getKeys();
    313     }
    314 
    315     /**
    316      * Get the request associated with this result.
    317      *
    318      * <p>Whenever a request has been fully or partially captured, with
    319      * {@link CameraCaptureSession.CaptureCallback#onCaptureCompleted} or
    320      * {@link CameraCaptureSession.CaptureCallback#onCaptureProgressed}, the {@code result}'s
    321      * {@code getRequest()} will return that {@code request}.
    322      * </p>
    323      *
    324      * <p>For example,
    325      * <code><pre>cameraDevice.capture(someRequest, new CaptureCallback() {
    326      *     {@literal @}Override
    327      *     void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) {
    328      *         assert(myResult.getRequest.equals(myRequest) == true);
    329      *     }
    330      * }, null);
    331      * </code></pre>
    332      * </p>
    333      *
    334      * @return The request associated with this result. Never {@code null}.
    335      */
    336     @NonNull
    337     public CaptureRequest getRequest() {
    338         return mRequest;
    339     }
    340 
    341     /**
    342      * Get the frame number associated with this result.
    343      *
    344      * <p>Whenever a request has been processed, regardless of failure or success,
    345      * it gets a unique frame number assigned to its future result/failure.</p>
    346      *
    347      * <p>For the same type of request (capturing from the camera device or reprocessing), this
    348      * value monotonically increments, starting with 0, for every new result or failure and the
    349      * scope is the lifetime of the {@link CameraDevice}. Between different types of requests,
    350      * the frame number may not monotonically increment. For example, the frame number of a newer
    351      * reprocess result may be smaller than the frame number of an older result of capturing new
    352      * images from the camera device, but the frame number of a newer reprocess result will never be
    353      * smaller than the frame number of an older reprocess result.</p>
    354      *
    355      * @return The frame number
    356      *
    357      * @see CameraDevice#createCaptureRequest
    358      * @see CameraDevice#createReprocessCaptureRequest
    359      */
    360     public long getFrameNumber() {
    361         return mFrameNumber;
    362     }
    363 
    364     /**
    365      * The sequence ID for this failure that was returned by the
    366      * {@link CameraCaptureSession#capture} family of functions.
    367      *
    368      * <p>The sequence ID is a unique monotonically increasing value starting from 0,
    369      * incremented every time a new group of requests is submitted to the CameraDevice.</p>
    370      *
    371      * @return int The ID for the sequence of requests that this capture result is a part of
    372      *
    373      * @see CameraDevice.CaptureCallback#onCaptureSequenceCompleted
    374      * @see CameraDevice.CaptureCallback#onCaptureSequenceAborted
    375      */
    376     public int getSequenceId() {
    377         return mSequenceId;
    378     }
    379 
    380     /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
    381      * The key entries below this point are generated from metadata
    382      * definitions in /system/media/camera/docs. Do not modify by hand or
    383      * modify the comment blocks at the start or end.
    384      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
    385 
    386     /**
    387      * <p>The mode control selects how the image data is converted from the
    388      * sensor's native color into linear sRGB color.</p>
    389      * <p>When auto-white balance (AWB) is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this
    390      * control is overridden by the AWB routine. When AWB is disabled, the
    391      * application controls how the color mapping is performed.</p>
    392      * <p>We define the expected processing pipeline below. For consistency
    393      * across devices, this is always the case with TRANSFORM_MATRIX.</p>
    394      * <p>When either FULL or HIGH_QUALITY is used, the camera device may
    395      * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
    396      * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the
    397      * camera device (in the results) and be roughly correct.</p>
    398      * <p>Switching to TRANSFORM_MATRIX and using the data provided from
    399      * FAST or HIGH_QUALITY will yield a picture with the same white point
    400      * as what was produced by the camera device in the earlier frame.</p>
    401      * <p>The expected processing pipeline is as follows:</p>
    402      * <p><img alt="White balance processing pipeline" src="/reference/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
    403      * <p>The white balance is encoded by two values, a 4-channel white-balance
    404      * gain vector (applied in the Bayer domain), and a 3x3 color transform
    405      * matrix (applied after demosaic).</p>
    406      * <p>The 4-channel white-balance gains are defined as:</p>
    407      * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ]
    408      * </code></pre>
    409      * <p>where <code>G_even</code> is the gain for green pixels on even rows of the
    410      * output, and <code>G_odd</code> is the gain for green pixels on the odd rows.
    411      * These may be identical for a given camera device implementation; if
    412      * the camera device does not support a separate gain for even/odd green
    413      * channels, it will use the <code>G_even</code> value, and write <code>G_odd</code> equal to
    414      * <code>G_even</code> in the output result metadata.</p>
    415      * <p>The matrices for color transforms are defined as a 9-entry vector:</p>
    416      * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
    417      * </code></pre>
    418      * <p>which define a transform from input sensor colors, <code>P_in = [ r g b ]</code>,
    419      * to output linear sRGB, <code>P_out = [ r' g' b' ]</code>,</p>
    420      * <p>with colors as follows:</p>
    421      * <pre><code>r' = I0r + I1g + I2b
    422      * g' = I3r + I4g + I5b
    423      * b' = I6r + I7g + I8b
    424      * </code></pre>
    425      * <p>Both the input and output value ranges must match. Overflow/underflow
    426      * values are clipped to fit within the range.</p>
    427      * <p><b>Possible values:</b>
    428      * <ul>
    429      *   <li>{@link #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX TRANSFORM_MATRIX}</li>
    430      *   <li>{@link #COLOR_CORRECTION_MODE_FAST FAST}</li>
    431      *   <li>{@link #COLOR_CORRECTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
    432      * </ul></p>
    433      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
    434      * <p><b>Full capability</b> -
    435      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
    436      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
    437      *
    438      * @see CaptureRequest#COLOR_CORRECTION_GAINS
    439      * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
    440      * @see CaptureRequest#CONTROL_AWB_MODE
    441      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
    442      * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX
    443      * @see #COLOR_CORRECTION_MODE_FAST
    444      * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY
    445      */
    446     @PublicKey
    447     public static final Key<Integer> COLOR_CORRECTION_MODE =
    448             new Key<Integer>("android.colorCorrection.mode", int.class);
    449 
    450     /**
    451      * <p>A color transform matrix to use to transform
    452      * from sensor RGB color space to output linear sRGB color space.</p>
    453      * <p>This matrix is either set by the camera device when the request
    454      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
    455      * directly by the application in the request when the
    456      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
    457      * <p>In the latter case, the camera device may round the matrix to account
    458      * for precision issues; the final rounded matrix should be reported back
    459      * in this matrix result metadata. The transform should keep the magnitude
    460      * of the output color values within <code>[0, 1.0]</code> (assuming input color
    461      * values is within the normalized range <code>[0, 1.0]</code>), or clipping may occur.</p>
    462      * <p>The valid range of each matrix element varies on different devices, but
    463      * values within [-1.5, 3.0] are guaranteed not to be clipped.</p>
    464      * <p><b>Units</b>: Unitless scale factors</p>
    465      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
    466      * <p><b>Full capability</b> -
    467      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
    468      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
    469      *
    470      * @see CaptureRequest#COLOR_CORRECTION_MODE
    471      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
    472      */
    473     @PublicKey
    474     public static final Key<android.hardware.camera2.params.ColorSpaceTransform> COLOR_CORRECTION_TRANSFORM =
    475             new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.colorCorrection.transform", android.hardware.camera2.params.ColorSpaceTransform.class);
    476 
    477     /**
    478      * <p>Gains applying to Bayer raw color channels for
    479      * white-balance.</p>
    480      * <p>These per-channel gains are either set by the camera device
    481      * when the request {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not
    482      * TRANSFORM_MATRIX, or directly by the application in the
    483      * request when the {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is
    484      * TRANSFORM_MATRIX.</p>
    485      * <p>The gains in the result metadata are the gains actually
    486      * applied by the camera device to the current frame.</p>
    487      * <p>The valid range of gains varies on different devices, but gains
    488      * between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
    489      * device allows gains below 1.0, this is usually not recommended because
    490      * this can create color artifacts.</p>
    491      * <p><b>Units</b>: Unitless gain factors</p>
    492      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
    493      * <p><b>Full capability</b> -
    494      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
    495      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
    496      *
    497      * @see CaptureRequest#COLOR_CORRECTION_MODE
    498      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
    499      */
    500     @PublicKey
    501     public static final Key<android.hardware.camera2.params.RggbChannelVector> COLOR_CORRECTION_GAINS =
    502             new Key<android.hardware.camera2.params.RggbChannelVector>("android.colorCorrection.gains", android.hardware.camera2.params.RggbChannelVector.class);
    503 
    504     /**
    505      * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
    506      * <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
    507      * can not focus on the same point after exiting from the lens. This metadata defines
    508      * the high level control of chromatic aberration correction algorithm, which aims to
    509      * minimize the chromatic artifacts that may occur along the object boundaries in an
    510      * image.</p>
    511      * <p>FAST/HIGH_QUALITY both mean that camera device determined aberration
    512      * correction will be applied. HIGH_QUALITY mode indicates that the camera device will
    513      * use the highest-quality aberration correction algorithms, even if it slows down
    514      * capture rate. FAST means the camera device will not slow down capture rate when
    515      * applying aberration correction.</p>
    516      * <p>LEGACY devices will always be in FAST mode.</p>
    517      * <p><b>Possible values:</b>
    518      * <ul>
    519      *   <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_OFF OFF}</li>
    520      *   <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_FAST FAST}</li>
    521      *   <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
    522      * </ul></p>
    523      * <p><b>Available values for this device:</b><br>
    524      * {@link CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES android.colorCorrection.availableAberrationModes}</p>
    525      * <p>This key is available on all devices.</p>
    526      *
    527      * @see CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES
    528      * @see #COLOR_CORRECTION_ABERRATION_MODE_OFF
    529      * @see #COLOR_CORRECTION_ABERRATION_MODE_FAST
    530      * @see #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
    531      */
    532     @PublicKey
    533     public static final Key<Integer> COLOR_CORRECTION_ABERRATION_MODE =
    534             new Key<Integer>("android.colorCorrection.aberrationMode", int.class);
    535 
    536     /**
    537      * <p>The desired setting for the camera device's auto-exposure
    538      * algorithm's antibanding compensation.</p>
    539      * <p>Some kinds of lighting fixtures, such as some fluorescent
    540      * lights, flicker at the rate of the power supply frequency
    541      * (60Hz or 50Hz, depending on country). While this is
    542      * typically not noticeable to a person, it can be visible to
    543      * a camera device. If a camera sets its exposure time to the
    544      * wrong value, the flicker may become visible in the
    545      * viewfinder as flicker or in a final captured image, as a
    546      * set of variable-brightness bands across the image.</p>
    547      * <p>Therefore, the auto-exposure routines of camera devices
    548      * include antibanding routines that ensure that the chosen
    549      * exposure value will not cause such banding. The choice of
    550      * exposure time depends on the rate of flicker, which the
    551      * camera device can detect automatically, or the expected
    552      * rate can be selected by the application using this
    553      * control.</p>
    554      * <p>A given camera device may not support all of the possible
    555      * options for the antibanding mode. The
    556      * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains
    557      * the available modes for a given camera device.</p>
    558      * <p>AUTO mode is the default if it is available on given
    559      * camera device. When AUTO mode is not available, the
    560      * default will be either 50HZ or 60HZ, and both 50HZ
    561      * and 60HZ will be available.</p>
    562      * <p>If manual exposure control is enabled (by setting
    563      * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF),
    564      * then this setting has no effect, and the application must
    565      * ensure it selects exposure times that do not cause banding
    566      * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist
    567      * the application in this.</p>
    568      * <p><b>Possible values:</b>
    569      * <ul>
    570      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_OFF OFF}</li>
    571      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_50HZ 50HZ}</li>
    572      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_60HZ 60HZ}</li>
    573      *   <li>{@link #CONTROL_AE_ANTIBANDING_MODE_AUTO AUTO}</li>
    574      * </ul></p>
    575      * <p><b>Available values for this device:</b><br></p>
    576      * <p>{@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes}</p>
    577      * <p>This key is available on all devices.</p>
    578      *
    579      * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES
    580      * @see CaptureRequest#CONTROL_AE_MODE
    581      * @see CaptureRequest#CONTROL_MODE
    582      * @see CaptureResult#STATISTICS_SCENE_FLICKER
    583      * @see #CONTROL_AE_ANTIBANDING_MODE_OFF
    584      * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ
    585      * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ
    586      * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO
    587      */
    588     @PublicKey
    589     public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE =
    590             new Key<Integer>("android.control.aeAntibandingMode", int.class);
    591 
    592     /**
    593      * <p>Adjustment to auto-exposure (AE) target image
    594      * brightness.</p>
    595      * <p>The adjustment is measured as a count of steps, with the
    596      * step size defined by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep} and the
    597      * allowed range by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}.</p>
    598      * <p>For example, if the exposure value (EV) step is 0.333, '6'
    599      * will mean an exposure compensation of +2 EV; -3 will mean an
    600      * exposure compensation of -1 EV. One EV represents a doubling
    601      * of image brightness. Note that this control will only be
    602      * effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF. This control
    603      * will take effect even when {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} <code>== true</code>.</p>
    604      * <p>In the event of exposure compensation value being changed, camera device
    605      * may take several frames to reach the newly requested exposure target.
    606      * During that time, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} field will be in the SEARCHING
    607      * state. Once the new exposure target is reached, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} will
    608      * change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
    609      * FLASH_REQUIRED (if the scene is too dark for still capture).</p>
    610      * <p><b>Units</b>: Compensation steps</p>
    611      * <p><b>Range of valid values:</b><br>
    612      * {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}</p>
    613      * <p>This key is available on all devices.</p>
    614      *
    615      * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE
    616      * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
    617      * @see CaptureRequest#CONTROL_AE_LOCK
    618      * @see CaptureRequest#CONTROL_AE_MODE
    619      * @see CaptureResult#CONTROL_AE_STATE
    620      */
    621     @PublicKey
    622     public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION =
    623             new Key<Integer>("android.control.aeExposureCompensation", int.class);
    624 
    625     /**
    626      * <p>Whether auto-exposure (AE) is currently locked to its latest
    627      * calculated values.</p>
    628      * <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
    629      * and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
    630      * <p>Note that even when AE is locked, the flash may be fired if
    631      * the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH /
    632      * ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.</p>
    633      * <p>When {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation} is changed, even if the AE lock
    634      * is ON, the camera device will still adjust its exposure value.</p>
    635      * <p>If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger})
    636      * when AE is already locked, the camera device will not change the exposure time
    637      * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity})
    638      * parameters. The flash may be fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
    639      * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
    640      * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed.
    641      * Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.</p>
    642      * <p>When an AE precapture sequence is triggered, AE unlock will not be able to unlock
    643      * the AE if AE is locked by the camera device internally during precapture metering
    644      * sequence In other words, submitting requests with AE unlock has no effect for an
    645      * ongoing precapture metering sequence. Otherwise, the precapture metering sequence
    646      * will never succeed in a sequence of preview requests where AE lock is always set
    647      * to <code>false</code>.</p>
    648      * <p>Since the camera device has a pipeline of in-flight requests, the settings that
    649      * get locked do not necessarily correspond to the settings that were present in the
    650      * latest capture result received from the camera device, since additional captures
    651      * and AE updates may have occurred even before the result was sent out. If an
    652      * application is switching between automatic and manual control and wishes to eliminate
    653      * any flicker during the switch, the following procedure is recommended:</p>
    654      * <ol>
    655      * <li>Starting in auto-AE mode:</li>
    656      * <li>Lock AE</li>
    657      * <li>Wait for the first result to be output that has the AE locked</li>
    658      * <li>Copy exposure settings from that result into a request, set the request to manual AE</li>
    659      * <li>Submit the capture request, proceed to run manual AE as desired.</li>
    660      * </ol>
    661      * <p>See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.</p>
    662      * <p>This key is available on all devices.</p>
    663      *
    664      * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
    665      * @see CaptureRequest#CONTROL_AE_MODE
    666      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
    667      * @see CaptureResult#CONTROL_AE_STATE
    668      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
    669      * @see CaptureRequest#SENSOR_SENSITIVITY
    670      */
    671     @PublicKey
    672     public static final Key<Boolean> CONTROL_AE_LOCK =
    673             new Key<Boolean>("android.control.aeLock", boolean.class);
    674 
    675     /**
    676      * <p>The desired mode for the camera device's
    677      * auto-exposure routine.</p>
    678      * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is
    679      * AUTO.</p>
    680      * <p>When set to any of the ON modes, the camera device's
    681      * auto-exposure routine is enabled, overriding the
    682      * application's selected exposure time, sensor sensitivity,
    683      * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
    684      * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and
    685      * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes
    686      * is selected, the camera device's flash unit controls are
    687      * also overridden.</p>
    688      * <p>The FLASH modes are only available if the camera device
    689      * has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is <code>true</code>).</p>
    690      * <p>If flash TORCH mode is desired, this field must be set to
    691      * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.</p>
    692      * <p>When set to any of the ON modes, the values chosen by the
    693      * camera device auto-exposure routine for the overridden
    694      * fields for a given capture will be available in its
    695      * CaptureResult.</p>
    696      * <p><b>Possible values:</b>
    697      * <ul>
    698      *   <li>{@link #CONTROL_AE_MODE_OFF OFF}</li>
    699      *   <li>{@link #CONTROL_AE_MODE_ON ON}</li>
    700      *   <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH ON_AUTO_FLASH}</li>
    701      *   <li>{@link #CONTROL_AE_MODE_ON_ALWAYS_FLASH ON_ALWAYS_FLASH}</li>
    702      *   <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE ON_AUTO_FLASH_REDEYE}</li>
    703      *   <li>{@link #CONTROL_AE_MODE_ON_EXTERNAL_FLASH ON_EXTERNAL_FLASH}</li>
    704      * </ul></p>
    705      * <p><b>Available values for this device:</b><br>
    706      * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES android.control.aeAvailableModes}</p>
    707      * <p>This key is available on all devices.</p>
    708      *
    709      * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES
    710      * @see CaptureRequest#CONTROL_MODE
    711      * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
    712      * @see CaptureRequest#FLASH_MODE
    713      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
    714      * @see CaptureRequest#SENSOR_FRAME_DURATION
    715      * @see CaptureRequest#SENSOR_SENSITIVITY
    716      * @see #CONTROL_AE_MODE_OFF
    717      * @see #CONTROL_AE_MODE_ON
    718      * @see #CONTROL_AE_MODE_ON_AUTO_FLASH
    719      * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH
    720      * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
    721      * @see #CONTROL_AE_MODE_ON_EXTERNAL_FLASH
    722      */
    723     @PublicKey
    724     public static final Key<Integer> CONTROL_AE_MODE =
    725             new Key<Integer>("android.control.aeMode", int.class);
    726 
    727     /**
    728      * <p>List of metering areas to use for auto-exposure adjustment.</p>
    729      * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe} is 0.
    730      * Otherwise will always be present.</p>
    731      * <p>The maximum number of regions supported by the device is determined by the value
    732      * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe}.</p>
    733      * <p>The coordinate system is based on the active pixel array,
    734      * with (0,0) being the top-left pixel in the active pixel array, and
    735      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
    736      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
    737      * bottom-right pixel in the active pixel array.</p>
    738      * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
    739      * for every pixel in the area. This means that a large metering area
    740      * with the same weight as a smaller area will have more effect in
    741      * the metering result. Metering areas can partially overlap and the
    742      * camera device will add the weights in the overlap region.</p>
    743      * <p>The weights are relative to weights of other exposure metering regions, so if only one
    744      * region is used, all non-zero weights will have the same effect. A region with 0
    745      * weight is ignored.</p>
    746      * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
    747      * camera device.</p>
    748      * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
    749      * capture result metadata, the camera device will ignore the sections outside the crop
    750      * region and output only the intersection rectangle as the metering region in the result
    751      * metadata.  If the region is entirely outside the crop region, it will be ignored and
    752      * not reported in the result metadata.</p>
    753      * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
    754      * <p><b>Range of valid values:</b><br>
    755      * Coordinates must be between <code>[(0,0), (width, height))</code> of
    756      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
    757      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
    758      *
    759      * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AE
    760      * @see CaptureRequest#SCALER_CROP_REGION
    761      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
    762      */
    763     @PublicKey
    764     public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AE_REGIONS =
    765             new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.aeRegions", android.hardware.camera2.params.MeteringRectangle[].class);
    766 
    767     /**
    768      * <p>Range over which the auto-exposure routine can
    769      * adjust the capture frame rate to maintain good
    770      * exposure.</p>
    771      * <p>Only constrains auto-exposure (AE) algorithm, not
    772      * manual control of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime} and
    773      * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}.</p>
    774      * <p><b>Units</b>: Frames per second (FPS)</p>
    775      * <p><b>Range of valid values:</b><br>
    776      * Any of the entries in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}</p>
    777      * <p>This key is available on all devices.</p>
    778      *
    779      * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
    780      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
    781      * @see CaptureRequest#SENSOR_FRAME_DURATION
    782      */
    783     @PublicKey
    784     public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
    785             new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});
    786 
    787     /**
    788      * <p>Whether the camera device will trigger a precapture
    789      * metering sequence when it processes this request.</p>
    790      * <p>This entry is normally set to IDLE, or is not
    791      * included at all in the request settings. When included and
    792      * set to START, the camera device will trigger the auto-exposure (AE)
    793      * precapture metering sequence.</p>
    794      * <p>When set to CANCEL, the camera device will cancel any active
    795      * precapture metering trigger, and return to its initial AE state.
    796      * If a precapture metering sequence is already completed, and the camera
    797      * device has implicitly locked the AE for subsequent still capture, the
    798      * CANCEL trigger will unlock the AE and return to its initial AE state.</p>
    799      * <p>The precapture sequence should be triggered before starting a
    800      * high-quality still capture for final metering decisions to
    801      * be made, and for firing pre-capture flash pulses to estimate
    802      * scene brightness and required final capture flash power, when
    803      * the flash is enabled.</p>
    804      * <p>Normally, this entry should be set to START for only a
    805      * single request, and the application should wait until the
    806      * sequence completes before starting a new one.</p>
    807      * <p>When a precapture metering sequence is finished, the camera device
    808      * may lock the auto-exposure routine internally to be able to accurately expose the
    809      * subsequent still capture image (<code>{@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE</code>).
    810      * For this case, the AE may not resume normal scan if no subsequent still capture is
    811      * submitted. To ensure that the AE routine restarts normal scan, the application should
    812      * submit a request with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == true</code>, followed by a request
    813      * with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == false</code>, if the application decides not to submit a
    814      * still capture request after the precapture sequence completes. Alternatively, for
    815      * API level 23 or newer devices, the CANCEL can be used to unlock the camera device
    816      * internally locked AE if the application doesn't submit a still capture request after
    817      * the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
    818      * be used in devices that have earlier API levels.</p>
    819      * <p>The exact effect of auto-exposure (AE) precapture trigger
    820      * depends on the current AE mode and state; see
    821      * {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture state transition
    822      * details.</p>
    823      * <p>On LEGACY-level devices, the precapture trigger is not supported;
    824      * capturing a high-resolution JPEG image will automatically trigger a
    825      * precapture sequence before the high-resolution capture, including
    826      * potentially firing a pre-capture flash.</p>
    827      * <p>Using the precapture trigger and the auto-focus trigger {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}
    828      * simultaneously is allowed. However, since these triggers often require cooperation between
    829      * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
    830      * focus sweep), the camera device may delay acting on a later trigger until the previous
    831      * trigger has been fully handled. This may lead to longer intervals between the trigger and
    832      * changes to {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} indicating the start of the precapture sequence, for
    833      * example.</p>
    834      * <p>If both the precapture and the auto-focus trigger are activated on the same request, then
    835      * the camera device will complete them in the optimal order for that device.</p>
    836      * <p><b>Possible values:</b>
    837      * <ul>
    838      *   <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE IDLE}</li>
    839      *   <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_START START}</li>
    840      *   <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL CANCEL}</li>
    841      * </ul></p>
    842      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
    843      * <p><b>Limited capability</b> -
    844      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
    845      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
    846      *
    847      * @see CaptureRequest#CONTROL_AE_LOCK
    848      * @see CaptureResult#CONTROL_AE_STATE
    849      * @see CaptureRequest#CONTROL_AF_TRIGGER
    850      * @see CaptureRequest#CONTROL_CAPTURE_INTENT
    851      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
    852      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE
    853      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START
    854      * @see #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
    855      */
    856     @PublicKey
    857     public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER =
    858             new Key<Integer>("android.control.aePrecaptureTrigger", int.class);
    859 
    860     /**
    861      * <p>Current state of the auto-exposure (AE) algorithm.</p>
    862      * <p>Switching between or enabling AE modes ({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}) always
    863      * resets the AE state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
    864      * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
    865      * the algorithm states to INACTIVE.</p>
    866      * <p>The camera device can do several state transitions between two results, if it is
    867      * allowed by the state transition table. For example: INACTIVE may never actually be
    868      * seen in a result.</p>
    869      * <p>The state in the result is the state for this image (in sync with this image): if
    870      * AE state becomes CONVERGED, then the image data associated with this result should
    871      * be good to use.</p>
    872      * <p>Below are state transition tables for different AE modes.</p>
    873      * <table>
    874      * <thead>
    875      * <tr>
    876      * <th align="center">State</th>
    877      * <th align="center">Transition Cause</th>
    878      * <th align="center">New State</th>
    879      * <th align="center">Notes</th>
    880      * </tr>
    881      * </thead>
    882      * <tbody>
    883      * <tr>
    884      * <td align="center">INACTIVE</td>
    885      * <td align="center"></td>
    886      * <td align="center">INACTIVE</td>
    887      * <td align="center">Camera device auto exposure algorithm is disabled</td>
    888      * </tr>
    889      * </tbody>
    890      * </table>
    891      * <p>When {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is AE_MODE_ON*:</p>
    892      * <table>
    893      * <thead>
    894      * <tr>
    895      * <th align="center">State</th>
    896      * <th align="center">Transition Cause</th>
    897      * <th align="center">New State</th>
    898      * <th align="center">Notes</th>
    899      * </tr>
    900      * </thead>
    901      * <tbody>
    902      * <tr>
    903      * <td align="center">INACTIVE</td>
    904      * <td align="center">Camera device initiates AE scan</td>
    905      * <td align="center">SEARCHING</td>
    906      * <td align="center">Values changing</td>
    907      * </tr>
    908      * <tr>
    909      * <td align="center">INACTIVE</td>
    910      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
    911      * <td align="center">LOCKED</td>
    912      * <td align="center">Values locked</td>
    913      * </tr>
    914      * <tr>
    915      * <td align="center">SEARCHING</td>
    916      * <td align="center">Camera device finishes AE scan</td>
    917      * <td align="center">CONVERGED</td>
    918      * <td align="center">Good values, not changing</td>
    919      * </tr>
    920      * <tr>
    921      * <td align="center">SEARCHING</td>
    922      * <td align="center">Camera device finishes AE scan</td>
    923      * <td align="center">FLASH_REQUIRED</td>
    924      * <td align="center">Converged but too dark w/o flash</td>
    925      * </tr>
    926      * <tr>
    927      * <td align="center">SEARCHING</td>
    928      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
    929      * <td align="center">LOCKED</td>
    930      * <td align="center">Values locked</td>
    931      * </tr>
    932      * <tr>
    933      * <td align="center">CONVERGED</td>
    934      * <td align="center">Camera device initiates AE scan</td>
    935      * <td align="center">SEARCHING</td>
    936      * <td align="center">Values changing</td>
    937      * </tr>
    938      * <tr>
    939      * <td align="center">CONVERGED</td>
    940      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
    941      * <td align="center">LOCKED</td>
    942      * <td align="center">Values locked</td>
    943      * </tr>
    944      * <tr>
    945      * <td align="center">FLASH_REQUIRED</td>
    946      * <td align="center">Camera device initiates AE scan</td>
    947      * <td align="center">SEARCHING</td>
    948      * <td align="center">Values changing</td>
    949      * </tr>
    950      * <tr>
    951      * <td align="center">FLASH_REQUIRED</td>
    952      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
    953      * <td align="center">LOCKED</td>
    954      * <td align="center">Values locked</td>
    955      * </tr>
    956      * <tr>
    957      * <td align="center">LOCKED</td>
    958      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
    959      * <td align="center">SEARCHING</td>
    960      * <td align="center">Values not good after unlock</td>
    961      * </tr>
    962      * <tr>
    963      * <td align="center">LOCKED</td>
    964      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
    965      * <td align="center">CONVERGED</td>
    966      * <td align="center">Values good after unlock</td>
    967      * </tr>
    968      * <tr>
    969      * <td align="center">LOCKED</td>
    970      * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
    971      * <td align="center">FLASH_REQUIRED</td>
    972      * <td align="center">Exposure good, but too dark</td>
    973      * </tr>
    974      * <tr>
    975      * <td align="center">PRECAPTURE</td>
    976      * <td align="center">Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
    977      * <td align="center">CONVERGED</td>
    978      * <td align="center">Ready for high-quality capture</td>
    979      * </tr>
    980      * <tr>
    981      * <td align="center">PRECAPTURE</td>
    982      * <td align="center">Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
    983      * <td align="center">LOCKED</td>
    984      * <td align="center">Ready for high-quality capture</td>
    985      * </tr>
    986      * <tr>
    987      * <td align="center">LOCKED</td>
    988      * <td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
    989      * <td align="center">LOCKED</td>
    990      * <td align="center">Precapture trigger is ignored when AE is already locked</td>
    991      * </tr>
    992      * <tr>
    993      * <td align="center">LOCKED</td>
    994      * <td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
    995      * <td align="center">LOCKED</td>
    996      * <td align="center">Precapture trigger is ignored when AE is already locked</td>
    997      * </tr>
    998      * <tr>
    999      * <td align="center">Any state (excluding LOCKED)</td>
   1000      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START</td>
   1001      * <td align="center">PRECAPTURE</td>
   1002      * <td align="center">Start AE precapture metering sequence</td>
   1003      * </tr>
   1004      * <tr>
   1005      * <td align="center">Any state (excluding LOCKED)</td>
   1006      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL</td>
   1007      * <td align="center">INACTIVE</td>
   1008      * <td align="center">Currently active precapture metering sequence is canceled</td>
   1009      * </tr>
   1010      * </tbody>
   1011      * </table>
   1012      * <p>If the camera device supports AE external flash mode (ON_EXTERNAL_FLASH is included in
   1013      * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES android.control.aeAvailableModes}), {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} must be FLASH_REQUIRED after
   1014      * the camera device finishes AE scan and it's too dark without flash.</p>
   1015      * <p>For the above table, the camera device may skip reporting any state changes that happen
   1016      * without application intervention (i.e. mode switch, trigger, locking). Any state that
   1017      * can be skipped in that manner is called a transient state.</p>
   1018      * <p>For example, for above AE modes (AE_MODE_ON*), in addition to the state transitions
   1019      * listed in above table, it is also legal for the camera device to skip one or more
   1020      * transient states between two results. See below table for examples:</p>
   1021      * <table>
   1022      * <thead>
   1023      * <tr>
   1024      * <th align="center">State</th>
   1025      * <th align="center">Transition Cause</th>
   1026      * <th align="center">New State</th>
   1027      * <th align="center">Notes</th>
   1028      * </tr>
   1029      * </thead>
   1030      * <tbody>
   1031      * <tr>
   1032      * <td align="center">INACTIVE</td>
   1033      * <td align="center">Camera device finished AE scan</td>
   1034      * <td align="center">CONVERGED</td>
   1035      * <td align="center">Values are already good, transient states are skipped by camera device.</td>
   1036      * </tr>
   1037      * <tr>
   1038      * <td align="center">Any state (excluding LOCKED)</td>
   1039      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done</td>
   1040      * <td align="center">FLASH_REQUIRED</td>
   1041      * <td align="center">Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.</td>
   1042      * </tr>
   1043      * <tr>
   1044      * <td align="center">Any state (excluding LOCKED)</td>
   1045      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done</td>
   1046      * <td align="center">CONVERGED</td>
   1047      * <td align="center">Converged after a precapture sequence, transient states are skipped by camera device.</td>
   1048      * </tr>
   1049      * <tr>
   1050      * <td align="center">Any state (excluding LOCKED)</td>
   1051      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged</td>
   1052      * <td align="center">FLASH_REQUIRED</td>
   1053      * <td align="center">Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.</td>
   1054      * </tr>
   1055      * <tr>
   1056      * <td align="center">Any state (excluding LOCKED)</td>
   1057      * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged</td>
   1058      * <td align="center">CONVERGED</td>
   1059      * <td align="center">Converged after a precapture sequenceis canceled, transient states are skipped by camera device.</td>
   1060      * </tr>
   1061      * <tr>
   1062      * <td align="center">CONVERGED</td>
   1063      * <td align="center">Camera device finished AE scan</td>
   1064      * <td align="center">FLASH_REQUIRED</td>
   1065      * <td align="center">Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.</td>
   1066      * </tr>
   1067      * <tr>
   1068      * <td align="center">FLASH_REQUIRED</td>
   1069      * <td align="center">Camera device finished AE scan</td>
   1070      * <td align="center">CONVERGED</td>
   1071      * <td align="center">Converged after a new scan, transient states are skipped by camera device.</td>
   1072      * </tr>
   1073      * </tbody>
   1074      * </table>
   1075      * <p><b>Possible values:</b>
   1076      * <ul>
   1077      *   <li>{@link #CONTROL_AE_STATE_INACTIVE INACTIVE}</li>
   1078      *   <li>{@link #CONTROL_AE_STATE_SEARCHING SEARCHING}</li>
   1079      *   <li>{@link #CONTROL_AE_STATE_CONVERGED CONVERGED}</li>
   1080      *   <li>{@link #CONTROL_AE_STATE_LOCKED LOCKED}</li>
   1081      *   <li>{@link #CONTROL_AE_STATE_FLASH_REQUIRED FLASH_REQUIRED}</li>
   1082      *   <li>{@link #CONTROL_AE_STATE_PRECAPTURE PRECAPTURE}</li>
   1083      * </ul></p>
   1084      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   1085      * <p><b>Limited capability</b> -
   1086      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   1087      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   1088      *
   1089      * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES
   1090      * @see CaptureRequest#CONTROL_AE_LOCK
   1091      * @see CaptureRequest#CONTROL_AE_MODE
   1092      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
   1093      * @see CaptureResult#CONTROL_AE_STATE
   1094      * @see CaptureRequest#CONTROL_MODE
   1095      * @see CaptureRequest#CONTROL_SCENE_MODE
   1096      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   1097      * @see #CONTROL_AE_STATE_INACTIVE
   1098      * @see #CONTROL_AE_STATE_SEARCHING
   1099      * @see #CONTROL_AE_STATE_CONVERGED
   1100      * @see #CONTROL_AE_STATE_LOCKED
   1101      * @see #CONTROL_AE_STATE_FLASH_REQUIRED
   1102      * @see #CONTROL_AE_STATE_PRECAPTURE
   1103      */
   1104     @PublicKey
   1105     public static final Key<Integer> CONTROL_AE_STATE =
   1106             new Key<Integer>("android.control.aeState", int.class);
   1107 
   1108     /**
   1109      * <p>Whether auto-focus (AF) is currently enabled, and what
   1110      * mode it is set to.</p>
   1111      * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO and the lens is not fixed focus
   1112      * (i.e. <code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} &gt; 0</code>). Also note that
   1113      * when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, the behavior of AF is device
   1114      * dependent. It is recommended to lock AF by using {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger} before
   1115      * setting {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} to OFF, or set AF mode to OFF when AE is OFF.</p>
   1116      * <p>If the lens is controlled by the camera device auto-focus algorithm,
   1117      * the camera device will report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState}
   1118      * in result metadata.</p>
   1119      * <p><b>Possible values:</b>
   1120      * <ul>
   1121      *   <li>{@link #CONTROL_AF_MODE_OFF OFF}</li>
   1122      *   <li>{@link #CONTROL_AF_MODE_AUTO AUTO}</li>
   1123      *   <li>{@link #CONTROL_AF_MODE_MACRO MACRO}</li>
   1124      *   <li>{@link #CONTROL_AF_MODE_CONTINUOUS_VIDEO CONTINUOUS_VIDEO}</li>
   1125      *   <li>{@link #CONTROL_AF_MODE_CONTINUOUS_PICTURE CONTINUOUS_PICTURE}</li>
   1126      *   <li>{@link #CONTROL_AF_MODE_EDOF EDOF}</li>
   1127      * </ul></p>
   1128      * <p><b>Available values for this device:</b><br>
   1129      * {@link CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES android.control.afAvailableModes}</p>
   1130      * <p>This key is available on all devices.</p>
   1131      *
   1132      * @see CaptureRequest#CONTROL_AE_MODE
   1133      * @see CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES
   1134      * @see CaptureResult#CONTROL_AF_STATE
   1135      * @see CaptureRequest#CONTROL_AF_TRIGGER
   1136      * @see CaptureRequest#CONTROL_MODE
   1137      * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
   1138      * @see #CONTROL_AF_MODE_OFF
   1139      * @see #CONTROL_AF_MODE_AUTO
   1140      * @see #CONTROL_AF_MODE_MACRO
   1141      * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO
   1142      * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE
   1143      * @see #CONTROL_AF_MODE_EDOF
   1144      */
   1145     @PublicKey
   1146     public static final Key<Integer> CONTROL_AF_MODE =
   1147             new Key<Integer>("android.control.afMode", int.class);
   1148 
   1149     /**
   1150      * <p>List of metering areas to use for auto-focus.</p>
   1151      * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf} is 0.
   1152      * Otherwise will always be present.</p>
   1153      * <p>The maximum number of focus areas supported by the device is determined by the value
   1154      * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf}.</p>
   1155      * <p>The coordinate system is based on the active pixel array,
   1156      * with (0,0) being the top-left pixel in the active pixel array, and
   1157      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
   1158      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
   1159      * bottom-right pixel in the active pixel array.</p>
   1160      * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
   1161      * for every pixel in the area. This means that a large metering area
   1162      * with the same weight as a smaller area will have more effect in
   1163      * the metering result. Metering areas can partially overlap and the
   1164      * camera device will add the weights in the overlap region.</p>
   1165      * <p>The weights are relative to weights of other metering regions, so if only one region
   1166      * is used, all non-zero weights will have the same effect. A region with 0 weight is
   1167      * ignored.</p>
   1168      * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
   1169      * camera device. The capture result will either be a zero weight region as well, or
   1170      * the region selected by the camera device as the focus area of interest.</p>
   1171      * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
   1172      * capture result metadata, the camera device will ignore the sections outside the crop
   1173      * region and output only the intersection rectangle as the metering region in the result
   1174      * metadata. If the region is entirely outside the crop region, it will be ignored and
   1175      * not reported in the result metadata.</p>
   1176      * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
   1177      * <p><b>Range of valid values:</b><br>
   1178      * Coordinates must be between <code>[(0,0), (width, height))</code> of
   1179      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
   1180      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   1181      *
   1182      * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AF
   1183      * @see CaptureRequest#SCALER_CROP_REGION
   1184      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   1185      */
   1186     @PublicKey
   1187     public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AF_REGIONS =
   1188             new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.afRegions", android.hardware.camera2.params.MeteringRectangle[].class);
   1189 
   1190     /**
   1191      * <p>Whether the camera device will trigger autofocus for this request.</p>
   1192      * <p>This entry is normally set to IDLE, or is not
   1193      * included at all in the request settings.</p>
   1194      * <p>When included and set to START, the camera device will trigger the
   1195      * autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p>
   1196      * <p>When set to CANCEL, the camera device will cancel any active trigger,
   1197      * and return to its initial AF state.</p>
   1198      * <p>Generally, applications should set this entry to START or CANCEL for only a
   1199      * single capture, and then return it to IDLE (or not set at all). Specifying
   1200      * START for multiple captures in a row means restarting the AF operation over
   1201      * and over again.</p>
   1202      * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what the trigger means for each AF mode.</p>
   1203      * <p>Using the autofocus trigger and the precapture trigger {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}
   1204      * simultaneously is allowed. However, since these triggers often require cooperation between
   1205      * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
   1206      * focus sweep), the camera device may delay acting on a later trigger until the previous
   1207      * trigger has been fully handled. This may lead to longer intervals between the trigger and
   1208      * changes to {@link CaptureResult#CONTROL_AF_STATE android.control.afState}, for example.</p>
   1209      * <p><b>Possible values:</b>
   1210      * <ul>
   1211      *   <li>{@link #CONTROL_AF_TRIGGER_IDLE IDLE}</li>
   1212      *   <li>{@link #CONTROL_AF_TRIGGER_START START}</li>
   1213      *   <li>{@link #CONTROL_AF_TRIGGER_CANCEL CANCEL}</li>
   1214      * </ul></p>
   1215      * <p>This key is available on all devices.</p>
   1216      *
   1217      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
   1218      * @see CaptureResult#CONTROL_AF_STATE
   1219      * @see #CONTROL_AF_TRIGGER_IDLE
   1220      * @see #CONTROL_AF_TRIGGER_START
   1221      * @see #CONTROL_AF_TRIGGER_CANCEL
   1222      */
   1223     @PublicKey
   1224     public static final Key<Integer> CONTROL_AF_TRIGGER =
   1225             new Key<Integer>("android.control.afTrigger", int.class);
   1226 
   1227     /**
   1228      * <p>Current state of auto-focus (AF) algorithm.</p>
   1229      * <p>Switching between or enabling AF modes ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) always
   1230      * resets the AF state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
   1231      * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
   1232      * the algorithm states to INACTIVE.</p>
   1233      * <p>The camera device can do several state transitions between two results, if it is
   1234      * allowed by the state transition table. For example: INACTIVE may never actually be
   1235      * seen in a result.</p>
   1236      * <p>The state in the result is the state for this image (in sync with this image): if
   1237      * AF state becomes FOCUSED, then the image data associated with this result should
   1238      * be sharp.</p>
   1239      * <p>Below are state transition tables for different AF modes.</p>
   1240      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_OFF or AF_MODE_EDOF:</p>
   1241      * <table>
   1242      * <thead>
   1243      * <tr>
   1244      * <th align="center">State</th>
   1245      * <th align="center">Transition Cause</th>
   1246      * <th align="center">New State</th>
   1247      * <th align="center">Notes</th>
   1248      * </tr>
   1249      * </thead>
   1250      * <tbody>
   1251      * <tr>
   1252      * <td align="center">INACTIVE</td>
   1253      * <td align="center"></td>
   1254      * <td align="center">INACTIVE</td>
   1255      * <td align="center">Never changes</td>
   1256      * </tr>
   1257      * </tbody>
   1258      * </table>
   1259      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_AUTO or AF_MODE_MACRO:</p>
   1260      * <table>
   1261      * <thead>
   1262      * <tr>
   1263      * <th align="center">State</th>
   1264      * <th align="center">Transition Cause</th>
   1265      * <th align="center">New State</th>
   1266      * <th align="center">Notes</th>
   1267      * </tr>
   1268      * </thead>
   1269      * <tbody>
   1270      * <tr>
   1271      * <td align="center">INACTIVE</td>
   1272      * <td align="center">AF_TRIGGER</td>
   1273      * <td align="center">ACTIVE_SCAN</td>
   1274      * <td align="center">Start AF sweep, Lens now moving</td>
   1275      * </tr>
   1276      * <tr>
   1277      * <td align="center">ACTIVE_SCAN</td>
   1278      * <td align="center">AF sweep done</td>
   1279      * <td align="center">FOCUSED_LOCKED</td>
   1280      * <td align="center">Focused, Lens now locked</td>
   1281      * </tr>
   1282      * <tr>
   1283      * <td align="center">ACTIVE_SCAN</td>
   1284      * <td align="center">AF sweep done</td>
   1285      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1286      * <td align="center">Not focused, Lens now locked</td>
   1287      * </tr>
   1288      * <tr>
   1289      * <td align="center">ACTIVE_SCAN</td>
   1290      * <td align="center">AF_CANCEL</td>
   1291      * <td align="center">INACTIVE</td>
   1292      * <td align="center">Cancel/reset AF, Lens now locked</td>
   1293      * </tr>
   1294      * <tr>
   1295      * <td align="center">FOCUSED_LOCKED</td>
   1296      * <td align="center">AF_CANCEL</td>
   1297      * <td align="center">INACTIVE</td>
   1298      * <td align="center">Cancel/reset AF</td>
   1299      * </tr>
   1300      * <tr>
   1301      * <td align="center">FOCUSED_LOCKED</td>
   1302      * <td align="center">AF_TRIGGER</td>
   1303      * <td align="center">ACTIVE_SCAN</td>
   1304      * <td align="center">Start new sweep, Lens now moving</td>
   1305      * </tr>
   1306      * <tr>
   1307      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1308      * <td align="center">AF_CANCEL</td>
   1309      * <td align="center">INACTIVE</td>
   1310      * <td align="center">Cancel/reset AF</td>
   1311      * </tr>
   1312      * <tr>
   1313      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1314      * <td align="center">AF_TRIGGER</td>
   1315      * <td align="center">ACTIVE_SCAN</td>
   1316      * <td align="center">Start new sweep, Lens now moving</td>
   1317      * </tr>
   1318      * <tr>
   1319      * <td align="center">Any state</td>
   1320      * <td align="center">Mode change</td>
   1321      * <td align="center">INACTIVE</td>
   1322      * <td align="center"></td>
   1323      * </tr>
   1324      * </tbody>
   1325      * </table>
   1326      * <p>For the above table, the camera device may skip reporting any state changes that happen
   1327      * without application intervention (i.e. mode switch, trigger, locking). Any state that
   1328      * can be skipped in that manner is called a transient state.</p>
   1329      * <p>For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
   1330      * state transitions listed in above table, it is also legal for the camera device to skip
   1331      * one or more transient states between two results. See below table for examples:</p>
   1332      * <table>
   1333      * <thead>
   1334      * <tr>
   1335      * <th align="center">State</th>
   1336      * <th align="center">Transition Cause</th>
   1337      * <th align="center">New State</th>
   1338      * <th align="center">Notes</th>
   1339      * </tr>
   1340      * </thead>
   1341      * <tbody>
   1342      * <tr>
   1343      * <td align="center">INACTIVE</td>
   1344      * <td align="center">AF_TRIGGER</td>
   1345      * <td align="center">FOCUSED_LOCKED</td>
   1346      * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
   1347      * </tr>
   1348      * <tr>
   1349      * <td align="center">INACTIVE</td>
   1350      * <td align="center">AF_TRIGGER</td>
   1351      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1352      * <td align="center">Focus failed after a scan, lens is now locked.</td>
   1353      * </tr>
   1354      * <tr>
   1355      * <td align="center">FOCUSED_LOCKED</td>
   1356      * <td align="center">AF_TRIGGER</td>
   1357      * <td align="center">FOCUSED_LOCKED</td>
   1358      * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
   1359      * </tr>
   1360      * <tr>
   1361      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1362      * <td align="center">AF_TRIGGER</td>
   1363      * <td align="center">FOCUSED_LOCKED</td>
   1364      * <td align="center">Focus is good after a scan, lens is not locked.</td>
   1365      * </tr>
   1366      * </tbody>
   1367      * </table>
   1368      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_VIDEO:</p>
   1369      * <table>
   1370      * <thead>
   1371      * <tr>
   1372      * <th align="center">State</th>
   1373      * <th align="center">Transition Cause</th>
   1374      * <th align="center">New State</th>
   1375      * <th align="center">Notes</th>
   1376      * </tr>
   1377      * </thead>
   1378      * <tbody>
   1379      * <tr>
   1380      * <td align="center">INACTIVE</td>
   1381      * <td align="center">Camera device initiates new scan</td>
   1382      * <td align="center">PASSIVE_SCAN</td>
   1383      * <td align="center">Start AF scan, Lens now moving</td>
   1384      * </tr>
   1385      * <tr>
   1386      * <td align="center">INACTIVE</td>
   1387      * <td align="center">AF_TRIGGER</td>
   1388      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1389      * <td align="center">AF state query, Lens now locked</td>
   1390      * </tr>
   1391      * <tr>
   1392      * <td align="center">PASSIVE_SCAN</td>
   1393      * <td align="center">Camera device completes current scan</td>
   1394      * <td align="center">PASSIVE_FOCUSED</td>
   1395      * <td align="center">End AF scan, Lens now locked</td>
   1396      * </tr>
   1397      * <tr>
   1398      * <td align="center">PASSIVE_SCAN</td>
   1399      * <td align="center">Camera device fails current scan</td>
   1400      * <td align="center">PASSIVE_UNFOCUSED</td>
   1401      * <td align="center">End AF scan, Lens now locked</td>
   1402      * </tr>
   1403      * <tr>
   1404      * <td align="center">PASSIVE_SCAN</td>
   1405      * <td align="center">AF_TRIGGER</td>
   1406      * <td align="center">FOCUSED_LOCKED</td>
   1407      * <td align="center">Immediate transition, if focus is good. Lens now locked</td>
   1408      * </tr>
   1409      * <tr>
   1410      * <td align="center">PASSIVE_SCAN</td>
   1411      * <td align="center">AF_TRIGGER</td>
   1412      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1413      * <td align="center">Immediate transition, if focus is bad. Lens now locked</td>
   1414      * </tr>
   1415      * <tr>
   1416      * <td align="center">PASSIVE_SCAN</td>
   1417      * <td align="center">AF_CANCEL</td>
   1418      * <td align="center">INACTIVE</td>
   1419      * <td align="center">Reset lens position, Lens now locked</td>
   1420      * </tr>
   1421      * <tr>
   1422      * <td align="center">PASSIVE_FOCUSED</td>
   1423      * <td align="center">Camera device initiates new scan</td>
   1424      * <td align="center">PASSIVE_SCAN</td>
   1425      * <td align="center">Start AF scan, Lens now moving</td>
   1426      * </tr>
   1427      * <tr>
   1428      * <td align="center">PASSIVE_UNFOCUSED</td>
   1429      * <td align="center">Camera device initiates new scan</td>
   1430      * <td align="center">PASSIVE_SCAN</td>
   1431      * <td align="center">Start AF scan, Lens now moving</td>
   1432      * </tr>
   1433      * <tr>
   1434      * <td align="center">PASSIVE_FOCUSED</td>
   1435      * <td align="center">AF_TRIGGER</td>
   1436      * <td align="center">FOCUSED_LOCKED</td>
   1437      * <td align="center">Immediate transition, lens now locked</td>
   1438      * </tr>
   1439      * <tr>
   1440      * <td align="center">PASSIVE_UNFOCUSED</td>
   1441      * <td align="center">AF_TRIGGER</td>
   1442      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1443      * <td align="center">Immediate transition, lens now locked</td>
   1444      * </tr>
   1445      * <tr>
   1446      * <td align="center">FOCUSED_LOCKED</td>
   1447      * <td align="center">AF_TRIGGER</td>
   1448      * <td align="center">FOCUSED_LOCKED</td>
   1449      * <td align="center">No effect</td>
   1450      * </tr>
   1451      * <tr>
   1452      * <td align="center">FOCUSED_LOCKED</td>
   1453      * <td align="center">AF_CANCEL</td>
   1454      * <td align="center">INACTIVE</td>
   1455      * <td align="center">Restart AF scan</td>
   1456      * </tr>
   1457      * <tr>
   1458      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1459      * <td align="center">AF_TRIGGER</td>
   1460      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1461      * <td align="center">No effect</td>
   1462      * </tr>
   1463      * <tr>
   1464      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1465      * <td align="center">AF_CANCEL</td>
   1466      * <td align="center">INACTIVE</td>
   1467      * <td align="center">Restart AF scan</td>
   1468      * </tr>
   1469      * </tbody>
   1470      * </table>
   1471      * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_PICTURE:</p>
   1472      * <table>
   1473      * <thead>
   1474      * <tr>
   1475      * <th align="center">State</th>
   1476      * <th align="center">Transition Cause</th>
   1477      * <th align="center">New State</th>
   1478      * <th align="center">Notes</th>
   1479      * </tr>
   1480      * </thead>
   1481      * <tbody>
   1482      * <tr>
   1483      * <td align="center">INACTIVE</td>
   1484      * <td align="center">Camera device initiates new scan</td>
   1485      * <td align="center">PASSIVE_SCAN</td>
   1486      * <td align="center">Start AF scan, Lens now moving</td>
   1487      * </tr>
   1488      * <tr>
   1489      * <td align="center">INACTIVE</td>
   1490      * <td align="center">AF_TRIGGER</td>
   1491      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1492      * <td align="center">AF state query, Lens now locked</td>
   1493      * </tr>
   1494      * <tr>
   1495      * <td align="center">PASSIVE_SCAN</td>
   1496      * <td align="center">Camera device completes current scan</td>
   1497      * <td align="center">PASSIVE_FOCUSED</td>
   1498      * <td align="center">End AF scan, Lens now locked</td>
   1499      * </tr>
   1500      * <tr>
   1501      * <td align="center">PASSIVE_SCAN</td>
   1502      * <td align="center">Camera device fails current scan</td>
   1503      * <td align="center">PASSIVE_UNFOCUSED</td>
   1504      * <td align="center">End AF scan, Lens now locked</td>
   1505      * </tr>
   1506      * <tr>
   1507      * <td align="center">PASSIVE_SCAN</td>
   1508      * <td align="center">AF_TRIGGER</td>
   1509      * <td align="center">FOCUSED_LOCKED</td>
   1510      * <td align="center">Eventual transition once the focus is good. Lens now locked</td>
   1511      * </tr>
   1512      * <tr>
   1513      * <td align="center">PASSIVE_SCAN</td>
   1514      * <td align="center">AF_TRIGGER</td>
   1515      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1516      * <td align="center">Eventual transition if cannot find focus. Lens now locked</td>
   1517      * </tr>
   1518      * <tr>
   1519      * <td align="center">PASSIVE_SCAN</td>
   1520      * <td align="center">AF_CANCEL</td>
   1521      * <td align="center">INACTIVE</td>
   1522      * <td align="center">Reset lens position, Lens now locked</td>
   1523      * </tr>
   1524      * <tr>
   1525      * <td align="center">PASSIVE_FOCUSED</td>
   1526      * <td align="center">Camera device initiates new scan</td>
   1527      * <td align="center">PASSIVE_SCAN</td>
   1528      * <td align="center">Start AF scan, Lens now moving</td>
   1529      * </tr>
   1530      * <tr>
   1531      * <td align="center">PASSIVE_UNFOCUSED</td>
   1532      * <td align="center">Camera device initiates new scan</td>
   1533      * <td align="center">PASSIVE_SCAN</td>
   1534      * <td align="center">Start AF scan, Lens now moving</td>
   1535      * </tr>
   1536      * <tr>
   1537      * <td align="center">PASSIVE_FOCUSED</td>
   1538      * <td align="center">AF_TRIGGER</td>
   1539      * <td align="center">FOCUSED_LOCKED</td>
   1540      * <td align="center">Immediate trans. Lens now locked</td>
   1541      * </tr>
   1542      * <tr>
   1543      * <td align="center">PASSIVE_UNFOCUSED</td>
   1544      * <td align="center">AF_TRIGGER</td>
   1545      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1546      * <td align="center">Immediate trans. Lens now locked</td>
   1547      * </tr>
   1548      * <tr>
   1549      * <td align="center">FOCUSED_LOCKED</td>
   1550      * <td align="center">AF_TRIGGER</td>
   1551      * <td align="center">FOCUSED_LOCKED</td>
   1552      * <td align="center">No effect</td>
   1553      * </tr>
   1554      * <tr>
   1555      * <td align="center">FOCUSED_LOCKED</td>
   1556      * <td align="center">AF_CANCEL</td>
   1557      * <td align="center">INACTIVE</td>
   1558      * <td align="center">Restart AF scan</td>
   1559      * </tr>
   1560      * <tr>
   1561      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1562      * <td align="center">AF_TRIGGER</td>
   1563      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1564      * <td align="center">No effect</td>
   1565      * </tr>
   1566      * <tr>
   1567      * <td align="center">NOT_FOCUSED_LOCKED</td>
   1568      * <td align="center">AF_CANCEL</td>
   1569      * <td align="center">INACTIVE</td>
   1570      * <td align="center">Restart AF scan</td>
   1571      * </tr>
   1572      * </tbody>
   1573      * </table>
   1574      * <p>When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
   1575      * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
   1576      * camera device. When a trigger is included in a mode switch request, the trigger
   1577      * will be evaluated in the context of the new mode in the request.
   1578      * See below table for examples:</p>
   1579      * <table>
   1580      * <thead>
   1581      * <tr>
   1582      * <th align="center">State</th>
   1583      * <th align="center">Transition Cause</th>
   1584      * <th align="center">New State</th>
   1585      * <th align="center">Notes</th>
   1586      * </tr>
   1587      * </thead>
   1588      * <tbody>
   1589      * <tr>
   1590      * <td align="center">any state</td>
   1591      * <td align="center">CAF--&gt;AUTO mode switch</td>
   1592      * <td align="center">INACTIVE</td>
   1593      * <td align="center">Mode switch without trigger, initial state must be INACTIVE</td>
   1594      * </tr>
   1595      * <tr>
   1596      * <td align="center">any state</td>
   1597      * <td align="center">CAF--&gt;AUTO mode switch with AF_TRIGGER</td>
   1598      * <td align="center">trigger-reachable states from INACTIVE</td>
   1599      * <td align="center">Mode switch with trigger, INACTIVE is skipped</td>
   1600      * </tr>
   1601      * <tr>
   1602      * <td align="center">any state</td>
   1603      * <td align="center">AUTO--&gt;CAF mode switch</td>
   1604      * <td align="center">passively reachable states from INACTIVE</td>
   1605      * <td align="center">Mode switch without trigger, passive transient state is skipped</td>
   1606      * </tr>
   1607      * </tbody>
   1608      * </table>
   1609      * <p><b>Possible values:</b>
   1610      * <ul>
   1611      *   <li>{@link #CONTROL_AF_STATE_INACTIVE INACTIVE}</li>
   1612      *   <li>{@link #CONTROL_AF_STATE_PASSIVE_SCAN PASSIVE_SCAN}</li>
   1613      *   <li>{@link #CONTROL_AF_STATE_PASSIVE_FOCUSED PASSIVE_FOCUSED}</li>
   1614      *   <li>{@link #CONTROL_AF_STATE_ACTIVE_SCAN ACTIVE_SCAN}</li>
   1615      *   <li>{@link #CONTROL_AF_STATE_FOCUSED_LOCKED FOCUSED_LOCKED}</li>
   1616      *   <li>{@link #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED NOT_FOCUSED_LOCKED}</li>
   1617      *   <li>{@link #CONTROL_AF_STATE_PASSIVE_UNFOCUSED PASSIVE_UNFOCUSED}</li>
   1618      * </ul></p>
   1619      * <p>This key is available on all devices.</p>
   1620      *
   1621      * @see CaptureRequest#CONTROL_AF_MODE
   1622      * @see CaptureRequest#CONTROL_MODE
   1623      * @see CaptureRequest#CONTROL_SCENE_MODE
   1624      * @see #CONTROL_AF_STATE_INACTIVE
   1625      * @see #CONTROL_AF_STATE_PASSIVE_SCAN
   1626      * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED
   1627      * @see #CONTROL_AF_STATE_ACTIVE_SCAN
   1628      * @see #CONTROL_AF_STATE_FOCUSED_LOCKED
   1629      * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
   1630      * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED
   1631      */
   1632     @PublicKey
   1633     public static final Key<Integer> CONTROL_AF_STATE =
   1634             new Key<Integer>("android.control.afState", int.class);
   1635 
   1636     /**
   1637      * <p>Whether auto-white balance (AWB) is currently locked to its
   1638      * latest calculated values.</p>
   1639      * <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
   1640      * and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
   1641      * <p>Since the camera device has a pipeline of in-flight requests, the settings that
   1642      * get locked do not necessarily correspond to the settings that were present in the
   1643      * latest capture result received from the camera device, since additional captures
   1644      * and AWB updates may have occurred even before the result was sent out. If an
   1645      * application is switching between automatic and manual control and wishes to eliminate
   1646      * any flicker during the switch, the following procedure is recommended:</p>
   1647      * <ol>
   1648      * <li>Starting in auto-AWB mode:</li>
   1649      * <li>Lock AWB</li>
   1650      * <li>Wait for the first result to be output that has the AWB locked</li>
   1651      * <li>Copy AWB settings from that result into a request, set the request to manual AWB</li>
   1652      * <li>Submit the capture request, proceed to run manual AWB as desired.</li>
   1653      * </ol>
   1654      * <p>Note that AWB lock is only meaningful when
   1655      * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is in the AUTO mode; in other modes,
   1656      * AWB is already fixed to a specific setting.</p>
   1657      * <p>Some LEGACY devices may not support ON; the value is then overridden to OFF.</p>
   1658      * <p>This key is available on all devices.</p>
   1659      *
   1660      * @see CaptureRequest#CONTROL_AWB_MODE
   1661      */
   1662     @PublicKey
   1663     public static final Key<Boolean> CONTROL_AWB_LOCK =
   1664             new Key<Boolean>("android.control.awbLock", boolean.class);
   1665 
   1666     /**
   1667      * <p>Whether auto-white balance (AWB) is currently setting the color
   1668      * transform fields, and what its illumination target
   1669      * is.</p>
   1670      * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
   1671      * <p>When set to the ON mode, the camera device's auto-white balance
   1672      * routine is enabled, overriding the application's selected
   1673      * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
   1674      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}. Note that when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
   1675      * is OFF, the behavior of AWB is device dependent. It is recommened to
   1676      * also set AWB mode to OFF or lock AWB by using {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} before
   1677      * setting AE mode to OFF.</p>
   1678      * <p>When set to the OFF mode, the camera device's auto-white balance
   1679      * routine is disabled. The application manually controls the white
   1680      * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
   1681      * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
   1682      * <p>When set to any other modes, the camera device's auto-white
   1683      * balance routine is disabled. The camera device uses each
   1684      * particular illumination target for white balance
   1685      * adjustment. The application's values for
   1686      * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform},
   1687      * {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
   1688      * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} are ignored.</p>
   1689      * <p><b>Possible values:</b>
   1690      * <ul>
   1691      *   <li>{@link #CONTROL_AWB_MODE_OFF OFF}</li>
   1692      *   <li>{@link #CONTROL_AWB_MODE_AUTO AUTO}</li>
   1693      *   <li>{@link #CONTROL_AWB_MODE_INCANDESCENT INCANDESCENT}</li>
   1694      *   <li>{@link #CONTROL_AWB_MODE_FLUORESCENT FLUORESCENT}</li>
   1695      *   <li>{@link #CONTROL_AWB_MODE_WARM_FLUORESCENT WARM_FLUORESCENT}</li>
   1696      *   <li>{@link #CONTROL_AWB_MODE_DAYLIGHT DAYLIGHT}</li>
   1697      *   <li>{@link #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT CLOUDY_DAYLIGHT}</li>
   1698      *   <li>{@link #CONTROL_AWB_MODE_TWILIGHT TWILIGHT}</li>
   1699      *   <li>{@link #CONTROL_AWB_MODE_SHADE SHADE}</li>
   1700      * </ul></p>
   1701      * <p><b>Available values for this device:</b><br>
   1702      * {@link CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES android.control.awbAvailableModes}</p>
   1703      * <p>This key is available on all devices.</p>
   1704      *
   1705      * @see CaptureRequest#COLOR_CORRECTION_GAINS
   1706      * @see CaptureRequest#COLOR_CORRECTION_MODE
   1707      * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
   1708      * @see CaptureRequest#CONTROL_AE_MODE
   1709      * @see CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES
   1710      * @see CaptureRequest#CONTROL_AWB_LOCK
   1711      * @see CaptureRequest#CONTROL_MODE
   1712      * @see #CONTROL_AWB_MODE_OFF
   1713      * @see #CONTROL_AWB_MODE_AUTO
   1714      * @see #CONTROL_AWB_MODE_INCANDESCENT
   1715      * @see #CONTROL_AWB_MODE_FLUORESCENT
   1716      * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT
   1717      * @see #CONTROL_AWB_MODE_DAYLIGHT
   1718      * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT
   1719      * @see #CONTROL_AWB_MODE_TWILIGHT
   1720      * @see #CONTROL_AWB_MODE_SHADE
   1721      */
   1722     @PublicKey
   1723     public static final Key<Integer> CONTROL_AWB_MODE =
   1724             new Key<Integer>("android.control.awbMode", int.class);
   1725 
   1726     /**
   1727      * <p>List of metering areas to use for auto-white-balance illuminant
   1728      * estimation.</p>
   1729      * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb} is 0.
   1730      * Otherwise will always be present.</p>
   1731      * <p>The maximum number of regions supported by the device is determined by the value
   1732      * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb}.</p>
   1733      * <p>The coordinate system is based on the active pixel array,
   1734      * with (0,0) being the top-left pixel in the active pixel array, and
   1735      * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
   1736      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
   1737      * bottom-right pixel in the active pixel array.</p>
   1738      * <p>The weight must range from 0 to 1000, and represents a weight
   1739      * for every pixel in the area. This means that a large metering area
   1740      * with the same weight as a smaller area will have more effect in
   1741      * the metering result. Metering areas can partially overlap and the
   1742      * camera device will add the weights in the overlap region.</p>
   1743      * <p>The weights are relative to weights of other white balance metering regions, so if
   1744      * only one region is used, all non-zero weights will have the same effect. A region with
   1745      * 0 weight is ignored.</p>
   1746      * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
   1747      * camera device.</p>
   1748      * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
   1749      * capture result metadata, the camera device will ignore the sections outside the crop
   1750      * region and output only the intersection rectangle as the metering region in the result
   1751      * metadata.  If the region is entirely outside the crop region, it will be ignored and
   1752      * not reported in the result metadata.</p>
   1753      * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
   1754      * <p><b>Range of valid values:</b><br>
   1755      * Coordinates must be between <code>[(0,0), (width, height))</code> of
   1756      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
   1757      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   1758      *
   1759      * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AWB
   1760      * @see CaptureRequest#SCALER_CROP_REGION
   1761      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   1762      */
   1763     @PublicKey
   1764     public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AWB_REGIONS =
   1765             new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.awbRegions", android.hardware.camera2.params.MeteringRectangle[].class);
   1766 
   1767     /**
   1768      * <p>Information to the camera device 3A (auto-exposure,
   1769      * auto-focus, auto-white balance) routines about the purpose
   1770      * of this capture, to help the camera device to decide optimal 3A
   1771      * strategy.</p>
   1772      * <p>This control (except for MANUAL) is only effective if
   1773      * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p>
   1774      * <p>All intents are supported by all devices, except that:
   1775      *   * ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
   1776      * PRIVATE_REPROCESSING or YUV_REPROCESSING.
   1777      *   * MANUAL will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
   1778      * MANUAL_SENSOR.
   1779      *   * MOTION_TRACKING will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
   1780      * MOTION_TRACKING.</p>
   1781      * <p><b>Possible values:</b>
   1782      * <ul>
   1783      *   <li>{@link #CONTROL_CAPTURE_INTENT_CUSTOM CUSTOM}</li>
   1784      *   <li>{@link #CONTROL_CAPTURE_INTENT_PREVIEW PREVIEW}</li>
   1785      *   <li>{@link #CONTROL_CAPTURE_INTENT_STILL_CAPTURE STILL_CAPTURE}</li>
   1786      *   <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_RECORD VIDEO_RECORD}</li>
   1787      *   <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT VIDEO_SNAPSHOT}</li>
   1788      *   <li>{@link #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
   1789      *   <li>{@link #CONTROL_CAPTURE_INTENT_MANUAL MANUAL}</li>
   1790      *   <li>{@link #CONTROL_CAPTURE_INTENT_MOTION_TRACKING MOTION_TRACKING}</li>
   1791      * </ul></p>
   1792      * <p>This key is available on all devices.</p>
   1793      *
   1794      * @see CaptureRequest#CONTROL_MODE
   1795      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
   1796      * @see #CONTROL_CAPTURE_INTENT_CUSTOM
   1797      * @see #CONTROL_CAPTURE_INTENT_PREVIEW
   1798      * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE
   1799      * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD
   1800      * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT
   1801      * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG
   1802      * @see #CONTROL_CAPTURE_INTENT_MANUAL
   1803      * @see #CONTROL_CAPTURE_INTENT_MOTION_TRACKING
   1804      */
   1805     @PublicKey
   1806     public static final Key<Integer> CONTROL_CAPTURE_INTENT =
   1807             new Key<Integer>("android.control.captureIntent", int.class);
   1808 
   1809     /**
   1810      * <p>Current state of auto-white balance (AWB) algorithm.</p>
   1811      * <p>Switching between or enabling AWB modes ({@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}) always
   1812      * resets the AWB state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
   1813      * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
   1814      * the algorithm states to INACTIVE.</p>
   1815      * <p>The camera device can do several state transitions between two results, if it is
   1816      * allowed by the state transition table. So INACTIVE may never actually be seen in
   1817      * a result.</p>
   1818      * <p>The state in the result is the state for this image (in sync with this image): if
   1819      * AWB state becomes CONVERGED, then the image data associated with this result should
   1820      * be good to use.</p>
   1821      * <p>Below are state transition tables for different AWB modes.</p>
   1822      * <p>When <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != AWB_MODE_AUTO</code>:</p>
   1823      * <table>
   1824      * <thead>
   1825      * <tr>
   1826      * <th align="center">State</th>
   1827      * <th align="center">Transition Cause</th>
   1828      * <th align="center">New State</th>
   1829      * <th align="center">Notes</th>
   1830      * </tr>
   1831      * </thead>
   1832      * <tbody>
   1833      * <tr>
   1834      * <td align="center">INACTIVE</td>
   1835      * <td align="center"></td>
   1836      * <td align="center">INACTIVE</td>
   1837      * <td align="center">Camera device auto white balance algorithm is disabled</td>
   1838      * </tr>
   1839      * </tbody>
   1840      * </table>
   1841      * <p>When {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is AWB_MODE_AUTO:</p>
   1842      * <table>
   1843      * <thead>
   1844      * <tr>
   1845      * <th align="center">State</th>
   1846      * <th align="center">Transition Cause</th>
   1847      * <th align="center">New State</th>
   1848      * <th align="center">Notes</th>
   1849      * </tr>
   1850      * </thead>
   1851      * <tbody>
   1852      * <tr>
   1853      * <td align="center">INACTIVE</td>
   1854      * <td align="center">Camera device initiates AWB scan</td>
   1855      * <td align="center">SEARCHING</td>
   1856      * <td align="center">Values changing</td>
   1857      * </tr>
   1858      * <tr>
   1859      * <td align="center">INACTIVE</td>
   1860      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
   1861      * <td align="center">LOCKED</td>
   1862      * <td align="center">Values locked</td>
   1863      * </tr>
   1864      * <tr>
   1865      * <td align="center">SEARCHING</td>
   1866      * <td align="center">Camera device finishes AWB scan</td>
   1867      * <td align="center">CONVERGED</td>
   1868      * <td align="center">Good values, not changing</td>
   1869      * </tr>
   1870      * <tr>
   1871      * <td align="center">SEARCHING</td>
   1872      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
   1873      * <td align="center">LOCKED</td>
   1874      * <td align="center">Values locked</td>
   1875      * </tr>
   1876      * <tr>
   1877      * <td align="center">CONVERGED</td>
   1878      * <td align="center">Camera device initiates AWB scan</td>
   1879      * <td align="center">SEARCHING</td>
   1880      * <td align="center">Values changing</td>
   1881      * </tr>
   1882      * <tr>
   1883      * <td align="center">CONVERGED</td>
   1884      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
   1885      * <td align="center">LOCKED</td>
   1886      * <td align="center">Values locked</td>
   1887      * </tr>
   1888      * <tr>
   1889      * <td align="center">LOCKED</td>
   1890      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF</td>
   1891      * <td align="center">SEARCHING</td>
   1892      * <td align="center">Values not good after unlock</td>
   1893      * </tr>
   1894      * </tbody>
   1895      * </table>
   1896      * <p>For the above table, the camera device may skip reporting any state changes that happen
   1897      * without application intervention (i.e. mode switch, trigger, locking). Any state that
   1898      * can be skipped in that manner is called a transient state.</p>
   1899      * <p>For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
   1900      * listed in above table, it is also legal for the camera device to skip one or more
   1901      * transient states between two results. See below table for examples:</p>
   1902      * <table>
   1903      * <thead>
   1904      * <tr>
   1905      * <th align="center">State</th>
   1906      * <th align="center">Transition Cause</th>
   1907      * <th align="center">New State</th>
   1908      * <th align="center">Notes</th>
   1909      * </tr>
   1910      * </thead>
   1911      * <tbody>
   1912      * <tr>
   1913      * <td align="center">INACTIVE</td>
   1914      * <td align="center">Camera device finished AWB scan</td>
   1915      * <td align="center">CONVERGED</td>
   1916      * <td align="center">Values are already good, transient states are skipped by camera device.</td>
   1917      * </tr>
   1918      * <tr>
   1919      * <td align="center">LOCKED</td>
   1920      * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF</td>
   1921      * <td align="center">CONVERGED</td>
   1922      * <td align="center">Values good after unlock, transient states are skipped by camera device.</td>
   1923      * </tr>
   1924      * </tbody>
   1925      * </table>
   1926      * <p><b>Possible values:</b>
   1927      * <ul>
   1928      *   <li>{@link #CONTROL_AWB_STATE_INACTIVE INACTIVE}</li>
   1929      *   <li>{@link #CONTROL_AWB_STATE_SEARCHING SEARCHING}</li>
   1930      *   <li>{@link #CONTROL_AWB_STATE_CONVERGED CONVERGED}</li>
   1931      *   <li>{@link #CONTROL_AWB_STATE_LOCKED LOCKED}</li>
   1932      * </ul></p>
   1933      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   1934      * <p><b>Limited capability</b> -
   1935      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   1936      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   1937      *
   1938      * @see CaptureRequest#CONTROL_AWB_LOCK
   1939      * @see CaptureRequest#CONTROL_AWB_MODE
   1940      * @see CaptureRequest#CONTROL_MODE
   1941      * @see CaptureRequest#CONTROL_SCENE_MODE
   1942      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   1943      * @see #CONTROL_AWB_STATE_INACTIVE
   1944      * @see #CONTROL_AWB_STATE_SEARCHING
   1945      * @see #CONTROL_AWB_STATE_CONVERGED
   1946      * @see #CONTROL_AWB_STATE_LOCKED
   1947      */
   1948     @PublicKey
   1949     public static final Key<Integer> CONTROL_AWB_STATE =
   1950             new Key<Integer>("android.control.awbState", int.class);
   1951 
   1952     /**
   1953      * <p>A special color effect to apply.</p>
   1954      * <p>When this mode is set, a color effect will be applied
   1955      * to images produced by the camera device. The interpretation
   1956      * and implementation of these color effects is left to the
   1957      * implementor of the camera device, and should not be
   1958      * depended on to be consistent (or present) across all
   1959      * devices.</p>
   1960      * <p><b>Possible values:</b>
   1961      * <ul>
   1962      *   <li>{@link #CONTROL_EFFECT_MODE_OFF OFF}</li>
   1963      *   <li>{@link #CONTROL_EFFECT_MODE_MONO MONO}</li>
   1964      *   <li>{@link #CONTROL_EFFECT_MODE_NEGATIVE NEGATIVE}</li>
   1965      *   <li>{@link #CONTROL_EFFECT_MODE_SOLARIZE SOLARIZE}</li>
   1966      *   <li>{@link #CONTROL_EFFECT_MODE_SEPIA SEPIA}</li>
   1967      *   <li>{@link #CONTROL_EFFECT_MODE_POSTERIZE POSTERIZE}</li>
   1968      *   <li>{@link #CONTROL_EFFECT_MODE_WHITEBOARD WHITEBOARD}</li>
   1969      *   <li>{@link #CONTROL_EFFECT_MODE_BLACKBOARD BLACKBOARD}</li>
   1970      *   <li>{@link #CONTROL_EFFECT_MODE_AQUA AQUA}</li>
   1971      * </ul></p>
   1972      * <p><b>Available values for this device:</b><br>
   1973      * {@link CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS android.control.availableEffects}</p>
   1974      * <p>This key is available on all devices.</p>
   1975      *
   1976      * @see CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS
   1977      * @see #CONTROL_EFFECT_MODE_OFF
   1978      * @see #CONTROL_EFFECT_MODE_MONO
   1979      * @see #CONTROL_EFFECT_MODE_NEGATIVE
   1980      * @see #CONTROL_EFFECT_MODE_SOLARIZE
   1981      * @see #CONTROL_EFFECT_MODE_SEPIA
   1982      * @see #CONTROL_EFFECT_MODE_POSTERIZE
   1983      * @see #CONTROL_EFFECT_MODE_WHITEBOARD
   1984      * @see #CONTROL_EFFECT_MODE_BLACKBOARD
   1985      * @see #CONTROL_EFFECT_MODE_AQUA
   1986      */
   1987     @PublicKey
   1988     public static final Key<Integer> CONTROL_EFFECT_MODE =
   1989             new Key<Integer>("android.control.effectMode", int.class);
   1990 
   1991     /**
   1992      * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
   1993      * routines.</p>
   1994      * <p>This is a top-level 3A control switch. When set to OFF, all 3A control
   1995      * by the camera device is disabled. The application must set the fields for
   1996      * capture parameters itself.</p>
   1997      * <p>When set to AUTO, the individual algorithm controls in
   1998      * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
   1999      * <p>When set to USE_SCENE_MODE, the individual controls in
   2000      * android.control.* are mostly disabled, and the camera device
   2001      * implements one of the scene mode settings (such as ACTION,
   2002      * SUNSET, or PARTY) as it wishes. The camera device scene mode
   2003      * 3A settings are provided by {@link android.hardware.camera2.CaptureResult capture results}.</p>
   2004      * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
   2005      * is that this frame will not be used by camera device background 3A statistics
   2006      * update, as if this frame is never captured. This mode can be used in the scenario
   2007      * where the application doesn't want a 3A manual control capture to affect
   2008      * the subsequent auto 3A capture results.</p>
   2009      * <p><b>Possible values:</b>
   2010      * <ul>
   2011      *   <li>{@link #CONTROL_MODE_OFF OFF}</li>
   2012      *   <li>{@link #CONTROL_MODE_AUTO AUTO}</li>
   2013      *   <li>{@link #CONTROL_MODE_USE_SCENE_MODE USE_SCENE_MODE}</li>
   2014      *   <li>{@link #CONTROL_MODE_OFF_KEEP_STATE OFF_KEEP_STATE}</li>
   2015      * </ul></p>
   2016      * <p><b>Available values for this device:</b><br>
   2017      * {@link CameraCharacteristics#CONTROL_AVAILABLE_MODES android.control.availableModes}</p>
   2018      * <p>This key is available on all devices.</p>
   2019      *
   2020      * @see CaptureRequest#CONTROL_AF_MODE
   2021      * @see CameraCharacteristics#CONTROL_AVAILABLE_MODES
   2022      * @see #CONTROL_MODE_OFF
   2023      * @see #CONTROL_MODE_AUTO
   2024      * @see #CONTROL_MODE_USE_SCENE_MODE
   2025      * @see #CONTROL_MODE_OFF_KEEP_STATE
   2026      */
   2027     @PublicKey
   2028     public static final Key<Integer> CONTROL_MODE =
   2029             new Key<Integer>("android.control.mode", int.class);
   2030 
   2031     /**
   2032      * <p>Control for which scene mode is currently active.</p>
   2033      * <p>Scene modes are custom camera modes optimized for a certain set of conditions and
   2034      * capture settings.</p>
   2035      * <p>This is the mode that that is active when
   2036      * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY, these modes will
   2037      * disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}
   2038      * while in use.</p>
   2039      * <p>The interpretation and implementation of these scene modes is left
   2040      * to the implementor of the camera device. Their behavior will not be
   2041      * consistent across all devices, and any given device may only implement
   2042      * a subset of these modes.</p>
   2043      * <p><b>Possible values:</b>
   2044      * <ul>
   2045      *   <li>{@link #CONTROL_SCENE_MODE_DISABLED DISABLED}</li>
   2046      *   <li>{@link #CONTROL_SCENE_MODE_FACE_PRIORITY FACE_PRIORITY}</li>
   2047      *   <li>{@link #CONTROL_SCENE_MODE_ACTION ACTION}</li>
   2048      *   <li>{@link #CONTROL_SCENE_MODE_PORTRAIT PORTRAIT}</li>
   2049      *   <li>{@link #CONTROL_SCENE_MODE_LANDSCAPE LANDSCAPE}</li>
   2050      *   <li>{@link #CONTROL_SCENE_MODE_NIGHT NIGHT}</li>
   2051      *   <li>{@link #CONTROL_SCENE_MODE_NIGHT_PORTRAIT NIGHT_PORTRAIT}</li>
   2052      *   <li>{@link #CONTROL_SCENE_MODE_THEATRE THEATRE}</li>
   2053      *   <li>{@link #CONTROL_SCENE_MODE_BEACH BEACH}</li>
   2054      *   <li>{@link #CONTROL_SCENE_MODE_SNOW SNOW}</li>
   2055      *   <li>{@link #CONTROL_SCENE_MODE_SUNSET SUNSET}</li>
   2056      *   <li>{@link #CONTROL_SCENE_MODE_STEADYPHOTO STEADYPHOTO}</li>
   2057      *   <li>{@link #CONTROL_SCENE_MODE_FIREWORKS FIREWORKS}</li>
   2058      *   <li>{@link #CONTROL_SCENE_MODE_SPORTS SPORTS}</li>
   2059      *   <li>{@link #CONTROL_SCENE_MODE_PARTY PARTY}</li>
   2060      *   <li>{@link #CONTROL_SCENE_MODE_CANDLELIGHT CANDLELIGHT}</li>
   2061      *   <li>{@link #CONTROL_SCENE_MODE_BARCODE BARCODE}</li>
   2062      *   <li>{@link #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO}</li>
   2063      *   <li>{@link #CONTROL_SCENE_MODE_HDR HDR}</li>
   2064      * </ul></p>
   2065      * <p><b>Available values for this device:</b><br>
   2066      * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}</p>
   2067      * <p>This key is available on all devices.</p>
   2068      *
   2069      * @see CaptureRequest#CONTROL_AE_MODE
   2070      * @see CaptureRequest#CONTROL_AF_MODE
   2071      * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
   2072      * @see CaptureRequest#CONTROL_AWB_MODE
   2073      * @see CaptureRequest#CONTROL_MODE
   2074      * @see #CONTROL_SCENE_MODE_DISABLED
   2075      * @see #CONTROL_SCENE_MODE_FACE_PRIORITY
   2076      * @see #CONTROL_SCENE_MODE_ACTION
   2077      * @see #CONTROL_SCENE_MODE_PORTRAIT
   2078      * @see #CONTROL_SCENE_MODE_LANDSCAPE
   2079      * @see #CONTROL_SCENE_MODE_NIGHT
   2080      * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT
   2081      * @see #CONTROL_SCENE_MODE_THEATRE
   2082      * @see #CONTROL_SCENE_MODE_BEACH
   2083      * @see #CONTROL_SCENE_MODE_SNOW
   2084      * @see #CONTROL_SCENE_MODE_SUNSET
   2085      * @see #CONTROL_SCENE_MODE_STEADYPHOTO
   2086      * @see #CONTROL_SCENE_MODE_FIREWORKS
   2087      * @see #CONTROL_SCENE_MODE_SPORTS
   2088      * @see #CONTROL_SCENE_MODE_PARTY
   2089      * @see #CONTROL_SCENE_MODE_CANDLELIGHT
   2090      * @see #CONTROL_SCENE_MODE_BARCODE
   2091      * @see #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO
   2092      * @see #CONTROL_SCENE_MODE_HDR
   2093      */
   2094     @PublicKey
   2095     public static final Key<Integer> CONTROL_SCENE_MODE =
   2096             new Key<Integer>("android.control.sceneMode", int.class);
   2097 
   2098     /**
   2099      * <p>Whether video stabilization is
   2100      * active.</p>
   2101      * <p>Video stabilization automatically warps images from
   2102      * the camera in order to stabilize motion between consecutive frames.</p>
   2103      * <p>If enabled, video stabilization can modify the
   2104      * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream stabilized.</p>
   2105      * <p>Switching between different video stabilization modes may take several
   2106      * frames to initialize, the camera device will report the current mode
   2107      * in capture result metadata. For example, When "ON" mode is requested,
   2108      * the video stabilization modes in the first several capture results may
   2109      * still be "OFF", and it will become "ON" when the initialization is
   2110      * done.</p>
   2111      * <p>In addition, not all recording sizes or frame rates may be supported for
   2112      * stabilization by a device that reports stabilization support. It is guaranteed
   2113      * that an output targeting a MediaRecorder or MediaCodec will be stabilized if
   2114      * the recording resolution is less than or equal to 1920 x 1080 (width less than
   2115      * or equal to 1920, height less than or equal to 1080), and the recording
   2116      * frame rate is less than or equal to 30fps.  At other sizes, the CaptureResult
   2117      * {@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode} field will return
   2118      * OFF if the recording output is not stabilized, or if there are no output
   2119      * Surface types that can be stabilized.</p>
   2120      * <p>If a camera device supports both this mode and OIS
   2121      * ({@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}), turning both modes on may
   2122      * produce undesirable interaction, so it is recommended not to enable
   2123      * both at the same time.</p>
   2124      * <p><b>Possible values:</b>
   2125      * <ul>
   2126      *   <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_OFF OFF}</li>
   2127      *   <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_ON ON}</li>
   2128      * </ul></p>
   2129      * <p>This key is available on all devices.</p>
   2130      *
   2131      * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
   2132      * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
   2133      * @see CaptureRequest#SCALER_CROP_REGION
   2134      * @see #CONTROL_VIDEO_STABILIZATION_MODE_OFF
   2135      * @see #CONTROL_VIDEO_STABILIZATION_MODE_ON
   2136      */
   2137     @PublicKey
   2138     public static final Key<Integer> CONTROL_VIDEO_STABILIZATION_MODE =
   2139             new Key<Integer>("android.control.videoStabilizationMode", int.class);
   2140 
   2141     /**
   2142      * <p>The amount of additional sensitivity boost applied to output images
   2143      * after RAW sensor data is captured.</p>
   2144      * <p>Some camera devices support additional digital sensitivity boosting in the
   2145      * camera processing pipeline after sensor RAW image is captured.
   2146      * Such a boost will be applied to YUV/JPEG format output images but will not
   2147      * have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.</p>
   2148      * <p>This key will be <code>null</code> for devices that do not support any RAW format
   2149      * outputs. For devices that do support RAW format outputs, this key will always
   2150      * present, and if a device does not support post RAW sensitivity boost, it will
   2151      * list <code>100</code> in this key.</p>
   2152      * <p>If the camera device cannot apply the exact boost requested, it will reduce the
   2153      * boost to the nearest supported value.
   2154      * The final boost value used will be available in the output capture result.</p>
   2155      * <p>For devices that support post RAW sensitivity boost, the YUV/JPEG output images
   2156      * of such device will have the total sensitivity of
   2157      * <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} * {@link CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST android.control.postRawSensitivityBoost} / 100</code>
   2158      * The sensitivity of RAW format images will always be <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</code></p>
   2159      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
   2160      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
   2161      * <p><b>Units</b>: ISO arithmetic units, the same as {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</p>
   2162      * <p><b>Range of valid values:</b><br>
   2163      * {@link CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE android.control.postRawSensitivityBoostRange}</p>
   2164      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2165      *
   2166      * @see CaptureRequest#CONTROL_AE_MODE
   2167      * @see CaptureRequest#CONTROL_MODE
   2168      * @see CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST
   2169      * @see CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE
   2170      * @see CaptureRequest#SENSOR_SENSITIVITY
   2171      */
   2172     @PublicKey
   2173     public static final Key<Integer> CONTROL_POST_RAW_SENSITIVITY_BOOST =
   2174             new Key<Integer>("android.control.postRawSensitivityBoost", int.class);
   2175 
   2176     /**
   2177      * <p>Allow camera device to enable zero-shutter-lag mode for requests with
   2178      * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE.</p>
   2179      * <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
   2180      * STILL_CAPTURE capture intent. The camera device may use images captured in the past to
   2181      * produce output images for a zero-shutter-lag request. The result metadata including the
   2182      * {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} reflects the source frames used to produce output images.
   2183      * Therefore, the contents of the output images and the result metadata may be out of order
   2184      * compared to previous regular requests. enableZsl does not affect requests with other
   2185      * capture intents.</p>
   2186      * <p>For example, when requests are submitted in the following order:
   2187      *   Request A: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is PREVIEW
   2188      *   Request B: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is STILL_CAPTURE</p>
   2189      * <p>The output images for request B may have contents captured before the output images for
   2190      * request A, and the result metadata for request B may be older than the result metadata for
   2191      * request A.</p>
   2192      * <p>Note that when enableZsl is <code>true</code>, it is not guaranteed to get output images captured in
   2193      * the past for requests with STILL_CAPTURE capture intent.</p>
   2194      * <p>For applications targeting SDK versions O and newer, the value of enableZsl in
   2195      * TEMPLATE_STILL_CAPTURE template may be <code>true</code>. The value in other templates is always
   2196      * <code>false</code> if present.</p>
   2197      * <p>For applications targeting SDK versions older than O, the value of enableZsl in all
   2198      * capture templates is always <code>false</code> if present.</p>
   2199      * <p>For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
   2200      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2201      *
   2202      * @see CaptureRequest#CONTROL_CAPTURE_INTENT
   2203      * @see CaptureResult#SENSOR_TIMESTAMP
   2204      */
   2205     @PublicKey
   2206     public static final Key<Boolean> CONTROL_ENABLE_ZSL =
   2207             new Key<Boolean>("android.control.enableZsl", boolean.class);
   2208 
   2209     /**
   2210      * <p>Whether a significant scene change is detected within the currently-set AF
   2211      * region(s).</p>
   2212      * <p>When the camera focus routine detects a change in the scene it is looking at,
   2213      * such as a large shift in camera viewpoint, significant motion in the scene, or a
   2214      * significant illumination change, this value will be set to DETECTED for a single capture
   2215      * result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar
   2216      * to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes.</p>
   2217      * <p>This key will be available if the camera device advertises this key via {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
   2218      * <p><b>Possible values:</b>
   2219      * <ul>
   2220      *   <li>{@link #CONTROL_AF_SCENE_CHANGE_NOT_DETECTED NOT_DETECTED}</li>
   2221      *   <li>{@link #CONTROL_AF_SCENE_CHANGE_DETECTED DETECTED}</li>
   2222      * </ul></p>
   2223      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2224      * @see #CONTROL_AF_SCENE_CHANGE_NOT_DETECTED
   2225      * @see #CONTROL_AF_SCENE_CHANGE_DETECTED
   2226      */
   2227     @PublicKey
   2228     public static final Key<Integer> CONTROL_AF_SCENE_CHANGE =
   2229             new Key<Integer>("android.control.afSceneChange", int.class);
   2230 
   2231     /**
   2232      * <p>Operation mode for edge
   2233      * enhancement.</p>
   2234      * <p>Edge enhancement improves sharpness and details in the captured image. OFF means
   2235      * no enhancement will be applied by the camera device.</p>
   2236      * <p>FAST/HIGH_QUALITY both mean camera device determined enhancement
   2237      * will be applied. HIGH_QUALITY mode indicates that the
   2238      * camera device will use the highest-quality enhancement algorithms,
   2239      * even if it slows down capture rate. FAST means the camera device will
   2240      * not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
   2241      * edge enhancement will slow down capture rate. Every output stream will have a similar
   2242      * amount of enhancement applied.</p>
   2243      * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
   2244      * buffer of high-resolution images during preview and reprocess image(s) from that buffer
   2245      * into a final capture when triggered by the user. In this mode, the camera device applies
   2246      * edge enhancement to low-resolution streams (below maximum recording resolution) to
   2247      * maximize preview quality, but does not apply edge enhancement to high-resolution streams,
   2248      * since those will be reprocessed later if necessary.</p>
   2249      * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
   2250      * device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
   2251      * The camera device may adjust its internal edge enhancement parameters for best
   2252      * image quality based on the {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor}, if it is set.</p>
   2253      * <p><b>Possible values:</b>
   2254      * <ul>
   2255      *   <li>{@link #EDGE_MODE_OFF OFF}</li>
   2256      *   <li>{@link #EDGE_MODE_FAST FAST}</li>
   2257      *   <li>{@link #EDGE_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
   2258      *   <li>{@link #EDGE_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
   2259      * </ul></p>
   2260      * <p><b>Available values for this device:</b><br>
   2261      * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes}</p>
   2262      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2263      * <p><b>Full capability</b> -
   2264      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   2265      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2266      *
   2267      * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES
   2268      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2269      * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
   2270      * @see #EDGE_MODE_OFF
   2271      * @see #EDGE_MODE_FAST
   2272      * @see #EDGE_MODE_HIGH_QUALITY
   2273      * @see #EDGE_MODE_ZERO_SHUTTER_LAG
   2274      */
   2275     @PublicKey
   2276     public static final Key<Integer> EDGE_MODE =
   2277             new Key<Integer>("android.edge.mode", int.class);
   2278 
   2279     /**
   2280      * <p>The desired mode for for the camera device's flash control.</p>
   2281      * <p>This control is only effective when flash unit is available
   2282      * (<code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true</code>).</p>
   2283      * <p>When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF.
   2284      * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
   2285      * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.</p>
   2286      * <p>When set to OFF, the camera device will not fire flash for this capture.</p>
   2287      * <p>When set to SINGLE, the camera device will fire flash regardless of the camera
   2288      * device's auto-exposure routine's result. When used in still capture case, this
   2289      * control should be used along with auto-exposure (AE) precapture metering sequence
   2290      * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.</p>
   2291      * <p>When set to TORCH, the flash will be on continuously. This mode can be used
   2292      * for use cases such as preview, auto-focus assist, still capture, or video recording.</p>
   2293      * <p>The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.</p>
   2294      * <p><b>Possible values:</b>
   2295      * <ul>
   2296      *   <li>{@link #FLASH_MODE_OFF OFF}</li>
   2297      *   <li>{@link #FLASH_MODE_SINGLE SINGLE}</li>
   2298      *   <li>{@link #FLASH_MODE_TORCH TORCH}</li>
   2299      * </ul></p>
   2300      * <p>This key is available on all devices.</p>
   2301      *
   2302      * @see CaptureRequest#CONTROL_AE_MODE
   2303      * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
   2304      * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
   2305      * @see CaptureResult#FLASH_STATE
   2306      * @see #FLASH_MODE_OFF
   2307      * @see #FLASH_MODE_SINGLE
   2308      * @see #FLASH_MODE_TORCH
   2309      */
   2310     @PublicKey
   2311     public static final Key<Integer> FLASH_MODE =
   2312             new Key<Integer>("android.flash.mode", int.class);
   2313 
   2314     /**
   2315      * <p>Current state of the flash
   2316      * unit.</p>
   2317      * <p>When the camera device doesn't have flash unit
   2318      * (i.e. <code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == false</code>), this state will always be UNAVAILABLE.
   2319      * Other states indicate the current flash status.</p>
   2320      * <p>In certain conditions, this will be available on LEGACY devices:</p>
   2321      * <ul>
   2322      * <li>Flash-less cameras always return UNAVAILABLE.</li>
   2323      * <li>Using {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>==</code> ON_ALWAYS_FLASH
   2324      *    will always return FIRED.</li>
   2325      * <li>Using {@link CaptureRequest#FLASH_MODE android.flash.mode} <code>==</code> TORCH
   2326      *    will always return FIRED.</li>
   2327      * </ul>
   2328      * <p>In all other conditions the state will not be available on
   2329      * LEGACY devices (i.e. it will be <code>null</code>).</p>
   2330      * <p><b>Possible values:</b>
   2331      * <ul>
   2332      *   <li>{@link #FLASH_STATE_UNAVAILABLE UNAVAILABLE}</li>
   2333      *   <li>{@link #FLASH_STATE_CHARGING CHARGING}</li>
   2334      *   <li>{@link #FLASH_STATE_READY READY}</li>
   2335      *   <li>{@link #FLASH_STATE_FIRED FIRED}</li>
   2336      *   <li>{@link #FLASH_STATE_PARTIAL PARTIAL}</li>
   2337      * </ul></p>
   2338      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2339      * <p><b>Limited capability</b> -
   2340      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   2341      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2342      *
   2343      * @see CaptureRequest#CONTROL_AE_MODE
   2344      * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
   2345      * @see CaptureRequest#FLASH_MODE
   2346      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2347      * @see #FLASH_STATE_UNAVAILABLE
   2348      * @see #FLASH_STATE_CHARGING
   2349      * @see #FLASH_STATE_READY
   2350      * @see #FLASH_STATE_FIRED
   2351      * @see #FLASH_STATE_PARTIAL
   2352      */
   2353     @PublicKey
   2354     public static final Key<Integer> FLASH_STATE =
   2355             new Key<Integer>("android.flash.state", int.class);
   2356 
   2357     /**
   2358      * <p>Operational mode for hot pixel correction.</p>
   2359      * <p>Hotpixel correction interpolates out, or otherwise removes, pixels
   2360      * that do not accurately measure the incoming light (i.e. pixels that
   2361      * are stuck at an arbitrary value or are oversensitive).</p>
   2362      * <p><b>Possible values:</b>
   2363      * <ul>
   2364      *   <li>{@link #HOT_PIXEL_MODE_OFF OFF}</li>
   2365      *   <li>{@link #HOT_PIXEL_MODE_FAST FAST}</li>
   2366      *   <li>{@link #HOT_PIXEL_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
   2367      * </ul></p>
   2368      * <p><b>Available values for this device:</b><br>
   2369      * {@link CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES android.hotPixel.availableHotPixelModes}</p>
   2370      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2371      *
   2372      * @see CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES
   2373      * @see #HOT_PIXEL_MODE_OFF
   2374      * @see #HOT_PIXEL_MODE_FAST
   2375      * @see #HOT_PIXEL_MODE_HIGH_QUALITY
   2376      */
   2377     @PublicKey
   2378     public static final Key<Integer> HOT_PIXEL_MODE =
   2379             new Key<Integer>("android.hotPixel.mode", int.class);
   2380 
   2381     /**
   2382      * <p>A location object to use when generating image GPS metadata.</p>
   2383      * <p>Setting a location object in a request will include the GPS coordinates of the location
   2384      * into any JPEG images captured based on the request. These coordinates can then be
   2385      * viewed by anyone who receives the JPEG image.</p>
   2386      * <p>This key is available on all devices.</p>
   2387      */
   2388     @PublicKey
   2389     @SyntheticKey
   2390     public static final Key<android.location.Location> JPEG_GPS_LOCATION =
   2391             new Key<android.location.Location>("android.jpeg.gpsLocation", android.location.Location.class);
   2392 
   2393     /**
   2394      * <p>GPS coordinates to include in output JPEG
   2395      * EXIF.</p>
   2396      * <p><b>Range of valid values:</b><br>
   2397      * (-180 - 180], [-90,90], [-inf, inf]</p>
   2398      * <p>This key is available on all devices.</p>
   2399      * @hide
   2400      */
   2401     public static final Key<double[]> JPEG_GPS_COORDINATES =
   2402             new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
   2403 
   2404     /**
   2405      * <p>32 characters describing GPS algorithm to
   2406      * include in EXIF.</p>
   2407      * <p><b>Units</b>: UTF-8 null-terminated string</p>
   2408      * <p>This key is available on all devices.</p>
   2409      * @hide
   2410      */
   2411     public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
   2412             new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
   2413 
   2414     /**
   2415      * <p>Time GPS fix was made to include in
   2416      * EXIF.</p>
   2417      * <p><b>Units</b>: UTC in seconds since January 1, 1970</p>
   2418      * <p>This key is available on all devices.</p>
   2419      * @hide
   2420      */
   2421     public static final Key<Long> JPEG_GPS_TIMESTAMP =
   2422             new Key<Long>("android.jpeg.gpsTimestamp", long.class);
   2423 
   2424     /**
   2425      * <p>The orientation for a JPEG image.</p>
   2426      * <p>The clockwise rotation angle in degrees, relative to the orientation
   2427      * to the camera, that the JPEG picture needs to be rotated by, to be viewed
   2428      * upright.</p>
   2429      * <p>Camera devices may either encode this value into the JPEG EXIF header, or
   2430      * rotate the image data to match this orientation. When the image data is rotated,
   2431      * the thumbnail data will also be rotated.</p>
   2432      * <p>Note that this orientation is relative to the orientation of the camera sensor, given
   2433      * by {@link CameraCharacteristics#SENSOR_ORIENTATION android.sensor.orientation}.</p>
   2434      * <p>To translate from the device orientation given by the Android sensor APIs for camera
   2435      * sensors which are not EXTERNAL, the following sample code may be used:</p>
   2436      * <pre><code>private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
   2437      *     if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
   2438      *     int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
   2439      *
   2440      *     // Round device orientation to a multiple of 90
   2441      *     deviceOrientation = (deviceOrientation + 45) / 90 * 90;
   2442      *
   2443      *     // Reverse device orientation for front-facing cameras
   2444      *     boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
   2445      *     if (facingFront) deviceOrientation = -deviceOrientation;
   2446      *
   2447      *     // Calculate desired JPEG orientation relative to camera orientation to make
   2448      *     // the image upright relative to the device orientation
   2449      *     int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
   2450      *
   2451      *     return jpegOrientation;
   2452      * }
   2453      * </code></pre>
   2454      * <p>For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
   2455      * also be set to EXTERNAL. The above code is not relevant in such case.</p>
   2456      * <p><b>Units</b>: Degrees in multiples of 90</p>
   2457      * <p><b>Range of valid values:</b><br>
   2458      * 0, 90, 180, 270</p>
   2459      * <p>This key is available on all devices.</p>
   2460      *
   2461      * @see CameraCharacteristics#SENSOR_ORIENTATION
   2462      */
   2463     @PublicKey
   2464     public static final Key<Integer> JPEG_ORIENTATION =
   2465             new Key<Integer>("android.jpeg.orientation", int.class);
   2466 
   2467     /**
   2468      * <p>Compression quality of the final JPEG
   2469      * image.</p>
   2470      * <p>85-95 is typical usage range.</p>
   2471      * <p><b>Range of valid values:</b><br>
   2472      * 1-100; larger is higher quality</p>
   2473      * <p>This key is available on all devices.</p>
   2474      */
   2475     @PublicKey
   2476     public static final Key<Byte> JPEG_QUALITY =
   2477             new Key<Byte>("android.jpeg.quality", byte.class);
   2478 
   2479     /**
   2480      * <p>Compression quality of JPEG
   2481      * thumbnail.</p>
   2482      * <p><b>Range of valid values:</b><br>
   2483      * 1-100; larger is higher quality</p>
   2484      * <p>This key is available on all devices.</p>
   2485      */
   2486     @PublicKey
   2487     public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
   2488             new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
   2489 
   2490     /**
   2491      * <p>Resolution of embedded JPEG thumbnail.</p>
   2492      * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
   2493      * but the captured JPEG will still be a valid image.</p>
   2494      * <p>For best results, when issuing a request for a JPEG image, the thumbnail size selected
   2495      * should have the same aspect ratio as the main JPEG output.</p>
   2496      * <p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
   2497      * ratio, the camera device creates the thumbnail by cropping it from the primary image.
   2498      * For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
   2499      * 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
   2500      * generate the thumbnail image. The thumbnail image will always have a smaller Field
   2501      * Of View (FOV) than the primary image when aspect ratios differ.</p>
   2502      * <p>When an {@link CaptureRequest#JPEG_ORIENTATION android.jpeg.orientation} of non-zero degree is requested,
   2503      * the camera device will handle thumbnail rotation in one of the following ways:</p>
   2504      * <ul>
   2505      * <li>Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
   2506      *   and keep jpeg and thumbnail image data unrotated.</li>
   2507      * <li>Rotate the jpeg and thumbnail image data and not set
   2508      *   {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
   2509      *   case, LIMITED or FULL hardware level devices will report rotated thumnail size in
   2510      *   capture result, so the width and height will be interchanged if 90 or 270 degree
   2511      *   orientation is requested. LEGACY device will always report unrotated thumbnail
   2512      *   size.</li>
   2513      * </ul>
   2514      * <p><b>Range of valid values:</b><br>
   2515      * {@link CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES android.jpeg.availableThumbnailSizes}</p>
   2516      * <p>This key is available on all devices.</p>
   2517      *
   2518      * @see CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES
   2519      * @see CaptureRequest#JPEG_ORIENTATION
   2520      */
   2521     @PublicKey
   2522     public static final Key<android.util.Size> JPEG_THUMBNAIL_SIZE =
   2523             new Key<android.util.Size>("android.jpeg.thumbnailSize", android.util.Size.class);
   2524 
   2525     /**
   2526      * <p>The desired lens aperture size, as a ratio of lens focal length to the
   2527      * effective aperture diameter.</p>
   2528      * <p>Setting this value is only supported on the camera devices that have a variable
   2529      * aperture lens.</p>
   2530      * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF,
   2531      * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
   2532      * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
   2533      * to achieve manual exposure control.</p>
   2534      * <p>The requested aperture value may take several frames to reach the
   2535      * requested value; the camera device will report the current (intermediate)
   2536      * aperture size in capture result metadata while the aperture is changing.
   2537      * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
   2538      * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of
   2539      * the ON modes, this will be overridden by the camera device
   2540      * auto-exposure algorithm, the overridden values are then provided
   2541      * back to the user in the corresponding result.</p>
   2542      * <p><b>Units</b>: The f-number (f/N)</p>
   2543      * <p><b>Range of valid values:</b><br>
   2544      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}</p>
   2545      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2546      * <p><b>Full capability</b> -
   2547      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   2548      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2549      *
   2550      * @see CaptureRequest#CONTROL_AE_MODE
   2551      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2552      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES
   2553      * @see CaptureResult#LENS_STATE
   2554      * @see CaptureRequest#SENSOR_EXPOSURE_TIME
   2555      * @see CaptureRequest#SENSOR_FRAME_DURATION
   2556      * @see CaptureRequest#SENSOR_SENSITIVITY
   2557      */
   2558     @PublicKey
   2559     public static final Key<Float> LENS_APERTURE =
   2560             new Key<Float>("android.lens.aperture", float.class);
   2561 
   2562     /**
   2563      * <p>The desired setting for the lens neutral density filter(s).</p>
   2564      * <p>This control will not be supported on most camera devices.</p>
   2565      * <p>Lens filters are typically used to lower the amount of light the
   2566      * sensor is exposed to (measured in steps of EV). As used here, an EV
   2567      * step is the standard logarithmic representation, which are
   2568      * non-negative, and inversely proportional to the amount of light
   2569      * hitting the sensor.  For example, setting this to 0 would result
   2570      * in no reduction of the incoming light, and setting this to 2 would
   2571      * mean that the filter is set to reduce incoming light by two stops
   2572      * (allowing 1/4 of the prior amount of light to the sensor).</p>
   2573      * <p>It may take several frames before the lens filter density changes
   2574      * to the requested value. While the filter density is still changing,
   2575      * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
   2576      * <p><b>Units</b>: Exposure Value (EV)</p>
   2577      * <p><b>Range of valid values:</b><br>
   2578      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}</p>
   2579      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2580      * <p><b>Full capability</b> -
   2581      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   2582      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2583      *
   2584      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2585      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES
   2586      * @see CaptureResult#LENS_STATE
   2587      */
   2588     @PublicKey
   2589     public static final Key<Float> LENS_FILTER_DENSITY =
   2590             new Key<Float>("android.lens.filterDensity", float.class);
   2591 
   2592     /**
   2593      * <p>The desired lens focal length; used for optical zoom.</p>
   2594      * <p>This setting controls the physical focal length of the camera
   2595      * device's lens. Changing the focal length changes the field of
   2596      * view of the camera device, and is usually used for optical zoom.</p>
   2597      * <p>Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this
   2598      * setting won't be applied instantaneously, and it may take several
   2599      * frames before the lens can change to the requested focal length.
   2600      * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will
   2601      * be set to MOVING.</p>
   2602      * <p>Optical zoom will not be supported on most devices.</p>
   2603      * <p><b>Units</b>: Millimeters</p>
   2604      * <p><b>Range of valid values:</b><br>
   2605      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths}</p>
   2606      * <p>This key is available on all devices.</p>
   2607      *
   2608      * @see CaptureRequest#LENS_APERTURE
   2609      * @see CaptureRequest#LENS_FOCUS_DISTANCE
   2610      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS
   2611      * @see CaptureResult#LENS_STATE
   2612      */
   2613     @PublicKey
   2614     public static final Key<Float> LENS_FOCAL_LENGTH =
   2615             new Key<Float>("android.lens.focalLength", float.class);
   2616 
   2617     /**
   2618      * <p>Desired distance to plane of sharpest focus,
   2619      * measured from frontmost surface of the lens.</p>
   2620      * <p>Should be zero for fixed-focus cameras</p>
   2621      * <p><b>Units</b>: See {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details</p>
   2622      * <p><b>Range of valid values:</b><br>
   2623      * &gt;= 0</p>
   2624      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2625      * <p><b>Full capability</b> -
   2626      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   2627      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2628      *
   2629      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2630      * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
   2631      */
   2632     @PublicKey
   2633     public static final Key<Float> LENS_FOCUS_DISTANCE =
   2634             new Key<Float>("android.lens.focusDistance", float.class);
   2635 
   2636     /**
   2637      * <p>The range of scene distances that are in
   2638      * sharp focus (depth of field).</p>
   2639      * <p>If variable focus not supported, can still report
   2640      * fixed depth of field range</p>
   2641      * <p><b>Units</b>: A pair of focus distances in diopters: (near,
   2642      * far); see {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details.</p>
   2643      * <p><b>Range of valid values:</b><br>
   2644      * &gt;=0</p>
   2645      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2646      * <p><b>Limited capability</b> -
   2647      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   2648      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2649      *
   2650      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2651      * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
   2652      */
   2653     @PublicKey
   2654     public static final Key<android.util.Pair<Float,Float>> LENS_FOCUS_RANGE =
   2655             new Key<android.util.Pair<Float,Float>>("android.lens.focusRange", new TypeReference<android.util.Pair<Float,Float>>() {{ }});
   2656 
   2657     /**
   2658      * <p>Sets whether the camera device uses optical image stabilization (OIS)
   2659      * when capturing images.</p>
   2660      * <p>OIS is used to compensate for motion blur due to small
   2661      * movements of the camera during capture. Unlike digital image
   2662      * stabilization ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), OIS
   2663      * makes use of mechanical elements to stabilize the camera
   2664      * sensor, and thus allows for longer exposure times before
   2665      * camera shake becomes apparent.</p>
   2666      * <p>Switching between different optical stabilization modes may take several
   2667      * frames to initialize, the camera device will report the current mode in
   2668      * capture result metadata. For example, When "ON" mode is requested, the
   2669      * optical stabilization modes in the first several capture results may still
   2670      * be "OFF", and it will become "ON" when the initialization is done.</p>
   2671      * <p>If a camera device supports both OIS and digital image stabilization
   2672      * ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), turning both modes on may produce undesirable
   2673      * interaction, so it is recommended not to enable both at the same time.</p>
   2674      * <p>Not all devices will support OIS; see
   2675      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization} for
   2676      * available controls.</p>
   2677      * <p><b>Possible values:</b>
   2678      * <ul>
   2679      *   <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_OFF OFF}</li>
   2680      *   <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_ON ON}</li>
   2681      * </ul></p>
   2682      * <p><b>Available values for this device:</b><br>
   2683      * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization}</p>
   2684      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2685      * <p><b>Limited capability</b> -
   2686      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   2687      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2688      *
   2689      * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
   2690      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2691      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
   2692      * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
   2693      * @see #LENS_OPTICAL_STABILIZATION_MODE_ON
   2694      */
   2695     @PublicKey
   2696     public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE =
   2697             new Key<Integer>("android.lens.opticalStabilizationMode", int.class);
   2698 
   2699     /**
   2700      * <p>Current lens status.</p>
   2701      * <p>For lens parameters {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
   2702      * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, when changes are requested,
   2703      * they may take several frames to reach the requested values. This state indicates
   2704      * the current status of the lens parameters.</p>
   2705      * <p>When the state is STATIONARY, the lens parameters are not changing. This could be
   2706      * either because the parameters are all fixed, or because the lens has had enough
   2707      * time to reach the most recently-requested values.
   2708      * If all these lens parameters are not changable for a camera device, as listed below:</p>
   2709      * <ul>
   2710      * <li>Fixed focus (<code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} == 0</code>), which means
   2711      * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} parameter will always be 0.</li>
   2712      * <li>Fixed focal length ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths} contains single value),
   2713      * which means the optical zoom is not supported.</li>
   2714      * <li>No ND filter ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities} contains only 0).</li>
   2715      * <li>Fixed aperture ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures} contains single value).</li>
   2716      * </ul>
   2717      * <p>Then this state will always be STATIONARY.</p>
   2718      * <p>When the state is MOVING, it indicates that at least one of the lens parameters
   2719      * is changing.</p>
   2720      * <p><b>Possible values:</b>
   2721      * <ul>
   2722      *   <li>{@link #LENS_STATE_STATIONARY STATIONARY}</li>
   2723      *   <li>{@link #LENS_STATE_MOVING MOVING}</li>
   2724      * </ul></p>
   2725      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2726      * <p><b>Limited capability</b> -
   2727      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   2728      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   2729      *
   2730      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   2731      * @see CaptureRequest#LENS_APERTURE
   2732      * @see CaptureRequest#LENS_FILTER_DENSITY
   2733      * @see CaptureRequest#LENS_FOCAL_LENGTH
   2734      * @see CaptureRequest#LENS_FOCUS_DISTANCE
   2735      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES
   2736      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES
   2737      * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS
   2738      * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
   2739      * @see #LENS_STATE_STATIONARY
   2740      * @see #LENS_STATE_MOVING
   2741      */
   2742     @PublicKey
   2743     public static final Key<Integer> LENS_STATE =
   2744             new Key<Integer>("android.lens.state", int.class);
   2745 
   2746     /**
   2747      * <p>The orientation of the camera relative to the sensor
   2748      * coordinate system.</p>
   2749      * <p>The four coefficients that describe the quaternion
   2750      * rotation from the Android sensor coordinate system to a
   2751      * camera-aligned coordinate system where the X-axis is
   2752      * aligned with the long side of the image sensor, the Y-axis
   2753      * is aligned with the short side of the image sensor, and
   2754      * the Z-axis is aligned with the optical axis of the sensor.</p>
   2755      * <p>To convert from the quaternion coefficients <code>(x,y,z,w)</code>
   2756      * to the axis of rotation <code>(a_x, a_y, a_z)</code> and rotation
   2757      * amount <code>theta</code>, the following formulas can be used:</p>
   2758      * <pre><code> theta = 2 * acos(w)
   2759      * a_x = x / sin(theta/2)
   2760      * a_y = y / sin(theta/2)
   2761      * a_z = z / sin(theta/2)
   2762      * </code></pre>
   2763      * <p>To create a 3x3 rotation matrix that applies the rotation
   2764      * defined by this quaternion, the following matrix can be
   2765      * used:</p>
   2766      * <pre><code>R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
   2767      *            2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
   2768      *            2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
   2769      * </code></pre>
   2770      * <p>This matrix can then be used to apply the rotation to a
   2771      *  column vector point with</p>
   2772      * <p><code>p' = Rp</code></p>
   2773      * <p>where <code>p</code> is in the device sensor coordinate system, and
   2774      *  <code>p'</code> is in the camera-oriented coordinate system.</p>
   2775      * <p><b>Units</b>:
   2776      * Quaternion coefficients</p>
   2777      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2778      */
   2779     @PublicKey
   2780     public static final Key<float[]> LENS_POSE_ROTATION =
   2781             new Key<float[]>("android.lens.poseRotation", float[].class);
   2782 
   2783     /**
   2784      * <p>Position of the camera optical center.</p>
   2785      * <p>The position of the camera device's lens optical center,
   2786      * as a three-dimensional vector <code>(x,y,z)</code>.</p>
   2787      * <p>Prior to Android P, or when {@link CameraCharacteristics#LENS_POSE_REFERENCE android.lens.poseReference} is PRIMARY_CAMERA, this position
   2788      * is relative to the optical center of the largest camera device facing in the same
   2789      * direction as this camera, in the {@link android.hardware.SensorEvent Android sensor
   2790      * coordinate axes}. Note that only the axis definitions are shared with the sensor
   2791      * coordinate system, but not the origin.</p>
   2792      * <p>If this device is the largest or only camera device with a given facing, then this
   2793      * position will be <code>(0, 0, 0)</code>; a camera device with a lens optical center located 3 cm
   2794      * from the main sensor along the +X axis (to the right from the user's perspective) will
   2795      * report <code>(0.03, 0, 0)</code>.</p>
   2796      * <p>To transform a pixel coordinates between two cameras facing the same direction, first
   2797      * the source camera {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion} must be corrected for.  Then the source
   2798      * camera {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} needs to be applied, followed by the
   2799      * {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} of the source camera, the translation of the source camera
   2800      * relative to the destination camera, the {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} of the destination
   2801      * camera, and finally the inverse of {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} of the destination
   2802      * camera. This obtains a radial-distortion-free coordinate in the destination camera pixel
   2803      * coordinates.</p>
   2804      * <p>To compare this against a real image from the destination camera, the destination camera
   2805      * image then needs to be corrected for radial distortion before comparison or sampling.</p>
   2806      * <p>When {@link CameraCharacteristics#LENS_POSE_REFERENCE android.lens.poseReference} is GYROSCOPE, then this position is relative to
   2807      * the center of the primary gyroscope on the device.</p>
   2808      * <p><b>Units</b>: Meters</p>
   2809      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2810      *
   2811      * @see CameraCharacteristics#LENS_DISTORTION
   2812      * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
   2813      * @see CameraCharacteristics#LENS_POSE_REFERENCE
   2814      * @see CameraCharacteristics#LENS_POSE_ROTATION
   2815      */
   2816     @PublicKey
   2817     public static final Key<float[]> LENS_POSE_TRANSLATION =
   2818             new Key<float[]>("android.lens.poseTranslation", float[].class);
   2819 
   2820     /**
   2821      * <p>The parameters for this camera device's intrinsic
   2822      * calibration.</p>
   2823      * <p>The five calibration parameters that describe the
   2824      * transform from camera-centric 3D coordinates to sensor
   2825      * pixel coordinates:</p>
   2826      * <pre><code>[f_x, f_y, c_x, c_y, s]
   2827      * </code></pre>
   2828      * <p>Where <code>f_x</code> and <code>f_y</code> are the horizontal and vertical
   2829      * focal lengths, <code>[c_x, c_y]</code> is the position of the optical
   2830      * axis, and <code>s</code> is a skew parameter for the sensor plane not
   2831      * being aligned with the lens plane.</p>
   2832      * <p>These are typically used within a transformation matrix K:</p>
   2833      * <pre><code>K = [ f_x,   s, c_x,
   2834      *        0, f_y, c_y,
   2835      *        0    0,   1 ]
   2836      * </code></pre>
   2837      * <p>which can then be combined with the camera pose rotation
   2838      * <code>R</code> and translation <code>t</code> ({@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} and
   2839      * {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}, respective) to calculate the
   2840      * complete transform from world coordinates to pixel
   2841      * coordinates:</p>
   2842      * <pre><code>P = [ K 0   * [ R t
   2843      *      0 1 ]     0 1 ]
   2844      * </code></pre>
   2845      * <p>and with <code>p_w</code> being a point in the world coordinate system
   2846      * and <code>p_s</code> being a point in the camera active pixel array
   2847      * coordinate system, and with the mapping including the
   2848      * homogeneous division by z:</p>
   2849      * <pre><code> p_h = (x_h, y_h, z_h) = P p_w
   2850      * p_s = p_h / z_h
   2851      * </code></pre>
   2852      * <p>so <code>[x_s, y_s]</code> is the pixel coordinates of the world
   2853      * point, <code>z_s = 1</code>, and <code>w_s</code> is a measurement of disparity
   2854      * (depth) in pixel coordinates.</p>
   2855      * <p>Note that the coordinate system for this transform is the
   2856      * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} system,
   2857      * where <code>(0,0)</code> is the top-left of the
   2858      * preCorrectionActiveArraySize rectangle. Once the pose and
   2859      * intrinsic calibration transforms have been applied to a
   2860      * world point, then the {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion}
   2861      * transform needs to be applied, and the result adjusted to
   2862      * be in the {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} coordinate
   2863      * system (where <code>(0, 0)</code> is the top-left of the
   2864      * activeArraySize rectangle), to determine the final pixel
   2865      * coordinate of the world point for processed (non-RAW)
   2866      * output buffers.</p>
   2867      * <p><b>Units</b>:
   2868      * Pixels in the
   2869      * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}
   2870      * coordinate system.</p>
   2871      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2872      *
   2873      * @see CameraCharacteristics#LENS_DISTORTION
   2874      * @see CameraCharacteristics#LENS_POSE_ROTATION
   2875      * @see CameraCharacteristics#LENS_POSE_TRANSLATION
   2876      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   2877      * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
   2878      */
   2879     @PublicKey
   2880     public static final Key<float[]> LENS_INTRINSIC_CALIBRATION =
   2881             new Key<float[]>("android.lens.intrinsicCalibration", float[].class);
   2882 
   2883     /**
   2884      * <p>The correction coefficients to correct for this camera device's
   2885      * radial and tangential lens distortion.</p>
   2886      * <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
   2887      * kappa_3]</code> and two tangential distortion coefficients
   2888      * <code>[kappa_4, kappa_5]</code> that can be used to correct the
   2889      * lens's geometric distortion with the mapping equations:</p>
   2890      * <pre><code> x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
   2891      *        kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
   2892      *  y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
   2893      *        kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
   2894      * </code></pre>
   2895      * <p>Here, <code>[x_c, y_c]</code> are the coordinates to sample in the
   2896      * input image that correspond to the pixel values in the
   2897      * corrected image at the coordinate <code>[x_i, y_i]</code>:</p>
   2898      * <pre><code> correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
   2899      * </code></pre>
   2900      * <p>The pixel coordinates are defined in a normalized
   2901      * coordinate system related to the
   2902      * {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} calibration fields.
   2903      * Both <code>[x_i, y_i]</code> and <code>[x_c, y_c]</code> have <code>(0,0)</code> at the
   2904      * lens optical center <code>[c_x, c_y]</code>. The maximum magnitudes
   2905      * of both x and y coordinates are normalized to be 1 at the
   2906      * edge further from the optical center, so the range
   2907      * for both dimensions is <code>-1 &lt;= x &lt;= 1</code>.</p>
   2908      * <p>Finally, <code>r</code> represents the radial distance from the
   2909      * optical center, <code>r^2 = x_i^2 + y_i^2</code>, and its magnitude
   2910      * is therefore no larger than <code>|r| &lt;= sqrt(2)</code>.</p>
   2911      * <p>The distortion model used is the Brown-Conrady model.</p>
   2912      * <p><b>Units</b>:
   2913      * Unitless coefficients.</p>
   2914      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2915      *
   2916      * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
   2917      * @deprecated
   2918      * <p>This field was inconsistently defined in terms of its
   2919      * normalization. Use {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion} instead.</p>
   2920      *
   2921      * @see CameraCharacteristics#LENS_DISTORTION
   2922 
   2923      */
   2924     @Deprecated
   2925     @PublicKey
   2926     public static final Key<float[]> LENS_RADIAL_DISTORTION =
   2927             new Key<float[]>("android.lens.radialDistortion", float[].class);
   2928 
   2929     /**
   2930      * <p>The correction coefficients to correct for this camera device's
   2931      * radial and tangential lens distortion.</p>
   2932      * <p>Replaces the deprecated {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion} field, which was
   2933      * inconsistently defined.</p>
   2934      * <p>Three radial distortion coefficients <code>[kappa_1, kappa_2,
   2935      * kappa_3]</code> and two tangential distortion coefficients
   2936      * <code>[kappa_4, kappa_5]</code> that can be used to correct the
   2937      * lens's geometric distortion with the mapping equations:</p>
   2938      * <pre><code> x_c = x_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
   2939      *        kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
   2940      *  y_c = y_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
   2941      *        kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
   2942      * </code></pre>
   2943      * <p>Here, <code>[x_c, y_c]</code> are the coordinates to sample in the
   2944      * input image that correspond to the pixel values in the
   2945      * corrected image at the coordinate <code>[x_i, y_i]</code>:</p>
   2946      * <pre><code> correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
   2947      * </code></pre>
   2948      * <p>The pixel coordinates are defined in a coordinate system
   2949      * related to the {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}
   2950      * calibration fields; see that entry for details of the mapping stages.
   2951      * Both <code>[x_i, y_i]</code> and <code>[x_c, y_c]</code>
   2952      * have <code>(0,0)</code> at the lens optical center <code>[c_x, c_y]</code>, and
   2953      * the range of the coordinates depends on the focal length
   2954      * terms of the intrinsic calibration.</p>
   2955      * <p>Finally, <code>r</code> represents the radial distance from the
   2956      * optical center, <code>r^2 = x_i^2 + y_i^2</code>.</p>
   2957      * <p>The distortion model used is the Brown-Conrady model.</p>
   2958      * <p><b>Units</b>:
   2959      * Unitless coefficients.</p>
   2960      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   2961      *
   2962      * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
   2963      * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
   2964      */
   2965     @PublicKey
   2966     public static final Key<float[]> LENS_DISTORTION =
   2967             new Key<float[]>("android.lens.distortion", float[].class);
   2968 
   2969     /**
   2970      * <p>Mode of operation for the noise reduction algorithm.</p>
   2971      * <p>The noise reduction algorithm attempts to improve image quality by removing
   2972      * excessive noise added by the capture process, especially in dark conditions.</p>
   2973      * <p>OFF means no noise reduction will be applied by the camera device, for both raw and
   2974      * YUV domain.</p>
   2975      * <p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
   2976      * demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
   2977      * This mode is optional, may not be support by all devices. The application should check
   2978      * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} before using it.</p>
   2979      * <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
   2980      * will be applied. HIGH_QUALITY mode indicates that the camera device
   2981      * will use the highest-quality noise filtering algorithms,
   2982      * even if it slows down capture rate. FAST means the camera device will not
   2983      * slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
   2984      * MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
   2985      * Every output stream will have a similar amount of enhancement applied.</p>
   2986      * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
   2987      * buffer of high-resolution images during preview and reprocess image(s) from that buffer
   2988      * into a final capture when triggered by the user. In this mode, the camera device applies
   2989      * noise reduction to low-resolution streams (below maximum recording resolution) to maximize
   2990      * preview quality, but does not apply noise reduction to high-resolution streams, since
   2991      * those will be reprocessed later if necessary.</p>
   2992      * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
   2993      * will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
   2994      * may adjust the noise reduction parameters for best image quality based on the
   2995      * {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor} if it is set.</p>
   2996      * <p><b>Possible values:</b>
   2997      * <ul>
   2998      *   <li>{@link #NOISE_REDUCTION_MODE_OFF OFF}</li>
   2999      *   <li>{@link #NOISE_REDUCTION_MODE_FAST FAST}</li>
   3000      *   <li>{@link #NOISE_REDUCTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
   3001      *   <li>{@link #NOISE_REDUCTION_MODE_MINIMAL MINIMAL}</li>
   3002      *   <li>{@link #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
   3003      * </ul></p>
   3004      * <p><b>Available values for this device:</b><br>
   3005      * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes}</p>
   3006      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3007      * <p><b>Full capability</b> -
   3008      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3009      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3010      *
   3011      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3012      * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
   3013      * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
   3014      * @see #NOISE_REDUCTION_MODE_OFF
   3015      * @see #NOISE_REDUCTION_MODE_FAST
   3016      * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY
   3017      * @see #NOISE_REDUCTION_MODE_MINIMAL
   3018      * @see #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG
   3019      */
   3020     @PublicKey
   3021     public static final Key<Integer> NOISE_REDUCTION_MODE =
   3022             new Key<Integer>("android.noiseReduction.mode", int.class);
   3023 
   3024     /**
   3025      * <p>Whether a result given to the framework is the
   3026      * final one for the capture, or only a partial that contains a
   3027      * subset of the full set of dynamic metadata
   3028      * values.</p>
   3029      * <p>The entries in the result metadata buffers for a
   3030      * single capture may not overlap, except for this entry. The
   3031      * FINAL buffers must retain FIFO ordering relative to the
   3032      * requests that generate them, so the FINAL buffer for frame 3 must
   3033      * always be sent to the framework after the FINAL buffer for frame 2, and
   3034      * before the FINAL buffer for frame 4. PARTIAL buffers may be returned
   3035      * in any order relative to other frames, but all PARTIAL buffers for a given
   3036      * capture must arrive before the FINAL buffer for that capture. This entry may
   3037      * only be used by the camera device if quirks.usePartialResult is set to 1.</p>
   3038      * <p><b>Range of valid values:</b><br>
   3039      * Optional. Default value is FINAL.</p>
   3040      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3041      * @deprecated
   3042      * <p>Not used in HALv3 or newer</p>
   3043 
   3044      * @hide
   3045      */
   3046     @Deprecated
   3047     public static final Key<Boolean> QUIRKS_PARTIAL_RESULT =
   3048             new Key<Boolean>("android.quirks.partialResult", boolean.class);
   3049 
   3050     /**
   3051      * <p>A frame counter set by the framework. This value monotonically
   3052      * increases with every new result (that is, each new result has a unique
   3053      * frameCount value).</p>
   3054      * <p>Reset on release()</p>
   3055      * <p><b>Units</b>: count of frames</p>
   3056      * <p><b>Range of valid values:</b><br>
   3057      * &gt; 0</p>
   3058      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3059      * @deprecated
   3060      * <p>Not used in HALv3 or newer</p>
   3061 
   3062      * @hide
   3063      */
   3064     @Deprecated
   3065     public static final Key<Integer> REQUEST_FRAME_COUNT =
   3066             new Key<Integer>("android.request.frameCount", int.class);
   3067 
   3068     /**
   3069      * <p>An application-specified ID for the current
   3070      * request. Must be maintained unchanged in output
   3071      * frame</p>
   3072      * <p><b>Units</b>: arbitrary integer assigned by application</p>
   3073      * <p><b>Range of valid values:</b><br>
   3074      * Any int</p>
   3075      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3076      * @hide
   3077      */
   3078     public static final Key<Integer> REQUEST_ID =
   3079             new Key<Integer>("android.request.id", int.class);
   3080 
   3081     /**
   3082      * <p>Specifies the number of pipeline stages the frame went
   3083      * through from when it was exposed to when the final completed result
   3084      * was available to the framework.</p>
   3085      * <p>Depending on what settings are used in the request, and
   3086      * what streams are configured, the data may undergo less processing,
   3087      * and some pipeline stages skipped.</p>
   3088      * <p>See {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} for more details.</p>
   3089      * <p><b>Range of valid values:</b><br>
   3090      * &lt;= {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth}</p>
   3091      * <p>This key is available on all devices.</p>
   3092      *
   3093      * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH
   3094      */
   3095     @PublicKey
   3096     public static final Key<Byte> REQUEST_PIPELINE_DEPTH =
   3097             new Key<Byte>("android.request.pipelineDepth", byte.class);
   3098 
   3099     /**
   3100      * <p>The desired region of the sensor to read out for this capture.</p>
   3101      * <p>This control can be used to implement digital zoom.</p>
   3102      * <p>The crop region coordinate system is based off
   3103      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with <code>(0, 0)</code> being the
   3104      * top-left corner of the sensor active array.</p>
   3105      * <p>Output streams use this rectangle to produce their output,
   3106      * cropping to a smaller region if necessary to maintain the
   3107      * stream's aspect ratio, then scaling the sensor input to
   3108      * match the output's configured resolution.</p>
   3109      * <p>The crop region is applied after the RAW to other color
   3110      * space (e.g. YUV) conversion. Since raw streams
   3111      * (e.g. RAW16) don't have the conversion stage, they are not
   3112      * croppable. The crop region will be ignored by raw streams.</p>
   3113      * <p>For non-raw streams, any additional per-stream cropping will
   3114      * be done to maximize the final pixel area of the stream.</p>
   3115      * <p>For example, if the crop region is set to a 4:3 aspect
   3116      * ratio, then 4:3 streams will use the exact crop
   3117      * region. 16:9 streams will further crop vertically
   3118      * (letterbox).</p>
   3119      * <p>Conversely, if the crop region is set to a 16:9, then 4:3
   3120      * outputs will crop horizontally (pillarbox), and 16:9
   3121      * streams will match exactly. These additional crops will
   3122      * be centered within the crop region.</p>
   3123      * <p>The width and height of the crop region cannot
   3124      * be set to be smaller than
   3125      * <code>floor( activeArraySize.width / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code> and
   3126      * <code>floor( activeArraySize.height / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code>, respectively.</p>
   3127      * <p>The camera device may adjust the crop region to account
   3128      * for rounding and other hardware requirements; the final
   3129      * crop region used will be included in the output capture
   3130      * result.</p>
   3131      * <p><b>Units</b>: Pixel coordinates relative to
   3132      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
   3133      * <p>This key is available on all devices.</p>
   3134      *
   3135      * @see CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
   3136      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   3137      */
   3138     @PublicKey
   3139     public static final Key<android.graphics.Rect> SCALER_CROP_REGION =
   3140             new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class);
   3141 
   3142     /**
   3143      * <p>Duration each pixel is exposed to
   3144      * light.</p>
   3145      * <p>If the sensor can't expose this exact duration, it will shorten the
   3146      * duration exposed to the nearest possible value (rather than expose longer).
   3147      * The final exposure time used will be available in the output capture result.</p>
   3148      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
   3149      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
   3150      * <p><b>Units</b>: Nanoseconds</p>
   3151      * <p><b>Range of valid values:</b><br>
   3152      * {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</p>
   3153      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3154      * <p><b>Full capability</b> -
   3155      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3156      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3157      *
   3158      * @see CaptureRequest#CONTROL_AE_MODE
   3159      * @see CaptureRequest#CONTROL_MODE
   3160      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3161      * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE
   3162      */
   3163     @PublicKey
   3164     public static final Key<Long> SENSOR_EXPOSURE_TIME =
   3165             new Key<Long>("android.sensor.exposureTime", long.class);
   3166 
   3167     /**
   3168      * <p>Duration from start of frame exposure to
   3169      * start of next frame exposure.</p>
   3170      * <p>The maximum frame rate that can be supported by a camera subsystem is
   3171      * a function of many factors:</p>
   3172      * <ul>
   3173      * <li>Requested resolutions of output image streams</li>
   3174      * <li>Availability of binning / skipping modes on the imager</li>
   3175      * <li>The bandwidth of the imager interface</li>
   3176      * <li>The bandwidth of the various ISP processing blocks</li>
   3177      * </ul>
   3178      * <p>Since these factors can vary greatly between different ISPs and
   3179      * sensors, the camera abstraction tries to represent the bandwidth
   3180      * restrictions with as simple a model as possible.</p>
   3181      * <p>The model presented has the following characteristics:</p>
   3182      * <ul>
   3183      * <li>The image sensor is always configured to output the smallest
   3184      * resolution possible given the application's requested output stream
   3185      * sizes.  The smallest resolution is defined as being at least as large
   3186      * as the largest requested output stream size; the camera pipeline must
   3187      * never digitally upsample sensor data when the crop region covers the
   3188      * whole sensor. In general, this means that if only small output stream
   3189      * resolutions are configured, the sensor can provide a higher frame
   3190      * rate.</li>
   3191      * <li>Since any request may use any or all the currently configured
   3192      * output streams, the sensor and ISP must be configured to support
   3193      * scaling a single capture to all the streams at the same time.  This
   3194      * means the camera pipeline must be ready to produce the largest
   3195      * requested output size without any delay.  Therefore, the overall
   3196      * frame rate of a given configured stream set is governed only by the
   3197      * largest requested stream resolution.</li>
   3198      * <li>Using more than one output stream in a request does not affect the
   3199      * frame duration.</li>
   3200      * <li>Certain format-streams may need to do additional background processing
   3201      * before data is consumed/produced by that stream. These processors
   3202      * can run concurrently to the rest of the camera pipeline, but
   3203      * cannot process more than 1 capture at a time.</li>
   3204      * </ul>
   3205      * <p>The necessary information for the application, given the model above, is provided via
   3206      * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.
   3207      * These are used to determine the maximum frame rate / minimum frame duration that is
   3208      * possible for a given stream configuration.</p>
   3209      * <p>Specifically, the application can use the following rules to
   3210      * determine the minimum frame duration it can request from the camera
   3211      * device:</p>
   3212      * <ol>
   3213      * <li>Let the set of currently configured input/output streams be called <code>S</code>.</li>
   3214      * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking it up in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
   3215      * (with its respective size/format). Let this set of frame durations be called <code>F</code>.</li>
   3216      * <li>For any given request <code>R</code>, the minimum frame duration allowed for <code>R</code> is the maximum
   3217      * out of all values in <code>F</code>. Let the streams used in <code>R</code> be called <code>S_r</code>.</li>
   3218      * </ol>
   3219      * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }
   3220      * using its respective size/format), then the frame duration in <code>F</code> determines the steady
   3221      * state frame rate that the application will get if it uses <code>R</code> as a repeating request. Let
   3222      * this special kind of request be called <code>Rsimple</code>.</p>
   3223      * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved by a single capture of a
   3224      * new request <code>Rstall</code> (which has at least one in-use stream with a non-0 stall time) and if
   3225      * <code>Rstall</code> has the same minimum frame duration this will not cause a frame rate loss if all
   3226      * buffers from the previous <code>Rstall</code> have already been delivered.</p>
   3227      * <p>For more details about stalling, see {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
   3228      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
   3229      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
   3230      * <p><b>Units</b>: Nanoseconds</p>
   3231      * <p><b>Range of valid values:</b><br>
   3232      * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}, {@link android.hardware.camera2.params.StreamConfigurationMap }.
   3233      * The duration is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
   3234      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3235      * <p><b>Full capability</b> -
   3236      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3237      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3238      *
   3239      * @see CaptureRequest#CONTROL_AE_MODE
   3240      * @see CaptureRequest#CONTROL_MODE
   3241      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3242      * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
   3243      */
   3244     @PublicKey
   3245     public static final Key<Long> SENSOR_FRAME_DURATION =
   3246             new Key<Long>("android.sensor.frameDuration", long.class);
   3247 
   3248     /**
   3249      * <p>The amount of gain applied to sensor data
   3250      * before processing.</p>
   3251      * <p>The sensitivity is the standard ISO sensitivity value,
   3252      * as defined in ISO 12232:2006.</p>
   3253      * <p>The sensitivity must be within {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}, and
   3254      * if if it less than {@link CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY android.sensor.maxAnalogSensitivity}, the camera device
   3255      * is guaranteed to use only analog amplification for applying the gain.</p>
   3256      * <p>If the camera device cannot apply the exact sensitivity
   3257      * requested, it will reduce the gain to the nearest supported
   3258      * value. The final sensitivity used will be available in the
   3259      * output capture result.</p>
   3260      * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
   3261      * OFF; otherwise the auto-exposure algorithm will override this value.</p>
   3262      * <p><b>Units</b>: ISO arithmetic units</p>
   3263      * <p><b>Range of valid values:</b><br>
   3264      * {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}</p>
   3265      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3266      * <p><b>Full capability</b> -
   3267      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3268      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3269      *
   3270      * @see CaptureRequest#CONTROL_AE_MODE
   3271      * @see CaptureRequest#CONTROL_MODE
   3272      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3273      * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE
   3274      * @see CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY
   3275      */
   3276     @PublicKey
   3277     public static final Key<Integer> SENSOR_SENSITIVITY =
   3278             new Key<Integer>("android.sensor.sensitivity", int.class);
   3279 
   3280     /**
   3281      * <p>Time at start of exposure of first
   3282      * row of the image sensor active array, in nanoseconds.</p>
   3283      * <p>The timestamps are also included in all image
   3284      * buffers produced for the same capture, and will be identical
   3285      * on all the outputs.</p>
   3286      * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> UNKNOWN,
   3287      * the timestamps measure time since an unspecified starting point,
   3288      * and are monotonically increasing. They can be compared with the
   3289      * timestamps for other captures from the same camera device, but are
   3290      * not guaranteed to be comparable to any other time source.</p>
   3291      * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME, the
   3292      * timestamps measure time in the same timebase as {@link android.os.SystemClock#elapsedRealtimeNanos }, and they can
   3293      * be compared to other timestamps from other subsystems that
   3294      * are using that base.</p>
   3295      * <p>For reprocessing, the timestamp will match the start of exposure of
   3296      * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
   3297      * timestamp} in the TotalCaptureResult that was used to create the
   3298      * reprocess capture request.</p>
   3299      * <p><b>Units</b>: Nanoseconds</p>
   3300      * <p><b>Range of valid values:</b><br>
   3301      * &gt; 0</p>
   3302      * <p>This key is available on all devices.</p>
   3303      *
   3304      * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
   3305      */
   3306     @PublicKey
   3307     public static final Key<Long> SENSOR_TIMESTAMP =
   3308             new Key<Long>("android.sensor.timestamp", long.class);
   3309 
   3310     /**
   3311      * <p>The estimated camera neutral color in the native sensor colorspace at
   3312      * the time of capture.</p>
   3313      * <p>This value gives the neutral color point encoded as an RGB value in the
   3314      * native sensor color space.  The neutral color point indicates the
   3315      * currently estimated white point of the scene illumination.  It can be
   3316      * used to interpolate between the provided color transforms when
   3317      * processing raw sensor data.</p>
   3318      * <p>The order of the values is R, G, B; where R is in the lowest index.</p>
   3319      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3320      */
   3321     @PublicKey
   3322     public static final Key<Rational[]> SENSOR_NEUTRAL_COLOR_POINT =
   3323             new Key<Rational[]>("android.sensor.neutralColorPoint", Rational[].class);
   3324 
   3325     /**
   3326      * <p>Noise model coefficients for each CFA mosaic channel.</p>
   3327      * <p>This key contains two noise model coefficients for each CFA channel
   3328      * corresponding to the sensor amplification (S) and sensor readout
   3329      * noise (O).  These are given as pairs of coefficients for each channel
   3330      * in the same order as channels listed for the CFA layout key
   3331      * (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}).  This is
   3332      * represented as an array of Pair&lt;Double, Double&gt;, where
   3333      * the first member of the Pair at index n is the S coefficient and the
   3334      * second member is the O coefficient for the nth color channel in the CFA.</p>
   3335      * <p>These coefficients are used in a two parameter noise model to describe
   3336      * the amount of noise present in the image for each CFA channel.  The
   3337      * noise model used here is:</p>
   3338      * <p>N(x) = sqrt(Sx + O)</p>
   3339      * <p>Where x represents the recorded signal of a CFA channel normalized to
   3340      * the range [0, 1], and S and O are the noise model coeffiecients for
   3341      * that channel.</p>
   3342      * <p>A more detailed description of the noise model can be found in the
   3343      * Adobe DNG specification for the NoiseProfile tag.</p>
   3344      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3345      *
   3346      * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
   3347      */
   3348     @PublicKey
   3349     public static final Key<android.util.Pair<Double,Double>[]> SENSOR_NOISE_PROFILE =
   3350             new Key<android.util.Pair<Double,Double>[]>("android.sensor.noiseProfile", new TypeReference<android.util.Pair<Double,Double>[]>() {{ }});
   3351 
   3352     /**
   3353      * <p>The worst-case divergence between Bayer green channels.</p>
   3354      * <p>This value is an estimate of the worst case split between the
   3355      * Bayer green channels in the red and blue rows in the sensor color
   3356      * filter array.</p>
   3357      * <p>The green split is calculated as follows:</p>
   3358      * <ol>
   3359      * <li>A 5x5 pixel (or larger) window W within the active sensor array is
   3360      * chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
   3361      * mosaic channels (R, Gr, Gb, B).  The location and size of the window
   3362      * chosen is implementation defined, and should be chosen to provide a
   3363      * green split estimate that is both representative of the entire image
   3364      * for this camera sensor, and can be calculated quickly.</li>
   3365      * <li>The arithmetic mean of the green channels from the red
   3366      * rows (mean_Gr) within W is computed.</li>
   3367      * <li>The arithmetic mean of the green channels from the blue
   3368      * rows (mean_Gb) within W is computed.</li>
   3369      * <li>The maximum ratio R of the two means is computed as follows:
   3370      * <code>R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))</code></li>
   3371      * </ol>
   3372      * <p>The ratio R is the green split divergence reported for this property,
   3373      * which represents how much the green channels differ in the mosaic
   3374      * pattern.  This value is typically used to determine the treatment of
   3375      * the green mosaic channels when demosaicing.</p>
   3376      * <p>The green split value can be roughly interpreted as follows:</p>
   3377      * <ul>
   3378      * <li>R &lt; 1.03 is a negligible split (&lt;3% divergence).</li>
   3379      * <li>1.20 &lt;= R &gt;= 1.03 will require some software
   3380      * correction to avoid demosaic errors (3-20% divergence).</li>
   3381      * <li>R &gt; 1.20 will require strong software correction to produce
   3382      * a usuable image (&gt;20% divergence).</li>
   3383      * </ul>
   3384      * <p><b>Range of valid values:</b><br></p>
   3385      * <p>&gt;= 0</p>
   3386      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3387      */
   3388     @PublicKey
   3389     public static final Key<Float> SENSOR_GREEN_SPLIT =
   3390             new Key<Float>("android.sensor.greenSplit", float.class);
   3391 
   3392     /**
   3393      * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern
   3394      * when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.</p>
   3395      * <p>Each color channel is treated as an unsigned 32-bit integer.
   3396      * The camera device then uses the most significant X bits
   3397      * that correspond to how many bits are in its Bayer raw sensor
   3398      * output.</p>
   3399      * <p>For example, a sensor with RAW10 Bayer output would use the
   3400      * 10 most significant bits from each color channel.</p>
   3401      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3402      *
   3403      * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
   3404      */
   3405     @PublicKey
   3406     public static final Key<int[]> SENSOR_TEST_PATTERN_DATA =
   3407             new Key<int[]>("android.sensor.testPatternData", int[].class);
   3408 
   3409     /**
   3410      * <p>When enabled, the sensor sends a test pattern instead of
   3411      * doing a real exposure from the camera.</p>
   3412      * <p>When a test pattern is enabled, all manual sensor controls specified
   3413      * by android.sensor.* will be ignored. All other controls should
   3414      * work as normal.</p>
   3415      * <p>For example, if manual flash is enabled, flash firing should still
   3416      * occur (and that the test pattern remain unmodified, since the flash
   3417      * would not actually affect it).</p>
   3418      * <p>Defaults to OFF.</p>
   3419      * <p><b>Possible values:</b>
   3420      * <ul>
   3421      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_OFF OFF}</li>
   3422      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR SOLID_COLOR}</li>
   3423      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS COLOR_BARS}</li>
   3424      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY COLOR_BARS_FADE_TO_GRAY}</li>
   3425      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_PN9 PN9}</li>
   3426      *   <li>{@link #SENSOR_TEST_PATTERN_MODE_CUSTOM1 CUSTOM1}</li>
   3427      * </ul></p>
   3428      * <p><b>Available values for this device:</b><br>
   3429      * {@link CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES android.sensor.availableTestPatternModes}</p>
   3430      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3431      *
   3432      * @see CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES
   3433      * @see #SENSOR_TEST_PATTERN_MODE_OFF
   3434      * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR
   3435      * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS
   3436      * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY
   3437      * @see #SENSOR_TEST_PATTERN_MODE_PN9
   3438      * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1
   3439      */
   3440     @PublicKey
   3441     public static final Key<Integer> SENSOR_TEST_PATTERN_MODE =
   3442             new Key<Integer>("android.sensor.testPatternMode", int.class);
   3443 
   3444     /**
   3445      * <p>Duration between the start of first row exposure
   3446      * and the start of last row exposure.</p>
   3447      * <p>This is the exposure time skew between the first and last
   3448      * row exposure start times. The first row and the last row are
   3449      * the first and last rows inside of the
   3450      * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
   3451      * <p>For typical camera sensors that use rolling shutters, this is also equivalent
   3452      * to the frame readout time.</p>
   3453      * <p><b>Units</b>: Nanoseconds</p>
   3454      * <p><b>Range of valid values:</b><br>
   3455      * &gt;= 0 and &lt;
   3456      * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.</p>
   3457      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3458      * <p><b>Limited capability</b> -
   3459      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   3460      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3461      *
   3462      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3463      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   3464      */
   3465     @PublicKey
   3466     public static final Key<Long> SENSOR_ROLLING_SHUTTER_SKEW =
   3467             new Key<Long>("android.sensor.rollingShutterSkew", long.class);
   3468 
   3469     /**
   3470      * <p>A per-frame dynamic black level offset for each of the color filter
   3471      * arrangement (CFA) mosaic channels.</p>
   3472      * <p>Camera sensor black levels may vary dramatically for different
   3473      * capture settings (e.g. {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}). The fixed black
   3474      * level reported by {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may be too
   3475      * inaccurate to represent the actual value on a per-frame basis. The
   3476      * camera device internal pipeline relies on reliable black level values
   3477      * to process the raw images appropriately. To get the best image
   3478      * quality, the camera device may choose to estimate the per frame black
   3479      * level values either based on optically shielded black regions
   3480      * ({@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}) or its internal model.</p>
   3481      * <p>This key reports the camera device estimated per-frame zero light
   3482      * value for each of the CFA mosaic channels in the camera sensor. The
   3483      * {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may only represent a coarse
   3484      * approximation of the actual black level values. This value is the
   3485      * black level used in camera device internal image processing pipeline
   3486      * and generally more accurate than the fixed black level values.
   3487      * However, since they are estimated values by the camera device, they
   3488      * may not be as accurate as the black level values calculated from the
   3489      * optical black pixels reported by {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}.</p>
   3490      * <p>The values are given in the same order as channels listed for the CFA
   3491      * layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
   3492      * nth value given corresponds to the black level offset for the nth
   3493      * color channel listed in the CFA.</p>
   3494      * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is available or the
   3495      * camera device advertises this key via {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
   3496      * <p><b>Range of valid values:</b><br>
   3497      * &gt;= 0 for each.</p>
   3498      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3499      *
   3500      * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
   3501      * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
   3502      * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS
   3503      * @see CaptureRequest#SENSOR_SENSITIVITY
   3504      */
   3505     @PublicKey
   3506     public static final Key<float[]> SENSOR_DYNAMIC_BLACK_LEVEL =
   3507             new Key<float[]>("android.sensor.dynamicBlackLevel", float[].class);
   3508 
   3509     /**
   3510      * <p>Maximum raw value output by sensor for this frame.</p>
   3511      * <p>Since the {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may change for different
   3512      * capture settings (e.g., {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}), the white
   3513      * level will change accordingly. This key is similar to
   3514      * {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}, but specifies the camera device
   3515      * estimated white level for each frame.</p>
   3516      * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is
   3517      * available or the camera device advertises this key via
   3518      * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys }.</p>
   3519      * <p><b>Range of valid values:</b><br>
   3520      * &gt;= 0</p>
   3521      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3522      *
   3523      * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
   3524      * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL
   3525      * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS
   3526      * @see CaptureRequest#SENSOR_SENSITIVITY
   3527      */
   3528     @PublicKey
   3529     public static final Key<Integer> SENSOR_DYNAMIC_WHITE_LEVEL =
   3530             new Key<Integer>("android.sensor.dynamicWhiteLevel", int.class);
   3531 
   3532     /**
   3533      * <p>Quality of lens shading correction applied
   3534      * to the image data.</p>
   3535      * <p>When set to OFF mode, no lens shading correction will be applied by the
   3536      * camera device, and an identity lens shading map data will be provided
   3537      * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens
   3538      * shading map with size of <code>[ 4, 3 ]</code>,
   3539      * the output {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap} for this case will be an identity
   3540      * map shown below:</p>
   3541      * <pre><code>[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
   3542      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
   3543      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
   3544      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
   3545      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
   3546      *  1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
   3547      * </code></pre>
   3548      * <p>When set to other modes, lens shading correction will be applied by the camera
   3549      * device. Applications can request lens shading map data by setting
   3550      * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide lens
   3551      * shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap}; the returned shading map
   3552      * data will be the one applied by the camera device for this capture request.</p>
   3553      * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
   3554      * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
   3555      * AWB are in AUTO modes({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF and {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} <code>!=</code>
   3556      * OFF), to get best results, it is recommended that the applications wait for the AE and AWB
   3557      * to be converged before using the returned shading map data.</p>
   3558      * <p><b>Possible values:</b>
   3559      * <ul>
   3560      *   <li>{@link #SHADING_MODE_OFF OFF}</li>
   3561      *   <li>{@link #SHADING_MODE_FAST FAST}</li>
   3562      *   <li>{@link #SHADING_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
   3563      * </ul></p>
   3564      * <p><b>Available values for this device:</b><br>
   3565      * {@link CameraCharacteristics#SHADING_AVAILABLE_MODES android.shading.availableModes}</p>
   3566      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3567      * <p><b>Full capability</b> -
   3568      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3569      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3570      *
   3571      * @see CaptureRequest#CONTROL_AE_MODE
   3572      * @see CaptureRequest#CONTROL_AWB_MODE
   3573      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3574      * @see CameraCharacteristics#SHADING_AVAILABLE_MODES
   3575      * @see CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP
   3576      * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
   3577      * @see #SHADING_MODE_OFF
   3578      * @see #SHADING_MODE_FAST
   3579      * @see #SHADING_MODE_HIGH_QUALITY
   3580      */
   3581     @PublicKey
   3582     public static final Key<Integer> SHADING_MODE =
   3583             new Key<Integer>("android.shading.mode", int.class);
   3584 
   3585     /**
   3586      * <p>Operating mode for the face detector
   3587      * unit.</p>
   3588      * <p>Whether face detection is enabled, and whether it
   3589      * should output just the basic fields or the full set of
   3590      * fields.</p>
   3591      * <p><b>Possible values:</b>
   3592      * <ul>
   3593      *   <li>{@link #STATISTICS_FACE_DETECT_MODE_OFF OFF}</li>
   3594      *   <li>{@link #STATISTICS_FACE_DETECT_MODE_SIMPLE SIMPLE}</li>
   3595      *   <li>{@link #STATISTICS_FACE_DETECT_MODE_FULL FULL}</li>
   3596      * </ul></p>
   3597      * <p><b>Available values for this device:</b><br>
   3598      * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}</p>
   3599      * <p>This key is available on all devices.</p>
   3600      *
   3601      * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
   3602      * @see #STATISTICS_FACE_DETECT_MODE_OFF
   3603      * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE
   3604      * @see #STATISTICS_FACE_DETECT_MODE_FULL
   3605      */
   3606     @PublicKey
   3607     public static final Key<Integer> STATISTICS_FACE_DETECT_MODE =
   3608             new Key<Integer>("android.statistics.faceDetectMode", int.class);
   3609 
   3610     /**
   3611      * <p>List of unique IDs for detected faces.</p>
   3612      * <p>Each detected face is given a unique ID that is valid for as long as the face is visible
   3613      * to the camera device.  A face that leaves the field of view and later returns may be
   3614      * assigned a new ID.</p>
   3615      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL
   3616      * This key is available on all devices.</p>
   3617      *
   3618      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
   3619      * @hide
   3620      */
   3621     public static final Key<int[]> STATISTICS_FACE_IDS =
   3622             new Key<int[]>("android.statistics.faceIds", int[].class);
   3623 
   3624     /**
   3625      * <p>List of landmarks for detected
   3626      * faces.</p>
   3627      * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
   3628      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
   3629      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL
   3630      * This key is available on all devices.</p>
   3631      *
   3632      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   3633      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
   3634      * @hide
   3635      */
   3636     public static final Key<int[]> STATISTICS_FACE_LANDMARKS =
   3637             new Key<int[]>("android.statistics.faceLandmarks", int[].class);
   3638 
   3639     /**
   3640      * <p>List of the bounding rectangles for detected
   3641      * faces.</p>
   3642      * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
   3643      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
   3644      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF
   3645      * This key is available on all devices.</p>
   3646      *
   3647      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   3648      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
   3649      * @hide
   3650      */
   3651     public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES =
   3652             new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class);
   3653 
   3654     /**
   3655      * <p>List of the face confidence scores for
   3656      * detected faces</p>
   3657      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF.</p>
   3658      * <p><b>Range of valid values:</b><br>
   3659      * 1-100</p>
   3660      * <p>This key is available on all devices.</p>
   3661      *
   3662      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
   3663      * @hide
   3664      */
   3665     public static final Key<byte[]> STATISTICS_FACE_SCORES =
   3666             new Key<byte[]>("android.statistics.faceScores", byte[].class);
   3667 
   3668     /**
   3669      * <p>List of the faces detected through camera face detection
   3670      * in this capture.</p>
   3671      * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} <code>!=</code> OFF.</p>
   3672      * <p>This key is available on all devices.</p>
   3673      *
   3674      * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
   3675      */
   3676     @PublicKey
   3677     @SyntheticKey
   3678     public static final Key<android.hardware.camera2.params.Face[]> STATISTICS_FACES =
   3679             new Key<android.hardware.camera2.params.Face[]>("android.statistics.faces", android.hardware.camera2.params.Face[].class);
   3680 
   3681     /**
   3682      * <p>The shading map is a low-resolution floating-point map
   3683      * that lists the coefficients used to correct for vignetting, for each
   3684      * Bayer color channel.</p>
   3685      * <p>The map provided here is the same map that is used by the camera device to
   3686      * correct both color shading and vignetting for output non-RAW images.</p>
   3687      * <p>When there is no lens shading correction applied to RAW
   3688      * output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} <code>==</code>
   3689      * false), this map is the complete lens shading correction
   3690      * map; when there is some lens shading correction applied to
   3691      * the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}<code>==</code> true), this map reports the remaining lens shading
   3692      * correction map that needs to be applied to get shading
   3693      * corrected images that match the camera device's output for
   3694      * non-RAW formats.</p>
   3695      * <p>For a complete shading correction map, the least shaded
   3696      * section of the image will have a gain factor of 1; all
   3697      * other sections will have gains above 1.</p>
   3698      * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map
   3699      * will take into account the colorCorrection settings.</p>
   3700      * <p>The shading map is for the entire active pixel array, and is not
   3701      * affected by the crop region specified in the request. Each shading map
   3702      * entry is the value of the shading compensation map over a specific
   3703      * pixel on the sensor.  Specifically, with a (N x M) resolution shading
   3704      * map, and an active pixel array size (W x H), shading map entry
   3705      * (x,y)  (0 ... N-1, 0 ... M-1) is the value of the shading map at
   3706      * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
   3707      * The map is assumed to be bilinearly interpolated between the sample points.</p>
   3708      * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
   3709      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
   3710      * The shading map is stored in a fully interleaved format.</p>
   3711      * <p>The shading map will generally have on the order of 30-40 rows and columns,
   3712      * and will be smaller than 64x64.</p>
   3713      * <p>As an example, given a very small map defined as:</p>
   3714      * <pre><code>width,height = [ 4, 3 ]
   3715      * values =
   3716      * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
   3717      *     1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
   3718      *   1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
   3719      *     1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
   3720      *   1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
   3721      *     1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
   3722      * </code></pre>
   3723      * <p>The low-resolution scaling map images for each channel are
   3724      * (displayed using nearest-neighbor interpolation):</p>
   3725      * <p><img alt="Red lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
   3726      * <img alt="Green (even rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
   3727      * <img alt="Green (odd rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
   3728      * <img alt="Blue lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
   3729      * <p>As a visualization only, inverting the full-color map to recover an
   3730      * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
   3731      * <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
   3732      * <p><b>Range of valid values:</b><br>
   3733      * Each gain factor is &gt;= 1</p>
   3734      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3735      * <p><b>Full capability</b> -
   3736      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3737      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3738      *
   3739      * @see CaptureRequest#COLOR_CORRECTION_MODE
   3740      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3741      * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED
   3742      */
   3743     @PublicKey
   3744     public static final Key<android.hardware.camera2.params.LensShadingMap> STATISTICS_LENS_SHADING_CORRECTION_MAP =
   3745             new Key<android.hardware.camera2.params.LensShadingMap>("android.statistics.lensShadingCorrectionMap", android.hardware.camera2.params.LensShadingMap.class);
   3746 
   3747     /**
   3748      * <p>The shading map is a low-resolution floating-point map
   3749      * that lists the coefficients used to correct for vignetting and color shading,
   3750      * for each Bayer color channel of RAW image data.</p>
   3751      * <p>The map provided here is the same map that is used by the camera device to
   3752      * correct both color shading and vignetting for output non-RAW images.</p>
   3753      * <p>When there is no lens shading correction applied to RAW
   3754      * output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} <code>==</code>
   3755      * false), this map is the complete lens shading correction
   3756      * map; when there is some lens shading correction applied to
   3757      * the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}<code>==</code> true), this map reports the remaining lens shading
   3758      * correction map that needs to be applied to get shading
   3759      * corrected images that match the camera device's output for
   3760      * non-RAW formats.</p>
   3761      * <p>For a complete shading correction map, the least shaded
   3762      * section of the image will have a gain factor of 1; all
   3763      * other sections will have gains above 1.</p>
   3764      * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map
   3765      * will take into account the colorCorrection settings.</p>
   3766      * <p>The shading map is for the entire active pixel array, and is not
   3767      * affected by the crop region specified in the request. Each shading map
   3768      * entry is the value of the shading compensation map over a specific
   3769      * pixel on the sensor.  Specifically, with a (N x M) resolution shading
   3770      * map, and an active pixel array size (W x H), shading map entry
   3771      * (x,y)  (0 ... N-1, 0 ... M-1) is the value of the shading map at
   3772      * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
   3773      * The map is assumed to be bilinearly interpolated between the sample points.</p>
   3774      * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
   3775      * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
   3776      * The shading map is stored in a fully interleaved format, and its size
   3777      * is provided in the camera static metadata by android.lens.info.shadingMapSize.</p>
   3778      * <p>The shading map will generally have on the order of 30-40 rows and columns,
   3779      * and will be smaller than 64x64.</p>
   3780      * <p>As an example, given a very small map defined as:</p>
   3781      * <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
   3782      * android.statistics.lensShadingMap =
   3783      * [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
   3784      *     1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
   3785      *   1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
   3786      *     1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
   3787      *   1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
   3788      *     1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
   3789      * </code></pre>
   3790      * <p>The low-resolution scaling map images for each channel are
   3791      * (displayed using nearest-neighbor interpolation):</p>
   3792      * <p><img alt="Red lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
   3793      * <img alt="Green (even rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
   3794      * <img alt="Green (odd rows) lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
   3795      * <img alt="Blue lens shading map" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
   3796      * <p>As a visualization only, inverting the full-color map to recover an
   3797      * image of a gray wall (using bicubic interpolation for visual quality)
   3798      * as captured by the sensor gives:</p>
   3799      * <p><img alt="Image of a uniform white wall (inverse shading map)" src="/reference/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
   3800      * <p>Note that the RAW image data might be subject to lens shading
   3801      * correction not reported on this map. Query
   3802      * {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject
   3803      * to lens shading correction. If {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}
   3804      * is TRUE, the RAW image data is subject to partial or full lens shading
   3805      * correction. In the case full lens shading correction is applied to RAW
   3806      * images, the gain factor map reported in this key will contain all 1.0 gains.
   3807      * In other words, the map reported in this key is the remaining lens shading
   3808      * that needs to be applied on the RAW image to get images without lens shading
   3809      * artifacts. See {@link CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW android.request.maxNumOutputRaw} for a list of RAW image
   3810      * formats.</p>
   3811      * <p><b>Range of valid values:</b><br>
   3812      * Each gain factor is &gt;= 1</p>
   3813      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3814      * <p><b>Full capability</b> -
   3815      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3816      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3817      *
   3818      * @see CaptureRequest#COLOR_CORRECTION_MODE
   3819      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3820      * @see CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW
   3821      * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED
   3822      * @hide
   3823      */
   3824     public static final Key<float[]> STATISTICS_LENS_SHADING_MAP =
   3825             new Key<float[]>("android.statistics.lensShadingMap", float[].class);
   3826 
   3827     /**
   3828      * <p>The best-fit color channel gains calculated
   3829      * by the camera device's statistics units for the current output frame.</p>
   3830      * <p>This may be different than the gains used for this frame,
   3831      * since statistics processing on data from a new frame
   3832      * typically completes after the transform has already been
   3833      * applied to that frame.</p>
   3834      * <p>The 4 channel gains are defined in Bayer domain,
   3835      * see {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} for details.</p>
   3836      * <p>This value should always be calculated by the auto-white balance (AWB) block,
   3837      * regardless of the android.control.* current values.</p>
   3838      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3839      *
   3840      * @see CaptureRequest#COLOR_CORRECTION_GAINS
   3841      * @deprecated
   3842      * <p>Never fully implemented or specified; do not use</p>
   3843 
   3844      * @hide
   3845      */
   3846     @Deprecated
   3847     public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS =
   3848             new Key<float[]>("android.statistics.predictedColorGains", float[].class);
   3849 
   3850     /**
   3851      * <p>The best-fit color transform matrix estimate
   3852      * calculated by the camera device's statistics units for the current
   3853      * output frame.</p>
   3854      * <p>The camera device will provide the estimate from its
   3855      * statistics unit on the white balance transforms to use
   3856      * for the next frame. These are the values the camera device believes
   3857      * are the best fit for the current output frame. This may
   3858      * be different than the transform used for this frame, since
   3859      * statistics processing on data from a new frame typically
   3860      * completes after the transform has already been applied to
   3861      * that frame.</p>
   3862      * <p>These estimates must be provided for all frames, even if
   3863      * capture settings and color transforms are set by the application.</p>
   3864      * <p>This value should always be calculated by the auto-white balance (AWB) block,
   3865      * regardless of the android.control.* current values.</p>
   3866      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3867      * @deprecated
   3868      * <p>Never fully implemented or specified; do not use</p>
   3869 
   3870      * @hide
   3871      */
   3872     @Deprecated
   3873     public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM =
   3874             new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class);
   3875 
   3876     /**
   3877      * <p>The camera device estimated scene illumination lighting
   3878      * frequency.</p>
   3879      * <p>Many light sources, such as most fluorescent lights, flicker at a rate
   3880      * that depends on the local utility power standards. This flicker must be
   3881      * accounted for by auto-exposure routines to avoid artifacts in captured images.
   3882      * The camera device uses this entry to tell the application what the scene
   3883      * illuminant frequency is.</p>
   3884      * <p>When manual exposure control is enabled
   3885      * (<code>{@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} == OFF</code> or <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} ==
   3886      * OFF</code>), the {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} doesn't perform
   3887      * antibanding, and the application can ensure it selects
   3888      * exposure times that do not cause banding issues by looking
   3889      * into this metadata field. See
   3890      * {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} for more details.</p>
   3891      * <p>Reports NONE if there doesn't appear to be flickering illumination.</p>
   3892      * <p><b>Possible values:</b>
   3893      * <ul>
   3894      *   <li>{@link #STATISTICS_SCENE_FLICKER_NONE NONE}</li>
   3895      *   <li>{@link #STATISTICS_SCENE_FLICKER_50HZ 50HZ}</li>
   3896      *   <li>{@link #STATISTICS_SCENE_FLICKER_60HZ 60HZ}</li>
   3897      * </ul></p>
   3898      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3899      * <p><b>Full capability</b> -
   3900      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3901      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3902      *
   3903      * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
   3904      * @see CaptureRequest#CONTROL_AE_MODE
   3905      * @see CaptureRequest#CONTROL_MODE
   3906      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3907      * @see #STATISTICS_SCENE_FLICKER_NONE
   3908      * @see #STATISTICS_SCENE_FLICKER_50HZ
   3909      * @see #STATISTICS_SCENE_FLICKER_60HZ
   3910      */
   3911     @PublicKey
   3912     public static final Key<Integer> STATISTICS_SCENE_FLICKER =
   3913             new Key<Integer>("android.statistics.sceneFlicker", int.class);
   3914 
   3915     /**
   3916      * <p>Operating mode for hot pixel map generation.</p>
   3917      * <p>If set to <code>true</code>, a hot pixel map is returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.
   3918      * If set to <code>false</code>, no hot pixel map will be returned.</p>
   3919      * <p><b>Range of valid values:</b><br>
   3920      * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES android.statistics.info.availableHotPixelMapModes}</p>
   3921      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3922      *
   3923      * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
   3924      * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES
   3925      */
   3926     @PublicKey
   3927     public static final Key<Boolean> STATISTICS_HOT_PIXEL_MAP_MODE =
   3928             new Key<Boolean>("android.statistics.hotPixelMapMode", boolean.class);
   3929 
   3930     /**
   3931      * <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
   3932      * <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
   3933      * <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
   3934      * bottom-right of the pixel array, respectively. The width and
   3935      * height dimensions are given in {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.
   3936      * This may include hot pixels that lie outside of the active array
   3937      * bounds given by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
   3938      * <p><b>Range of valid values:</b><br></p>
   3939      * <p>n &lt;= number of pixels on the sensor.
   3940      * The <code>(x, y)</code> coordinates must be bounded by
   3941      * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.</p>
   3942      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3943      *
   3944      * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
   3945      * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
   3946      */
   3947     @PublicKey
   3948     public static final Key<android.graphics.Point[]> STATISTICS_HOT_PIXEL_MAP =
   3949             new Key<android.graphics.Point[]>("android.statistics.hotPixelMap", android.graphics.Point[].class);
   3950 
   3951     /**
   3952      * <p>Whether the camera device will output the lens
   3953      * shading map in output result metadata.</p>
   3954      * <p>When set to ON,
   3955      * android.statistics.lensShadingMap will be provided in
   3956      * the output result metadata.</p>
   3957      * <p>ON is always supported on devices with the RAW capability.</p>
   3958      * <p><b>Possible values:</b>
   3959      * <ul>
   3960      *   <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_OFF OFF}</li>
   3961      *   <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_ON ON}</li>
   3962      * </ul></p>
   3963      * <p><b>Available values for this device:</b><br>
   3964      * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES android.statistics.info.availableLensShadingMapModes}</p>
   3965      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3966      * <p><b>Full capability</b> -
   3967      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   3968      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   3969      *
   3970      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   3971      * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES
   3972      * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
   3973      * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
   3974      */
   3975     @PublicKey
   3976     public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE =
   3977             new Key<Integer>("android.statistics.lensShadingMapMode", int.class);
   3978 
   3979     /**
   3980      * <p>A control for selecting whether OIS position information is included in output
   3981      * result metadata.</p>
   3982      * <p><b>Possible values:</b>
   3983      * <ul>
   3984      *   <li>{@link #STATISTICS_OIS_DATA_MODE_OFF OFF}</li>
   3985      *   <li>{@link #STATISTICS_OIS_DATA_MODE_ON ON}</li>
   3986      * </ul></p>
   3987      * <p><b>Available values for this device:</b><br>
   3988      * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES android.statistics.info.availableOisDataModes}</p>
   3989      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   3990      *
   3991      * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES
   3992      * @see #STATISTICS_OIS_DATA_MODE_OFF
   3993      * @see #STATISTICS_OIS_DATA_MODE_ON
   3994      */
   3995     @PublicKey
   3996     public static final Key<Integer> STATISTICS_OIS_DATA_MODE =
   3997             new Key<Integer>("android.statistics.oisDataMode", int.class);
   3998 
   3999     /**
   4000      * <p>An array of timestamps of OIS samples, in nanoseconds.</p>
   4001      * <p>The array contains the timestamps of OIS samples. The timestamps are in the same
   4002      * timebase as and comparable to {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp}.</p>
   4003      * <p><b>Units</b>: nanoseconds</p>
   4004      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4005      *
   4006      * @see CaptureResult#SENSOR_TIMESTAMP
   4007      * @hide
   4008      */
   4009     public static final Key<long[]> STATISTICS_OIS_TIMESTAMPS =
   4010             new Key<long[]>("android.statistics.oisTimestamps", long[].class);
   4011 
   4012     /**
   4013      * <p>An array of shifts of OIS samples, in x direction.</p>
   4014      * <p>The array contains the amount of shifts in x direction, in pixels, based on OIS samples.
   4015      * A positive value is a shift from left to right in active array coordinate system. For
   4016      * example, if the optical center is (1000, 500) in active array coordinates, a shift of
   4017      * (3, 0) puts the new optical center at (1003, 500).</p>
   4018      * <p>The number of shifts must match the number of timestamps in
   4019      * android.statistics.oisTimestamps.</p>
   4020      * <p><b>Units</b>: Pixels in active array.</p>
   4021      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4022      * @hide
   4023      */
   4024     public static final Key<float[]> STATISTICS_OIS_X_SHIFTS =
   4025             new Key<float[]>("android.statistics.oisXShifts", float[].class);
   4026 
   4027     /**
   4028      * <p>An array of shifts of OIS samples, in y direction.</p>
   4029      * <p>The array contains the amount of shifts in y direction, in pixels, based on OIS samples.
   4030      * A positive value is a shift from top to bottom in active array coordinate system. For
   4031      * example, if the optical center is (1000, 500) in active array coordinates, a shift of
   4032      * (0, 5) puts the new optical center at (1000, 505).</p>
   4033      * <p>The number of shifts must match the number of timestamps in
   4034      * android.statistics.oisTimestamps.</p>
   4035      * <p><b>Units</b>: Pixels in active array.</p>
   4036      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4037      * @hide
   4038      */
   4039     public static final Key<float[]> STATISTICS_OIS_Y_SHIFTS =
   4040             new Key<float[]>("android.statistics.oisYShifts", float[].class);
   4041 
   4042     /**
   4043      * <p>An array of OIS samples.</p>
   4044      * <p>Each OIS sample contains the timestamp and the amount of shifts in x and y direction,
   4045      * in pixels, of the OIS sample.</p>
   4046      * <p>A positive value for a shift in x direction is a shift from left to right in active array
   4047      * coordinate system. For example, if the optical center is (1000, 500) in active array
   4048      * coordinates, a shift of (3, 0) puts the new optical center at (1003, 500).</p>
   4049      * <p>A positive value for a shift in y direction is a shift from top to bottom in active array
   4050      * coordinate system. For example, if the optical center is (1000, 500) in active array
   4051      * coordinates, a shift of (0, 5) puts the new optical center at (1000, 505).</p>
   4052      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4053      */
   4054     @PublicKey
   4055     @SyntheticKey
   4056     public static final Key<android.hardware.camera2.params.OisSample[]> STATISTICS_OIS_SAMPLES =
   4057             new Key<android.hardware.camera2.params.OisSample[]>("android.statistics.oisSamples", android.hardware.camera2.params.OisSample[].class);
   4058 
   4059     /**
   4060      * <p>Tonemapping / contrast / gamma curve for the blue
   4061      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
   4062      * CONTRAST_CURVE.</p>
   4063      * <p>See android.tonemap.curveRed for more details.</p>
   4064      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4065      * <p><b>Full capability</b> -
   4066      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   4067      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   4068      *
   4069      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   4070      * @see CaptureRequest#TONEMAP_MODE
   4071      * @hide
   4072      */
   4073     public static final Key<float[]> TONEMAP_CURVE_BLUE =
   4074             new Key<float[]>("android.tonemap.curveBlue", float[].class);
   4075 
   4076     /**
   4077      * <p>Tonemapping / contrast / gamma curve for the green
   4078      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
   4079      * CONTRAST_CURVE.</p>
   4080      * <p>See android.tonemap.curveRed for more details.</p>
   4081      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4082      * <p><b>Full capability</b> -
   4083      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   4084      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   4085      *
   4086      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   4087      * @see CaptureRequest#TONEMAP_MODE
   4088      * @hide
   4089      */
   4090     public static final Key<float[]> TONEMAP_CURVE_GREEN =
   4091             new Key<float[]>("android.tonemap.curveGreen", float[].class);
   4092 
   4093     /**
   4094      * <p>Tonemapping / contrast / gamma curve for the red
   4095      * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
   4096      * CONTRAST_CURVE.</p>
   4097      * <p>Each channel's curve is defined by an array of control points:</p>
   4098      * <pre><code>android.tonemap.curveRed =
   4099      *   [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
   4100      * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
   4101      * <p>These are sorted in order of increasing <code>Pin</code>; it is
   4102      * required that input values 0.0 and 1.0 are included in the list to
   4103      * define a complete mapping. For input values between control points,
   4104      * the camera device must linearly interpolate between the control
   4105      * points.</p>
   4106      * <p>Each curve can have an independent number of points, and the number
   4107      * of points can be less than max (that is, the request doesn't have to
   4108      * always provide a curve with number of points equivalent to
   4109      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
   4110      * <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
   4111      * are ignored.</p>
   4112      * <p>A few examples, and their corresponding graphical mappings; these
   4113      * only specify the red channel and the precision is limited to 4
   4114      * digits, for conciseness.</p>
   4115      * <p>Linear mapping:</p>
   4116      * <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
   4117      * </code></pre>
   4118      * <p><img alt="Linear mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
   4119      * <p>Invert mapping:</p>
   4120      * <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
   4121      * </code></pre>
   4122      * <p><img alt="Inverting mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
   4123      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
   4124      * <pre><code>android.tonemap.curveRed = [
   4125      *   0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
   4126      *   0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
   4127      *   0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
   4128      *   0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
   4129      * </code></pre>
   4130      * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
   4131      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
   4132      * <pre><code>android.tonemap.curveRed = [
   4133      *   0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
   4134      *   0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
   4135      *   0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
   4136      *   0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
   4137      * </code></pre>
   4138      * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
   4139      * <p><b>Range of valid values:</b><br>
   4140      * 0-1 on both input and output coordinates, normalized
   4141      * as a floating-point value such that 0 == black and 1 == white.</p>
   4142      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4143      * <p><b>Full capability</b> -
   4144      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   4145      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   4146      *
   4147      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   4148      * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
   4149      * @see CaptureRequest#TONEMAP_MODE
   4150      * @hide
   4151      */
   4152     public static final Key<float[]> TONEMAP_CURVE_RED =
   4153             new Key<float[]>("android.tonemap.curveRed", float[].class);
   4154 
   4155     /**
   4156      * <p>Tonemapping / contrast / gamma curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}
   4157      * is CONTRAST_CURVE.</p>
   4158      * <p>The tonemapCurve consist of three curves for each of red, green, and blue
   4159      * channels respectively. The following example uses the red channel as an
   4160      * example. The same logic applies to green and blue channel.
   4161      * Each channel's curve is defined by an array of control points:</p>
   4162      * <pre><code>curveRed =
   4163      *   [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
   4164      * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
   4165      * <p>These are sorted in order of increasing <code>Pin</code>; it is always
   4166      * guaranteed that input values 0.0 and 1.0 are included in the list to
   4167      * define a complete mapping. For input values between control points,
   4168      * the camera device must linearly interpolate between the control
   4169      * points.</p>
   4170      * <p>Each curve can have an independent number of points, and the number
   4171      * of points can be less than max (that is, the request doesn't have to
   4172      * always provide a curve with number of points equivalent to
   4173      * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
   4174      * <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
   4175      * are ignored.</p>
   4176      * <p>A few examples, and their corresponding graphical mappings; these
   4177      * only specify the red channel and the precision is limited to 4
   4178      * digits, for conciseness.</p>
   4179      * <p>Linear mapping:</p>
   4180      * <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
   4181      * </code></pre>
   4182      * <p><img alt="Linear mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
   4183      * <p>Invert mapping:</p>
   4184      * <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
   4185      * </code></pre>
   4186      * <p><img alt="Inverting mapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
   4187      * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
   4188      * <pre><code>curveRed = [
   4189      *   (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
   4190      *   (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
   4191      *   (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
   4192      *   (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
   4193      * </code></pre>
   4194      * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
   4195      * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
   4196      * <pre><code>curveRed = [
   4197      *   (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
   4198      *   (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
   4199      *   (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
   4200      *   (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
   4201      * </code></pre>
   4202      * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
   4203      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4204      * <p><b>Full capability</b> -
   4205      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   4206      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   4207      *
   4208      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   4209      * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
   4210      * @see CaptureRequest#TONEMAP_MODE
   4211      */
   4212     @PublicKey
   4213     @SyntheticKey
   4214     public static final Key<android.hardware.camera2.params.TonemapCurve> TONEMAP_CURVE =
   4215             new Key<android.hardware.camera2.params.TonemapCurve>("android.tonemap.curve", android.hardware.camera2.params.TonemapCurve.class);
   4216 
   4217     /**
   4218      * <p>High-level global contrast/gamma/tonemapping control.</p>
   4219      * <p>When switching to an application-defined contrast curve by setting
   4220      * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
   4221      * per-channel with a set of <code>(in, out)</code> points that specify the
   4222      * mapping from input high-bit-depth pixel value to the output
   4223      * low-bit-depth value.  Since the actual pixel ranges of both input
   4224      * and output may change depending on the camera pipeline, the values
   4225      * are specified by normalized floating-point numbers.</p>
   4226      * <p>More-complex color mapping operations such as 3D color look-up
   4227      * tables, selective chroma enhancement, or other non-linear color
   4228      * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
   4229      * CONTRAST_CURVE.</p>
   4230      * <p>When using either FAST or HIGH_QUALITY, the camera device will
   4231      * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.
   4232      * These values are always available, and as close as possible to the
   4233      * actually used nonlinear/nonglobal transforms.</p>
   4234      * <p>If a request is sent with CONTRAST_CURVE with the camera device's
   4235      * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
   4236      * roughly the same.</p>
   4237      * <p><b>Possible values:</b>
   4238      * <ul>
   4239      *   <li>{@link #TONEMAP_MODE_CONTRAST_CURVE CONTRAST_CURVE}</li>
   4240      *   <li>{@link #TONEMAP_MODE_FAST FAST}</li>
   4241      *   <li>{@link #TONEMAP_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
   4242      *   <li>{@link #TONEMAP_MODE_GAMMA_VALUE GAMMA_VALUE}</li>
   4243      *   <li>{@link #TONEMAP_MODE_PRESET_CURVE PRESET_CURVE}</li>
   4244      * </ul></p>
   4245      * <p><b>Available values for this device:</b><br>
   4246      * {@link CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES android.tonemap.availableToneMapModes}</p>
   4247      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4248      * <p><b>Full capability</b> -
   4249      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   4250      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   4251      *
   4252      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   4253      * @see CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES
   4254      * @see CaptureRequest#TONEMAP_CURVE
   4255      * @see CaptureRequest#TONEMAP_MODE
   4256      * @see #TONEMAP_MODE_CONTRAST_CURVE
   4257      * @see #TONEMAP_MODE_FAST
   4258      * @see #TONEMAP_MODE_HIGH_QUALITY
   4259      * @see #TONEMAP_MODE_GAMMA_VALUE
   4260      * @see #TONEMAP_MODE_PRESET_CURVE
   4261      */
   4262     @PublicKey
   4263     public static final Key<Integer> TONEMAP_MODE =
   4264             new Key<Integer>("android.tonemap.mode", int.class);
   4265 
   4266     /**
   4267      * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
   4268      * GAMMA_VALUE</p>
   4269      * <p>The tonemap curve will be defined the following formula:
   4270      * * OUT = pow(IN, 1.0 / gamma)
   4271      * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
   4272      * pow is the power function and gamma is the gamma value specified by this
   4273      * key.</p>
   4274      * <p>The same curve will be applied to all color channels. The camera device
   4275      * may clip the input gamma value to its supported range. The actual applied
   4276      * value will be returned in capture result.</p>
   4277      * <p>The valid range of gamma value varies on different devices, but values
   4278      * within [1.0, 5.0] are guaranteed not to be clipped.</p>
   4279      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4280      *
   4281      * @see CaptureRequest#TONEMAP_MODE
   4282      */
   4283     @PublicKey
   4284     public static final Key<Float> TONEMAP_GAMMA =
   4285             new Key<Float>("android.tonemap.gamma", float.class);
   4286 
   4287     /**
   4288      * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
   4289      * PRESET_CURVE</p>
   4290      * <p>The tonemap curve will be defined by specified standard.</p>
   4291      * <p>sRGB (approximated by 16 control points):</p>
   4292      * <p><img alt="sRGB tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
   4293      * <p>Rec. 709 (approximated by 16 control points):</p>
   4294      * <p><img alt="Rec. 709 tonemapping curve" src="/reference/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
   4295      * <p>Note that above figures show a 16 control points approximation of preset
   4296      * curves. Camera devices may apply a different approximation to the curve.</p>
   4297      * <p><b>Possible values:</b>
   4298      * <ul>
   4299      *   <li>{@link #TONEMAP_PRESET_CURVE_SRGB SRGB}</li>
   4300      *   <li>{@link #TONEMAP_PRESET_CURVE_REC709 REC709}</li>
   4301      * </ul></p>
   4302      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4303      *
   4304      * @see CaptureRequest#TONEMAP_MODE
   4305      * @see #TONEMAP_PRESET_CURVE_SRGB
   4306      * @see #TONEMAP_PRESET_CURVE_REC709
   4307      */
   4308     @PublicKey
   4309     public static final Key<Integer> TONEMAP_PRESET_CURVE =
   4310             new Key<Integer>("android.tonemap.presetCurve", int.class);
   4311 
   4312     /**
   4313      * <p>This LED is nominally used to indicate to the user
   4314      * that the camera is powered on and may be streaming images back to the
   4315      * Application Processor. In certain rare circumstances, the OS may
   4316      * disable this when video is processed locally and not transmitted to
   4317      * any untrusted applications.</p>
   4318      * <p>In particular, the LED <em>must</em> always be on when the data could be
   4319      * transmitted off the device. The LED <em>should</em> always be on whenever
   4320      * data is stored locally on the device.</p>
   4321      * <p>The LED <em>may</em> be off if a trusted application is using the data that
   4322      * doesn't violate the above rules.</p>
   4323      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4324      * @hide
   4325      */
   4326     public static final Key<Boolean> LED_TRANSMIT =
   4327             new Key<Boolean>("android.led.transmit", boolean.class);
   4328 
   4329     /**
   4330      * <p>Whether black-level compensation is locked
   4331      * to its current values, or is free to vary.</p>
   4332      * <p>Whether the black level offset was locked for this frame.  Should be
   4333      * ON if {@link CaptureRequest#BLACK_LEVEL_LOCK android.blackLevel.lock} was ON in the capture request, unless
   4334      * a change in other capture settings forced the camera device to
   4335      * perform a black level reset.</p>
   4336      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4337      * <p><b>Full capability</b> -
   4338      * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
   4339      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   4340      *
   4341      * @see CaptureRequest#BLACK_LEVEL_LOCK
   4342      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   4343      */
   4344     @PublicKey
   4345     public static final Key<Boolean> BLACK_LEVEL_LOCK =
   4346             new Key<Boolean>("android.blackLevel.lock", boolean.class);
   4347 
   4348     /**
   4349      * <p>The frame number corresponding to the last request
   4350      * with which the output result (metadata + buffers) has been fully
   4351      * synchronized.</p>
   4352      * <p>When a request is submitted to the camera device, there is usually a
   4353      * delay of several frames before the controls get applied. A camera
   4354      * device may either choose to account for this delay by implementing a
   4355      * pipeline and carefully submit well-timed atomic control updates, or
   4356      * it may start streaming control changes that span over several frame
   4357      * boundaries.</p>
   4358      * <p>In the latter case, whenever a request's settings change relative to
   4359      * the previous submitted request, the full set of changes may take
   4360      * multiple frame durations to fully take effect. Some settings may
   4361      * take effect sooner (in less frame durations) than others.</p>
   4362      * <p>While a set of control changes are being propagated, this value
   4363      * will be CONVERGING.</p>
   4364      * <p>Once it is fully known that a set of control changes have been
   4365      * finished propagating, and the resulting updated control settings
   4366      * have been read back by the camera device, this value will be set
   4367      * to a non-negative frame number (corresponding to the request to
   4368      * which the results have synchronized to).</p>
   4369      * <p>Older camera device implementations may not have a way to detect
   4370      * when all camera controls have been applied, and will always set this
   4371      * value to UNKNOWN.</p>
   4372      * <p>FULL capability devices will always have this value set to the
   4373      * frame number of the request corresponding to this result.</p>
   4374      * <p><em>Further details</em>:</p>
   4375      * <ul>
   4376      * <li>Whenever a request differs from the last request, any future
   4377      * results not yet returned may have this value set to CONVERGING (this
   4378      * could include any in-progress captures not yet returned by the camera
   4379      * device, for more details see pipeline considerations below).</li>
   4380      * <li>Submitting a series of multiple requests that differ from the
   4381      * previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
   4382      * moves the new synchronization frame to the last non-repeating
   4383      * request (using the smallest frame number from the contiguous list of
   4384      * repeating requests).</li>
   4385      * <li>Submitting the same request repeatedly will not change this value
   4386      * to CONVERGING, if it was already a non-negative value.</li>
   4387      * <li>When this value changes to non-negative, that means that all of the
   4388      * metadata controls from the request have been applied, all of the
   4389      * metadata controls from the camera device have been read to the
   4390      * updated values (into the result), and all of the graphics buffers
   4391      * corresponding to this result are also synchronized to the request.</li>
   4392      * </ul>
   4393      * <p><em>Pipeline considerations</em>:</p>
   4394      * <p>Submitting a request with updated controls relative to the previously
   4395      * submitted requests may also invalidate the synchronization state
   4396      * of all the results corresponding to currently in-flight requests.</p>
   4397      * <p>In other words, results for this current request and up to
   4398      * {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} prior requests may have their
   4399      * android.sync.frameNumber change to CONVERGING.</p>
   4400      * <p><b>Possible values:</b>
   4401      * <ul>
   4402      *   <li>{@link #SYNC_FRAME_NUMBER_CONVERGING CONVERGING}</li>
   4403      *   <li>{@link #SYNC_FRAME_NUMBER_UNKNOWN UNKNOWN}</li>
   4404      * </ul></p>
   4405      * <p><b>Available values for this device:</b><br>
   4406      * Either a non-negative value corresponding to a
   4407      * <code>frame_number</code>, or one of the two enums (CONVERGING / UNKNOWN).</p>
   4408      * <p>This key is available on all devices.</p>
   4409      *
   4410      * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH
   4411      * @see #SYNC_FRAME_NUMBER_CONVERGING
   4412      * @see #SYNC_FRAME_NUMBER_UNKNOWN
   4413      * @hide
   4414      */
   4415     public static final Key<Long> SYNC_FRAME_NUMBER =
   4416             new Key<Long>("android.sync.frameNumber", long.class);
   4417 
   4418     /**
   4419      * <p>The amount of exposure time increase factor applied to the original output
   4420      * frame by the application processing before sending for reprocessing.</p>
   4421      * <p>This is optional, and will be supported if the camera device supports YUV_REPROCESSING
   4422      * capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains YUV_REPROCESSING).</p>
   4423      * <p>For some YUV reprocessing use cases, the application may choose to filter the original
   4424      * output frames to effectively reduce the noise to the same level as a frame that was
   4425      * captured with longer exposure time. To be more specific, assuming the original captured
   4426      * images were captured with a sensitivity of S and an exposure time of T, the model in
   4427      * the camera device is that the amount of noise in the image would be approximately what
   4428      * would be expected if the original capture parameters had been a sensitivity of
   4429      * S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
   4430      * than S and T respectively. If the captured images were processed by the application
   4431      * before being sent for reprocessing, then the application may have used image processing
   4432      * algorithms and/or multi-frame image fusion to reduce the noise in the
   4433      * application-processed images (input images). By using the effectiveExposureFactor
   4434      * control, the application can communicate to the camera device the actual noise level
   4435      * improvement in the application-processed image. With this information, the camera
   4436      * device can select appropriate noise reduction and edge enhancement parameters to avoid
   4437      * excessive noise reduction ({@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}) and insufficient edge
   4438      * enhancement ({@link CaptureRequest#EDGE_MODE android.edge.mode}) being applied to the reprocessed frames.</p>
   4439      * <p>For example, for multi-frame image fusion use case, the application may fuse
   4440      * multiple output frames together to a final frame for reprocessing. When N image are
   4441      * fused into 1 image for reprocessing, the exposure time increase factor could be up to
   4442      * square root of N (based on a simple photon shot noise model). The camera device will
   4443      * adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
   4444      * produce the best quality images.</p>
   4445      * <p>This is relative factor, 1.0 indicates the application hasn't processed the input
   4446      * buffer in a way that affects its effective exposure time.</p>
   4447      * <p>This control is only effective for YUV reprocessing capture request. For noise
   4448      * reduction reprocessing, it is only effective when <code>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode} != OFF</code>.
   4449      * Similarly, for edge enhancement reprocessing, it is only effective when
   4450      * <code>{@link CaptureRequest#EDGE_MODE android.edge.mode} != OFF</code>.</p>
   4451      * <p><b>Units</b>: Relative exposure time increase factor.</p>
   4452      * <p><b>Range of valid values:</b><br>
   4453      * &gt;= 1.0</p>
   4454      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4455      * <p><b>Limited capability</b> -
   4456      * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
   4457      * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
   4458      *
   4459      * @see CaptureRequest#EDGE_MODE
   4460      * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
   4461      * @see CaptureRequest#NOISE_REDUCTION_MODE
   4462      * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
   4463      */
   4464     @PublicKey
   4465     public static final Key<Float> REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =
   4466             new Key<Float>("android.reprocess.effectiveExposureFactor", float.class);
   4467 
   4468     /**
   4469      * <p>Mode of operation for the lens distortion correction block.</p>
   4470      * <p>The lens distortion correction block attempts to improve image quality by fixing
   4471      * radial, tangential, or other geometric aberrations in the camera device's optics.  If
   4472      * available, the {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion} field documents the lens's distortion parameters.</p>
   4473      * <p>OFF means no distortion correction is done.</p>
   4474      * <p>FAST/HIGH_QUALITY both mean camera device determined distortion correction will be
   4475      * applied. HIGH_QUALITY mode indicates that the camera device will use the highest-quality
   4476      * correction algorithms, even if it slows down capture rate. FAST means the camera device
   4477      * will not slow down capture rate when applying correction. FAST may be the same as OFF if
   4478      * any correction at all would slow down capture rate.  Every output stream will have a
   4479      * similar amount of enhancement applied.</p>
   4480      * <p>The correction only applies to processed outputs such as YUV, JPEG, or DEPTH16; it is not
   4481      * applied to any RAW output.  Metadata coordinates such as face rectangles or metering
   4482      * regions are also not affected by correction.</p>
   4483      * <p>Applications enabling distortion correction need to pay extra attention when converting
   4484      * image coordinates between corrected output buffers and the sensor array. For example, if
   4485      * the app supports tap-to-focus and enables correction, it then has to apply the distortion
   4486      * model described in {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion} to the image buffer tap coordinates to properly
   4487      * calculate the tap position on the sensor active array to be used with
   4488      * {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}. The same applies in reverse to detected face rectangles if
   4489      * they need to be drawn on top of the corrected output buffers.</p>
   4490      * <p><b>Possible values:</b>
   4491      * <ul>
   4492      *   <li>{@link #DISTORTION_CORRECTION_MODE_OFF OFF}</li>
   4493      *   <li>{@link #DISTORTION_CORRECTION_MODE_FAST FAST}</li>
   4494      *   <li>{@link #DISTORTION_CORRECTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
   4495      * </ul></p>
   4496      * <p><b>Available values for this device:</b><br>
   4497      * {@link CameraCharacteristics#DISTORTION_CORRECTION_AVAILABLE_MODES android.distortionCorrection.availableModes}</p>
   4498      * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
   4499      *
   4500      * @see CaptureRequest#CONTROL_AF_REGIONS
   4501      * @see CameraCharacteristics#DISTORTION_CORRECTION_AVAILABLE_MODES
   4502      * @see CameraCharacteristics#LENS_DISTORTION
   4503      * @see #DISTORTION_CORRECTION_MODE_OFF
   4504      * @see #DISTORTION_CORRECTION_MODE_FAST
   4505      * @see #DISTORTION_CORRECTION_MODE_HIGH_QUALITY
   4506      */
   4507     @PublicKey
   4508     public static final Key<Integer> DISTORTION_CORRECTION_MODE =
   4509             new Key<Integer>("android.distortionCorrection.mode", int.class);
   4510 
   4511     /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
   4512      * End generated code
   4513      *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
   4514 
   4515 
   4516 
   4517 }
   4518