Home | History | Annotate | Download | only in hardware
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ANDROID_INCLUDE_CAMERA3_H
     18 #define ANDROID_INCLUDE_CAMERA3_H
     19 
     20 #include <system/camera_metadata.h>
     21 #include "camera_common.h"
     22 
     23 /**
     24  * Camera device HAL 3.4 [ CAMERA_DEVICE_API_VERSION_3_4 ]
     25  *
     26  * This is the current recommended version of the camera device HAL.
     27  *
     28  * Supports the android.hardware.Camera API, and as of v3.2, the
     29  * android.hardware.camera2 API as LIMITED or above hardware level.
     30  *
     31  * Camera devices that support this version of the HAL must return
     32  * CAMERA_DEVICE_API_VERSION_3_4 in camera_device_t.common.version and in
     33  * camera_info_t.device_version (from camera_module_t.get_camera_info).
     34  *
     35  * CAMERA_DEVICE_API_VERSION_3_3 and above:
     36  *    Camera modules that may contain version 3.3 or above devices must
     37  *    implement at least version 2.2 of the camera module interface (as defined
     38  *    by camera_module_t.common.module_api_version).
     39  *
     40  * CAMERA_DEVICE_API_VERSION_3_2:
     41  *    Camera modules that may contain version 3.2 devices must implement at
     42  *    least version 2.2 of the camera module interface (as defined by
     43  *    camera_module_t.common.module_api_version).
     44  *
     45  * <= CAMERA_DEVICE_API_VERSION_3_1:
     46  *    Camera modules that may contain version 3.1 (or 3.0) devices must
     47  *    implement at least version 2.0 of the camera module interface
     48  *    (as defined by camera_module_t.common.module_api_version).
     49  *
     50  * See camera_common.h for more versioning details.
     51  *
     52  * Documentation index:
     53  *   S1. Version history
     54  *   S2. Startup and operation sequencing
     55  *   S3. Operational modes
     56  *   S4. 3A modes and state machines
     57  *   S5. Cropping
     58  *   S6. Error management
     59  *   S7. Key Performance Indicator (KPI) glossary
     60  *   S8. Sample Use Cases
     61  *   S9. Notes on Controls and Metadata
     62  *   S10. Reprocessing flow and controls
     63  */
     64 
     65 /**
     66  * S1. Version history:
     67  *
     68  * 1.0: Initial Android camera HAL (Android 4.0) [camera.h]:
     69  *
     70  *   - Converted from C++ CameraHardwareInterface abstraction layer.
     71  *
     72  *   - Supports android.hardware.Camera API.
     73  *
     74  * 2.0: Initial release of expanded-capability HAL (Android 4.2) [camera2.h]:
     75  *
     76  *   - Sufficient for implementing existing android.hardware.Camera API.
     77  *
     78  *   - Allows for ZSL queue in camera service layer
     79  *
     80  *   - Not tested for any new features such manual capture control, Bayer RAW
     81  *     capture, reprocessing of RAW data.
     82  *
     83  * 3.0: First revision of expanded-capability HAL:
     84  *
     85  *   - Major version change since the ABI is completely different. No change to
     86  *     the required hardware capabilities or operational model from 2.0.
     87  *
     88  *   - Reworked input request and stream queue interfaces: Framework calls into
     89  *     HAL with next request and stream buffers already dequeued. Sync framework
     90  *     support is included, necessary for efficient implementations.
     91  *
     92  *   - Moved triggers into requests, most notifications into results.
     93  *
     94  *   - Consolidated all callbacks into framework into one structure, and all
     95  *     setup methods into a single initialize() call.
     96  *
     97  *   - Made stream configuration into a single call to simplify stream
     98  *     management. Bidirectional streams replace STREAM_FROM_STREAM construct.
     99  *
    100  *   - Limited mode semantics for older/limited hardware devices.
    101  *
    102  * 3.1: Minor revision of expanded-capability HAL:
    103  *
    104  *   - configure_streams passes consumer usage flags to the HAL.
    105  *
    106  *   - flush call to drop all in-flight requests/buffers as fast as possible.
    107  *
    108  * 3.2: Minor revision of expanded-capability HAL:
    109  *
    110  *   - Deprecates get_metadata_vendor_tag_ops.  Please use get_vendor_tag_ops
    111  *     in camera_common.h instead.
    112  *
    113  *   - register_stream_buffers deprecated. All gralloc buffers provided
    114  *     by framework to HAL in process_capture_request may be new at any time.
    115  *
    116  *   - add partial result support. process_capture_result may be called
    117  *     multiple times with a subset of the available result before the full
    118  *     result is available.
    119  *
    120  *   - add manual template to camera3_request_template. The applications may
    121  *     use this template to control the capture settings directly.
    122  *
    123  *   - Rework the bidirectional and input stream specifications.
    124  *
    125  *   - change the input buffer return path. The buffer is returned in
    126  *     process_capture_result instead of process_capture_request.
    127  *
    128  * 3.3: Minor revision of expanded-capability HAL:
    129  *
    130  *   - OPAQUE and YUV reprocessing API updates.
    131  *
    132  *   - Basic support for depth output buffers.
    133  *
    134  *   - Addition of data_space field to camera3_stream_t.
    135  *
    136  *   - Addition of rotation field to camera3_stream_t.
    137  *
    138  *   - Addition of camera3 stream configuration operation mode to camera3_stream_configuration_t
    139  *
    140  * 3.4: Minor additions to supported metadata and changes to data_space support
    141  *
    142  *   - Add ANDROID_SENSOR_OPAQUE_RAW_SIZE static metadata as mandatory if
    143  *     RAW_OPAQUE format is supported.
    144  *
    145  *   - Add ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE static metadata as
    146  *     mandatory if any RAW format is supported
    147  *
    148  *   - Switch camera3_stream_t data_space field to a more flexible definition,
    149  *     using the version 0 definition of dataspace encoding.
    150  *
    151  *   - General metadata additions which are available to use for HALv3.2 or
    152  *     newer:
    153  *     - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3
    154  *     - ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST
    155  *     - ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE
    156  *     - ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
    157  *     - ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL
    158  *     - ANDROID_SENSOR_OPAQUE_RAW_SIZE
    159  *     - ANDROID_SENSOR_OPTICAL_BLACK_REGIONS
    160  */
    161 
    162 /**
    163  * S2. Startup and general expected operation sequence:
    164  *
    165  * 1. Framework calls camera_module_t->common.open(), which returns a
    166  *    hardware_device_t structure.
    167  *
    168  * 2. Framework inspects the hardware_device_t->version field, and instantiates
    169  *    the appropriate handler for that version of the camera hardware device. In
    170  *    case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to
    171  *    a camera3_device_t.
    172  *
    173  * 3. Framework calls camera3_device_t->ops->initialize() with the framework
    174  *    callback function pointers. This will only be called this one time after
    175  *    open(), before any other functions in the ops structure are called.
    176  *
    177  * 4. The framework calls camera3_device_t->ops->configure_streams() with a list
    178  *    of input/output streams to the HAL device.
    179  *
    180  * 5. <= CAMERA_DEVICE_API_VERSION_3_1:
    181  *
    182  *    The framework allocates gralloc buffers and calls
    183  *    camera3_device_t->ops->register_stream_buffers() for at least one of the
    184  *    output streams listed in configure_streams. The same stream is registered
    185  *    only once.
    186  *
    187  *    >= CAMERA_DEVICE_API_VERSION_3_2:
    188  *
    189  *    camera3_device_t->ops->register_stream_buffers() is not called and must
    190  *    be NULL.
    191  *
    192  * 6. The framework requests default settings for some number of use cases with
    193  *    calls to camera3_device_t->ops->construct_default_request_settings(). This
    194  *    may occur any time after step 3.
    195  *
    196  * 7. The framework constructs and sends the first capture request to the HAL,
    197  *    with settings based on one of the sets of default settings, and with at
    198  *    least one output stream, which has been registered earlier by the
    199  *    framework. This is sent to the HAL with
    200  *    camera3_device_t->ops->process_capture_request(). The HAL must block the
    201  *    return of this call until it is ready for the next request to be sent.
    202  *
    203  *    >= CAMERA_DEVICE_API_VERSION_3_2:
    204  *
    205  *    The buffer_handle_t provided in the camera3_stream_buffer_t array
    206  *    in the camera3_capture_request_t may be new and never-before-seen
    207  *    by the HAL on any given new request.
    208  *
    209  * 8. The framework continues to submit requests, and call
    210  *    construct_default_request_settings to get default settings buffers for
    211  *    other use cases.
    212  *
    213  *    <= CAMERA_DEVICE_API_VERSION_3_1:
    214  *
    215  *    The framework may call register_stream_buffers() at this time for
    216  *    not-yet-registered streams.
    217  *
    218  * 9. When the capture of a request begins (sensor starts exposing for the
    219  *    capture) or processing a reprocess request begins, the HAL
    220  *    calls camera3_callback_ops_t->notify() with the SHUTTER event, including
    221  *    the frame number and the timestamp for start of exposure. For a reprocess
    222  *    request, the timestamp must be the start of exposure of the input image
    223  *    which can be looked up with android.sensor.timestamp from
    224  *    camera3_capture_request_t.settings when process_capture_request() is
    225  *    called.
    226  *
    227  *    <= CAMERA_DEVICE_API_VERSION_3_1:
    228  *
    229  *    This notify call must be made before the first call to
    230  *    process_capture_result() for that frame number.
    231  *
    232  *    >= CAMERA_DEVICE_API_VERSION_3_2:
    233  *
    234  *    The camera3_callback_ops_t->notify() call with the SHUTTER event should
    235  *    be made as early as possible since the framework will be unable to
    236  *    deliver gralloc buffers to the application layer (for that frame) until
    237  *    it has a valid timestamp for the start of exposure (or the input image's
    238  *    start of exposure for a reprocess request).
    239  *
    240  *    Both partial metadata results and the gralloc buffers may be sent to the
    241  *    framework at any time before or after the SHUTTER event.
    242  *
    243  * 10. After some pipeline delay, the HAL begins to return completed captures to
    244  *    the framework with camera3_callback_ops_t->process_capture_result(). These
    245  *    are returned in the same order as the requests were submitted. Multiple
    246  *    requests can be in flight at once, depending on the pipeline depth of the
    247  *    camera HAL device.
    248  *
    249  *    >= CAMERA_DEVICE_API_VERSION_3_2:
    250  *
    251  *    Once a buffer is returned by process_capture_result as part of the
    252  *    camera3_stream_buffer_t array, and the fence specified by release_fence
    253  *    has been signaled (this is a no-op for -1 fences), the ownership of that
    254  *    buffer is considered to be transferred back to the framework. After that,
    255  *    the HAL must no longer retain that particular buffer, and the
    256  *    framework may clean up the memory for it immediately.
    257  *
    258  *    process_capture_result may be called multiple times for a single frame,
    259  *    each time with a new disjoint piece of metadata and/or set of gralloc
    260  *    buffers. The framework will accumulate these partial metadata results
    261  *    into one result.
    262  *
    263  *    In particular, it is legal for a process_capture_result to be called
    264  *    simultaneously for both a frame N and a frame N+1 as long as the
    265  *    above rule holds for gralloc buffers (both input and output).
    266  *
    267  * 11. After some time, the framework may stop submitting new requests, wait for
    268  *    the existing captures to complete (all buffers filled, all results
    269  *    returned), and then call configure_streams() again. This resets the camera
    270  *    hardware and pipeline for a new set of input/output streams. Some streams
    271  *    may be reused from the previous configuration; if these streams' buffers
    272  *    had already been registered with the HAL, they will not be registered
    273  *    again. The framework then continues from step 7, if at least one
    274  *    registered output stream remains (otherwise, step 5 is required first).
    275  *
    276  * 12. Alternatively, the framework may call camera3_device_t->common->close()
    277  *    to end the camera session. This may be called at any time when no other
    278  *    calls from the framework are active, although the call may block until all
    279  *    in-flight captures have completed (all results returned, all buffers
    280  *    filled). After the close call returns, no more calls to the
    281  *    camera3_callback_ops_t functions are allowed from the HAL. Once the
    282  *    close() call is underway, the framework may not call any other HAL device
    283  *    functions.
    284  *
    285  * 13. In case of an error or other asynchronous event, the HAL must call
    286  *    camera3_callback_ops_t->notify() with the appropriate error/event
    287  *    message. After returning from a fatal device-wide error notification, the
    288  *    HAL should act as if close() had been called on it. However, the HAL must
    289  *    either cancel or complete all outstanding captures before calling
    290  *    notify(), so that once notify() is called with a fatal error, the
    291  *    framework will not receive further callbacks from the device. Methods
    292  *    besides close() should return -ENODEV or NULL after the notify() method
    293  *    returns from a fatal error message.
    294  */
    295 
    296 /**
    297  * S3. Operational modes:
    298  *
    299  * The camera 3 HAL device can implement one of two possible operational modes;
    300  * limited and full. Full support is expected from new higher-end
    301  * devices. Limited mode has hardware requirements roughly in line with those
    302  * for a camera HAL device v1 implementation, and is expected from older or
    303  * inexpensive devices. Full is a strict superset of limited, and they share the
    304  * same essential operational flow, as documented above.
    305  *
    306  * The HAL must indicate its level of support with the
    307  * android.info.supportedHardwareLevel static metadata entry, with 0 indicating
    308  * limited mode, and 1 indicating full mode support.
    309  *
    310  * Roughly speaking, limited-mode devices do not allow for application control
    311  * of capture settings (3A control only), high-rate capture of high-resolution
    312  * images, raw sensor readout, or support for YUV output streams above maximum
    313  * recording resolution (JPEG only for large images).
    314  *
    315  * ** Details of limited mode behavior:
    316  *
    317  * - Limited-mode devices do not need to implement accurate synchronization
    318  *   between capture request settings and the actual image data
    319  *   captured. Instead, changes to settings may take effect some time in the
    320  *   future, and possibly not for the same output frame for each settings
    321  *   entry. Rapid changes in settings may result in some settings never being
    322  *   used for a capture. However, captures that include high-resolution output
    323  *   buffers ( > 1080p ) have to use the settings as specified (but see below
    324  *   for processing rate).
    325  *
    326  * - Limited-mode devices do not need to support most of the
    327  *   settings/result/static info metadata. Specifically, only the following settings
    328  *   are expected to be consumed or produced by a limited-mode HAL device:
    329  *
    330  *   android.control.aeAntibandingMode (controls and dynamic)
    331  *   android.control.aeExposureCompensation (controls and dynamic)
    332  *   android.control.aeLock (controls and dynamic)
    333  *   android.control.aeMode (controls and dynamic)
    334  *   android.control.aeRegions (controls and dynamic)
    335  *   android.control.aeTargetFpsRange (controls and dynamic)
    336  *   android.control.aePrecaptureTrigger (controls and dynamic)
    337  *   android.control.afMode (controls and dynamic)
    338  *   android.control.afRegions (controls and dynamic)
    339  *   android.control.awbLock (controls and dynamic)
    340  *   android.control.awbMode (controls and dynamic)
    341  *   android.control.awbRegions (controls and dynamic)
    342  *   android.control.captureIntent (controls and dynamic)
    343  *   android.control.effectMode (controls and dynamic)
    344  *   android.control.mode (controls and dynamic)
    345  *   android.control.sceneMode (controls and dynamic)
    346  *   android.control.videoStabilizationMode (controls and dynamic)
    347  *   android.control.aeAvailableAntibandingModes (static)
    348  *   android.control.aeAvailableModes (static)
    349  *   android.control.aeAvailableTargetFpsRanges (static)
    350  *   android.control.aeCompensationRange (static)
    351  *   android.control.aeCompensationStep (static)
    352  *   android.control.afAvailableModes (static)
    353  *   android.control.availableEffects (static)
    354  *   android.control.availableSceneModes (static)
    355  *   android.control.availableVideoStabilizationModes (static)
    356  *   android.control.awbAvailableModes (static)
    357  *   android.control.maxRegions (static)
    358  *   android.control.sceneModeOverrides (static)
    359  *   android.control.aeState (dynamic)
    360  *   android.control.afState (dynamic)
    361  *   android.control.awbState (dynamic)
    362  *
    363  *   android.flash.mode (controls and dynamic)
    364  *   android.flash.info.available (static)
    365  *
    366  *   android.info.supportedHardwareLevel (static)
    367  *
    368  *   android.jpeg.gpsCoordinates (controls and dynamic)
    369  *   android.jpeg.gpsProcessingMethod (controls and dynamic)
    370  *   android.jpeg.gpsTimestamp (controls and dynamic)
    371  *   android.jpeg.orientation (controls and dynamic)
    372  *   android.jpeg.quality (controls and dynamic)
    373  *   android.jpeg.thumbnailQuality (controls and dynamic)
    374  *   android.jpeg.thumbnailSize (controls and dynamic)
    375  *   android.jpeg.availableThumbnailSizes (static)
    376  *   android.jpeg.maxSize (static)
    377  *
    378  *   android.lens.info.minimumFocusDistance (static)
    379  *
    380  *   android.request.id (controls and dynamic)
    381  *
    382  *   android.scaler.cropRegion (controls and dynamic)
    383  *   android.scaler.availableStreamConfigurations (static)
    384  *   android.scaler.availableMinFrameDurations (static)
    385  *   android.scaler.availableStallDurations (static)
    386  *   android.scaler.availableMaxDigitalZoom (static)
    387  *   android.scaler.maxDigitalZoom (static)
    388  *   android.scaler.croppingType (static)
    389  *
    390  *   android.sensor.orientation (static)
    391  *   android.sensor.timestamp (dynamic)
    392  *
    393  *   android.statistics.faceDetectMode (controls and dynamic)
    394  *   android.statistics.info.availableFaceDetectModes (static)
    395  *   android.statistics.faceIds (dynamic)
    396  *   android.statistics.faceLandmarks (dynamic)
    397  *   android.statistics.faceRectangles (dynamic)
    398  *   android.statistics.faceScores (dynamic)
    399  *
    400  *   android.sync.frameNumber (dynamic)
    401  *   android.sync.maxLatency (static)
    402  *
    403  * - Captures in limited mode that include high-resolution (> 1080p) output
    404  *   buffers may block in process_capture_request() until all the output buffers
    405  *   have been filled. A full-mode HAL device must process sequences of
    406  *   high-resolution requests at the rate indicated in the static metadata for
    407  *   that pixel format. The HAL must still call process_capture_result() to
    408  *   provide the output; the framework must simply be prepared for
    409  *   process_capture_request() to block until after process_capture_result() for
    410  *   that request completes for high-resolution captures for limited-mode
    411  *   devices.
    412  *
    413  * - Full-mode devices must support below additional capabilities:
    414  *   - 30fps at maximum resolution is preferred, more than 20fps is required.
    415  *   - Per frame control (android.sync.maxLatency == PER_FRAME_CONTROL).
    416  *   - Sensor manual control metadata. See MANUAL_SENSOR defined in
    417  *     android.request.availableCapabilities.
    418  *   - Post-processing manual control metadata. See MANUAL_POST_PROCESSING defined
    419  *     in android.request.availableCapabilities.
    420  *
    421  */
    422 
    423 /**
    424  * S4. 3A modes and state machines:
    425  *
    426  * While the actual 3A algorithms are up to the HAL implementation, a high-level
    427  * state machine description is defined by the HAL interface, to allow the HAL
    428  * device and the framework to communicate about the current state of 3A, and to
    429  * trigger 3A events.
    430  *
    431  * When the device is opened, all the individual 3A states must be
    432  * STATE_INACTIVE. Stream configuration does not reset 3A. For example, locked
    433  * focus must be maintained across the configure() call.
    434  *
    435  * Triggering a 3A action involves simply setting the relevant trigger entry in
    436  * the settings for the next request to indicate start of trigger. For example,
    437  * the trigger for starting an autofocus scan is setting the entry
    438  * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one
    439  * request, and cancelling an autofocus scan is triggered by setting
    440  * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise,
    441  * the entry will not exist, or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each
    442  * request with a trigger entry set to a non-IDLE value will be treated as an
    443  * independent triggering event.
    444  *
    445  * At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting, which
    446  * selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode
    447  * (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting
    448  * (ANDROID_CONTROL_USE_SCENE_MODE).
    449  *
    450  * - In OFF mode, each of the individual AE/AF/AWB modes are effectively OFF,
    451  *   and none of the capture controls may be overridden by the 3A routines.
    452  *
    453  * - In AUTO mode, Auto-focus, auto-exposure, and auto-whitebalance all run
    454  *   their own independent algorithms, and have their own mode, state, and
    455  *   trigger metadata entries, as listed in the next section.
    456  *
    457  * - In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must
    458  *   be used to determine the behavior of 3A routines. In SCENE_MODEs other than
    459  *   FACE_PRIORITY, the HAL must override the values of
    460  *   ANDROId_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected
    461  *   SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use
    462  *   CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene
    463  *   must be ignored for these scene modes.
    464  *
    465  * - For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AF_MODE controls work as in
    466  *   ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering
    467  *   and focusing on any detected faces in the scene.
    468  *
    469  * S4.1. Auto-focus settings and result entries:
    470  *
    471  *  Main metadata entries:
    472  *
    473  *   ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus
    474  *      mode. Set by the framework in the request settings.
    475  *
    476  *     AF_MODE_OFF: AF is disabled; the framework/app directly controls lens
    477  *         position.
    478  *
    479  *     AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is
    480  *         triggered.
    481  *
    482  *     AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless
    483  *         AF is triggered.
    484  *
    485  *     AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording
    486  *         video. Triggering immediately locks focus in current
    487  *         position. Canceling resumes cotinuous focusing.
    488  *
    489  *     AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for
    490  *        zero-shutter-lag still capture. Triggering locks focus once currently
    491  *        active sweep concludes. Canceling resumes continuous focusing.
    492  *
    493  *     AF_MODE_EDOF: Advanced extended depth of field focusing. There is no
    494  *        autofocus scan, so triggering one or canceling one has no effect.
    495  *        Images are focused automatically by the HAL.
    496  *
    497  *   ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF
    498  *       algorithm state, reported by the HAL in the result metadata.
    499  *
    500  *     AF_STATE_INACTIVE: No focusing has been done, or algorithm was
    501  *        reset. Lens is not moving. Always the state for MODE_OFF or MODE_EDOF.
    502  *        When the device is opened, it must start in this state.
    503  *
    504  *     AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning
    505  *        for good focus. The lens is moving.
    506  *
    507  *     AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is
    508  *        well focused. The lens is not moving. The HAL may spontaneously leave
    509  *        this state.
    510  *
    511  *     AF_STATE_PASSIVE_UNFOCUSED: A continuous focus algorithm believes it is
    512  *        not well focused. The lens is not moving. The HAL may spontaneously
    513  *        leave this state.
    514  *
    515  *     AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway.
    516  *
    517  *     AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The
    518  *        lens is not moving.
    519  *
    520  *     AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to
    521  *        focus. The lens is not moving.
    522  *
    523  *   ANDROID_CONTROL_AF_TRIGGER: Control for starting an autofocus scan, the
    524  *       meaning of which is mode- and state- dependent. Set by the framework in
    525  *       the request settings.
    526  *
    527  *     AF_TRIGGER_IDLE: No current trigger.
    528  *
    529  *     AF_TRIGGER_START: Trigger start of AF scan. Effect is mode and state
    530  *         dependent.
    531  *
    532  *     AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to
    533  *         default.
    534  *
    535  *  Additional metadata entries:
    536  *
    537  *   ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the FOV
    538  *       that should be used to determine good focus. This applies to all AF
    539  *       modes that scan for focus. Set by the framework in the request
    540  *       settings.
    541  *
    542  * S4.2. Auto-exposure settings and result entries:
    543  *
    544  *  Main metadata entries:
    545  *
    546  *   ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure
    547  *       mode. Set by the framework in the request settings.
    548  *
    549  *     AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain,
    550  *         frame duration, and flash.
    551  *
    552  *     AE_MODE_ON: Standard autoexposure, with flash control disabled. User may
    553  *         set flash to fire or to torch mode.
    554  *
    555  *     AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's
    556  *         discretion for precapture and still capture. User control of flash
    557  *         disabled.
    558  *
    559  *     AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired
    560  *         for capture, and at HAL's discretion for precapture.. User control of
    561  *         flash disabled.
    562  *
    563  *     AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at
    564  *         HAL's discretion for precapture and still capture. Use a flash burst
    565  *         at end of precapture sequence to reduce redeye in the final
    566  *         picture. User control of flash disabled.
    567  *
    568  *   ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE
    569  *       algorithm state, reported by the HAL in the result metadata.
    570  *
    571  *     AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is
    572  *         opened, it must start in this state.
    573  *
    574  *     AE_STATE_SEARCHING: AE is not converged to a good value, and is adjusting
    575  *         exposure parameters.
    576  *
    577  *     AE_STATE_CONVERGED: AE has found good exposure values for the current
    578  *         scene, and the exposure parameters are not changing. HAL may
    579  *         spontaneously leave this state to search for better solution.
    580  *
    581  *     AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure
    582  *         values are not changing.
    583  *
    584  *     AE_STATE_FLASH_REQUIRED: The HAL has converged exposure, but believes
    585  *         flash is required for a sufficiently bright picture. Used for
    586  *         determining if a zero-shutter-lag frame can be used.
    587  *
    588  *     AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture
    589  *         sequence. Depending on AE mode, this mode may involve firing the
    590  *         flash for metering, or a burst of flash pulses for redeye reduction.
    591  *
    592  *   ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering
    593  *       sequence before capturing a high-quality image. Set by the framework in
    594  *       the request settings.
    595  *
    596  *      PRECAPTURE_TRIGGER_IDLE: No current trigger.
    597  *
    598  *      PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should
    599  *         use the subsequent requests to measure good exposure/white balance
    600  *         for an upcoming high-resolution capture.
    601  *
    602  *  Additional metadata entries:
    603  *
    604  *   ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current
    605  *       values
    606  *
    607  *   ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE
    608  *       algorithm target brightness point.
    609  *
    610  *   ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame
    611  *       rate range for the AE algorithm. The AE routine cannot change the frame
    612  *       rate to be outside these bounds.
    613  *
    614  *   ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV
    615  *       that should be used to determine good exposure levels. This applies to
    616  *       all AE modes besides OFF.
    617  *
    618  * S4.3. Auto-whitebalance settings and result entries:
    619  *
    620  *  Main metadata entries:
    621  *
    622  *   ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance
    623  *       mode.
    624  *
    625  *     AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix.
    626  *
    627  *     AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color
    628  *        transform, possibly using more complex transforms than a simple
    629  *        matrix.
    630  *
    631  *     AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor
    632  *        incandescent (tungsten) lighting, roughly 2700K.
    633  *
    634  *     AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent
    635  *        lighting, roughly 5000K.
    636  *
    637  *     AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for
    638  *        fluorescent lighting, roughly 3000K.
    639  *
    640  *     AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight,
    641  *        roughly 5500K.
    642  *
    643  *     AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded
    644  *        daylight, roughly 6500K.
    645  *
    646  *     AWB_MODE_TWILIGHT: Fixed white balance settings good for
    647  *        near-sunset/sunrise, roughly 15000K.
    648  *
    649  *     AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly
    650  *        lit by the sun, roughly 7500K.
    651  *
    652  *   ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB
    653  *       algorithm state, reported by the HAL in the result metadata.
    654  *
    655  *     AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device
    656  *         is opened, it must start in this state.
    657  *
    658  *     AWB_STATE_SEARCHING: AWB is not converged to a good value, and is
    659  *         changing color adjustment parameters.
    660  *
    661  *     AWB_STATE_CONVERGED: AWB has found good color adjustment values for the
    662  *         current scene, and the parameters are not changing. HAL may
    663  *         spontaneously leave this state to search for better solution.
    664  *
    665  *     AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color
    666  *         adjustment values are not changing.
    667  *
    668  *  Additional metadata entries:
    669  *
    670  *   ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to
    671  *       their current values.
    672  *
    673  *   ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV
    674  *       that should be used to determine good color balance. This applies only
    675  *       to auto-WB mode.
    676  *
    677  * S4.4. General state machine transition notes
    678  *
    679  *   Switching between AF, AE, or AWB modes always resets the algorithm's state
    680  *   to INACTIVE.  Similarly, switching between CONTROL_MODE or
    681  *   CONTROL_SCENE_MODE if CONTROL_MODE == USE_SCENE_MODE resets all the
    682  *   algorithm states to INACTIVE.
    683  *
    684  *   The tables below are per-mode.
    685  *
    686  * S4.5. AF state machines
    687  *
    688  *                       when enabling AF or changing AF mode
    689  *| state              | trans. cause  | new state          | notes            |
    690  *+--------------------+---------------+--------------------+------------------+
    691  *| Any                | AF mode change| INACTIVE           |                  |
    692  *+--------------------+---------------+--------------------+------------------+
    693  *
    694  *                            mode = AF_MODE_OFF or AF_MODE_EDOF
    695  *| state              | trans. cause  | new state          | notes            |
    696  *+--------------------+---------------+--------------------+------------------+
    697  *| INACTIVE           |               | INACTIVE           | Never changes    |
    698  *+--------------------+---------------+--------------------+------------------+
    699  *
    700  *                            mode = AF_MODE_AUTO or AF_MODE_MACRO
    701  *| state              | trans. cause  | new state          | notes            |
    702  *+--------------------+---------------+--------------------+------------------+
    703  *| INACTIVE           | AF_TRIGGER    | ACTIVE_SCAN        | Start AF sweep   |
    704  *|                    |               |                    | Lens now moving  |
    705  *+--------------------+---------------+--------------------+------------------+
    706  *| ACTIVE_SCAN        | AF sweep done | FOCUSED_LOCKED     | If AF successful |
    707  *|                    |               |                    | Lens now locked  |
    708  *+--------------------+---------------+--------------------+------------------+
    709  *| ACTIVE_SCAN        | AF sweep done | NOT_FOCUSED_LOCKED | If AF successful |
    710  *|                    |               |                    | Lens now locked  |
    711  *+--------------------+---------------+--------------------+------------------+
    712  *| ACTIVE_SCAN        | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
    713  *|                    |               |                    | Lens now locked  |
    714  *+--------------------+---------------+--------------------+------------------+
    715  *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
    716  *+--------------------+---------------+--------------------+------------------+
    717  *| FOCUSED_LOCKED     | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |
    718  *|                    |               |                    | Lens now moving  |
    719  *+--------------------+---------------+--------------------+------------------+
    720  *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |
    721  *+--------------------+---------------+--------------------+------------------+
    722  *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |
    723  *|                    |               |                    | Lens now moving  |
    724  *+--------------------+---------------+--------------------+------------------+
    725  *| All states         | mode change   | INACTIVE           |                  |
    726  *+--------------------+---------------+--------------------+------------------+
    727  *
    728  *                            mode = AF_MODE_CONTINUOUS_VIDEO
    729  *| state              | trans. cause  | new state          | notes            |
    730  *+--------------------+---------------+--------------------+------------------+
    731  *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
    732  *|                    | new scan      |                    | Lens now moving  |
    733  *+--------------------+---------------+--------------------+------------------+
    734  *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |
    735  *|                    |               |                    | Lens now locked  |
    736  *+--------------------+---------------+--------------------+------------------+
    737  *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |
    738  *|                    | current scan  |                    | Lens now locked  |
    739  *+--------------------+---------------+--------------------+------------------+
    740  *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |
    741  *|                    | current scan  |                    | Lens now locked  |
    742  *+--------------------+---------------+--------------------+------------------+
    743  *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
    744  *|                    |               |                    | if focus is good |
    745  *|                    |               |                    | Lens now locked  |
    746  *+--------------------+---------------+--------------------+------------------+
    747  *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
    748  *|                    |               |                    | if focus is bad  |
    749  *|                    |               |                    | Lens now locked  |
    750  *+--------------------+---------------+--------------------+------------------+
    751  *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |
    752  *|                    |               |                    | position         |
    753  *|                    |               |                    | Lens now locked  |
    754  *+--------------------+---------------+--------------------+------------------+
    755  *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
    756  *|                    | new scan      |                    | Lens now moving  |
    757  *+--------------------+---------------+--------------------+------------------+
    758  *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
    759  *|                    | new scan      |                    | Lens now moving  |
    760  *+--------------------+---------------+--------------------+------------------+
    761  *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
    762  *|                    |               |                    | Lens now locked  |
    763  *+--------------------+---------------+--------------------+------------------+
    764  *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
    765  *|                    |               |                    | Lens now locked  |
    766  *+--------------------+---------------+--------------------+------------------+
    767  *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |
    768  *+--------------------+---------------+--------------------+------------------+
    769  *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |
    770  *+--------------------+---------------+--------------------+------------------+
    771  *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |
    772  *+--------------------+---------------+--------------------+------------------+
    773  *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |
    774  *+--------------------+---------------+--------------------+------------------+
    775  *
    776  *                            mode = AF_MODE_CONTINUOUS_PICTURE
    777  *| state              | trans. cause  | new state          | notes            |
    778  *+--------------------+---------------+--------------------+------------------+
    779  *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
    780  *|                    | new scan      |                    | Lens now moving  |
    781  *+--------------------+---------------+--------------------+------------------+
    782  *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |
    783  *|                    |               |                    | Lens now locked  |
    784  *+--------------------+---------------+--------------------+------------------+
    785  *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |
    786  *|                    | current scan  |                    | Lens now locked  |
    787  *+--------------------+---------------+--------------------+------------------+
    788  *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |
    789  *|                    | current scan  |                    | Lens now locked  |
    790  *+--------------------+---------------+--------------------+------------------+
    791  *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Eventual trans.  |
    792  *|                    |               |                    | once focus good  |
    793  *|                    |               |                    | Lens now locked  |
    794  *+--------------------+---------------+--------------------+------------------+
    795  *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Eventual trans.  |
    796  *|                    |               |                    | if cannot focus  |
    797  *|                    |               |                    | Lens now locked  |
    798  *+--------------------+---------------+--------------------+------------------+
    799  *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |
    800  *|                    |               |                    | position         |
    801  *|                    |               |                    | Lens now locked  |
    802  *+--------------------+---------------+--------------------+------------------+
    803  *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
    804  *|                    | new scan      |                    | Lens now moving  |
    805  *+--------------------+---------------+--------------------+------------------+
    806  *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |
    807  *|                    | new scan      |                    | Lens now moving  |
    808  *+--------------------+---------------+--------------------+------------------+
    809  *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |
    810  *|                    |               |                    | Lens now locked  |
    811  *+--------------------+---------------+--------------------+------------------+
    812  *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |
    813  *|                    |               |                    | Lens now locked  |
    814  *+--------------------+---------------+--------------------+------------------+
    815  *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |
    816  *+--------------------+---------------+--------------------+------------------+
    817  *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |
    818  *+--------------------+---------------+--------------------+------------------+
    819  *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |
    820  *+--------------------+---------------+--------------------+------------------+
    821  *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |
    822  *+--------------------+---------------+--------------------+------------------+
    823  *
    824  * S4.6. AE and AWB state machines
    825  *
    826  *   The AE and AWB state machines are mostly identical. AE has additional
    827  *   FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two
    828  *   states should be ignored for the AWB state machine.
    829  *
    830  *                  when enabling AE/AWB or changing AE/AWB mode
    831  *| state              | trans. cause  | new state          | notes            |
    832  *+--------------------+---------------+--------------------+------------------+
    833  *| Any                |  mode change  | INACTIVE           |                  |
    834  *+--------------------+---------------+--------------------+------------------+
    835  *
    836  *                            mode = AE_MODE_OFF / AWB mode not AUTO
    837  *| state              | trans. cause  | new state          | notes            |
    838  *+--------------------+---------------+--------------------+------------------+
    839  *| INACTIVE           |               | INACTIVE           | AE/AWB disabled  |
    840  *+--------------------+---------------+--------------------+------------------+
    841  *
    842  *                            mode = AE_MODE_ON_* / AWB_MODE_AUTO
    843  *| state              | trans. cause  | new state          | notes            |
    844  *+--------------------+---------------+--------------------+------------------+
    845  *| INACTIVE           | HAL initiates | SEARCHING          |                  |
    846  *|                    | AE/AWB scan   |                    |                  |
    847  *+--------------------+---------------+--------------------+------------------+
    848  *| INACTIVE           | AE/AWB_LOCK   | LOCKED             | values locked    |
    849  *|                    | on            |                    |                  |
    850  *+--------------------+---------------+--------------------+------------------+
    851  *| SEARCHING          | HAL finishes  | CONVERGED          | good values, not |
    852  *|                    | AE/AWB scan   |                    | changing         |
    853  *+--------------------+---------------+--------------------+------------------+
    854  *| SEARCHING          | HAL finishes  | FLASH_REQUIRED     | converged but too|
    855  *|                    | AE scan       |                    | dark w/o flash   |
    856  *+--------------------+---------------+--------------------+------------------+
    857  *| SEARCHING          | AE/AWB_LOCK   | LOCKED             | values locked    |
    858  *|                    | on            |                    |                  |
    859  *+--------------------+---------------+--------------------+------------------+
    860  *| CONVERGED          | HAL initiates | SEARCHING          | values locked    |
    861  *|                    | AE/AWB scan   |                    |                  |
    862  *+--------------------+---------------+--------------------+------------------+
    863  *| CONVERGED          | AE/AWB_LOCK   | LOCKED             | values locked    |
    864  *|                    | on            |                    |                  |
    865  *+--------------------+---------------+--------------------+------------------+
    866  *| FLASH_REQUIRED     | HAL initiates | SEARCHING          | values locked    |
    867  *|                    | AE/AWB scan   |                    |                  |
    868  *+--------------------+---------------+--------------------+------------------+
    869  *| FLASH_REQUIRED     | AE/AWB_LOCK   | LOCKED             | values locked    |
    870  *|                    | on            |                    |                  |
    871  *+--------------------+---------------+--------------------+------------------+
    872  *| LOCKED             | AE/AWB_LOCK   | SEARCHING          | values not good  |
    873  *|                    | off           |                    | after unlock     |
    874  *+--------------------+---------------+--------------------+------------------+
    875  *| LOCKED             | AE/AWB_LOCK   | CONVERGED          | values good      |
    876  *|                    | off           |                    | after unlock     |
    877  *+--------------------+---------------+--------------------+------------------+
    878  *| LOCKED             | AE_LOCK       | FLASH_REQUIRED     | exposure good,   |
    879  *|                    | off           |                    | but too dark     |
    880  *+--------------------+---------------+--------------------+------------------+
    881  *| All AE states      | PRECAPTURE_   | PRECAPTURE         | Start precapture |
    882  *|                    | START         |                    | sequence         |
    883  *+--------------------+---------------+--------------------+------------------+
    884  *| PRECAPTURE         | Sequence done.| CONVERGED          | Ready for high-  |
    885  *|                    | AE_LOCK off   |                    | quality capture  |
    886  *+--------------------+---------------+--------------------+------------------+
    887  *| PRECAPTURE         | Sequence done.| LOCKED             | Ready for high-  |
    888  *|                    | AE_LOCK on    |                    | quality capture  |
    889  *+--------------------+---------------+--------------------+------------------+
    890  *
    891  */
    892 
    893 /**
    894  * S5. Cropping:
    895  *
    896  * Cropping of the full pixel array (for digital zoom and other use cases where
    897  * a smaller FOV is desirable) is communicated through the
    898  * ANDROID_SCALER_CROP_REGION setting. This is a per-request setting, and can
    899  * change on a per-request basis, which is critical for implementing smooth
    900  * digital zoom.
    901  *
    902  * The region is defined as a rectangle (x, y, width, height), with (x, y)
    903  * describing the top-left corner of the rectangle. The rectangle is defined on
    904  * the coordinate system of the sensor active pixel array, with (0,0) being the
    905  * top-left pixel of the active pixel array. Therefore, the width and height
    906  * cannot be larger than the dimensions reported in the
    907  * ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY static info field. The minimum allowed
    908  * width and height are reported by the HAL through the
    909  * ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes the
    910  * maximum supported zoom factor. Therefore, the minimum crop region width and
    911  * height are:
    912  *
    913  * {width, height} =
    914  *    { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] /
    915  *        ANDROID_SCALER_MAX_DIGITAL_ZOOM),
    916  *      floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] /
    917  *        ANDROID_SCALER_MAX_DIGITAL_ZOOM) }
    918  *
    919  * If the crop region needs to fulfill specific requirements (for example, it
    920  * needs to start on even coordinates, and its width/height needs to be even),
    921  * the HAL must do the necessary rounding and write out the final crop region
    922  * used in the output result metadata. Similarly, if the HAL implements video
    923  * stabilization, it must adjust the result crop region to describe the region
    924  * actually included in the output after video stabilization is applied. In
    925  * general, a camera-using application must be able to determine the field of
    926  * view it is receiving based on the crop region, the dimensions of the image
    927  * sensor, and the lens focal length.
    928  *
    929  * It is assumed that the cropping is applied after raw to other color space
    930  * conversion. Raw streams (RAW16 and RAW_OPAQUE) don't have this conversion stage,
    931  * and are not croppable. Therefore, the crop region must be ignored by the HAL
    932  * for raw streams.
    933  *
    934  * Since the crop region applies to all non-raw streams, which may have different aspect
    935  * ratios than the crop region, the exact sensor region used for each stream may
    936  * be smaller than the crop region. Specifically, each stream should maintain
    937  * square pixels and its aspect ratio by minimally further cropping the defined
    938  * crop region. If the stream's aspect ratio is wider than the crop region, the
    939  * stream should be further cropped vertically, and if the stream's aspect ratio
    940  * is narrower than the crop region, the stream should be further cropped
    941  * horizontally.
    942  *
    943  * In all cases, the stream crop must be centered within the full crop region,
    944  * and each stream is only either cropped horizontally or vertical relative to
    945  * the full crop region, never both.
    946  *
    947  * For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a
    948  * 1280x720 stream (16:9 aspect), below demonstrates the expected output regions
    949  * for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x
    950  * 1500 pixel array) sensor.
    951  *
    952  * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)
    953  *
    954  *   640x480 stream crop: (500, 375, 1000, 750) (equal to crop region)
    955  *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)
    956  *
    957  * 0                   1000               2000
    958  * +---------+---------+---------+----------+
    959  * | Active pixel array                     |
    960  * |                                        |
    961  * |                                        |
    962  * +         +-------------------+          + 375
    963  * |         |                   |          |
    964  * |         O===================O          |
    965  * |         I 1280x720 stream   I          |
    966  * +         I                   I          + 750
    967  * |         I                   I          |
    968  * |         O===================O          |
    969  * |         |                   |          |
    970  * +         +-------------------+          + 1125
    971  * |          Crop region, 640x480 stream   |
    972  * |                                        |
    973  * |                                        |
    974  * +---------+---------+---------+----------+ 1500
    975  *
    976  * Crop region: (500, 375, 1333, 750) (16:9 aspect ratio)
    977  *
    978  *   640x480 stream crop: (666, 375, 1000, 750) (marked with =)
    979  *   1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region)
    980  *
    981  * 0                   1000               2000
    982  * +---------+---------+---------+----------+
    983  * | Active pixel array                     |
    984  * |                                        |
    985  * |                                        |
    986  * +         +---O==================O---+   + 375
    987  * |         |   I 640x480 stream   I   |   |
    988  * |         |   I                  I   |   |
    989  * |         |   I                  I   |   |
    990  * +         |   I                  I   |   + 750
    991  * |         |   I                  I   |   |
    992  * |         |   I                  I   |   |
    993  * |         |   I                  I   |   |
    994  * +         +---O==================O---+   + 1125
    995  * |          Crop region, 1280x720 stream  |
    996  * |                                        |
    997  * |                                        |
    998  * +---------+---------+---------+----------+ 1500
    999  *
   1000  * Crop region: (500, 375, 750, 750) (1:1 aspect ratio)
   1001  *
   1002  *   640x480 stream crop: (500, 469, 750, 562) (marked with =)
   1003  *   1280x720 stream crop: (500, 543, 750, 414) (marged with #)
   1004  *
   1005  * 0                   1000               2000
   1006  * +---------+---------+---------+----------+
   1007  * | Active pixel array                     |
   1008  * |                                        |
   1009  * |                                        |
   1010  * +         +--------------+               + 375
   1011  * |         O==============O               |
   1012  * |         ################               |
   1013  * |         #              #               |
   1014  * +         #              #               + 750
   1015  * |         #              #               |
   1016  * |         ################ 1280x720      |
   1017  * |         O==============O 640x480       |
   1018  * +         +--------------+               + 1125
   1019  * |          Crop region                   |
   1020  * |                                        |
   1021  * |                                        |
   1022  * +---------+---------+---------+----------+ 1500
   1023  *
   1024  * And a final example, a 1024x1024 square aspect ratio stream instead of the
   1025  * 480p stream:
   1026  *
   1027  * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)
   1028  *
   1029  *   1024x1024 stream crop: (625, 375, 750, 750) (marked with #)
   1030  *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)
   1031  *
   1032  * 0                   1000               2000
   1033  * +---------+---------+---------+----------+
   1034  * | Active pixel array                     |
   1035  * |                                        |
   1036  * |              1024x1024 stream          |
   1037  * +         +--###############--+          + 375
   1038  * |         |  #             #  |          |
   1039  * |         O===================O          |
   1040  * |         I 1280x720 stream   I          |
   1041  * +         I                   I          + 750
   1042  * |         I                   I          |
   1043  * |         O===================O          |
   1044  * |         |  #             #  |          |
   1045  * +         +--###############--+          + 1125
   1046  * |          Crop region                   |
   1047  * |                                        |
   1048  * |                                        |
   1049  * +---------+---------+---------+----------+ 1500
   1050  *
   1051  */
   1052 
   1053 /**
   1054  * S6. Error management:
   1055  *
   1056  * Camera HAL device ops functions that have a return value will all return
   1057  * -ENODEV / NULL in case of a serious error. This means the device cannot
   1058  * continue operation, and must be closed by the framework. Once this error is
   1059  * returned by some method, or if notify() is called with ERROR_DEVICE, only
   1060  * the close() method can be called successfully. All other methods will return
   1061  * -ENODEV / NULL.
   1062  *
   1063  * If a device op is called in the wrong sequence, for example if the framework
   1064  * calls configure_streams() is called before initialize(), the device must
   1065  * return -ENOSYS from the call, and do nothing.
   1066  *
   1067  * Transient errors in image capture must be reported through notify() as follows:
   1068  *
   1069  * - The failure of an entire capture to occur must be reported by the HAL by
   1070  *   calling notify() with ERROR_REQUEST. Individual errors for the result
   1071  *   metadata or the output buffers must not be reported in this case.
   1072  *
   1073  * - If the metadata for a capture cannot be produced, but some image buffers
   1074  *   were filled, the HAL must call notify() with ERROR_RESULT.
   1075  *
   1076  * - If an output image buffer could not be filled, but either the metadata was
   1077  *   produced or some other buffers were filled, the HAL must call notify() with
   1078  *   ERROR_BUFFER for each failed buffer.
   1079  *
   1080  * In each of these transient failure cases, the HAL must still call
   1081  * process_capture_result, with valid output and input (if an input buffer was
   1082  * submitted) buffer_handle_t. If the result metadata could not be produced, it
   1083  * should be NULL. If some buffers could not be filled, they must be returned with
   1084  * process_capture_result in the error state, their release fences must be set to
   1085  * the acquire fences passed by the framework, or -1 if they have been waited on by
   1086  * the HAL already.
   1087  *
   1088  * Invalid input arguments result in -EINVAL from the appropriate methods. In
   1089  * that case, the framework must act as if that call had never been made.
   1090  *
   1091  */
   1092 
   1093 /**
   1094  * S7. Key Performance Indicator (KPI) glossary:
   1095  *
   1096  * This includes some critical definitions that are used by KPI metrics.
   1097  *
   1098  * Pipeline Latency:
   1099  *  For a given capture request, the duration from the framework calling
   1100  *  process_capture_request to the HAL sending capture result and all buffers
   1101  *  back by process_capture_result call. To make the Pipeline Latency measure
   1102  *  independent of frame rate, it is measured by frame count.
   1103  *
   1104  *  For example, when frame rate is 30 (fps), the frame duration (time interval
   1105  *  between adjacent frame capture time) is 33 (ms).
   1106  *  If it takes 5 frames for framework to get the result and buffers back for
   1107  *  a given request, then the Pipeline Latency is 5 (frames), instead of
   1108  *  5 x 33 = 165 (ms).
   1109  *
   1110  *  The Pipeline Latency is determined by android.request.pipelineDepth and
   1111  *  android.request.pipelineMaxDepth, see their definitions for more details.
   1112  *
   1113  */
   1114 
   1115 /**
   1116  * S8. Sample Use Cases:
   1117  *
   1118  * This includes some typical use case examples the camera HAL may support.
   1119  *
   1120  * S8.1 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_BIDIRECTIONAL stream.
   1121  *
   1122  *   For this use case, the bidirectional stream will be used by the framework as follows:
   1123  *
   1124  *   1. The framework includes a buffer from this stream as output buffer in a
   1125  *      request as normal.
   1126  *
   1127  *   2. Once the HAL device returns a filled output buffer to the framework,
   1128  *      the framework may do one of two things with the filled buffer:
   1129  *
   1130  *   2. a. The framework uses the filled data, and returns the now-used buffer
   1131  *         to the stream queue for reuse. This behavior exactly matches the
   1132  *         OUTPUT type of stream.
   1133  *
   1134  *   2. b. The framework wants to reprocess the filled data, and uses the
   1135  *         buffer as an input buffer for a request. Once the HAL device has
   1136  *         used the reprocessing buffer, it then returns it to the
   1137  *         framework. The framework then returns the now-used buffer to the
   1138  *         stream queue for reuse.
   1139  *
   1140  *   3. The HAL device will be given the buffer again as an output buffer for
   1141  *        a request at some future point.
   1142  *
   1143  *   For ZSL use case, the pixel format for bidirectional stream will be
   1144  *   HAL_PIXEL_FORMAT_RAW_OPAQUE or HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED if it
   1145  *   is listed in android.scaler.availableInputOutputFormatsMap. When
   1146  *   HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, the gralloc
   1147  *   usage flags for the consumer endpoint will be set to GRALLOC_USAGE_HW_CAMERA_ZSL.
   1148  *   A configuration stream list that has BIDIRECTIONAL stream used as input, will
   1149  *   usually also have a distinct OUTPUT stream to get the reprocessing data. For example,
   1150  *   for the ZSL use case, the stream list might be configured with the following:
   1151  *
   1152  *     - A HAL_PIXEL_FORMAT_RAW_OPAQUE bidirectional stream is used
   1153  *       as input.
   1154  *     - And a HAL_PIXEL_FORMAT_BLOB (JPEG) output stream.
   1155  *
   1156  * S8.2 ZSL (OPAQUE) reprocessing with CAMERA3_STREAM_INPUT stream.
   1157  *
   1158  * CAMERA_DEVICE_API_VERSION_3_3:
   1159  *   When OPAQUE_REPROCESSING capability is supported by the camera device, the INPUT stream
   1160  *   can be used for application/framework implemented use case like Zero Shutter Lag (ZSL).
   1161  *   This kind of stream will be used by the framework as follows:
   1162  *
   1163  *   1. Application/framework configures an opaque (RAW or YUV based) format output stream that is
   1164  *      used to produce the ZSL output buffers. The stream pixel format will be
   1165  *      HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED.
   1166  *
   1167  *   2. Application/framework configures an opaque format input stream that is used to
   1168  *      send the reprocessing ZSL buffers to the HAL. The stream pixel format will
   1169  *      also be HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED.
   1170  *
   1171  *   3. Application/framework configures a YUV/JPEG output stream that is used to receive the
   1172  *      reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB.
   1173  *
   1174  *   4. Application/framework picks a ZSL buffer from the ZSL output stream when a ZSL capture is
   1175  *      issued by the application, and sends the data back as an input buffer in a
   1176  *      reprocessing request, then sends to the HAL for reprocessing.
   1177  *
   1178  *   5. The HAL sends back the output YUV/JPEG result to framework.
   1179  *
   1180  *   The HAL can select the actual opaque buffer format and configure the ISP pipeline
   1181  *   appropriately based on the HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format and
   1182  *   the gralloc usage flag GRALLOC_USAGE_HW_CAMERA_ZSL.
   1183 
   1184  * S8.3 YUV reprocessing with CAMERA3_STREAM_INPUT stream.
   1185  *
   1186  *   When YUV reprocessing is supported by the HAL, the INPUT stream
   1187  *   can be used for the YUV reprocessing use cases like lucky-shot and image fusion.
   1188  *   This kind of stream will be used by the framework as follows:
   1189  *
   1190  *   1. Application/framework configures an YCbCr_420 format output stream that is
   1191  *      used to produce the output buffers.
   1192  *
   1193  *   2. Application/framework configures an YCbCr_420 format input stream that is used to
   1194  *      send the reprocessing YUV buffers to the HAL.
   1195  *
   1196  *   3. Application/framework configures a YUV/JPEG output stream that is used to receive the
   1197  *      reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB.
   1198  *
   1199  *   4. Application/framework processes the output buffers (could be as simple as picking
   1200  *      an output buffer directly) from the output stream when a capture is issued, and sends
   1201  *      the data back as an input buffer in a reprocessing request, then sends to the HAL
   1202  *      for reprocessing.
   1203  *
   1204  *   5. The HAL sends back the output YUV/JPEG result to framework.
   1205  *
   1206  */
   1207 
   1208 /**
   1209  *   S9. Notes on Controls and Metadata
   1210  *
   1211  *   This section contains notes about the interpretation and usage of various metadata tags.
   1212  *
   1213  *   S9.1 HIGH_QUALITY and FAST modes.
   1214  *
   1215  *   Many camera post-processing blocks may be listed as having HIGH_QUALITY,
   1216  *   FAST, and OFF operating modes. These blocks will typically also have an
   1217  *   'available modes' tag representing which of these operating modes are
   1218  *   available on a given device. The general policy regarding implementing
   1219  *   these modes is as follows:
   1220  *
   1221  *   1. Operating mode controls of hardware blocks that cannot be disabled
   1222  *      must not list OFF in their corresponding 'available modes' tags.
   1223  *
   1224  *   2. OFF will always be included in their corresponding 'available modes'
   1225  *      tag if it is possible to disable that hardware block.
   1226  *
   1227  *   3. FAST must always be included in the 'available modes' tags for all
   1228  *      post-processing blocks supported on the device.  If a post-processing
   1229  *      block also has a slower and higher quality operating mode that does
   1230  *      not meet the framerate requirements for FAST mode, HIGH_QUALITY should
   1231  *      be included in the 'available modes' tag to represent this operating
   1232  *      mode.
   1233  */
   1234 
   1235 /**
   1236  *   S10. Reprocessing flow and controls
   1237  *
   1238  *   This section describes the OPAQUE and YUV reprocessing flow and controls. OPAQUE reprocessing
   1239  *   uses an opaque format that is not directly application-visible, and the application can
   1240  *   only select some of the output buffers and send back to HAL for reprocessing, while YUV
   1241  *   reprocessing gives the application opportunity to process the buffers before reprocessing.
   1242  *
   1243  *   S8 gives the stream configurations for the typical reprocessing uses cases,
   1244  *   this section specifies the buffer flow and controls in more details.
   1245  *
   1246  *   S10.1 OPAQUE (typically for ZSL use case) reprocessing flow and controls
   1247  *
   1248  *   For OPAQUE reprocessing (e.g. ZSL) use case, after the application creates the specific
   1249  *   output and input streams, runtime buffer flow and controls are specified as below:
   1250  *
   1251  *   1. Application starts output streaming by sending repeating requests for output
   1252  *      opaque buffers and preview. The buffers are held by an application
   1253  *      maintained circular buffer. The requests are based on CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG
   1254  *      capture template, which should have all necessary settings that guarantee output
   1255  *      frame rate is not slowed down relative to sensor output frame rate.
   1256  *
   1257  *   2. When a capture is issued, the application selects one output buffer based
   1258  *      on application buffer selection logic, e.g. good AE and AF statistics etc.
   1259  *      Application then creates an reprocess request based on the capture result associated
   1260  *      with this selected buffer. The selected output buffer is now added to this reprocess
   1261  *      request as an input buffer, the output buffer of this reprocess request should be
   1262  *      either JPEG output buffer or YUV output buffer, or both, depending on the application
   1263  *      choice.
   1264  *
   1265  *   3. Application then alters the reprocess settings to get best image quality. The HAL must
   1266  *      support and only support below controls if the HAL support OPAQUE_REPROCESSING capability:
   1267  *          - android.jpeg.* (if JPEG buffer is included as one of the output)
   1268  *          - android.noiseReduction.mode (change to HIGH_QUALITY if it is supported)
   1269  *          - android.edge.mode (change to HIGH_QUALITY if it is supported)
   1270  *       All other controls must be ignored by the HAL.
   1271  *   4. HAL processed the input buffer and return the output buffers in the capture results
   1272  *      as normal.
   1273  *
   1274  *   S10.2 YUV reprocessing flow and controls
   1275  *
   1276  *   The YUV reprocessing buffer flow is similar as OPAQUE reprocessing, with below difference:
   1277  *
   1278  *   1. Application may want to have finer granularity control of the intermediate YUV images
   1279  *      (before reprocessing). For example, application may choose
   1280  *          - android.noiseReduction.mode == MINIMAL
   1281  *      to make sure the no YUV domain noise reduction has applied to the output YUV buffers,
   1282  *      then it can do its own advanced noise reduction on them. For OPAQUE reprocessing case, this
   1283  *      doesn't matter, as long as the final reprocessed image has the best quality.
   1284  *   2. Application may modify the YUV output buffer data. For example, for image fusion use
   1285  *      case, where multiple output images are merged together to improve the signal-to-noise
   1286  *      ratio (SNR). The input buffer may be generated from multiple buffers by the application.
   1287  *      To avoid excessive amount of noise reduction and insufficient amount of edge enhancement
   1288  *      being applied to the input buffer, the application can hint the HAL  how much effective
   1289  *      exposure time improvement has been done by the application, then the HAL can adjust the
   1290  *      noise reduction and edge enhancement paramters to get best reprocessed image quality.
   1291  *      Below tag can be used for this purpose:
   1292  *          - android.reprocess.effectiveExposureFactor
   1293  *      The value would be exposure time increase factor applied to the original output image,
   1294  *      for example, if there are N image merged, the exposure time increase factor would be up
   1295  *      to sqrt(N). See this tag spec for more details.
   1296  *
   1297  *   S10.3 Reprocessing pipeline characteristics
   1298  *
   1299  *   Reprocessing pipeline has below different characteristics comparing with normal output
   1300  *   pipeline:
   1301  *
   1302  *   1. The reprocessing result can be returned ahead of the pending normal output results. But
   1303  *      the FIFO ordering must be maintained for all reprocessing results. For example, there are
   1304  *      below requests (A stands for output requests, B stands for reprocessing requests)
   1305  *      being processed by the HAL:
   1306  *          A1, A2, A3, A4, B1, A5, B2, A6...
   1307  *      result of B1 can be returned before A1-A4, but result of B2 must be returned after B1.
   1308  *   2. Single input rule: For a given reprocessing request, all output buffers must be from the
   1309  *      input buffer, rather than sensor output. For example, if a reprocess request include both
   1310  *      JPEG and preview buffers, all output buffers must be produced from the input buffer
   1311  *      included by the reprocessing request, rather than sensor. The HAL must not output preview
   1312  *      buffers from sensor, while output JPEG buffer from the input buffer.
   1313  *   3. Input buffer will be from camera output directly (ZSL case) or indirectly(image fusion
   1314  *      case). For the case where buffer is modified, the size will remain same. The HAL can
   1315  *      notify CAMERA3_MSG_ERROR_REQUEST if buffer from unknown source is sent.
   1316  *   4. Result as reprocessing request: The HAL can expect that a reprocessing request is a copy
   1317  *      of one of the output results with minor allowed setting changes. The HAL can notify
   1318  *      CAMERA3_MSG_ERROR_REQUEST if a request from unknown source is issued.
   1319  *   5. Output buffers may not be used as inputs across the configure stream boundary, This is
   1320  *      because an opaque stream like the ZSL output stream may have different actual image size
   1321  *      inside of the ZSL buffer to save power and bandwidth for smaller resolution JPEG capture.
   1322  *      The HAL may notify CAMERA3_MSG_ERROR_REQUEST if this case occurs.
   1323  *   6. HAL Reprocess requests error reporting during flush should follow the same rule specified
   1324  *      by flush() method.
   1325  *
   1326  */
   1327 
   1328 __BEGIN_DECLS
   1329 
   1330 struct camera3_device;
   1331 
   1332 /**********************************************************************
   1333  *
   1334  * Camera3 stream and stream buffer definitions.
   1335  *
   1336  * These structs and enums define the handles and contents of the input and
   1337  * output streams connecting the HAL to various framework and application buffer
   1338  * consumers. Each stream is backed by a gralloc buffer queue.
   1339  *
   1340  */
   1341 
   1342 /**
   1343  * camera3_stream_type_t:
   1344  *
   1345  * The type of the camera stream, which defines whether the camera HAL device is
   1346  * the producer or the consumer for that stream, and how the buffers of the
   1347  * stream relate to the other streams.
   1348  */
   1349 typedef enum camera3_stream_type {
   1350     /**
   1351      * This stream is an output stream; the camera HAL device will be
   1352      * responsible for filling buffers from this stream with newly captured or
   1353      * reprocessed image data.
   1354      */
   1355     CAMERA3_STREAM_OUTPUT = 0,
   1356 
   1357     /**
   1358      * This stream is an input stream; the camera HAL device will be responsible
   1359      * for reading buffers from this stream and sending them through the camera
   1360      * processing pipeline, as if the buffer was a newly captured image from the
   1361      * imager.
   1362      *
   1363      * The pixel format for input stream can be any format reported by
   1364      * android.scaler.availableInputOutputFormatsMap. The pixel format of the
   1365      * output stream that is used to produce the reprocessing data may be any
   1366      * format reported by android.scaler.availableStreamConfigurations. The
   1367      * supported input/output stream combinations depends the camera device
   1368      * capabilities, see android.scaler.availableInputOutputFormatsMap for
   1369      * stream map details.
   1370      *
   1371      * This kind of stream is generally used to reprocess data into higher
   1372      * quality images (that otherwise would cause a frame rate performance
   1373      * loss), or to do off-line reprocessing.
   1374      *
   1375      * CAMERA_DEVICE_API_VERSION_3_3:
   1376      *    The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing,
   1377      *    see S8.2, S8.3 and S10 for more details.
   1378      */
   1379     CAMERA3_STREAM_INPUT = 1,
   1380 
   1381     /**
   1382      * This stream can be used for input and output. Typically, the stream is
   1383      * used as an output stream, but occasionally one already-filled buffer may
   1384      * be sent back to the HAL device for reprocessing.
   1385      *
   1386      * This kind of stream is meant generally for Zero Shutter Lag (ZSL)
   1387      * features, where copying the captured image from the output buffer to the
   1388      * reprocessing input buffer would be expensive. See S8.1 for more details.
   1389      *
   1390      * Note that the HAL will always be reprocessing data it produced.
   1391      *
   1392      */
   1393     CAMERA3_STREAM_BIDIRECTIONAL = 2,
   1394 
   1395     /**
   1396      * Total number of framework-defined stream types
   1397      */
   1398     CAMERA3_NUM_STREAM_TYPES
   1399 
   1400 } camera3_stream_type_t;
   1401 
   1402 /**
   1403  * camera3_stream_rotation_t:
   1404  *
   1405  * The required counterclockwise rotation of camera stream.
   1406  */
   1407 typedef enum camera3_stream_rotation {
   1408     /* No rotation */
   1409     CAMERA3_STREAM_ROTATION_0 = 0,
   1410 
   1411     /* Rotate by 90 degree counterclockwise */
   1412     CAMERA3_STREAM_ROTATION_90 = 1,
   1413 
   1414     /* Rotate by 180 degree counterclockwise */
   1415     CAMERA3_STREAM_ROTATION_180 = 2,
   1416 
   1417     /* Rotate by 270 degree counterclockwise */
   1418     CAMERA3_STREAM_ROTATION_270 = 3
   1419 } camera3_stream_rotation_t;
   1420 
   1421 /**
   1422  * camera3_stream_configuration_mode_t:
   1423  *
   1424  * This defines the general operation mode for the HAL (for a given stream configuration), where
   1425  * modes besides NORMAL have different semantics, and usually limit the generality of the API in
   1426  * exchange for higher performance in some particular area.
   1427  */
   1428 typedef enum camera3_stream_configuration_mode {
   1429     /**
   1430      * Normal stream configuration operation mode. This is the default camera operation mode,
   1431      * where all semantics of HAL APIs and metadata controls apply.
   1432      */
   1433     CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE = 0,
   1434 
   1435     /**
   1436      * Special constrained high speed operation mode for devices that can not support high
   1437      * speed output in NORMAL mode. All streams in this configuration are operating at high speed
   1438      * mode and have different characteristics and limitations to achieve high speed output.
   1439      * The NORMAL mode can still be used for high speed output if the HAL can support high speed
   1440      * output while satisfying all the semantics of HAL APIs and metadata controls. It is
   1441      * recommended for the HAL to support high speed output in NORMAL mode (by advertising the high
   1442      * speed FPS ranges in android.control.aeAvailableTargetFpsRanges) if possible.
   1443      *
   1444      * This mode has below limitations/requirements:
   1445      *
   1446      *   1. The HAL must support up to 2 streams with sizes reported by
   1447      *      android.control.availableHighSpeedVideoConfigurations.
   1448      *   2. In this mode, the HAL is expected to output up to 120fps or higher. This mode must
   1449      *      support the targeted FPS range and size configurations reported by
   1450      *      android.control.availableHighSpeedVideoConfigurations.
   1451      *   3. The HAL must support HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED output stream format.
   1452      *   4. To achieve efficient high speed streaming, the HAL may have to aggregate
   1453      *      multiple frames together and send to camera device for processing where the request
   1454      *      controls are same for all the frames in this batch (batch mode). The HAL must support
   1455      *      max batch size and the max batch size requirements defined by
   1456      *      android.control.availableHighSpeedVideoConfigurations.
   1457      *   5. In this mode, the HAL must override aeMode, awbMode, and afMode to ON, ON, and
   1458      *      CONTINUOUS_VIDEO, respectively. All post-processing block mode controls must be
   1459      *      overridden to be FAST. Therefore, no manual control of capture and post-processing
   1460      *      parameters is possible. All other controls operate the same as when
   1461      *      android.control.mode == AUTO. This means that all other android.control.* fields
   1462      *      must continue to work, such as
   1463      *
   1464      *      android.control.aeTargetFpsRange
   1465      *      android.control.aeExposureCompensation
   1466      *      android.control.aeLock
   1467      *      android.control.awbLock
   1468      *      android.control.effectMode
   1469      *      android.control.aeRegions
   1470      *      android.control.afRegions
   1471      *      android.control.awbRegions
   1472      *      android.control.afTrigger
   1473      *      android.control.aePrecaptureTrigger
   1474      *
   1475      *      Outside of android.control.*, the following controls must work:
   1476      *
   1477      *      android.flash.mode (TORCH mode only, automatic flash for still capture will not work
   1478      *      since aeMode is ON)
   1479      *      android.lens.opticalStabilizationMode (if it is supported)
   1480      *      android.scaler.cropRegion
   1481      *      android.statistics.faceDetectMode (if it is supported)
   1482      *   6. To reduce the amount of data passed across process boundaries at
   1483      *      high frame rate, within one batch, camera framework only propagates
   1484      *      the last shutter notify and the last capture results (including partial
   1485      *      results and final result) to the app. The shutter notifies and capture
   1486      *      results for the other requests in the batch are derived by
   1487      *      the camera framework. As a result, the HAL can return empty metadata
   1488      *      except for the last result in the batch.
   1489      *
   1490      * For more details about high speed stream requirements, see
   1491      * android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
   1492      * capability defined in android.request.availableCapabilities.
   1493      *
   1494      * This mode only needs to be supported by HALs that include CONSTRAINED_HIGH_SPEED_VIDEO in
   1495      * the android.request.availableCapabilities static metadata.
   1496      */
   1497     CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,
   1498 
   1499     /**
   1500      * First value for vendor-defined stream configuration modes.
   1501      */
   1502     CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000
   1503 } camera3_stream_configuration_mode_t;
   1504 
   1505 /**
   1506  * camera3_stream_t:
   1507  *
   1508  * A handle to a single camera input or output stream. A stream is defined by
   1509  * the framework by its buffer resolution and format, and additionally by the
   1510  * HAL with the gralloc usage flags and the maximum in-flight buffer count.
   1511  *
   1512  * The stream structures are owned by the framework, but pointers to a
   1513  * camera3_stream passed into the HAL by configure_streams() are valid until the
   1514  * end of the first subsequent configure_streams() call that _does not_ include
   1515  * that camera3_stream as an argument, or until the end of the close() call.
   1516  *
   1517  * All camera3_stream framework-controlled members are immutable once the
   1518  * camera3_stream is passed into configure_streams().  The HAL may only change
   1519  * the HAL-controlled parameters during a configure_streams() call, except for
   1520  * the contents of the private pointer.
   1521  *
   1522  * If a configure_streams() call returns a non-fatal error, all active streams
   1523  * remain valid as if configure_streams() had not been called.
   1524  *
   1525  * The endpoint of the stream is not visible to the camera HAL device.
   1526  * In DEVICE_API_VERSION_3_1, this was changed to share consumer usage flags
   1527  * on streams where the camera is a producer (OUTPUT and BIDIRECTIONAL stream
   1528  * types) see the usage field below.
   1529  */
   1530 typedef struct camera3_stream {
   1531 
   1532     /*****
   1533      * Set by framework before configure_streams()
   1534      */
   1535 
   1536     /**
   1537      * The type of the stream, one of the camera3_stream_type_t values.
   1538      */
   1539     int stream_type;
   1540 
   1541     /**
   1542      * The width in pixels of the buffers in this stream
   1543      */
   1544     uint32_t width;
   1545 
   1546     /**
   1547      * The height in pixels of the buffers in this stream
   1548      */
   1549     uint32_t height;
   1550 
   1551     /**
   1552      * The pixel format for the buffers in this stream. Format is a value from
   1553      * the HAL_PIXEL_FORMAT_* list in system/core/include/system/graphics.h, or
   1554      * from device-specific headers.
   1555      *
   1556      * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform
   1557      * gralloc module will select a format based on the usage flags provided by
   1558      * the camera device and the other endpoint of the stream.
   1559      *
   1560      * <= CAMERA_DEVICE_API_VERSION_3_1:
   1561      *
   1562      * The camera HAL device must inspect the buffers handed to it in the
   1563      * subsequent register_stream_buffers() call to obtain the
   1564      * implementation-specific format details, if necessary.
   1565      *
   1566      * >= CAMERA_DEVICE_API_VERSION_3_2:
   1567      *
   1568      * register_stream_buffers() won't be called by the framework, so the HAL
   1569      * should configure the ISP and sensor pipeline based purely on the sizes,
   1570      * usage flags, and formats for the configured streams.
   1571      */
   1572     int format;
   1573 
   1574     /*****
   1575      * Set by HAL during configure_streams().
   1576      */
   1577 
   1578     /**
   1579      * The gralloc usage flags for this stream, as needed by the HAL. The usage
   1580      * flags are defined in gralloc.h (GRALLOC_USAGE_*), or in device-specific
   1581      * headers.
   1582      *
   1583      * For output streams, these are the HAL's producer usage flags. For input
   1584      * streams, these are the HAL's consumer usage flags. The usage flags from
   1585      * the producer and the consumer will be combined together and then passed
   1586      * to the platform gralloc HAL module for allocating the gralloc buffers for
   1587      * each stream.
   1588      *
   1589      * Version information:
   1590      *
   1591      * == CAMERA_DEVICE_API_VERSION_3_0:
   1592      *
   1593      *   No initial value guaranteed when passed via configure_streams().
   1594      *   HAL may not use this field as input, and must write over this field
   1595      *   with its usage flags.
   1596      *
   1597      * >= CAMERA_DEVICE_API_VERSION_3_1:
   1598      *
   1599      *   For stream_type OUTPUT and BIDIRECTIONAL, when passed via
   1600      *   configure_streams(), the initial value of this is the consumer's
   1601      *   usage flags.  The HAL may use these consumer flags to decide stream
   1602      *   configuration.
   1603      *   For stream_type INPUT, when passed via configure_streams(), the initial
   1604      *   value of this is 0.
   1605      *   For all streams passed via configure_streams(), the HAL must write
   1606      *   over this field with its usage flags.
   1607      *
   1608      *   From Android O, the usage flag for an output stream may be bitwise
   1609      *   combination of usage flags for multiple consumers, for the purpose of
   1610      *   sharing one camera stream between those consumers. The HAL must fail
   1611      *   configure_streams call with -EINVAL if the combined flags cannot be
   1612      *   supported due to imcompatible buffer format, dataSpace, or other hardware
   1613      *   limitations.
   1614      */
   1615     uint32_t usage;
   1616 
   1617     /**
   1618      * The maximum number of buffers the HAL device may need to have dequeued at
   1619      * the same time. The HAL device may not have more buffers in-flight from
   1620      * this stream than this value.
   1621      */
   1622     uint32_t max_buffers;
   1623 
   1624     /**
   1625      * A handle to HAL-private information for the stream. Will not be inspected
   1626      * by the framework code.
   1627      */
   1628     void *priv;
   1629 
   1630     /**
   1631      * A field that describes the contents of the buffer. The format and buffer
   1632      * dimensions define the memory layout and structure of the stream buffers,
   1633      * while dataSpace defines the meaning of the data within the buffer.
   1634      *
   1635      * For most formats, dataSpace defines the color space of the image data.
   1636      * In addition, for some formats, dataSpace indicates whether image- or
   1637      * depth-based data is requested.  See system/core/include/system/graphics.h
   1638      * for details of formats and valid dataSpace values for each format.
   1639      *
   1640      * Version information:
   1641      *
   1642      * < CAMERA_DEVICE_API_VERSION_3_3:
   1643      *
   1644      *   Not defined and should not be accessed. dataSpace should be assumed to
   1645      *   be HAL_DATASPACE_UNKNOWN, and the appropriate color space, etc, should
   1646      *   be determined from the usage flags and the format.
   1647      *
   1648      * = CAMERA_DEVICE_API_VERSION_3_3:
   1649      *
   1650      *   Always set by the camera service. HAL must use this dataSpace to
   1651      *   configure the stream to the correct colorspace, or to select between
   1652      *   color and depth outputs if supported. The dataspace values are the
   1653      *   legacy definitions in graphics.h
   1654      *
   1655      * >= CAMERA_DEVICE_API_VERSION_3_4:
   1656      *
   1657      *   Always set by the camera service. HAL must use this dataSpace to
   1658      *   configure the stream to the correct colorspace, or to select between
   1659      *   color and depth outputs if supported. The dataspace values are set
   1660      *   using the V0 dataspace definitions in graphics.h
   1661      */
   1662     android_dataspace_t data_space;
   1663 
   1664     /**
   1665      * The required output rotation of the stream, one of
   1666      * the camera3_stream_rotation_t values. This must be inspected by HAL along
   1667      * with stream width and height. For example, if the rotation is 90 degree
   1668      * and the stream width and height is 720 and 1280 respectively, camera service
   1669      * will supply buffers of size 720x1280, and HAL should capture a 1280x720 image
   1670      * and rotate the image by 90 degree counterclockwise. The rotation field is
   1671      * no-op when the stream type is input. Camera HAL must ignore the rotation
   1672      * field for an input stream.
   1673      *
   1674      * <= CAMERA_DEVICE_API_VERSION_3_2:
   1675      *
   1676      *    Not defined and must not be accessed. HAL must not apply any rotation
   1677      *    on output images.
   1678      *
   1679      * >= CAMERA_DEVICE_API_VERSION_3_3:
   1680      *
   1681      *    Always set by camera service. HAL must inspect this field during stream
   1682      *    configuration and returns -EINVAL if HAL cannot perform such rotation.
   1683      *    HAL must always support CAMERA3_STREAM_ROTATION_0, so a
   1684      *    configure_streams() call must not fail for unsupported rotation if
   1685      *    rotation field of all streams is CAMERA3_STREAM_ROTATION_0.
   1686      *
   1687      */
   1688     int rotation;
   1689 
   1690     /* reserved for future use */
   1691     void *reserved[7];
   1692 
   1693 } camera3_stream_t;
   1694 
   1695 /**
   1696  * camera3_stream_configuration_t:
   1697  *
   1698  * A structure of stream definitions, used by configure_streams(). This
   1699  * structure defines all the output streams and the reprocessing input
   1700  * stream for the current camera use case.
   1701  */
   1702 typedef struct camera3_stream_configuration {
   1703     /**
   1704      * The total number of streams requested by the framework.  This includes
   1705      * both input and output streams. The number of streams will be at least 1,
   1706      * and there will be at least one output-capable stream.
   1707      */
   1708     uint32_t num_streams;
   1709 
   1710     /**
   1711      * An array of camera stream pointers, defining the input/output
   1712      * configuration for the camera HAL device.
   1713      *
   1714      * At most one input-capable stream may be defined (INPUT or BIDIRECTIONAL)
   1715      * in a single configuration.
   1716      *
   1717      * At least one output-capable stream must be defined (OUTPUT or
   1718      * BIDIRECTIONAL).
   1719      */
   1720     camera3_stream_t **streams;
   1721 
   1722     /**
   1723      * >= CAMERA_DEVICE_API_VERSION_3_3:
   1724      *
   1725      * The operation mode of streams in this configuration, one of the value
   1726      * defined in camera3_stream_configuration_mode_t.  The HAL can use this
   1727      * mode as an indicator to set the stream property (e.g.,
   1728      * camera3_stream->max_buffers) appropriately. For example, if the
   1729      * configuration is
   1730      * CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE, the HAL may
   1731      * want to set aside more buffers for batch mode operation (see
   1732      * android.control.availableHighSpeedVideoConfigurations for batch mode
   1733      * definition).
   1734      *
   1735      */
   1736     uint32_t operation_mode;
   1737 } camera3_stream_configuration_t;
   1738 
   1739 /**
   1740  * camera3_buffer_status_t:
   1741  *
   1742  * The current status of a single stream buffer.
   1743  */
   1744 typedef enum camera3_buffer_status {
   1745     /**
   1746      * The buffer is in a normal state, and can be used after waiting on its
   1747      * sync fence.
   1748      */
   1749     CAMERA3_BUFFER_STATUS_OK = 0,
   1750 
   1751     /**
   1752      * The buffer does not contain valid data, and the data in it should not be
   1753      * used. The sync fence must still be waited on before reusing the buffer.
   1754      */
   1755     CAMERA3_BUFFER_STATUS_ERROR = 1
   1756 
   1757 } camera3_buffer_status_t;
   1758 
   1759 /**
   1760  * camera3_stream_buffer_t:
   1761  *
   1762  * A single buffer from a camera3 stream. It includes a handle to its parent
   1763  * stream, the handle to the gralloc buffer itself, and sync fences
   1764  *
   1765  * The buffer does not specify whether it is to be used for input or output;
   1766  * that is determined by its parent stream type and how the buffer is passed to
   1767  * the HAL device.
   1768  */
   1769 typedef struct camera3_stream_buffer {
   1770     /**
   1771      * The handle of the stream this buffer is associated with
   1772      */
   1773     camera3_stream_t *stream;
   1774 
   1775     /**
   1776      * The native handle to the buffer
   1777      */
   1778     buffer_handle_t *buffer;
   1779 
   1780     /**
   1781      * Current state of the buffer, one of the camera3_buffer_status_t
   1782      * values. The framework will not pass buffers to the HAL that are in an
   1783      * error state. In case a buffer could not be filled by the HAL, it must
   1784      * have its status set to CAMERA3_BUFFER_STATUS_ERROR when returned to the
   1785      * framework with process_capture_result().
   1786      */
   1787     int status;
   1788 
   1789     /**
   1790      * The acquire sync fence for this buffer. The HAL must wait on this fence
   1791      * fd before attempting to read from or write to this buffer.
   1792      *
   1793      * The framework may be set to -1 to indicate that no waiting is necessary
   1794      * for this buffer.
   1795      *
   1796      * When the HAL returns an output buffer to the framework with
   1797      * process_capture_result(), the acquire_fence must be set to -1. If the HAL
   1798      * never waits on the acquire_fence due to an error in filling a buffer,
   1799      * when calling process_capture_result() the HAL must set the release_fence
   1800      * of the buffer to be the acquire_fence passed to it by the framework. This
   1801      * will allow the framework to wait on the fence before reusing the buffer.
   1802      *
   1803      * For input buffers, the HAL must not change the acquire_fence field during
   1804      * the process_capture_request() call.
   1805      *
   1806      * >= CAMERA_DEVICE_API_VERSION_3_2:
   1807      *
   1808      * When the HAL returns an input buffer to the framework with
   1809      * process_capture_result(), the acquire_fence must be set to -1. If the HAL
   1810      * never waits on input buffer acquire fence due to an error, the sync
   1811      * fences should be handled similarly to the way they are handled for output
   1812      * buffers.
   1813      */
   1814      int acquire_fence;
   1815 
   1816     /**
   1817      * The release sync fence for this buffer. The HAL must set this fence when
   1818      * returning buffers to the framework, or write -1 to indicate that no
   1819      * waiting is required for this buffer.
   1820      *
   1821      * For the output buffers, the fences must be set in the output_buffers
   1822      * array passed to process_capture_result().
   1823      *
   1824      * <= CAMERA_DEVICE_API_VERSION_3_1:
   1825      *
   1826      * For the input buffer, the release fence must be set by the
   1827      * process_capture_request() call.
   1828      *
   1829      * >= CAMERA_DEVICE_API_VERSION_3_2:
   1830      *
   1831      * For the input buffer, the fences must be set in the input_buffer
   1832      * passed to process_capture_result().
   1833      *
   1834      * After signaling the release_fence for this buffer, the HAL
   1835      * should not make any further attempts to access this buffer as the
   1836      * ownership has been fully transferred back to the framework.
   1837      *
   1838      * If a fence of -1 was specified then the ownership of this buffer
   1839      * is transferred back immediately upon the call of process_capture_result.
   1840      */
   1841     int release_fence;
   1842 
   1843 } camera3_stream_buffer_t;
   1844 
   1845 /**
   1846  * camera3_stream_buffer_set_t:
   1847  *
   1848  * The complete set of gralloc buffers for a stream. This structure is given to
   1849  * register_stream_buffers() to allow the camera HAL device to register/map/etc
   1850  * newly allocated stream buffers.
   1851  *
   1852  * >= CAMERA_DEVICE_API_VERSION_3_2:
   1853  *
   1854  * Deprecated (and not used). In particular,
   1855  * register_stream_buffers is also deprecated and will never be invoked.
   1856  *
   1857  */
   1858 typedef struct camera3_stream_buffer_set {
   1859     /**
   1860      * The stream handle for the stream these buffers belong to
   1861      */
   1862     camera3_stream_t *stream;
   1863 
   1864     /**
   1865      * The number of buffers in this stream. It is guaranteed to be at least
   1866      * stream->max_buffers.
   1867      */
   1868     uint32_t num_buffers;
   1869 
   1870     /**
   1871      * The array of gralloc buffer handles for this stream. If the stream format
   1872      * is set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL device
   1873      * should inspect the passed-in buffers to determine any platform-private
   1874      * pixel format information.
   1875      */
   1876     buffer_handle_t **buffers;
   1877 
   1878 } camera3_stream_buffer_set_t;
   1879 
   1880 /**
   1881  * camera3_jpeg_blob:
   1882  *
   1883  * Transport header for compressed JPEG buffers in output streams.
   1884  *
   1885  * To capture JPEG images, a stream is created using the pixel format
   1886  * HAL_PIXEL_FORMAT_BLOB. The buffer size for the stream is calculated by the
   1887  * framework, based on the static metadata field android.jpeg.maxSize. Since
   1888  * compressed JPEG images are of variable size, the HAL needs to include the
   1889  * final size of the compressed image using this structure inside the output
   1890  * stream buffer. The JPEG blob ID field must be set to CAMERA3_JPEG_BLOB_ID.
   1891  *
   1892  * Transport header should be at the end of the JPEG output stream buffer. That
   1893  * means the jpeg_blob_id must start at byte[buffer_size -
   1894  * sizeof(camera3_jpeg_blob)], where the buffer_size is the size of gralloc buffer.
   1895  * Any HAL using this transport header must account for it in android.jpeg.maxSize
   1896  * The JPEG data itself starts at the beginning of the buffer and should be
   1897  * jpeg_size bytes long.
   1898  */
   1899 typedef struct camera3_jpeg_blob {
   1900     uint16_t jpeg_blob_id;
   1901     uint32_t jpeg_size;
   1902 } camera3_jpeg_blob_t;
   1903 
   1904 enum {
   1905     CAMERA3_JPEG_BLOB_ID = 0x00FF
   1906 };
   1907 
   1908 /**********************************************************************
   1909  *
   1910  * Message definitions for the HAL notify() callback.
   1911  *
   1912  * These definitions are used for the HAL notify callback, to signal
   1913  * asynchronous events from the HAL device to the Android framework.
   1914  *
   1915  */
   1916 
   1917 /**
   1918  * camera3_msg_type:
   1919  *
   1920  * Indicates the type of message sent, which specifies which member of the
   1921  * message union is valid.
   1922  *
   1923  */
   1924 typedef enum camera3_msg_type {
   1925     /**
   1926      * An error has occurred. camera3_notify_msg.message.error contains the
   1927      * error information.
   1928      */
   1929     CAMERA3_MSG_ERROR = 1,
   1930 
   1931     /**
   1932      * The exposure of a given request or processing a reprocess request has
   1933      * begun. camera3_notify_msg.message.shutter contains the information
   1934      * the capture.
   1935      */
   1936     CAMERA3_MSG_SHUTTER = 2,
   1937 
   1938     /**
   1939      * Number of framework message types
   1940      */
   1941     CAMERA3_NUM_MESSAGES
   1942 
   1943 } camera3_msg_type_t;
   1944 
   1945 /**
   1946  * Defined error codes for CAMERA_MSG_ERROR
   1947  */
   1948 typedef enum camera3_error_msg_code {
   1949     /**
   1950      * A serious failure occured. No further frames or buffer streams will
   1951      * be produced by the device. Device should be treated as closed. The
   1952      * client must reopen the device to use it again. The frame_number field
   1953      * is unused.
   1954      */
   1955     CAMERA3_MSG_ERROR_DEVICE = 1,
   1956 
   1957     /**
   1958      * An error has occurred in processing a request. No output (metadata or
   1959      * buffers) will be produced for this request. The frame_number field
   1960      * specifies which request has been dropped. Subsequent requests are
   1961      * unaffected, and the device remains operational.
   1962      */
   1963     CAMERA3_MSG_ERROR_REQUEST = 2,
   1964 
   1965     /**
   1966      * An error has occurred in producing an output result metadata buffer
   1967      * for a request, but output stream buffers for it will still be
   1968      * available. Subsequent requests are unaffected, and the device remains
   1969      * operational.  The frame_number field specifies the request for which
   1970      * result metadata won't be available.
   1971      */
   1972     CAMERA3_MSG_ERROR_RESULT = 3,
   1973 
   1974     /**
   1975      * An error has occurred in placing an output buffer into a stream for a
   1976      * request. The frame metadata and other buffers may still be
   1977      * available. Subsequent requests are unaffected, and the device remains
   1978      * operational. The frame_number field specifies the request for which the
   1979      * buffer was dropped, and error_stream contains a pointer to the stream
   1980      * that dropped the frame.
   1981      */
   1982     CAMERA3_MSG_ERROR_BUFFER = 4,
   1983 
   1984     /**
   1985      * Number of error types
   1986      */
   1987     CAMERA3_MSG_NUM_ERRORS
   1988 
   1989 } camera3_error_msg_code_t;
   1990 
   1991 /**
   1992  * camera3_error_msg_t:
   1993  *
   1994  * Message contents for CAMERA3_MSG_ERROR
   1995  */
   1996 typedef struct camera3_error_msg {
   1997     /**
   1998      * Frame number of the request the error applies to. 0 if the frame number
   1999      * isn't applicable to the error.
   2000      */
   2001     uint32_t frame_number;
   2002 
   2003     /**
   2004      * Pointer to the stream that had a failure. NULL if the stream isn't
   2005      * applicable to the error.
   2006      */
   2007     camera3_stream_t *error_stream;
   2008 
   2009     /**
   2010      * The code for this error; one of the CAMERA_MSG_ERROR enum values.
   2011      */
   2012     int error_code;
   2013 
   2014 } camera3_error_msg_t;
   2015 
   2016 /**
   2017  * camera3_shutter_msg_t:
   2018  *
   2019  * Message contents for CAMERA3_MSG_SHUTTER
   2020  */
   2021 typedef struct camera3_shutter_msg {
   2022     /**
   2023      * Frame number of the request that has begun exposure or reprocessing.
   2024      */
   2025     uint32_t frame_number;
   2026 
   2027     /**
   2028      * Timestamp for the start of capture. For a reprocess request, this must
   2029      * be input image's start of capture. This must match the capture result
   2030      * metadata's sensor exposure start timestamp.
   2031      */
   2032     uint64_t timestamp;
   2033 
   2034 } camera3_shutter_msg_t;
   2035 
   2036 /**
   2037  * camera3_notify_msg_t:
   2038  *
   2039  * The message structure sent to camera3_callback_ops_t.notify()
   2040  */
   2041 typedef struct camera3_notify_msg {
   2042 
   2043     /**
   2044      * The message type. One of camera3_notify_msg_type, or a private extension.
   2045      */
   2046     int type;
   2047 
   2048     union {
   2049         /**
   2050          * Error message contents. Valid if type is CAMERA3_MSG_ERROR
   2051          */
   2052         camera3_error_msg_t error;
   2053 
   2054         /**
   2055          * Shutter message contents. Valid if type is CAMERA3_MSG_SHUTTER
   2056          */
   2057         camera3_shutter_msg_t shutter;
   2058 
   2059         /**
   2060          * Generic message contents. Used to ensure a minimum size for custom
   2061          * message types.
   2062          */
   2063         uint8_t generic[32];
   2064     } message;
   2065 
   2066 } camera3_notify_msg_t;
   2067 
   2068 /**********************************************************************
   2069  *
   2070  * Capture request/result definitions for the HAL process_capture_request()
   2071  * method, and the process_capture_result() callback.
   2072  *
   2073  */
   2074 
   2075 /**
   2076  * camera3_request_template_t:
   2077  *
   2078  * Available template types for
   2079  * camera3_device_ops.construct_default_request_settings()
   2080  */
   2081 typedef enum camera3_request_template {
   2082     /**
   2083      * Standard camera preview operation with 3A on auto.
   2084      */
   2085     CAMERA3_TEMPLATE_PREVIEW = 1,
   2086 
   2087     /**
   2088      * Standard camera high-quality still capture with 3A and flash on auto.
   2089      */
   2090     CAMERA3_TEMPLATE_STILL_CAPTURE = 2,
   2091 
   2092     /**
   2093      * Standard video recording plus preview with 3A on auto, torch off.
   2094      */
   2095     CAMERA3_TEMPLATE_VIDEO_RECORD = 3,
   2096 
   2097     /**
   2098      * High-quality still capture while recording video. Application will
   2099      * include preview, video record, and full-resolution YUV or JPEG streams in
   2100      * request. Must not cause stuttering on video stream. 3A on auto.
   2101      */
   2102     CAMERA3_TEMPLATE_VIDEO_SNAPSHOT = 4,
   2103 
   2104     /**
   2105      * Zero-shutter-lag mode. Application will request preview and
   2106      * full-resolution data for each frame, and reprocess it to JPEG when a
   2107      * still image is requested by user. Settings should provide highest-quality
   2108      * full-resolution images without compromising preview frame rate. 3A on
   2109      * auto.
   2110      */
   2111     CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG = 5,
   2112 
   2113     /**
   2114      * A basic template for direct application control of capture
   2115      * parameters. All automatic control is disabled (auto-exposure, auto-white
   2116      * balance, auto-focus), and post-processing parameters are set to preview
   2117      * quality. The manual capture parameters (exposure, sensitivity, etc.)
   2118      * are set to reasonable defaults, but should be overridden by the
   2119      * application depending on the intended use case.
   2120      */
   2121     CAMERA3_TEMPLATE_MANUAL = 6,
   2122 
   2123     /* Total number of templates */
   2124     CAMERA3_TEMPLATE_COUNT,
   2125 
   2126     /**
   2127      * First value for vendor-defined request templates
   2128      */
   2129     CAMERA3_VENDOR_TEMPLATE_START = 0x40000000
   2130 
   2131 } camera3_request_template_t;
   2132 
   2133 /**
   2134  * camera3_capture_request_t:
   2135  *
   2136  * A single request for image capture/buffer reprocessing, sent to the Camera
   2137  * HAL device by the framework in process_capture_request().
   2138  *
   2139  * The request contains the settings to be used for this capture, and the set of
   2140  * output buffers to write the resulting image data in. It may optionally
   2141  * contain an input buffer, in which case the request is for reprocessing that
   2142  * input buffer instead of capturing a new image with the camera sensor. The
   2143  * capture is identified by the frame_number.
   2144  *
   2145  * In response, the camera HAL device must send a camera3_capture_result
   2146  * structure asynchronously to the framework, using the process_capture_result()
   2147  * callback.
   2148  */
   2149 typedef struct camera3_capture_request {
   2150     /**
   2151      * The frame number is an incrementing integer set by the framework to
   2152      * uniquely identify this capture. It needs to be returned in the result
   2153      * call, and is also used to identify the request in asynchronous
   2154      * notifications sent to camera3_callback_ops_t.notify().
   2155      */
   2156     uint32_t frame_number;
   2157 
   2158     /**
   2159      * The settings buffer contains the capture and processing parameters for
   2160      * the request. As a special case, a NULL settings buffer indicates that the
   2161      * settings are identical to the most-recently submitted capture request. A
   2162      * NULL buffer cannot be used as the first submitted request after a
   2163      * configure_streams() call.
   2164      */
   2165     const camera_metadata_t *settings;
   2166 
   2167     /**
   2168      * The input stream buffer to use for this request, if any.
   2169      *
   2170      * If input_buffer is NULL, then the request is for a new capture from the
   2171      * imager. If input_buffer is valid, the request is for reprocessing the
   2172      * image contained in input_buffer.
   2173      *
   2174      * In the latter case, the HAL must set the release_fence of the
   2175      * input_buffer to a valid sync fence, or to -1 if the HAL does not support
   2176      * sync, before process_capture_request() returns.
   2177      *
   2178      * The HAL is required to wait on the acquire sync fence of the input buffer
   2179      * before accessing it.
   2180      *
   2181      * <= CAMERA_DEVICE_API_VERSION_3_1:
   2182      *
   2183      * Any input buffer included here will have been registered with the HAL
   2184      * through register_stream_buffers() before its inclusion in a request.
   2185      *
   2186      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2187      *
   2188      * The buffers will not have been pre-registered with the HAL.
   2189      * Subsequent requests may reuse buffers, or provide entirely new buffers.
   2190      */
   2191     camera3_stream_buffer_t *input_buffer;
   2192 
   2193     /**
   2194      * The number of output buffers for this capture request. Must be at least
   2195      * 1.
   2196      */
   2197     uint32_t num_output_buffers;
   2198 
   2199     /**
   2200      * An array of num_output_buffers stream buffers, to be filled with image
   2201      * data from this capture/reprocess. The HAL must wait on the acquire fences
   2202      * of each stream buffer before writing to them.
   2203      *
   2204      * The HAL takes ownership of the actual buffer_handle_t entries in
   2205      * output_buffers; the framework does not access them until they are
   2206      * returned in a camera3_capture_result_t.
   2207      *
   2208      * <= CAMERA_DEVICE_API_VERSION_3_1:
   2209      *
   2210      * All the buffers included  here will have been registered with the HAL
   2211      * through register_stream_buffers() before their inclusion in a request.
   2212      *
   2213      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2214      *
   2215      * Any or all of the buffers included here may be brand new in this
   2216      * request (having never before seen by the HAL).
   2217      */
   2218     const camera3_stream_buffer_t *output_buffers;
   2219 
   2220 } camera3_capture_request_t;
   2221 
   2222 /**
   2223  * camera3_capture_result_t:
   2224  *
   2225  * The result of a single capture/reprocess by the camera HAL device. This is
   2226  * sent to the framework asynchronously with process_capture_result(), in
   2227  * response to a single capture request sent to the HAL with
   2228  * process_capture_request(). Multiple process_capture_result() calls may be
   2229  * performed by the HAL for each request.
   2230  *
   2231  * Each call, all with the same frame
   2232  * number, may contain some subset of the output buffers, and/or the result
   2233  * metadata. The metadata may only be provided once for a given frame number;
   2234  * all other calls must set the result metadata to NULL.
   2235  *
   2236  * The result structure contains the output metadata from this capture, and the
   2237  * set of output buffers that have been/will be filled for this capture. Each
   2238  * output buffer may come with a release sync fence that the framework will wait
   2239  * on before reading, in case the buffer has not yet been filled by the HAL.
   2240  *
   2241  * >= CAMERA_DEVICE_API_VERSION_3_2:
   2242  *
   2243  * The metadata may be provided multiple times for a single frame number. The
   2244  * framework will accumulate together the final result set by combining each
   2245  * partial result together into the total result set.
   2246  *
   2247  * If an input buffer is given in a request, the HAL must return it in one of
   2248  * the process_capture_result calls, and the call may be to just return the input
   2249  * buffer, without metadata and output buffers; the sync fences must be handled
   2250  * the same way they are done for output buffers.
   2251  *
   2252  *
   2253  * Performance considerations:
   2254  *
   2255  * Applications will also receive these partial results immediately, so sending
   2256  * partial results is a highly recommended performance optimization to avoid
   2257  * the total pipeline latency before sending the results for what is known very
   2258  * early on in the pipeline.
   2259  *
   2260  * A typical use case might be calculating the AF state halfway through the
   2261  * pipeline; by sending the state back to the framework immediately, we get a
   2262  * 50% performance increase and perceived responsiveness of the auto-focus.
   2263  *
   2264  */
   2265 typedef struct camera3_capture_result {
   2266     /**
   2267      * The frame number is an incrementing integer set by the framework in the
   2268      * submitted request to uniquely identify this capture. It is also used to
   2269      * identify the request in asynchronous notifications sent to
   2270      * camera3_callback_ops_t.notify().
   2271     */
   2272     uint32_t frame_number;
   2273 
   2274     /**
   2275      * The result metadata for this capture. This contains information about the
   2276      * final capture parameters, the state of the capture and post-processing
   2277      * hardware, the state of the 3A algorithms, if enabled, and the output of
   2278      * any enabled statistics units.
   2279      *
   2280      * Only one call to process_capture_result() with a given frame_number may
   2281      * include the result metadata. All other calls for the same frame_number
   2282      * must set this to NULL.
   2283      *
   2284      * If there was an error producing the result metadata, result must be an
   2285      * empty metadata buffer, and notify() must be called with ERROR_RESULT.
   2286      *
   2287      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2288      *
   2289      * Multiple calls to process_capture_result() with a given frame_number
   2290      * may include the result metadata.
   2291      *
   2292      * Partial metadata submitted should not include any metadata key returned
   2293      * in a previous partial result for a given frame. Each new partial result
   2294      * for that frame must also set a distinct partial_result value.
   2295      *
   2296      * If notify has been called with ERROR_RESULT, all further partial
   2297      * results for that frame are ignored by the framework.
   2298      */
   2299     const camera_metadata_t *result;
   2300 
   2301     /**
   2302      * The number of output buffers returned in this result structure. Must be
   2303      * less than or equal to the matching capture request's count. If this is
   2304      * less than the buffer count in the capture request, at least one more call
   2305      * to process_capture_result with the same frame_number must be made, to
   2306      * return the remaining output buffers to the framework. This may only be
   2307      * zero if the structure includes valid result metadata or an input buffer
   2308      * is returned in this result.
   2309      */
   2310     uint32_t num_output_buffers;
   2311 
   2312     /**
   2313      * The handles for the output stream buffers for this capture. They may not
   2314      * yet be filled at the time the HAL calls process_capture_result(); the
   2315      * framework will wait on the release sync fences provided by the HAL before
   2316      * reading the buffers.
   2317      *
   2318      * The HAL must set the stream buffer's release sync fence to a valid sync
   2319      * fd, or to -1 if the buffer has already been filled.
   2320      *
   2321      * If the HAL encounters an error while processing the buffer, and the
   2322      * buffer is not filled, the buffer's status field must be set to
   2323      * CAMERA3_BUFFER_STATUS_ERROR. If the HAL did not wait on the acquire fence
   2324      * before encountering the error, the acquire fence should be copied into
   2325      * the release fence, to allow the framework to wait on the fence before
   2326      * reusing the buffer.
   2327      *
   2328      * The acquire fence must be set to -1 for all output buffers.  If
   2329      * num_output_buffers is zero, this may be NULL. In that case, at least one
   2330      * more process_capture_result call must be made by the HAL to provide the
   2331      * output buffers.
   2332      *
   2333      * When process_capture_result is called with a new buffer for a frame,
   2334      * all previous frames' buffers for that corresponding stream must have been
   2335      * already delivered (the fences need not have yet been signaled).
   2336      *
   2337      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2338      *
   2339      * Gralloc buffers for a frame may be sent to framework before the
   2340      * corresponding SHUTTER-notify.
   2341      *
   2342      * Performance considerations:
   2343      *
   2344      * Buffers delivered to the framework will not be dispatched to the
   2345      * application layer until a start of exposure timestamp has been received
   2346      * via a SHUTTER notify() call. It is highly recommended to
   2347      * dispatch that call as early as possible.
   2348      */
   2349      const camera3_stream_buffer_t *output_buffers;
   2350 
   2351      /**
   2352       * >= CAMERA_DEVICE_API_VERSION_3_2:
   2353       *
   2354       * The handle for the input stream buffer for this capture. It may not
   2355       * yet be consumed at the time the HAL calls process_capture_result(); the
   2356       * framework will wait on the release sync fences provided by the HAL before
   2357       * reusing the buffer.
   2358       *
   2359       * The HAL should handle the sync fences the same way they are done for
   2360       * output_buffers.
   2361       *
   2362       * Only one input buffer is allowed to be sent per request. Similarly to
   2363       * output buffers, the ordering of returned input buffers must be
   2364       * maintained by the HAL.
   2365       *
   2366       * Performance considerations:
   2367       *
   2368       * The input buffer should be returned as early as possible. If the HAL
   2369       * supports sync fences, it can call process_capture_result to hand it back
   2370       * with sync fences being set appropriately. If the sync fences are not
   2371       * supported, the buffer can only be returned when it is consumed, which
   2372       * may take long time; the HAL may choose to copy this input buffer to make
   2373       * the buffer return sooner.
   2374       */
   2375       const camera3_stream_buffer_t *input_buffer;
   2376 
   2377      /**
   2378       * >= CAMERA_DEVICE_API_VERSION_3_2:
   2379       *
   2380       * In order to take advantage of partial results, the HAL must set the
   2381       * static metadata android.request.partialResultCount to the number of
   2382       * partial results it will send for each frame.
   2383       *
   2384       * Each new capture result with a partial result must set
   2385       * this field (partial_result) to a distinct inclusive value between
   2386       * 1 and android.request.partialResultCount.
   2387       *
   2388       * HALs not wishing to take advantage of this feature must not
   2389       * set an android.request.partialResultCount or partial_result to a value
   2390       * other than 1.
   2391       *
   2392       * This value must be set to 0 when a capture result contains buffers only
   2393       * and no metadata.
   2394       */
   2395      uint32_t partial_result;
   2396 
   2397 } camera3_capture_result_t;
   2398 
   2399 /**********************************************************************
   2400  *
   2401  * Callback methods for the HAL to call into the framework.
   2402  *
   2403  * These methods are used to return metadata and image buffers for a completed
   2404  * or failed captures, and to notify the framework of asynchronous events such
   2405  * as errors.
   2406  *
   2407  * The framework will not call back into the HAL from within these callbacks,
   2408  * and these calls will not block for extended periods.
   2409  *
   2410  */
   2411 typedef struct camera3_callback_ops {
   2412 
   2413     /**
   2414      * process_capture_result:
   2415      *
   2416      * Send results from a completed capture to the framework.
   2417      * process_capture_result() may be invoked multiple times by the HAL in
   2418      * response to a single capture request. This allows, for example, the
   2419      * metadata and low-resolution buffers to be returned in one call, and
   2420      * post-processed JPEG buffers in a later call, once it is available. Each
   2421      * call must include the frame number of the request it is returning
   2422      * metadata or buffers for.
   2423      *
   2424      * A component (buffer or metadata) of the complete result may only be
   2425      * included in one process_capture_result call. A buffer for each stream,
   2426      * and the result metadata, must be returned by the HAL for each request in
   2427      * one of the process_capture_result calls, even in case of errors producing
   2428      * some of the output. A call to process_capture_result() with neither
   2429      * output buffers or result metadata is not allowed.
   2430      *
   2431      * The order of returning metadata and buffers for a single result does not
   2432      * matter, but buffers for a given stream must be returned in FIFO order. So
   2433      * the buffer for request 5 for stream A must always be returned before the
   2434      * buffer for request 6 for stream A. This also applies to the result
   2435      * metadata; the metadata for request 5 must be returned before the metadata
   2436      * for request 6.
   2437      *
   2438      * However, different streams are independent of each other, so it is
   2439      * acceptable and expected that the buffer for request 5 for stream A may be
   2440      * returned after the buffer for request 6 for stream B is. And it is
   2441      * acceptable that the result metadata for request 6 for stream B is
   2442      * returned before the buffer for request 5 for stream A is.
   2443      *
   2444      * The HAL retains ownership of result structure, which only needs to be
   2445      * valid to access during this call. The framework will copy whatever it
   2446      * needs before this call returns.
   2447      *
   2448      * The output buffers do not need to be filled yet; the framework will wait
   2449      * on the stream buffer release sync fence before reading the buffer
   2450      * data. Therefore, this method should be called by the HAL as soon as
   2451      * possible, even if some or all of the output buffers are still in
   2452      * being filled. The HAL must include valid release sync fences into each
   2453      * output_buffers stream buffer entry, or -1 if that stream buffer is
   2454      * already filled.
   2455      *
   2456      * If the result buffer cannot be constructed for a request, the HAL should
   2457      * return an empty metadata buffer, but still provide the output buffers and
   2458      * their sync fences. In addition, notify() must be called with an
   2459      * ERROR_RESULT message.
   2460      *
   2461      * If an output buffer cannot be filled, its status field must be set to
   2462      * STATUS_ERROR. In addition, notify() must be called with a ERROR_BUFFER
   2463      * message.
   2464      *
   2465      * If the entire capture has failed, then this method still needs to be
   2466      * called to return the output buffers to the framework. All the buffer
   2467      * statuses should be STATUS_ERROR, and the result metadata should be an
   2468      * empty buffer. In addition, notify() must be called with a ERROR_REQUEST
   2469      * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages
   2470      * should not be sent.
   2471      *
   2472      * Performance requirements:
   2473      *
   2474      * This is a non-blocking call. The framework will return this call in 5ms.
   2475      *
   2476      * The pipeline latency (see S7 for definition) should be less than or equal to
   2477      * 4 frame intervals, and must be less than or equal to 8 frame intervals.
   2478      *
   2479      */
   2480     void (*process_capture_result)(const struct camera3_callback_ops *,
   2481             const camera3_capture_result_t *result);
   2482 
   2483     /**
   2484      * notify:
   2485      *
   2486      * Asynchronous notification callback from the HAL, fired for various
   2487      * reasons. Only for information independent of frame capture, or that
   2488      * require specific timing. The ownership of the message structure remains
   2489      * with the HAL, and the msg only needs to be valid for the duration of this
   2490      * call.
   2491      *
   2492      * Multiple threads may call notify() simultaneously.
   2493      *
   2494      * <= CAMERA_DEVICE_API_VERSION_3_1:
   2495      *
   2496      * The notification for the start of exposure for a given request must be
   2497      * sent by the HAL before the first call to process_capture_result() for
   2498      * that request is made.
   2499      *
   2500      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2501      *
   2502      * Buffers delivered to the framework will not be dispatched to the
   2503      * application layer until a start of exposure timestamp (or input image's
   2504      * start of exposure timestamp for a reprocess request) has been received
   2505      * via a SHUTTER notify() call. It is highly recommended to dispatch this
   2506      * call as early as possible.
   2507      *
   2508      * ------------------------------------------------------------------------
   2509      * Performance requirements:
   2510      *
   2511      * This is a non-blocking call. The framework will return this call in 5ms.
   2512      */
   2513     void (*notify)(const struct camera3_callback_ops *,
   2514             const camera3_notify_msg_t *msg);
   2515 
   2516 } camera3_callback_ops_t;
   2517 
   2518 /**********************************************************************
   2519  *
   2520  * Camera device operations
   2521  *
   2522  */
   2523 typedef struct camera3_device_ops {
   2524 
   2525     /**
   2526      * initialize:
   2527      *
   2528      * One-time initialization to pass framework callback function pointers to
   2529      * the HAL. Will be called once after a successful open() call, before any
   2530      * other functions are called on the camera3_device_ops structure.
   2531      *
   2532      * Performance requirements:
   2533      *
   2534      * This should be a non-blocking call. The HAL should return from this call
   2535      * in 5ms, and must return from this call in 10ms.
   2536      *
   2537      * Return values:
   2538      *
   2539      *  0:     On successful initialization
   2540      *
   2541      * -ENODEV: If initialization fails. Only close() can be called successfully
   2542      *          by the framework after this.
   2543      */
   2544     int (*initialize)(const struct camera3_device *,
   2545             const camera3_callback_ops_t *callback_ops);
   2546 
   2547     /**********************************************************************
   2548      * Stream management
   2549      */
   2550 
   2551     /**
   2552      * configure_streams:
   2553      *
   2554      * CAMERA_DEVICE_API_VERSION_3_0 only:
   2555      *
   2556      * Reset the HAL camera device processing pipeline and set up new input and
   2557      * output streams. This call replaces any existing stream configuration with
   2558      * the streams defined in the stream_list. This method will be called at
   2559      * least once after initialize() before a request is submitted with
   2560      * process_capture_request().
   2561      *
   2562      * The stream_list must contain at least one output-capable stream, and may
   2563      * not contain more than one input-capable stream.
   2564      *
   2565      * The stream_list may contain streams that are also in the currently-active
   2566      * set of streams (from the previous call to configure_stream()). These
   2567      * streams will already have valid values for usage, max_buffers, and the
   2568      * private pointer.
   2569      *
   2570      * If such a stream has already had its buffers registered,
   2571      * register_stream_buffers() will not be called again for the stream, and
   2572      * buffers from the stream can be immediately included in input requests.
   2573      *
   2574      * If the HAL needs to change the stream configuration for an existing
   2575      * stream due to the new configuration, it may rewrite the values of usage
   2576      * and/or max_buffers during the configure call.
   2577      *
   2578      * The framework will detect such a change, and will then reallocate the
   2579      * stream buffers, and call register_stream_buffers() again before using
   2580      * buffers from that stream in a request.
   2581      *
   2582      * If a currently-active stream is not included in stream_list, the HAL may
   2583      * safely remove any references to that stream. It will not be reused in a
   2584      * later configure() call by the framework, and all the gralloc buffers for
   2585      * it will be freed after the configure_streams() call returns.
   2586      *
   2587      * The stream_list structure is owned by the framework, and may not be
   2588      * accessed once this call completes. The address of an individual
   2589      * camera3_stream_t structure will remain valid for access by the HAL until
   2590      * the end of the first configure_stream() call which no longer includes
   2591      * that camera3_stream_t in the stream_list argument. The HAL may not change
   2592      * values in the stream structure outside of the private pointer, except for
   2593      * the usage and max_buffers members during the configure_streams() call
   2594      * itself.
   2595      *
   2596      * If the stream is new, the usage, max_buffer, and private pointer fields
   2597      * of the stream structure will all be set to 0. The HAL device must set
   2598      * these fields before the configure_streams() call returns. These fields
   2599      * are then used by the framework and the platform gralloc module to
   2600      * allocate the gralloc buffers for each stream.
   2601      *
   2602      * Before such a new stream can have its buffers included in a capture
   2603      * request, the framework will call register_stream_buffers() with that
   2604      * stream. However, the framework is not required to register buffers for
   2605      * _all_ streams before submitting a request. This allows for quick startup
   2606      * of (for example) a preview stream, with allocation for other streams
   2607      * happening later or concurrently.
   2608      *
   2609      * ------------------------------------------------------------------------
   2610      * CAMERA_DEVICE_API_VERSION_3_1 only:
   2611      *
   2612      * Reset the HAL camera device processing pipeline and set up new input and
   2613      * output streams. This call replaces any existing stream configuration with
   2614      * the streams defined in the stream_list. This method will be called at
   2615      * least once after initialize() before a request is submitted with
   2616      * process_capture_request().
   2617      *
   2618      * The stream_list must contain at least one output-capable stream, and may
   2619      * not contain more than one input-capable stream.
   2620      *
   2621      * The stream_list may contain streams that are also in the currently-active
   2622      * set of streams (from the previous call to configure_stream()). These
   2623      * streams will already have valid values for usage, max_buffers, and the
   2624      * private pointer.
   2625      *
   2626      * If such a stream has already had its buffers registered,
   2627      * register_stream_buffers() will not be called again for the stream, and
   2628      * buffers from the stream can be immediately included in input requests.
   2629      *
   2630      * If the HAL needs to change the stream configuration for an existing
   2631      * stream due to the new configuration, it may rewrite the values of usage
   2632      * and/or max_buffers during the configure call.
   2633      *
   2634      * The framework will detect such a change, and will then reallocate the
   2635      * stream buffers, and call register_stream_buffers() again before using
   2636      * buffers from that stream in a request.
   2637      *
   2638      * If a currently-active stream is not included in stream_list, the HAL may
   2639      * safely remove any references to that stream. It will not be reused in a
   2640      * later configure() call by the framework, and all the gralloc buffers for
   2641      * it will be freed after the configure_streams() call returns.
   2642      *
   2643      * The stream_list structure is owned by the framework, and may not be
   2644      * accessed once this call completes. The address of an individual
   2645      * camera3_stream_t structure will remain valid for access by the HAL until
   2646      * the end of the first configure_stream() call which no longer includes
   2647      * that camera3_stream_t in the stream_list argument. The HAL may not change
   2648      * values in the stream structure outside of the private pointer, except for
   2649      * the usage and max_buffers members during the configure_streams() call
   2650      * itself.
   2651      *
   2652      * If the stream is new, max_buffer, and private pointer fields of the
   2653      * stream structure will all be set to 0. The usage will be set to the
   2654      * consumer usage flags. The HAL device must set these fields before the
   2655      * configure_streams() call returns. These fields are then used by the
   2656      * framework and the platform gralloc module to allocate the gralloc
   2657      * buffers for each stream.
   2658      *
   2659      * Before such a new stream can have its buffers included in a capture
   2660      * request, the framework will call register_stream_buffers() with that
   2661      * stream. However, the framework is not required to register buffers for
   2662      * _all_ streams before submitting a request. This allows for quick startup
   2663      * of (for example) a preview stream, with allocation for other streams
   2664      * happening later or concurrently.
   2665      *
   2666      * ------------------------------------------------------------------------
   2667      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2668      *
   2669      * Reset the HAL camera device processing pipeline and set up new input and
   2670      * output streams. This call replaces any existing stream configuration with
   2671      * the streams defined in the stream_list. This method will be called at
   2672      * least once after initialize() before a request is submitted with
   2673      * process_capture_request().
   2674      *
   2675      * The stream_list must contain at least one output-capable stream, and may
   2676      * not contain more than one input-capable stream.
   2677      *
   2678      * The stream_list may contain streams that are also in the currently-active
   2679      * set of streams (from the previous call to configure_stream()). These
   2680      * streams will already have valid values for usage, max_buffers, and the
   2681      * private pointer.
   2682      *
   2683      * If the HAL needs to change the stream configuration for an existing
   2684      * stream due to the new configuration, it may rewrite the values of usage
   2685      * and/or max_buffers during the configure call.
   2686      *
   2687      * The framework will detect such a change, and may then reallocate the
   2688      * stream buffers before using buffers from that stream in a request.
   2689      *
   2690      * If a currently-active stream is not included in stream_list, the HAL may
   2691      * safely remove any references to that stream. It will not be reused in a
   2692      * later configure() call by the framework, and all the gralloc buffers for
   2693      * it will be freed after the configure_streams() call returns.
   2694      *
   2695      * The stream_list structure is owned by the framework, and may not be
   2696      * accessed once this call completes. The address of an individual
   2697      * camera3_stream_t structure will remain valid for access by the HAL until
   2698      * the end of the first configure_stream() call which no longer includes
   2699      * that camera3_stream_t in the stream_list argument. The HAL may not change
   2700      * values in the stream structure outside of the private pointer, except for
   2701      * the usage and max_buffers members during the configure_streams() call
   2702      * itself.
   2703      *
   2704      * If the stream is new, max_buffer, and private pointer fields of the
   2705      * stream structure will all be set to 0. The usage will be set to the
   2706      * consumer usage flags. The HAL device must set these fields before the
   2707      * configure_streams() call returns. These fields are then used by the
   2708      * framework and the platform gralloc module to allocate the gralloc
   2709      * buffers for each stream.
   2710      *
   2711      * Newly allocated buffers may be included in a capture request at any time
   2712      * by the framework. Once a gralloc buffer is returned to the framework
   2713      * with process_capture_result (and its respective release_fence has been
   2714      * signaled) the framework may free or reuse it at any time.
   2715      *
   2716      * ------------------------------------------------------------------------
   2717      *
   2718      * Preconditions:
   2719      *
   2720      * The framework will only call this method when no captures are being
   2721      * processed. That is, all results have been returned to the framework, and
   2722      * all in-flight input and output buffers have been returned and their
   2723      * release sync fences have been signaled by the HAL. The framework will not
   2724      * submit new requests for capture while the configure_streams() call is
   2725      * underway.
   2726      *
   2727      * Postconditions:
   2728      *
   2729      * The HAL device must configure itself to provide maximum possible output
   2730      * frame rate given the sizes and formats of the output streams, as
   2731      * documented in the camera device's static metadata.
   2732      *
   2733      * Performance requirements:
   2734      *
   2735      * This call is expected to be heavyweight and possibly take several hundred
   2736      * milliseconds to complete, since it may require resetting and
   2737      * reconfiguring the image sensor and the camera processing pipeline.
   2738      * Nevertheless, the HAL device should attempt to minimize the
   2739      * reconfiguration delay to minimize the user-visible pauses during
   2740      * application operational mode changes (such as switching from still
   2741      * capture to video recording).
   2742      *
   2743      * The HAL should return from this call in 500ms, and must return from this
   2744      * call in 1000ms.
   2745      *
   2746      * Return values:
   2747      *
   2748      *  0:      On successful stream configuration
   2749      *
   2750      * -EINVAL: If the requested stream configuration is invalid. Some examples
   2751      *          of invalid stream configurations include:
   2752      *
   2753      *          - Including more than 1 input-capable stream (INPUT or
   2754      *            BIDIRECTIONAL)
   2755      *
   2756      *          - Not including any output-capable streams (OUTPUT or
   2757      *            BIDIRECTIONAL)
   2758      *
   2759      *          - Including streams with unsupported formats, or an unsupported
   2760      *            size for that format.
   2761      *
   2762      *          - Including too many output streams of a certain format.
   2763      *
   2764      *          - Unsupported rotation configuration (only applies to
   2765      *            devices with version >= CAMERA_DEVICE_API_VERSION_3_3)
   2766      *
   2767      *          - Stream sizes/formats don't satisfy the
   2768      *            camera3_stream_configuration_t->operation_mode requirements for non-NORMAL mode,
   2769      *            or the requested operation_mode is not supported by the HAL.
   2770      *            (only applies to devices with version >= CAMERA_DEVICE_API_VERSION_3_3)
   2771      *
   2772      *          Note that the framework submitting an invalid stream
   2773      *          configuration is not normal operation, since stream
   2774      *          configurations are checked before configure. An invalid
   2775      *          configuration means that a bug exists in the framework code, or
   2776      *          there is a mismatch between the HAL's static metadata and the
   2777      *          requirements on streams.
   2778      *
   2779      * -ENODEV: If there has been a fatal error and the device is no longer
   2780      *          operational. Only close() can be called successfully by the
   2781      *          framework after this error is returned.
   2782      */
   2783     int (*configure_streams)(const struct camera3_device *,
   2784             camera3_stream_configuration_t *stream_list);
   2785 
   2786     /**
   2787      * register_stream_buffers:
   2788      *
   2789      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2790      *
   2791      * DEPRECATED. This will not be called and must be set to NULL.
   2792      *
   2793      * <= CAMERA_DEVICE_API_VERSION_3_1:
   2794      *
   2795      * Register buffers for a given stream with the HAL device. This method is
   2796      * called by the framework after a new stream is defined by
   2797      * configure_streams, and before buffers from that stream are included in a
   2798      * capture request. If the same stream is listed in a subsequent
   2799      * configure_streams() call, register_stream_buffers will _not_ be called
   2800      * again for that stream.
   2801      *
   2802      * The framework does not need to register buffers for all configured
   2803      * streams before it submits the first capture request. This allows quick
   2804      * startup for preview (or similar use cases) while other streams are still
   2805      * being allocated.
   2806      *
   2807      * This method is intended to allow the HAL device to map or otherwise
   2808      * prepare the buffers for later use. The buffers passed in will already be
   2809      * locked for use. At the end of the call, all the buffers must be ready to
   2810      * be returned to the stream.  The buffer_set argument is only valid for the
   2811      * duration of this call.
   2812      *
   2813      * If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
   2814      * the camera HAL should inspect the passed-in buffers here to determine any
   2815      * platform-private pixel format information.
   2816      *
   2817      * Performance requirements:
   2818      *
   2819      * This should be a non-blocking call. The HAL should return from this call
   2820      * in 1ms, and must return from this call in 5ms.
   2821      *
   2822      * Return values:
   2823      *
   2824      *  0:      On successful registration of the new stream buffers
   2825      *
   2826      * -EINVAL: If the stream_buffer_set does not refer to a valid active
   2827      *          stream, or if the buffers array is invalid.
   2828      *
   2829      * -ENOMEM: If there was a failure in registering the buffers. The framework
   2830      *          must consider all the stream buffers to be unregistered, and can
   2831      *          try to register again later.
   2832      *
   2833      * -ENODEV: If there is a fatal error, and the device is no longer
   2834      *          operational. Only close() can be called successfully by the
   2835      *          framework after this error is returned.
   2836      */
   2837     int (*register_stream_buffers)(const struct camera3_device *,
   2838             const camera3_stream_buffer_set_t *buffer_set);
   2839 
   2840     /**********************************************************************
   2841      * Request creation and submission
   2842      */
   2843 
   2844     /**
   2845      * construct_default_request_settings:
   2846      *
   2847      * Create capture settings for standard camera use cases.
   2848      *
   2849      * The device must return a settings buffer that is configured to meet the
   2850      * requested use case, which must be one of the CAMERA3_TEMPLATE_*
   2851      * enums. All request control fields must be included.
   2852      *
   2853      * The HAL retains ownership of this structure, but the pointer to the
   2854      * structure must be valid until the device is closed. The framework and the
   2855      * HAL may not modify the buffer once it is returned by this call. The same
   2856      * buffer may be returned for subsequent calls for the same template, or for
   2857      * other templates.
   2858      *
   2859      * Performance requirements:
   2860      *
   2861      * This should be a non-blocking call. The HAL should return from this call
   2862      * in 1ms, and must return from this call in 5ms.
   2863      *
   2864      * Return values:
   2865      *
   2866      *   Valid metadata: On successful creation of a default settings
   2867      *                   buffer.
   2868      *
   2869      *   NULL:           In case of a fatal error. After this is returned, only
   2870      *                   the close() method can be called successfully by the
   2871      *                   framework.
   2872      */
   2873     const camera_metadata_t* (*construct_default_request_settings)(
   2874             const struct camera3_device *,
   2875             int type);
   2876 
   2877     /**
   2878      * process_capture_request:
   2879      *
   2880      * Send a new capture request to the HAL. The HAL should not return from
   2881      * this call until it is ready to accept the next request to process. Only
   2882      * one call to process_capture_request() will be made at a time by the
   2883      * framework, and the calls will all be from the same thread. The next call
   2884      * to process_capture_request() will be made as soon as a new request and
   2885      * its associated buffers are available. In a normal preview scenario, this
   2886      * means the function will be called again by the framework almost
   2887      * instantly.
   2888      *
   2889      * The actual request processing is asynchronous, with the results of
   2890      * capture being returned by the HAL through the process_capture_result()
   2891      * call. This call requires the result metadata to be available, but output
   2892      * buffers may simply provide sync fences to wait on. Multiple requests are
   2893      * expected to be in flight at once, to maintain full output frame rate.
   2894      *
   2895      * The framework retains ownership of the request structure. It is only
   2896      * guaranteed to be valid during this call. The HAL device must make copies
   2897      * of the information it needs to retain for the capture processing. The HAL
   2898      * is responsible for waiting on and closing the buffers' fences and
   2899      * returning the buffer handles to the framework.
   2900      *
   2901      * The HAL must write the file descriptor for the input buffer's release
   2902      * sync fence into input_buffer->release_fence, if input_buffer is not
   2903      * NULL. If the HAL returns -1 for the input buffer release sync fence, the
   2904      * framework is free to immediately reuse the input buffer. Otherwise, the
   2905      * framework will wait on the sync fence before refilling and reusing the
   2906      * input buffer.
   2907      *
   2908      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2909      *
   2910      * The input/output buffers provided by the framework in each request
   2911      * may be brand new (having never before seen by the HAL).
   2912      *
   2913      * ------------------------------------------------------------------------
   2914      * Performance considerations:
   2915      *
   2916      * Handling a new buffer should be extremely lightweight and there should be
   2917      * no frame rate degradation or frame jitter introduced.
   2918      *
   2919      * This call must return fast enough to ensure that the requested frame
   2920      * rate can be sustained, especially for streaming cases (post-processing
   2921      * quality settings set to FAST). The HAL should return this call in 1
   2922      * frame interval, and must return from this call in 4 frame intervals.
   2923      *
   2924      * Return values:
   2925      *
   2926      *  0:      On a successful start to processing the capture request
   2927      *
   2928      * -EINVAL: If the input is malformed (the settings are NULL when not
   2929      *          allowed, there are 0 output buffers, etc) and capture processing
   2930      *          cannot start. Failures during request processing should be
   2931      *          handled by calling camera3_callback_ops_t.notify(). In case of
   2932      *          this error, the framework will retain responsibility for the
   2933      *          stream buffers' fences and the buffer handles; the HAL should
   2934      *          not close the fences or return these buffers with
   2935      *          process_capture_result.
   2936      *
   2937      * -ENODEV: If the camera device has encountered a serious error. After this
   2938      *          error is returned, only the close() method can be successfully
   2939      *          called by the framework.
   2940      *
   2941      */
   2942     int (*process_capture_request)(const struct camera3_device *,
   2943             camera3_capture_request_t *request);
   2944 
   2945     /**********************************************************************
   2946      * Miscellaneous methods
   2947      */
   2948 
   2949     /**
   2950      * get_metadata_vendor_tag_ops:
   2951      *
   2952      * Get methods to query for vendor extension metadata tag information. The
   2953      * HAL should fill in all the vendor tag operation methods, or leave ops
   2954      * unchanged if no vendor tags are defined.
   2955      *
   2956      * The definition of vendor_tag_query_ops_t can be found in
   2957      * system/media/camera/include/system/camera_metadata.h.
   2958      *
   2959      * >= CAMERA_DEVICE_API_VERSION_3_2:
   2960      *    DEPRECATED. This function has been deprecated and should be set to
   2961      *    NULL by the HAL.  Please implement get_vendor_tag_ops in camera_common.h
   2962      *    instead.
   2963      */
   2964     void (*get_metadata_vendor_tag_ops)(const struct camera3_device*,
   2965             vendor_tag_query_ops_t* ops);
   2966 
   2967     /**
   2968      * dump:
   2969      *
   2970      * Print out debugging state for the camera device. This will be called by
   2971      * the framework when the camera service is asked for a debug dump, which
   2972      * happens when using the dumpsys tool, or when capturing a bugreport.
   2973      *
   2974      * The passed-in file descriptor can be used to write debugging text using
   2975      * dprintf() or write(). The text should be in ASCII encoding only.
   2976      *
   2977      * Performance requirements:
   2978      *
   2979      * This must be a non-blocking call. The HAL should return from this call
   2980      * in 1ms, must return from this call in 10ms. This call must avoid
   2981      * deadlocks, as it may be called at any point during camera operation.
   2982      * Any synchronization primitives used (such as mutex locks or semaphores)
   2983      * should be acquired with a timeout.
   2984      */
   2985     void (*dump)(const struct camera3_device *, int fd);
   2986 
   2987     /**
   2988      * flush:
   2989      *
   2990      * Flush all currently in-process captures and all buffers in the pipeline
   2991      * on the given device. The framework will use this to dump all state as
   2992      * quickly as possible in order to prepare for a configure_streams() call.
   2993      *
   2994      * No buffers are required to be successfully returned, so every buffer
   2995      * held at the time of flush() (whether successfully filled or not) may be
   2996      * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed
   2997      * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call,
   2998      * provided they are successfully filled.
   2999      *
   3000      * All requests currently in the HAL are expected to be returned as soon as
   3001      * possible.  Not-in-process requests should return errors immediately. Any
   3002      * interruptible hardware blocks should be stopped, and any uninterruptible
   3003      * blocks should be waited on.
   3004      *
   3005      * flush() may be called concurrently to process_capture_request(), with the expectation that
   3006      * process_capture_request will return quickly and the request submitted in that
   3007      * process_capture_request call is treated like all other in-flight requests.  Due to
   3008      * concurrency issues, it is possible that from the HAL's point of view, a
   3009      * process_capture_request() call may be started after flush has been invoked but has not
   3010      * returned yet. If such a call happens before flush() returns, the HAL should treat the new
   3011      * capture request like other in-flight pending requests (see #4 below).
   3012      *
   3013      * More specifically, the HAL must follow below requirements for various cases:
   3014      *
   3015      * 1. For captures that are too late for the HAL to cancel/stop, and will be
   3016      *    completed normally by the HAL; i.e. the HAL can send shutter/notify and
   3017      *    process_capture_result and buffers as normal.
   3018      *
   3019      * 2. For pending requests that have not done any processing, the HAL must call notify
   3020      *    CAMERA3_MSG_ERROR_REQUEST, and return all the output buffers with
   3021      *    process_capture_result in the error state (CAMERA3_BUFFER_STATUS_ERROR).
   3022      *    The HAL must not place the release fence into an error state, instead,
   3023      *    the release fences must be set to the acquire fences passed by the framework,
   3024      *    or -1 if they have been waited on by the HAL already. This is also the path
   3025      *    to follow for any captures for which the HAL already called notify() with
   3026      *    CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers for.
   3027      *    After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only process_capture_results with
   3028      *    buffers in CAMERA3_BUFFER_STATUS_ERROR are allowed. No further notifys or
   3029      *    process_capture_result with non-null metadata is allowed.
   3030      *
   3031      * 3. For partially completed pending requests that will not have all the output
   3032      *    buffers or perhaps missing metadata, the HAL should follow below:
   3033      *
   3034      *    3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected result
   3035      *    metadata (i.e. one or more partial metadata) won't be available for the capture.
   3036      *
   3037      *    3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that won't
   3038      *         be produced for the capture.
   3039      *
   3040      *    3.3  Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp before
   3041      *         any buffers/metadata are returned with process_capture_result.
   3042      *
   3043      *    3.4 For captures that will produce some results, the HAL must not call
   3044      *        CAMERA3_MSG_ERROR_REQUEST, since that indicates complete failure.
   3045      *
   3046      *    3.5. Valid buffers/metadata should be passed to the framework as normal.
   3047      *
   3048      *    3.6. Failed buffers should be returned to the framework as described for case 2.
   3049      *         But failed buffers do not have to follow the strict ordering valid buffers do,
   3050      *         and may be out-of-order with respect to valid buffers. For example, if buffers
   3051      *         A, B, C, D, E are sent, D and E are failed, then A, E, B, D, C is an acceptable
   3052      *         return order.
   3053      *
   3054      *    3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is sufficient, no
   3055      *         need to call process_capture_result with NULL metadata or equivalent.
   3056      *
   3057      * 4. If a flush() is invoked while a process_capture_request() invocation is active, that
   3058      *    process call should return as soon as possible. In addition, if a process_capture_request()
   3059      *    call is made after flush() has been invoked but before flush() has returned, the
   3060      *    capture request provided by the late process_capture_request call should be treated like
   3061      *    a pending request in case #2 above.
   3062      *
   3063      * flush() should only return when there are no more outstanding buffers or
   3064      * requests left in the HAL. The framework may call configure_streams (as
   3065      * the HAL state is now quiesced) or may issue new requests.
   3066      *
   3067      * Note that it's sufficient to only support fully-succeeded and fully-failed result cases.
   3068      * However, it is highly desirable to support the partial failure cases as well, as it
   3069      * could help improve the flush call overall performance.
   3070      *
   3071      * Performance requirements:
   3072      *
   3073      * The HAL should return from this call in 100ms, and must return from this
   3074      * call in 1000ms. And this call must not be blocked longer than pipeline
   3075      * latency (see S7 for definition).
   3076      *
   3077      * Version information:
   3078      *
   3079      *   only available if device version >= CAMERA_DEVICE_API_VERSION_3_1.
   3080      *
   3081      * Return values:
   3082      *
   3083      *  0:      On a successful flush of the camera HAL.
   3084      *
   3085      * -EINVAL: If the input is malformed (the device is not valid).
   3086      *
   3087      * -ENODEV: If the camera device has encountered a serious error. After this
   3088      *          error is returned, only the close() method can be successfully
   3089      *          called by the framework.
   3090      */
   3091     int (*flush)(const struct camera3_device *);
   3092 
   3093     /* reserved for future use */
   3094     void *reserved[8];
   3095 } camera3_device_ops_t;
   3096 
   3097 /**********************************************************************
   3098  *
   3099  * Camera device definition
   3100  *
   3101  */
   3102 typedef struct camera3_device {
   3103     /**
   3104      * common.version must equal CAMERA_DEVICE_API_VERSION_3_0 to identify this
   3105      * device as implementing version 3.0 of the camera device HAL.
   3106      *
   3107      * Performance requirements:
   3108      *
   3109      * Camera open (common.module->common.methods->open) should return in 200ms, and must return
   3110      * in 500ms.
   3111      * Camera close (common.close) should return in 200ms, and must return in 500ms.
   3112      *
   3113      */
   3114     hw_device_t common;
   3115     camera3_device_ops_t *ops;
   3116     void *priv;
   3117 } camera3_device_t;
   3118 
   3119 __END_DECLS
   3120 
   3121 #endif /* #ifdef ANDROID_INCLUDE_CAMERA3_H */
   3122