Home | History | Annotate | Download | only in 3.2
      1 /*
      2  * Copyright (C) 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.hardware.camera.device@3.2;
     18 
     19 import android.hardware.graphics.common@1.0::types;
     20 
     21 typedef vec<uint8_t> CameraMetadata;
     22 typedef bitfield<BufferUsage> BufferUsageFlags;
     23 typedef bitfield<Dataspace> DataspaceFlags;
     24 
     25 /**
     26  * StreamType:
     27  *
     28  * The type of the camera stream, which defines whether the camera HAL device is
     29  * the producer or the consumer for that stream, and how the buffers of the
     30  * stream relate to the other streams.
     31  */
     32 enum StreamType : uint32_t {
     33     /**
     34      * This stream is an output stream; the camera HAL device must fill buffers
     35      * from this stream with newly captured or reprocessed image data.
     36      */
     37     OUTPUT = 0,
     38 
     39     /**
     40      * This stream is an input stream; the camera HAL device must read buffers
     41      * from this stream and send them through the camera processing pipeline,
     42      * as if the buffer was a newly captured image from the imager.
     43      *
     44      * The pixel format for input stream can be any format reported by
     45      * android.scaler.availableInputOutputFormatsMap. The pixel format of the
     46      * output stream that is used to produce the reprocessing data may be any
     47      * format reported by android.scaler.availableStreamConfigurations. The
     48      * supported input/output stream combinations depends the camera device
     49      * capabilities, see android.scaler.availableInputOutputFormatsMap for
     50      * stream map details.
     51      *
     52      * This kind of stream is generally used to reprocess data into higher
     53      * quality images (that otherwise would cause a frame rate performance
     54      * loss), or to do off-line reprocessing.
     55      *
     56      * The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing,
     57      * see S8.2, S8.3 and S10 for more details.
     58      */
     59     INPUT = 1
     60 
     61 };
     62 
     63 /**
     64  * StreamRotation:
     65  *
     66  * The required counterclockwise rotation of camera stream.
     67  */
     68 enum StreamRotation : uint32_t  {
     69     /** No rotation */
     70     ROTATION_0 = 0,
     71 
     72     /** Rotate by 90 degree counterclockwise */
     73     ROTATION_90 = 1,
     74 
     75     /** Rotate by 180 degree counterclockwise */
     76     ROTATION_180 = 2,
     77 
     78     /** Rotate by 270 degree counterclockwise */
     79     ROTATION_270 = 3
     80 
     81 };
     82 
     83 /**
     84  * StreamConfigurationMode:
     85  *
     86  * This defines the general operation mode for the HAL (for a given stream
     87  * configuration) where modes besides NORMAL have different semantics, and
     88  * usually limit the generality of the API in exchange for higher performance in
     89  * some particular area.
     90  */
     91 enum StreamConfigurationMode : uint32_t {
     92     /**
     93      * Normal stream configuration operation mode. This is the default camera
     94      * operation mode, where all semantics of HAL APIs and metadata controls
     95      * apply.
     96      */
     97     NORMAL_MODE = 0,
     98 
     99     /**
    100      * Special constrained high speed operation mode for devices that can not
    101      * support high speed output in NORMAL mode. All streams in this
    102      * configuration are operating at high speed mode and have different
    103      * characteristics and limitations to achieve high speed output. The NORMAL
    104      * mode can still be used for high speed output if the HAL can support high
    105      * speed output while satisfying all the semantics of HAL APIs and metadata
    106      * controls. It is recommended for the HAL to support high speed output in
    107      * NORMAL mode (by advertising the high speed FPS ranges in
    108      * android.control.aeAvailableTargetFpsRanges) if possible.
    109      *
    110      * This mode has below limitations/requirements:
    111      *
    112      *   1. The HAL must support up to 2 streams with sizes reported by
    113      *      android.control.availableHighSpeedVideoConfigurations.
    114      *   2. In this mode, the HAL is expected to output up to 120fps or
    115      *      higher. This mode must support the targeted FPS range and size
    116      *      configurations reported by
    117      *      android.control.availableHighSpeedVideoConfigurations.
    118      *   3. The HAL must support IMPLEMENTATION_DEFINED output
    119      *      stream format.
    120      *   4. To achieve efficient high speed streaming, the HAL may have to
    121      *      aggregate multiple frames together and send to camera device for
    122      *      processing where the request controls are same for all the frames in
    123      *      this batch (batch mode). The HAL must support max batch size and the
    124      *      max batch size requirements defined by
    125      *      android.control.availableHighSpeedVideoConfigurations.
    126      *   5. In this mode, the HAL must override aeMode, awbMode, and afMode to
    127      *      ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing
    128      *      block mode controls must be overridden to be FAST. Therefore, no
    129      *      manual control of capture and post-processing parameters is
    130      *      possible. All other controls operate the same as when
    131      *      android.control.mode == AUTO. This means that all other
    132      *      android.control.* fields must continue to work, such as
    133      *
    134      *      android.control.aeTargetFpsRange
    135      *      android.control.aeExposureCompensation
    136      *      android.control.aeLock
    137      *      android.control.awbLock
    138      *      android.control.effectMode
    139      *      android.control.aeRegions
    140      *      android.control.afRegions
    141      *      android.control.awbRegions
    142      *      android.control.afTrigger
    143      *      android.control.aePrecaptureTrigger
    144      *
    145      *      Outside of android.control.*, the following controls must work:
    146      *
    147      *      android.flash.mode (TORCH mode only, automatic flash for still
    148      *          capture must not work since aeMode is ON)
    149      *      android.lens.opticalStabilizationMode (if it is supported)
    150      *      android.scaler.cropRegion
    151      *      android.statistics.faceDetectMode (if it is supported)
    152      *   6. To reduce the amount of data passed across process boundaries at
    153      *      high frame rate, within one batch, camera framework only propagates
    154      *      the last shutter notify and the last capture results (including partial
    155      *      results and final result) to the app. The shutter notifies and capture
    156      *      results for the other requests in the batch are derived by
    157      *      the camera framework. As a result, the HAL can return empty metadata
    158      *      except for the last result in the batch.
    159      *
    160      * For more details about high speed stream requirements, see
    161      * android.control.availableHighSpeedVideoConfigurations and
    162      * CONSTRAINED_HIGH_SPEED_VIDEO capability defined in
    163      * android.request.availableCapabilities.
    164      *
    165      * This mode only needs to be supported by HALs that include
    166      * CONSTRAINED_HIGH_SPEED_VIDEO in the android.request.availableCapabilities
    167      * static metadata.
    168      */
    169     CONSTRAINED_HIGH_SPEED_MODE = 1,
    170 
    171     /**
    172      * A set of vendor-defined operating modes, for custom default camera
    173      * application features that can't be implemented in the fully flexible fashion
    174      * required for NORMAL_MODE.
    175      */
    176     VENDOR_MODE_0 = 0x8000,
    177     VENDOR_MODE_1,
    178     VENDOR_MODE_2,
    179     VENDOR_MODE_3,
    180     VENDOR_MODE_4,
    181     VENDOR_MODE_5,
    182     VENDOR_MODE_6,
    183     VENDOR_MODE_7
    184 };
    185 
    186 /**
    187  * Stream:
    188  *
    189  * A descriptor for a single camera input or output stream. A stream is defined
    190  * by the framework by its buffer resolution and format, and additionally by the
    191  * HAL with the gralloc usage flags and the maximum in-flight buffer count.
    192  *
    193  * If a configureStreams() call returns a non-fatal error, all active streams
    194  * remain valid as if configureStreams() had not been called.
    195  *
    196  */
    197 struct Stream {
    198     /**
    199      * Stream ID - a nonnegative integer identifier for a stream.
    200      *
    201      * The identical stream ID must reference the same stream, with the same
    202      * width/height/format, across consecutive calls to configureStreams.
    203      *
    204      * If previously-used stream ID is not used in a new call to
    205      * configureStreams, then that stream is no longer active. Such a stream ID
    206      * may be reused in a future configureStreams with a new
    207      * width/height/format.
    208      *
    209      */
    210     int32_t id;
    211 
    212     /**
    213      * The type of the stream (input vs output, etc).
    214      */
    215     StreamType streamType;
    216 
    217     /**
    218      * The width in pixels of the buffers in this stream
    219      */
    220     uint32_t width;
    221 
    222     /**
    223      * The height in pixels of the buffers in this stream
    224      */
    225     uint32_t height;
    226 
    227     /**
    228      * The pixel format for the buffers in this stream.
    229      *
    230      * If IMPLEMENTATION_DEFINED is used, then the platform
    231      * gralloc module must select a format based on the usage flags provided by
    232      * the camera device and the other endpoint of the stream.
    233      *
    234      */
    235     android.hardware.graphics.common@1.0::PixelFormat format;
    236 
    237     /**
    238      * The gralloc usage flags for this stream, as needed by the consumer of
    239      * the stream.
    240      *
    241      * The usage flags from the producer and the consumer must be combined
    242      * together and then passed to the platform gralloc HAL module for
    243      * allocating the gralloc buffers for each stream.
    244      *
    245      * The HAL may use these consumer flags to decide stream configuration. For
    246      * streamType INPUT, the value of this field is always 0. For all streams
    247      * passed via configureStreams(), the HAL must set its own
    248      * additional usage flags in its output HalStreamConfiguration.
    249      *
    250      * The usage flag for an output stream may be bitwise combination of usage
    251      * flags for multiple consumers, for the purpose of sharing one camera
    252      * stream between those consumers. The HAL must fail configureStreams call
    253      * with ILLEGAL_ARGUMENT if the combined flags cannot be supported due to
    254      * imcompatible buffer format, dataSpace, or other hardware limitations.
    255      */
    256     BufferUsageFlags usage;
    257 
    258     /**
    259      * A field that describes the contents of the buffer. The format and buffer
    260      * dimensions define the memory layout and structure of the stream buffers,
    261      * while dataSpace defines the meaning of the data within the buffer.
    262      *
    263      * For most formats, dataSpace defines the color space of the image data.
    264      * In addition, for some formats, dataSpace indicates whether image- or
    265      * depth-based data is requested. See
    266      * android.hardware.graphics.common (at) 1.0::types for details of formats and
    267      * valid dataSpace values for each format.
    268      *
    269      * The HAL must use this dataSpace to configure the stream to the correct
    270      * colorspace, or to select between color and depth outputs if
    271      * supported. The dataspace values are set using the V0 dataspace
    272      * definitions.
    273      */
    274     DataspaceFlags dataSpace;
    275 
    276     /**
    277      * The required output rotation of the stream.
    278      *
    279      * This must be inspected by HAL along with stream width and height. For
    280      * example, if the rotation is 90 degree and the stream width and height is
    281      * 720 and 1280 respectively, camera service must supply buffers of size
    282      * 720x1280, and HAL must capture a 1280x720 image and rotate the image by
    283      * 90 degree counterclockwise. The rotation field must be ignored when the
    284      * stream type is input.
    285      *
    286      * The HAL must inspect this field during stream configuration and return
    287      * IllegalArgument if HAL cannot perform such rotation. HAL must always
    288      * support ROTATION_0, so a configureStreams() call must not fail for
    289      * unsupported rotation if rotation field of all streams is ROTATION_0.
    290      *
    291      */
    292     StreamRotation rotation;
    293 
    294 };
    295 
    296 /**
    297  * StreamConfiguration:
    298  *
    299  * A structure of stream definitions, used by configureStreams(). This
    300  * structure defines all the output streams and the reprocessing input
    301  * stream for the current camera use case.
    302  */
    303 struct StreamConfiguration {
    304     /**
    305      * An array of camera stream pointers, defining the input/output
    306      * configuration for the camera HAL device.
    307      *
    308      * At most one input-capable stream may be defined.
    309      * At least one output-capable stream must be defined.
    310      */
    311     vec<Stream> streams;
    312 
    313     /**
    314      * The operation mode of streams in this configuration. The HAL can use this
    315      * mode as an indicator to set the stream property (e.g.,
    316      * HalStream::maxBuffers) appropriately. For example, if the
    317      * configuration is
    318      * CONSTRAINED_HIGH_SPEED_MODE, the HAL may
    319      * want to set aside more buffers for batch mode operation (see
    320      * android.control.availableHighSpeedVideoConfigurations for batch mode
    321      * definition).
    322      *
    323      */
    324     StreamConfigurationMode operationMode;
    325 
    326 };
    327 
    328 /**
    329  * HalStream:
    330  *
    331  * The camera HAL's response to each requested stream configuration.
    332  *
    333  * The HAL may specify the desired format, maximum buffers, and
    334  * usage flags for each stream.
    335  *
    336  */
    337 struct HalStream {
    338     /**
    339      * Stream ID - a nonnegative integer identifier for a stream.
    340      *
    341      * The ID must be one of the stream IDs passed into configureStreams.
    342      */
    343     int32_t id;
    344 
    345     /**
    346      * An override pixel format for the buffers in this stream.
    347      *
    348      * The HAL must respect the requested format in Stream unless it is
    349      * IMPLEMENTATION_DEFINED, in which case the override format here must be
    350      * used by the client instead, for this stream. This allows cross-platform
    351      * HALs to use a standard format since IMPLEMENTATION_DEFINED formats often
    352      * require device-specific information. In all other cases, the
    353      * overrideFormat must match the requested format.
    354      *
    355      * When HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform
    356      * gralloc module must select a format based on the usage flags provided by
    357      * the camera device and the other endpoint of the stream.
    358      */
    359     android.hardware.graphics.common@1.0::PixelFormat overrideFormat;
    360 
    361     /**
    362      * The gralloc usage flags for this stream, as needed by the HAL.
    363      *
    364      * For output streams, these are the HAL's producer usage flags. For input
    365      * streams, these are the HAL's consumer usage flags. The usage flags from
    366      * the producer and the consumer must be combined together and then passed
    367      * to the platform graphics allocator HAL for allocating the gralloc buffers
    368      * for each stream.
    369      *
    370      * If the stream's type is INPUT, then producerUsage must be 0, and
    371      * consumerUsage must be set. For other types, producerUsage must be set,
    372      * and consumerUsage must be 0.
    373      */
    374     BufferUsageFlags producerUsage;
    375     BufferUsageFlags consumerUsage;
    376 
    377     /**
    378      * The maximum number of buffers the HAL device may need to have dequeued at
    379      * the same time. The HAL device may not have more buffers in-flight from
    380      * this stream than this value.
    381      */
    382     uint32_t maxBuffers;
    383 
    384 };
    385 
    386 /**
    387  * HalStreamConfiguration:
    388  *
    389  * A structure of stream definitions, returned by configureStreams(). This
    390  * structure defines the HAL's desired parameters for each stream.
    391  *
    392  * All streams that were defined in the input to configureStreams() must have a
    393  * corresponding entry in this structure when returned by configureStreams().
    394  */
    395 struct HalStreamConfiguration {
    396     vec<HalStream> streams;
    397 };
    398 
    399 /**
    400  * BufferStatus:
    401  *
    402  * The current status of a single stream buffer.
    403  */
    404 enum BufferStatus : uint32_t {
    405     /**
    406      * The buffer is in a normal state, and can be used after waiting on its
    407      * sync fence.
    408      */
    409     OK = 0,
    410 
    411     /**
    412      * The buffer does not contain valid data, and the data in it must not be
    413      * used. The sync fence must still be waited on before reusing the buffer.
    414      */
    415     ERROR = 1
    416 };
    417 
    418 /**
    419  * StreamBuffer:
    420  *
    421  * A single buffer from a camera3 stream. It includes a handle to its parent
    422  * stream, the handle to the gralloc buffer itself, and sync fences
    423  *
    424  * The buffer does not specify whether it is to be used for input or output;
    425  * that is determined by its parent stream type and how the buffer is passed to
    426  * the HAL device.
    427  */
    428 struct StreamBuffer {
    429     /**
    430      * The ID of the stream this buffer is associated with. -1 indicates an
    431      * invalid (empty) StreamBuffer, in which case buffer must also point to
    432      * null and bufferId must be 0.
    433      */
    434     int32_t streamId;
    435 
    436     /**
    437      * The unique ID of the buffer within this StreamBuffer. 0 indicates this
    438      * StreamBuffer contains no buffer.
    439      * For StreamBuffers sent to the HAL in a CaptureRequest, this ID uniquely
    440      * identifies a buffer. When a buffer is sent to HAL for the first time,
    441      * both bufferId and buffer handle must be filled. HAL must keep track of
    442      * the mapping between bufferId and corresponding buffer until the
    443      * corresponding stream is removed from stream configuration or until camera
    444      * device session is closed. After the first time a buffer is introduced to
    445      * HAL, in the future camera service must refer to the same buffer using
    446      * only bufferId, and keep the buffer handle null.
    447      */
    448     uint64_t bufferId;
    449 
    450     /**
    451      * The graphics buffer handle to the buffer.
    452      *
    453      * For StreamBuffers sent to the HAL in a CaptureRequest, if the bufferId
    454      * is not seen by the HAL before, this buffer handle is guaranteed to be a
    455      * valid handle to a graphics buffer, with dimensions and format matching
    456      * that of the stream. If the bufferId has been sent to the HAL before, this
    457      * buffer handle must be null and HAL must look up the actual buffer handle
    458      * to use from its own bufferId to buffer handle map.
    459      *
    460      * For StreamBuffers returned in a CaptureResult, this must be null, since
    461      * the handle to the buffer is already known to the client (since the client
    462      * sent it in the matching CaptureRequest), and the handle can be identified
    463      * by the combination of frame number and stream ID.
    464      */
    465     handle buffer;
    466 
    467     /**
    468      * Current state of the buffer. The framework must not pass buffers to the
    469      * HAL that are in an error state. In case a buffer could not be filled by
    470      * the HAL, it must have its status set to ERROR when returned to the
    471      * framework with processCaptureResult().
    472      */
    473     BufferStatus status;
    474 
    475     /**
    476      * The acquire sync fence for this buffer. The HAL must wait on this fence
    477      * fd before attempting to read from or write to this buffer.
    478      *
    479      * In a buffer included in a CaptureRequest, the client may set this to null
    480      * to indicate that no waiting is necessary for this buffer.
    481      *
    482      * When the HAL returns an input or output buffer to the framework with
    483      * processCaptureResult(), the acquireFence must be set to null. If the HAL
    484      * never waits on the acquireFence due to an error in filling or reading a
    485      * buffer, when calling processCaptureResult() the HAL must set the
    486      * releaseFence of the buffer to be the acquireFence passed to it by the
    487      * client. This allows the client to wait on the fence before reusing the
    488      * buffer.
    489      */
    490     handle acquireFence;
    491 
    492     /**
    493      * The release sync fence for this buffer. The HAL must set this to a valid
    494      * fence fd when returning the input buffer or output buffers to the client
    495      * in a CaptureResult, or set it to null to indicate that no waiting is
    496      * required for this buffer.
    497      *
    498      * The client must set this to be null for all buffers included in a
    499      * processCaptureRequest call.
    500      *
    501      * After signaling the releaseFence for this buffer, the HAL
    502      * must not make any further attempts to access this buffer as the
    503      * ownership has been fully transferred back to the client.
    504      *
    505      * If this is null, then the ownership of this buffer is transferred back
    506      * immediately upon the call of processCaptureResult.
    507      */
    508     handle releaseFence;
    509 
    510 };
    511 
    512 /**
    513  * CameraBlob:
    514  *
    515  * Transport header for camera blob types; generally compressed JPEG buffers in
    516  * output streams.
    517  *
    518  * To capture JPEG images, a stream is created using the pixel format
    519  * HAL_PIXEL_FORMAT_BLOB and dataspace HAL_DATASPACE_V0_JFIF. The buffer size
    520  * for the stream is calculated by the framework, based on the static metadata
    521  * field android.jpeg.maxSize. Since compressed JPEG images are of variable
    522  * size, the HAL needs to include the final size of the compressed image using
    523  * this structure inside the output stream buffer. The camera blob ID field must
    524  * be set to CameraBlobId::JPEG.
    525  *
    526  * The transport header must be at the end of the JPEG output stream
    527  * buffer. That means the jpegBlobId must start at byte[buffer_size -
    528  * sizeof(CameraBlob)], where the buffer_size is the size of gralloc
    529  * buffer. Any HAL using this transport header must account for it in
    530  * android.jpeg.maxSize. The JPEG data itself starts at the beginning of the
    531  * buffer and must be blobSize bytes long.
    532  */
    533 enum CameraBlobId : uint16_t {
    534     JPEG = 0x00FF,
    535 };
    536 
    537 struct CameraBlob {
    538     CameraBlobId blobId;
    539 
    540     uint32_t blobSize;
    541 };
    542 
    543 /**
    544  * MsgType:
    545  *
    546  * Indicates the type of message sent, which specifies which member of the
    547  * message union is valid.
    548  *
    549  */
    550 enum MsgType : uint32_t {
    551     /**
    552      * An error has occurred. NotifyMsg::Message::Error contains the
    553      * error information.
    554      */
    555     ERROR = 1,
    556 
    557     /**
    558      * The exposure of a given request or processing a reprocess request has
    559      * begun. NotifyMsg::Message::Shutter contains the information
    560      * the capture.
    561      */
    562     SHUTTER = 2
    563 };
    564 
    565 /**
    566  * Defined error codes for MsgType::ERROR
    567  */
    568 enum ErrorCode : uint32_t {
    569     /**
    570      * A serious failure occured. No further frames or buffer streams must
    571      * be produced by the device. Device must be treated as closed. The
    572      * client must reopen the device to use it again. The frameNumber field
    573      * is unused.
    574      */
    575     ERROR_DEVICE = 1,
    576 
    577     /**
    578      * An error has occurred in processing a request. No output (metadata or
    579      * buffers) must be produced for this request. The frameNumber field
    580      * specifies which request has been dropped. Subsequent requests are
    581      * unaffected, and the device remains operational.
    582      */
    583     ERROR_REQUEST = 2,
    584 
    585     /**
    586      * An error has occurred in producing an output result metadata buffer
    587      * for a request, but output stream buffers for it must still be
    588      * available. Subsequent requests are unaffected, and the device remains
    589      * operational. The frameNumber field specifies the request for which
    590      * result metadata won't be available.
    591      */
    592     ERROR_RESULT = 3,
    593 
    594     /**
    595      * An error has occurred in placing an output buffer into a stream for a
    596      * request. The frame metadata and other buffers may still be
    597      * available. Subsequent requests are unaffected, and the device remains
    598      * operational. The frameNumber field specifies the request for which the
    599      * buffer was dropped, and errorStreamId indicates the stream
    600      * that dropped the frame.
    601      */
    602     ERROR_BUFFER = 4,
    603 };
    604 
    605 /**
    606  * ErrorMsg:
    607  *
    608  * Message contents for MsgType::ERROR
    609  */
    610 struct ErrorMsg {
    611     /**
    612      * Frame number of the request the error applies to. 0 if the frame number
    613      * isn't applicable to the error.
    614      */
    615     uint32_t frameNumber;
    616 
    617     /**
    618      * Pointer to the stream that had a failure. -1 if the stream isn't
    619      * applicable to the error.
    620      */
    621     int32_t errorStreamId;
    622 
    623     /**
    624      * The code for this error.
    625      */
    626     ErrorCode errorCode;
    627 
    628 };
    629 
    630 /**
    631  * ShutterMsg:
    632  *
    633  * Message contents for MsgType::SHUTTER
    634  */
    635 struct ShutterMsg {
    636     /**
    637      * Frame number of the request that has begun exposure or reprocessing.
    638      */
    639     uint32_t frameNumber;
    640 
    641     /**
    642      * Timestamp for the start of capture. For a reprocess request, this must
    643      * be input image's start of capture. This must match the capture result
    644      * metadata's sensor exposure start timestamp.
    645      */
    646     uint64_t timestamp;
    647 
    648 };
    649 
    650 /**
    651  * NotifyMsg:
    652  *
    653  * The message structure sent to ICameraDevice3Callback::notify()
    654  */
    655 struct NotifyMsg {
    656     /**
    657      * The message type.
    658      */
    659     MsgType type;
    660 
    661     union Message {
    662         /**
    663          * Error message contents. Valid if type is MsgType::ERROR
    664          */
    665         ErrorMsg error;
    666 
    667         /**
    668          * Shutter message contents. Valid if type is MsgType::SHUTTER
    669          */
    670         ShutterMsg shutter;
    671     } msg;
    672 
    673 };
    674 
    675 /**
    676  * RequestTemplate:
    677  *
    678  * Available template types for
    679  * ICameraDevice::constructDefaultRequestSettings()
    680  */
    681 enum RequestTemplate : uint32_t {
    682     /**
    683      * Standard camera preview operation with 3A on auto.
    684      */
    685     PREVIEW = 1,
    686 
    687     /**
    688      * Standard camera high-quality still capture with 3A and flash on auto.
    689      */
    690     STILL_CAPTURE = 2,
    691 
    692     /**
    693      * Standard video recording plus preview with 3A on auto, torch off.
    694      */
    695     VIDEO_RECORD = 3,
    696 
    697     /**
    698      * High-quality still capture while recording video. Applications typically
    699      * include preview, video record, and full-resolution YUV or JPEG streams in
    700      * request. Must not cause stuttering on video stream. 3A on auto.
    701      */
    702     VIDEO_SNAPSHOT = 4,
    703 
    704     /**
    705      * Zero-shutter-lag mode. Application typically request preview and
    706      * full-resolution data for each frame, and reprocess it to JPEG when a
    707      * still image is requested by user. Settings must provide highest-quality
    708      * full-resolution images without compromising preview frame rate. 3A on
    709      * auto.
    710      */
    711     ZERO_SHUTTER_LAG = 5,
    712 
    713     /**
    714      * A basic template for direct application control of capture
    715      * parameters. All automatic control is disabled (auto-exposure, auto-white
    716      * balance, auto-focus), and post-processing parameters are set to preview
    717      * quality. The manual capture parameters (exposure, sensitivity, etc.)
    718      * are set to reasonable defaults, but may be overridden by the
    719      * application depending on the intended use case.
    720      */
    721     MANUAL = 6,
    722 
    723     /**
    724      * First value for vendor-defined request templates
    725      */
    726     VENDOR_TEMPLATE_START = 0x40000000,
    727 
    728 };
    729 
    730 /**
    731  * CaptureRequest:
    732  *
    733  * A single request for image capture/buffer reprocessing, sent to the Camera
    734  * HAL device by the framework in processCaptureRequest().
    735  *
    736  * The request contains the settings to be used for this capture, and the set of
    737  * output buffers to write the resulting image data in. It may optionally
    738  * contain an input buffer, in which case the request is for reprocessing that
    739  * input buffer instead of capturing a new image with the camera sensor. The
    740  * capture is identified by the frameNumber.
    741  *
    742  * In response, the camera HAL device must send a CaptureResult
    743  * structure asynchronously to the framework, using the processCaptureResult()
    744  * callback.
    745  */
    746 struct CaptureRequest {
    747     /**
    748      * The frame number is an incrementing integer set by the framework to
    749      * uniquely identify this capture. It needs to be returned in the result
    750      * call, and is also used to identify the request in asynchronous
    751      * notifications sent to ICameraDevice3Callback::notify().
    752      */
    753     uint32_t frameNumber;
    754 
    755     /**
    756      * If non-zero, read settings from request queue instead
    757      * (see ICameraDeviceSession.getCaptureRequestMetadataQueue).
    758      * If zero, read settings from .settings field.
    759      */
    760     uint64_t fmqSettingsSize;
    761 
    762     /**
    763      * If fmqSettingsSize is zero,
    764      * the settings buffer contains the capture and processing parameters for
    765      * the request. As a special case, an empty settings buffer indicates that
    766      * the settings are identical to the most-recently submitted capture
    767      * request. A empty buffer cannot be used as the first submitted request
    768      * after a configureStreams() call.
    769      *
    770      * This field must be used if fmqSettingsSize is zero. It must not be used
    771      * if fmqSettingsSize is non-zero.
    772      */
    773     CameraMetadata settings;
    774 
    775     /**
    776      * The input stream buffer to use for this request, if any.
    777      *
    778      * An invalid inputBuffer is signified by a null inputBuffer::buffer, in
    779      * which case the value of all other members of inputBuffer must be ignored.
    780      *
    781      * If inputBuffer is invalid, then the request is for a new capture from the
    782      * imager. If inputBuffer is valid, the request is for reprocessing the
    783      * image contained in inputBuffer, and the HAL must release the inputBuffer
    784      * back to the client in a subsequent processCaptureResult call.
    785      *
    786      * The HAL is required to wait on the acquire sync fence of the input buffer
    787      * before accessing it.
    788      *
    789      */
    790     StreamBuffer inputBuffer;
    791 
    792     /**
    793      * An array of at least 1 stream buffers, to be filled with image
    794      * data from this capture/reprocess. The HAL must wait on the acquire fences
    795      * of each stream buffer before writing to them.
    796      *
    797      * The HAL takes ownership of the handles in outputBuffers; the client
    798      * must not access them until they are returned in a CaptureResult.
    799      *
    800      * Any or all of the buffers included here may be brand new in this
    801      * request (having never before seen by the HAL).
    802      */
    803     vec<StreamBuffer> outputBuffers;
    804 
    805 };
    806 
    807 /**
    808  * CaptureResult:
    809  *
    810  * The result of a single capture/reprocess by the camera HAL device. This is
    811  * sent to the framework asynchronously with processCaptureResult(), in
    812  * response to a single capture request sent to the HAL with
    813  * processCaptureRequest(). Multiple processCaptureResult() calls may be
    814  * performed by the HAL for each request.
    815  *
    816  * Each call, all with the same frame
    817  * number, may contain some subset of the output buffers, and/or the result
    818  * metadata.
    819  *
    820  * The result structure contains the output metadata from this capture, and the
    821  * set of output buffers that have been/will be filled for this capture. Each
    822  * output buffer may come with a release sync fence that the framework must wait
    823  * on before reading, in case the buffer has not yet been filled by the HAL.
    824  *
    825  * The metadata may be provided multiple times for a single frame number. The
    826  * framework must accumulate together the final result set by combining each
    827  * partial result together into the total result set.
    828  *
    829  * If an input buffer is given in a request, the HAL must return it in one of
    830  * the processCaptureResult calls, and the call may be to just return the
    831  * input buffer, without metadata and output buffers; the sync fences must be
    832  * handled the same way they are done for output buffers.
    833  *
    834  * Performance considerations:
    835  *
    836  * Applications receive these partial results immediately, so sending partial
    837  * results is a highly recommended performance optimization to avoid the total
    838  * pipeline latency before sending the results for what is known very early on
    839  * in the pipeline.
    840  *
    841  * A typical use case might be calculating the AF state halfway through the
    842  * pipeline; by sending the state back to the framework immediately, we get a
    843  * 50% performance increase and perceived responsiveness of the auto-focus.
    844  *
    845  */
    846 struct CaptureResult {
    847     /**
    848      * The frame number is an incrementing integer set by the framework in the
    849      * submitted request to uniquely identify this capture. It is also used to
    850      * identify the request in asynchronous notifications sent to
    851      * ICameraDevice3Callback::notify().
    852      */
    853     uint32_t frameNumber;
    854 
    855     /**
    856      * If non-zero, read result from result queue instead
    857      * (see ICameraDeviceSession.getCaptureResultMetadataQueue).
    858      * If zero, read result from .result field.
    859      */
    860     uint64_t fmqResultSize;
    861 
    862     /**
    863      * The result metadata for this capture. This contains information about the
    864      * final capture parameters, the state of the capture and post-processing
    865      * hardware, the state of the 3A algorithms, if enabled, and the output of
    866      * any enabled statistics units.
    867      *
    868      * If there was an error producing the result metadata, result must be an
    869      * empty metadata buffer, and notify() must be called with
    870      * ErrorCode::ERROR_RESULT.
    871      *
    872      * Multiple calls to processCaptureResult() with a given frameNumber
    873      * may include (partial) result metadata.
    874      *
    875      * Partial metadata submitted must not include any metadata key returned
    876      * in a previous partial result for a given frame. Each new partial result
    877      * for that frame must also set a distinct partialResult value.
    878      *
    879      * If notify has been called with ErrorCode::ERROR_RESULT, all further
    880      * partial results for that frame are ignored by the framework.
    881      */
    882     CameraMetadata result;
    883 
    884     /**
    885      * The completed output stream buffers for this capture.
    886      *
    887      * They may not yet be filled at the time the HAL calls
    888      * processCaptureResult(); the framework must wait on the release sync
    889      * fences provided by the HAL before reading the buffers.
    890      *
    891      * The StreamBuffer::buffer handle must be null for all returned buffers;
    892      * the client must cache the handle and look it up via the combination of
    893      * frame number and stream ID.
    894      *
    895      * The number of output buffers returned must be less than or equal to the
    896      * matching capture request's count. If this is less than the buffer count
    897      * in the capture request, at least one more call to processCaptureResult
    898      * with the same frameNumber must be made, to return the remaining output
    899      * buffers to the framework. This may only be zero if the structure includes
    900      * valid result metadata or an input buffer is returned in this result.
    901      *
    902      * The HAL must set the stream buffer's release sync fence to a valid sync
    903      * fd, or to null if the buffer has already been filled.
    904      *
    905      * If the HAL encounters an error while processing the buffer, and the
    906      * buffer is not filled, the buffer's status field must be set to ERROR. If
    907      * the HAL did not wait on the acquire fence before encountering the error,
    908      * the acquire fence must be copied into the release fence, to allow the
    909      * framework to wait on the fence before reusing the buffer.
    910      *
    911      * The acquire fence must be set to null for all output buffers.
    912      *
    913      * This vector may be empty; if so, at least one other processCaptureResult
    914      * call must be made (or have been made) by the HAL to provide the filled
    915      * output buffers.
    916      *
    917      * When processCaptureResult is called with a new buffer for a frame,
    918      * all previous frames' buffers for that corresponding stream must have been
    919      * already delivered (the fences need not have yet been signaled).
    920      *
    921      * Buffers for a frame may be sent to framework before the corresponding
    922      * SHUTTER-notify call is made by the HAL.
    923      *
    924      * Performance considerations:
    925      *
    926      * Buffers delivered to the framework are not dispatched to the
    927      * application layer until a start of exposure timestamp has been received
    928      * via a SHUTTER notify() call. It is highly recommended to
    929      * dispatch that call as early as possible.
    930      */
    931     vec<StreamBuffer> outputBuffers;
    932 
    933     /**
    934      * The handle for the input stream buffer for this capture, if any.
    935      *
    936      * It may not yet be consumed at the time the HAL calls
    937      * processCaptureResult(); the framework must wait on the release sync fence
    938      * provided by the HAL before reusing the buffer.
    939      *
    940      * The HAL must handle the sync fences the same way they are done for
    941      * outputBuffers.
    942      *
    943      * Only one input buffer is allowed to be sent per request. Similarly to
    944      * output buffers, the ordering of returned input buffers must be
    945      * maintained by the HAL.
    946      *
    947      * Performance considerations:
    948      *
    949      * The input buffer should be returned as early as possible. If the HAL
    950      * supports sync fences, it can call processCaptureResult to hand it back
    951      * with sync fences being set appropriately. If the sync fences are not
    952      * supported, the buffer can only be returned when it is consumed, which
    953      * may take long time; the HAL may choose to copy this input buffer to make
    954      * the buffer return sooner.
    955      */
    956     StreamBuffer inputBuffer;
    957 
    958     /**
    959      * In order to take advantage of partial results, the HAL must set the
    960      * static metadata android.request.partialResultCount to the number of
    961      * partial results it sends for each frame.
    962      *
    963      * Each new capture result with a partial result must set
    964      * this field to a distinct inclusive value between
    965      * 1 and android.request.partialResultCount.
    966      *
    967      * HALs not wishing to take advantage of this feature must not
    968      * set an android.request.partialResultCount or partial_result to a value
    969      * other than 1.
    970      *
    971      * This value must be set to 0 when a capture result contains buffers only
    972      * and no metadata.
    973      */
    974     uint32_t partialResult;
    975 
    976 };
    977 
    978 /**
    979  * BufferCache:
    980  *
    981  * A list of cached bufferIds associated with a certain stream.
    982  * Buffers are passed between camera service and camera HAL via bufferId except
    983  * the first time a new buffer is being passed to HAL in CaptureRequest. Camera
    984  * service and camera HAL therefore need to maintain a cached map of bufferId
    985  * and corresponing native handle.
    986  *
    987  */
    988 struct BufferCache {
    989     /**
    990      * The ID of the stream this list is associated with.
    991      */
    992     int32_t streamId;
    993 
    994     /**
    995      * A cached buffer ID associated with streamId.
    996      */
    997     uint64_t bufferId;
    998 };
    999