1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ANDROID_INCLUDE_CAMERA3_H 18 #define ANDROID_INCLUDE_CAMERA3_H 19 20 #include <system/camera_metadata.h> 21 #include "camera_common.h" 22 23 /** 24 * Camera device HAL 3.4 [ CAMERA_DEVICE_API_VERSION_3_4 ] 25 * 26 * This is the current recommended version of the camera device HAL. 27 * 28 * Supports the android.hardware.Camera API, and as of v3.2, the 29 * android.hardware.camera2 API as LIMITED or above hardware level. 30 * 31 * Camera devices that support this version of the HAL must return 32 * CAMERA_DEVICE_API_VERSION_3_4 in camera_device_t.common.version and in 33 * camera_info_t.device_version (from camera_module_t.get_camera_info). 34 * 35 * CAMERA_DEVICE_API_VERSION_3_3 and above: 36 * Camera modules that may contain version 3.3 or above devices must 37 * implement at least version 2.2 of the camera module interface (as defined 38 * by camera_module_t.common.module_api_version). 39 * 40 * CAMERA_DEVICE_API_VERSION_3_2: 41 * Camera modules that may contain version 3.2 devices must implement at 42 * least version 2.2 of the camera module interface (as defined by 43 * camera_module_t.common.module_api_version). 44 * 45 * <= CAMERA_DEVICE_API_VERSION_3_1: 46 * Camera modules that may contain version 3.1 (or 3.0) devices must 47 * implement at least version 2.0 of the camera module interface 48 * (as defined by camera_module_t.common.module_api_version). 49 * 50 * See camera_common.h for more versioning details. 51 * 52 * Documentation index: 53 * S1. Version history 54 * S2. Startup and operation sequencing 55 * S3. Operational modes 56 * S4. 3A modes and state machines 57 * S5. Cropping 58 * S6. Error management 59 * S7. Key Performance Indicator (KPI) glossary 60 * S8. Sample Use Cases 61 * S9. Notes on Controls and Metadata 62 * S10. Reprocessing flow and controls 63 */ 64 65 /** 66 * S1. Version history: 67 * 68 * 1.0: Initial Android camera HAL (Android 4.0) [camera.h]: 69 * 70 * - Converted from C++ CameraHardwareInterface abstraction layer. 71 * 72 * - Supports android.hardware.Camera API. 73 * 74 * 2.0: Initial release of expanded-capability HAL (Android 4.2) [camera2.h]: 75 * 76 * - Sufficient for implementing existing android.hardware.Camera API. 77 * 78 * - Allows for ZSL queue in camera service layer 79 * 80 * - Not tested for any new features such manual capture control, Bayer RAW 81 * capture, reprocessing of RAW data. 82 * 83 * 3.0: First revision of expanded-capability HAL: 84 * 85 * - Major version change since the ABI is completely different. No change to 86 * the required hardware capabilities or operational model from 2.0. 87 * 88 * - Reworked input request and stream queue interfaces: Framework calls into 89 * HAL with next request and stream buffers already dequeued. Sync framework 90 * support is included, necessary for efficient implementations. 91 * 92 * - Moved triggers into requests, most notifications into results. 93 * 94 * - Consolidated all callbacks into framework into one structure, and all 95 * setup methods into a single initialize() call. 96 * 97 * - Made stream configuration into a single call to simplify stream 98 * management. Bidirectional streams replace STREAM_FROM_STREAM construct. 99 * 100 * - Limited mode semantics for older/limited hardware devices. 101 * 102 * 3.1: Minor revision of expanded-capability HAL: 103 * 104 * - configure_streams passes consumer usage flags to the HAL. 105 * 106 * - flush call to drop all in-flight requests/buffers as fast as possible. 107 * 108 * 3.2: Minor revision of expanded-capability HAL: 109 * 110 * - Deprecates get_metadata_vendor_tag_ops. Please use get_vendor_tag_ops 111 * in camera_common.h instead. 112 * 113 * - register_stream_buffers deprecated. All gralloc buffers provided 114 * by framework to HAL in process_capture_request may be new at any time. 115 * 116 * - add partial result support. process_capture_result may be called 117 * multiple times with a subset of the available result before the full 118 * result is available. 119 * 120 * - add manual template to camera3_request_template. The applications may 121 * use this template to control the capture settings directly. 122 * 123 * - Rework the bidirectional and input stream specifications. 124 * 125 * - change the input buffer return path. The buffer is returned in 126 * process_capture_result instead of process_capture_request. 127 * 128 * 3.3: Minor revision of expanded-capability HAL: 129 * 130 * - OPAQUE and YUV reprocessing API updates. 131 * 132 * - Basic support for depth output buffers. 133 * 134 * - Addition of data_space field to camera3_stream_t. 135 * 136 * - Addition of rotation field to camera3_stream_t. 137 * 138 * - Addition of camera3 stream configuration operation mode to camera3_stream_configuration_t 139 * 140 * 3.4: Minor additions to supported metadata and changes to data_space support 141 * 142 * - Add ANDROID_SENSOR_OPAQUE_RAW_SIZE static metadata as mandatory if 143 * RAW_OPAQUE format is supported. 144 * 145 * - Add ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE static metadata as 146 * mandatory if any RAW format is supported 147 * 148 * - Switch camera3_stream_t data_space field to a more flexible definition, 149 * using the version 0 definition of dataspace encoding. 150 * 151 * - General metadata additions which are available to use for HALv3.2 or 152 * newer: 153 * - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 154 * - ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST 155 * - ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE 156 * - ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL 157 * - ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL 158 * - ANDROID_SENSOR_OPAQUE_RAW_SIZE 159 * - ANDROID_SENSOR_OPTICAL_BLACK_REGIONS 160 */ 161 162 /** 163 * S2. Startup and general expected operation sequence: 164 * 165 * 1. Framework calls camera_module_t->common.open(), which returns a 166 * hardware_device_t structure. 167 * 168 * 2. Framework inspects the hardware_device_t->version field, and instantiates 169 * the appropriate handler for that version of the camera hardware device. In 170 * case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to 171 * a camera3_device_t. 172 * 173 * 3. Framework calls camera3_device_t->ops->initialize() with the framework 174 * callback function pointers. This will only be called this one time after 175 * open(), before any other functions in the ops structure are called. 176 * 177 * 4. The framework calls camera3_device_t->ops->configure_streams() with a list 178 * of input/output streams to the HAL device. 179 * 180 * 5. <= CAMERA_DEVICE_API_VERSION_3_1: 181 * 182 * The framework allocates gralloc buffers and calls 183 * camera3_device_t->ops->register_stream_buffers() for at least one of the 184 * output streams listed in configure_streams. The same stream is registered 185 * only once. 186 * 187 * >= CAMERA_DEVICE_API_VERSION_3_2: 188 * 189 * camera3_device_t->ops->register_stream_buffers() is not called and must 190 * be NULL. 191 * 192 * 6. The framework requests default settings for some number of use cases with 193 * calls to camera3_device_t->ops->construct_default_request_settings(). This 194 * may occur any time after step 3. 195 * 196 * 7. The framework constructs and sends the first capture request to the HAL, 197 * with settings based on one of the sets of default settings, and with at 198 * least one output stream, which has been registered earlier by the 199 * framework. This is sent to the HAL with 200 * camera3_device_t->ops->process_capture_request(). The HAL must block the 201 * return of this call until it is ready for the next request to be sent. 202 * 203 * >= CAMERA_DEVICE_API_VERSION_3_2: 204 * 205 * The buffer_handle_t provided in the camera3_stream_buffer_t array 206 * in the camera3_capture_request_t may be new and never-before-seen 207 * by the HAL on any given new request. 208 * 209 * 8. The framework continues to submit requests, and call 210 * construct_default_request_settings to get default settings buffers for 211 * other use cases. 212 * 213 * <= CAMERA_DEVICE_API_VERSION_3_1: 214 * 215 * The framework may call register_stream_buffers() at this time for 216 * not-yet-registered streams. 217 * 218 * 9. When the capture of a request begins (sensor starts exposing for the 219 * capture) or processing a reprocess request begins, the HAL 220 * calls camera3_callback_ops_t->notify() with the SHUTTER event, including 221 * the frame number and the timestamp for start of exposure. For a reprocess 222 * request, the timestamp must be the start of exposure of the input image 223 * which can be looked up with android.sensor.timestamp from 224 * camera3_capture_request_t.settings when process_capture_request() is 225 * called. 226 * 227 * <= CAMERA_DEVICE_API_VERSION_3_1: 228 * 229 * This notify call must be made before the first call to 230 * process_capture_result() for that frame number. 231 * 232 * >= CAMERA_DEVICE_API_VERSION_3_2: 233 * 234 * The camera3_callback_ops_t->notify() call with the SHUTTER event should 235 * be made as early as possible since the framework will be unable to 236 * deliver gralloc buffers to the application layer (for that frame) until 237 * it has a valid timestamp for the start of exposure (or the input image's 238 * start of exposure for a reprocess request). 239 * 240 * Both partial metadata results and the gralloc buffers may be sent to the 241 * framework at any time before or after the SHUTTER event. 242 * 243 * 10. After some pipeline delay, the HAL begins to return completed captures to 244 * the framework with camera3_callback_ops_t->process_capture_result(). These 245 * are returned in the same order as the requests were submitted. Multiple 246 * requests can be in flight at once, depending on the pipeline depth of the 247 * camera HAL device. 248 * 249 * >= CAMERA_DEVICE_API_VERSION_3_2: 250 * 251 * Once a buffer is returned by process_capture_result as part of the 252 * camera3_stream_buffer_t array, and the fence specified by release_fence 253 * has been signaled (this is a no-op for -1 fences), the ownership of that 254 * buffer is considered to be transferred back to the framework. After that, 255 * the HAL must no longer retain that particular buffer, and the 256 * framework may clean up the memory for it immediately. 257 * 258 * process_capture_result may be called multiple times for a single frame, 259 * each time with a new disjoint piece of metadata and/or set of gralloc 260 * buffers. The framework will accumulate these partial metadata results 261 * into one result. 262 * 263 * In particular, it is legal for a process_capture_result to be called 264 * simultaneously for both a frame N and a frame N+1 as long as the 265 * above rule holds for gralloc buffers (both input and output). 266 * 267 * 11. After some time, the framework may stop submitting new requests, wait for 268 * the existing captures to complete (all buffers filled, all results 269 * returned), and then call configure_streams() again. This resets the camera 270 * hardware and pipeline for a new set of input/output streams. Some streams 271 * may be reused from the previous configuration; if these streams' buffers 272 * had already been registered with the HAL, they will not be registered 273 * again. The framework then continues from step 7, if at least one 274 * registered output stream remains (otherwise, step 5 is required first). 275 * 276 * 12. Alternatively, the framework may call camera3_device_t->common->close() 277 * to end the camera session. This may be called at any time when no other 278 * calls from the framework are active, although the call may block until all 279 * in-flight captures have completed (all results returned, all buffers 280 * filled). After the close call returns, no more calls to the 281 * camera3_callback_ops_t functions are allowed from the HAL. Once the 282 * close() call is underway, the framework may not call any other HAL device 283 * functions. 284 * 285 * 13. In case of an error or other asynchronous event, the HAL must call 286 * camera3_callback_ops_t->notify() with the appropriate error/event 287 * message. After returning from a fatal device-wide error notification, the 288 * HAL should act as if close() had been called on it. However, the HAL must 289 * either cancel or complete all outstanding captures before calling 290 * notify(), so that once notify() is called with a fatal error, the 291 * framework will not receive further callbacks from the device. Methods 292 * besides close() should return -ENODEV or NULL after the notify() method 293 * returns from a fatal error message. 294 */ 295 296 /** 297 * S3. Operational modes: 298 * 299 * The camera 3 HAL device can implement one of two possible operational modes; 300 * limited and full. Full support is expected from new higher-end 301 * devices. Limited mode has hardware requirements roughly in line with those 302 * for a camera HAL device v1 implementation, and is expected from older or 303 * inexpensive devices. Full is a strict superset of limited, and they share the 304 * same essential operational flow, as documented above. 305 * 306 * The HAL must indicate its level of support with the 307 * android.info.supportedHardwareLevel static metadata entry, with 0 indicating 308 * limited mode, and 1 indicating full mode support. 309 * 310 * Roughly speaking, limited-mode devices do not allow for application control 311 * of capture settings (3A control only), high-rate capture of high-resolution 312 * images, raw sensor readout, or support for YUV output streams above maximum 313 * recording resolution (JPEG only for large images). 314 * 315 * ** Details of limited mode behavior: 316 * 317 * - Limited-mode devices do not need to implement accurate synchronization 318 * between capture request settings and the actual image data 319 * captured. Instead, changes to settings may take effect some time in the 320 * future, and possibly not for the same output frame for each settings 321 * entry. Rapid changes in settings may result in some settings never being 322 * used for a capture. However, captures that include high-resolution output 323 * buffers ( > 1080p ) have to use the settings as specified (but see below 324 * for processing rate). 325 * 326 * - Limited-mode devices do not need to support most of the 327 * settings/result/static info metadata. Specifically, only the following settings 328 * are expected to be consumed or produced by a limited-mode HAL device: 329 * 330 * android.control.aeAntibandingMode (controls and dynamic) 331 * android.control.aeExposureCompensation (controls and dynamic) 332 * android.control.aeLock (controls and dynamic) 333 * android.control.aeMode (controls and dynamic) 334 * android.control.aeRegions (controls and dynamic) 335 * android.control.aeTargetFpsRange (controls and dynamic) 336 * android.control.aePrecaptureTrigger (controls and dynamic) 337 * android.control.afMode (controls and dynamic) 338 * android.control.afRegions (controls and dynamic) 339 * android.control.awbLock (controls and dynamic) 340 * android.control.awbMode (controls and dynamic) 341 * android.control.awbRegions (controls and dynamic) 342 * android.control.captureIntent (controls and dynamic) 343 * android.control.effectMode (controls and dynamic) 344 * android.control.mode (controls and dynamic) 345 * android.control.sceneMode (controls and dynamic) 346 * android.control.videoStabilizationMode (controls and dynamic) 347 * android.control.aeAvailableAntibandingModes (static) 348 * android.control.aeAvailableModes (static) 349 * android.control.aeAvailableTargetFpsRanges (static) 350 * android.control.aeCompensationRange (static) 351 * android.control.aeCompensationStep (static) 352 * android.control.afAvailableModes (static) 353 * android.control.availableEffects (static) 354 * android.control.availableSceneModes (static) 355 * android.control.availableVideoStabilizationModes (static) 356 * android.control.awbAvailableModes (static) 357 * android.control.maxRegions (static) 358 * android.control.sceneModeOverrides (static) 359 * android.control.aeState (dynamic) 360 * android.control.afState (dynamic) 361 * android.control.awbState (dynamic) 362 * 363 * android.flash.mode (controls and dynamic) 364 * android.flash.info.available (static) 365 * 366 * android.info.supportedHardwareLevel (static) 367 * 368 * android.jpeg.gpsCoordinates (controls and dynamic) 369 * android.jpeg.gpsProcessingMethod (controls and dynamic) 370 * android.jpeg.gpsTimestamp (controls and dynamic) 371 * android.jpeg.orientation (controls and dynamic) 372 * android.jpeg.quality (controls and dynamic) 373 * android.jpeg.thumbnailQuality (controls and dynamic) 374 * android.jpeg.thumbnailSize (controls and dynamic) 375 * android.jpeg.availableThumbnailSizes (static) 376 * android.jpeg.maxSize (static) 377 * 378 * android.lens.info.minimumFocusDistance (static) 379 * 380 * android.request.id (controls and dynamic) 381 * 382 * android.scaler.cropRegion (controls and dynamic) 383 * android.scaler.availableStreamConfigurations (static) 384 * android.scaler.availableMinFrameDurations (static) 385 * android.scaler.availableStallDurations (static) 386 * android.scaler.availableMaxDigitalZoom (static) 387 * android.scaler.maxDigitalZoom (static) 388 * android.scaler.croppingType (static) 389 * 390 * android.sensor.orientation (static) 391 * android.sensor.timestamp (dynamic) 392 * 393 * android.statistics.faceDetectMode (controls and dynamic) 394 * android.statistics.info.availableFaceDetectModes (static) 395 * android.statistics.faceIds (dynamic) 396 * android.statistics.faceLandmarks (dynamic) 397 * android.statistics.faceRectangles (dynamic) 398 * android.statistics.faceScores (dynamic) 399 * 400 * android.sync.frameNumber (dynamic) 401 * android.sync.maxLatency (static) 402 * 403 * - Captures in limited mode that include high-resolution (> 1080p) output 404 * buffers may block in process_capture_request() until all the output buffers 405 * have been filled. A full-mode HAL device must process sequences of 406 * high-resolution requests at the rate indicated in the static metadata for 407 * that pixel format. The HAL must still call process_capture_result() to 408 * provide the output; the framework must simply be prepared for 409 * process_capture_request() to block until after process_capture_result() for 410 * that request completes for high-resolution captures for limited-mode 411 * devices. 412 * 413 * - Full-mode devices must support below additional capabilities: 414 * - 30fps at maximum resolution is preferred, more than 20fps is required. 415 * - Per frame control (android.sync.maxLatency == PER_FRAME_CONTROL). 416 * - Sensor manual control metadata. See MANUAL_SENSOR defined in 417 * android.request.availableCapabilities. 418 * - Post-processing manual control metadata. See MANUAL_POST_PROCESSING defined 419 * in android.request.availableCapabilities. 420 * 421 */ 422 423 /** 424 * S4. 3A modes and state machines: 425 * 426 * While the actual 3A algorithms are up to the HAL implementation, a high-level 427 * state machine description is defined by the HAL interface, to allow the HAL 428 * device and the framework to communicate about the current state of 3A, and to 429 * trigger 3A events. 430 * 431 * When the device is opened, all the individual 3A states must be 432 * STATE_INACTIVE. Stream configuration does not reset 3A. For example, locked 433 * focus must be maintained across the configure() call. 434 * 435 * Triggering a 3A action involves simply setting the relevant trigger entry in 436 * the settings for the next request to indicate start of trigger. For example, 437 * the trigger for starting an autofocus scan is setting the entry 438 * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one 439 * request, and cancelling an autofocus scan is triggered by setting 440 * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise, 441 * the entry will not exist, or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each 442 * request with a trigger entry set to a non-IDLE value will be treated as an 443 * independent triggering event. 444 * 445 * At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting, which 446 * selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode 447 * (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting 448 * (ANDROID_CONTROL_USE_SCENE_MODE). 449 * 450 * - In OFF mode, each of the individual AE/AF/AWB modes are effectively OFF, 451 * and none of the capture controls may be overridden by the 3A routines. 452 * 453 * - In AUTO mode, Auto-focus, auto-exposure, and auto-whitebalance all run 454 * their own independent algorithms, and have their own mode, state, and 455 * trigger metadata entries, as listed in the next section. 456 * 457 * - In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must 458 * be used to determine the behavior of 3A routines. In SCENE_MODEs other than 459 * FACE_PRIORITY, the HAL must override the values of 460 * ANDROId_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected 461 * SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use 462 * CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene 463 * must be ignored for these scene modes. 464 * 465 * - For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AF_MODE controls work as in 466 * ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering 467 * and focusing on any detected faces in the scene. 468 * 469 * S4.1. Auto-focus settings and result entries: 470 * 471 * Main metadata entries: 472 * 473 * ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus 474 * mode. Set by the framework in the request settings. 475 * 476 * AF_MODE_OFF: AF is disabled; the framework/app directly controls lens 477 * position. 478 * 479 * AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is 480 * triggered. 481 * 482 * AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless 483 * AF is triggered. 484 * 485 * AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording 486 * video. Triggering immediately locks focus in current 487 * position. Canceling resumes cotinuous focusing. 488 * 489 * AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for 490 * zero-shutter-lag still capture. Triggering locks focus once currently 491 * active sweep concludes. Canceling resumes continuous focusing. 492 * 493 * AF_MODE_EDOF: Advanced extended depth of field focusing. There is no 494 * autofocus scan, so triggering one or canceling one has no effect. 495 * Images are focused automatically by the HAL. 496 * 497 * ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF 498 * algorithm state, reported by the HAL in the result metadata. 499 * 500 * AF_STATE_INACTIVE: No focusing has been done, or algorithm was 501 * reset. Lens is not moving. Always the state for MODE_OFF or MODE_EDOF. 502 * When the device is opened, it must start in this state. 503 * 504 * AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning 505 * for good focus. The lens is moving. 506 * 507 * AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is 508 * well focused. The lens is not moving. The HAL may spontaneously leave 509 * this state. 510 * 511 * AF_STATE_PASSIVE_UNFOCUSED: A continuous focus algorithm believes it is 512 * not well focused. The lens is not moving. The HAL may spontaneously 513 * leave this state. 514 * 515 * AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway. 516 * 517 * AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The 518 * lens is not moving. 519 * 520 * AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to 521 * focus. The lens is not moving. 522 * 523 * ANDROID_CONTROL_AF_TRIGGER: Control for starting an autofocus scan, the 524 * meaning of which is mode- and state- dependent. Set by the framework in 525 * the request settings. 526 * 527 * AF_TRIGGER_IDLE: No current trigger. 528 * 529 * AF_TRIGGER_START: Trigger start of AF scan. Effect is mode and state 530 * dependent. 531 * 532 * AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to 533 * default. 534 * 535 * Additional metadata entries: 536 * 537 * ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the FOV 538 * that should be used to determine good focus. This applies to all AF 539 * modes that scan for focus. Set by the framework in the request 540 * settings. 541 * 542 * S4.2. Auto-exposure settings and result entries: 543 * 544 * Main metadata entries: 545 * 546 * ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure 547 * mode. Set by the framework in the request settings. 548 * 549 * AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain, 550 * frame duration, and flash. 551 * 552 * AE_MODE_ON: Standard autoexposure, with flash control disabled. User may 553 * set flash to fire or to torch mode. 554 * 555 * AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's 556 * discretion for precapture and still capture. User control of flash 557 * disabled. 558 * 559 * AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired 560 * for capture, and at HAL's discretion for precapture.. User control of 561 * flash disabled. 562 * 563 * AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at 564 * HAL's discretion for precapture and still capture. Use a flash burst 565 * at end of precapture sequence to reduce redeye in the final 566 * picture. User control of flash disabled. 567 * 568 * ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE 569 * algorithm state, reported by the HAL in the result metadata. 570 * 571 * AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is 572 * opened, it must start in this state. 573 * 574 * AE_STATE_SEARCHING: AE is not converged to a good value, and is adjusting 575 * exposure parameters. 576 * 577 * AE_STATE_CONVERGED: AE has found good exposure values for the current 578 * scene, and the exposure parameters are not changing. HAL may 579 * spontaneously leave this state to search for better solution. 580 * 581 * AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure 582 * values are not changing. 583 * 584 * AE_STATE_FLASH_REQUIRED: The HAL has converged exposure, but believes 585 * flash is required for a sufficiently bright picture. Used for 586 * determining if a zero-shutter-lag frame can be used. 587 * 588 * AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture 589 * sequence. Depending on AE mode, this mode may involve firing the 590 * flash for metering, or a burst of flash pulses for redeye reduction. 591 * 592 * ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering 593 * sequence before capturing a high-quality image. Set by the framework in 594 * the request settings. 595 * 596 * PRECAPTURE_TRIGGER_IDLE: No current trigger. 597 * 598 * PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should 599 * use the subsequent requests to measure good exposure/white balance 600 * for an upcoming high-resolution capture. 601 * 602 * Additional metadata entries: 603 * 604 * ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current 605 * values 606 * 607 * ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE 608 * algorithm target brightness point. 609 * 610 * ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame 611 * rate range for the AE algorithm. The AE routine cannot change the frame 612 * rate to be outside these bounds. 613 * 614 * ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV 615 * that should be used to determine good exposure levels. This applies to 616 * all AE modes besides OFF. 617 * 618 * S4.3. Auto-whitebalance settings and result entries: 619 * 620 * Main metadata entries: 621 * 622 * ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance 623 * mode. 624 * 625 * AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix. 626 * 627 * AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color 628 * transform, possibly using more complex transforms than a simple 629 * matrix. 630 * 631 * AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor 632 * incandescent (tungsten) lighting, roughly 2700K. 633 * 634 * AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent 635 * lighting, roughly 5000K. 636 * 637 * AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for 638 * fluorescent lighting, roughly 3000K. 639 * 640 * AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight, 641 * roughly 5500K. 642 * 643 * AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded 644 * daylight, roughly 6500K. 645 * 646 * AWB_MODE_TWILIGHT: Fixed white balance settings good for 647 * near-sunset/sunrise, roughly 15000K. 648 * 649 * AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly 650 * lit by the sun, roughly 7500K. 651 * 652 * ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB 653 * algorithm state, reported by the HAL in the result metadata. 654 * 655 * AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device 656 * is opened, it must start in this state. 657 * 658 * AWB_STATE_SEARCHING: AWB is not converged to a good value, and is 659 * changing color adjustment parameters. 660 * 661 * AWB_STATE_CONVERGED: AWB has found good color adjustment values for the 662 * current scene, and the parameters are not changing. HAL may 663 * spontaneously leave this state to search for better solution. 664 * 665 * AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color 666 * adjustment values are not changing. 667 * 668 * Additional metadata entries: 669 * 670 * ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to 671 * their current values. 672 * 673 * ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV 674 * that should be used to determine good color balance. This applies only 675 * to auto-WB mode. 676 * 677 * S4.4. General state machine transition notes 678 * 679 * Switching between AF, AE, or AWB modes always resets the algorithm's state 680 * to INACTIVE. Similarly, switching between CONTROL_MODE or 681 * CONTROL_SCENE_MODE if CONTROL_MODE == USE_SCENE_MODE resets all the 682 * algorithm states to INACTIVE. 683 * 684 * The tables below are per-mode. 685 * 686 * S4.5. AF state machines 687 * 688 * when enabling AF or changing AF mode 689 *| state | trans. cause | new state | notes | 690 *+--------------------+---------------+--------------------+------------------+ 691 *| Any | AF mode change| INACTIVE | | 692 *+--------------------+---------------+--------------------+------------------+ 693 * 694 * mode = AF_MODE_OFF or AF_MODE_EDOF 695 *| state | trans. cause | new state | notes | 696 *+--------------------+---------------+--------------------+------------------+ 697 *| INACTIVE | | INACTIVE | Never changes | 698 *+--------------------+---------------+--------------------+------------------+ 699 * 700 * mode = AF_MODE_AUTO or AF_MODE_MACRO 701 *| state | trans. cause | new state | notes | 702 *+--------------------+---------------+--------------------+------------------+ 703 *| INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep | 704 *| | | | Lens now moving | 705 *+--------------------+---------------+--------------------+------------------+ 706 *| ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | If AF successful | 707 *| | | | Lens now locked | 708 *+--------------------+---------------+--------------------+------------------+ 709 *| ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | If AF successful | 710 *| | | | Lens now locked | 711 *+--------------------+---------------+--------------------+------------------+ 712 *| ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF | 713 *| | | | Lens now locked | 714 *+--------------------+---------------+--------------------+------------------+ 715 *| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF | 716 *+--------------------+---------------+--------------------+------------------+ 717 *| FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep | 718 *| | | | Lens now moving | 719 *+--------------------+---------------+--------------------+------------------+ 720 *| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF | 721 *+--------------------+---------------+--------------------+------------------+ 722 *| NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep | 723 *| | | | Lens now moving | 724 *+--------------------+---------------+--------------------+------------------+ 725 *| All states | mode change | INACTIVE | | 726 *+--------------------+---------------+--------------------+------------------+ 727 * 728 * mode = AF_MODE_CONTINUOUS_VIDEO 729 *| state | trans. cause | new state | notes | 730 *+--------------------+---------------+--------------------+------------------+ 731 *| INACTIVE | HAL initiates | PASSIVE_SCAN | Start AF scan | 732 *| | new scan | | Lens now moving | 733 *+--------------------+---------------+--------------------+------------------+ 734 *| INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query | 735 *| | | | Lens now locked | 736 *+--------------------+---------------+--------------------+------------------+ 737 *| PASSIVE_SCAN | HAL completes | PASSIVE_FOCUSED | End AF scan | 738 *| | current scan | | Lens now locked | 739 *+--------------------+---------------+--------------------+------------------+ 740 *| PASSIVE_SCAN | HAL fails | PASSIVE_UNFOCUSED | End AF scan | 741 *| | current scan | | Lens now locked | 742 *+--------------------+---------------+--------------------+------------------+ 743 *| PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. | 744 *| | | | if focus is good | 745 *| | | | Lens now locked | 746 *+--------------------+---------------+--------------------+------------------+ 747 *| PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. | 748 *| | | | if focus is bad | 749 *| | | | Lens now locked | 750 *+--------------------+---------------+--------------------+------------------+ 751 *| PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens | 752 *| | | | position | 753 *| | | | Lens now locked | 754 *+--------------------+---------------+--------------------+------------------+ 755 *| PASSIVE_FOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 756 *| | new scan | | Lens now moving | 757 *+--------------------+---------------+--------------------+------------------+ 758 *| PASSIVE_UNFOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 759 *| | new scan | | Lens now moving | 760 *+--------------------+---------------+--------------------+------------------+ 761 *| PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. | 762 *| | | | Lens now locked | 763 *+--------------------+---------------+--------------------+------------------+ 764 *| PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. | 765 *| | | | Lens now locked | 766 *+--------------------+---------------+--------------------+------------------+ 767 *| FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect | 768 *+--------------------+---------------+--------------------+------------------+ 769 *| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 770 *+--------------------+---------------+--------------------+------------------+ 771 *| NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect | 772 *+--------------------+---------------+--------------------+------------------+ 773 *| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 774 *+--------------------+---------------+--------------------+------------------+ 775 * 776 * mode = AF_MODE_CONTINUOUS_PICTURE 777 *| state | trans. cause | new state | notes | 778 *+--------------------+---------------+--------------------+------------------+ 779 *| INACTIVE | HAL initiates | PASSIVE_SCAN | Start AF scan | 780 *| | new scan | | Lens now moving | 781 *+--------------------+---------------+--------------------+------------------+ 782 *| INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query | 783 *| | | | Lens now locked | 784 *+--------------------+---------------+--------------------+------------------+ 785 *| PASSIVE_SCAN | HAL completes | PASSIVE_FOCUSED | End AF scan | 786 *| | current scan | | Lens now locked | 787 *+--------------------+---------------+--------------------+------------------+ 788 *| PASSIVE_SCAN | HAL fails | PASSIVE_UNFOCUSED | End AF scan | 789 *| | current scan | | Lens now locked | 790 *+--------------------+---------------+--------------------+------------------+ 791 *| PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual trans. | 792 *| | | | once focus good | 793 *| | | | Lens now locked | 794 *+--------------------+---------------+--------------------+------------------+ 795 *| PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual trans. | 796 *| | | | if cannot focus | 797 *| | | | Lens now locked | 798 *+--------------------+---------------+--------------------+------------------+ 799 *| PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens | 800 *| | | | position | 801 *| | | | Lens now locked | 802 *+--------------------+---------------+--------------------+------------------+ 803 *| PASSIVE_FOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 804 *| | new scan | | Lens now moving | 805 *+--------------------+---------------+--------------------+------------------+ 806 *| PASSIVE_UNFOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 807 *| | new scan | | Lens now moving | 808 *+--------------------+---------------+--------------------+------------------+ 809 *| PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. | 810 *| | | | Lens now locked | 811 *+--------------------+---------------+--------------------+------------------+ 812 *| PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. | 813 *| | | | Lens now locked | 814 *+--------------------+---------------+--------------------+------------------+ 815 *| FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect | 816 *+--------------------+---------------+--------------------+------------------+ 817 *| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 818 *+--------------------+---------------+--------------------+------------------+ 819 *| NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect | 820 *+--------------------+---------------+--------------------+------------------+ 821 *| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 822 *+--------------------+---------------+--------------------+------------------+ 823 * 824 * S4.6. AE and AWB state machines 825 * 826 * The AE and AWB state machines are mostly identical. AE has additional 827 * FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two 828 * states should be ignored for the AWB state machine. 829 * 830 * when enabling AE/AWB or changing AE/AWB mode 831 *| state | trans. cause | new state | notes | 832 *+--------------------+---------------+--------------------+------------------+ 833 *| Any | mode change | INACTIVE | | 834 *+--------------------+---------------+--------------------+------------------+ 835 * 836 * mode = AE_MODE_OFF / AWB mode not AUTO 837 *| state | trans. cause | new state | notes | 838 *+--------------------+---------------+--------------------+------------------+ 839 *| INACTIVE | | INACTIVE | AE/AWB disabled | 840 *+--------------------+---------------+--------------------+------------------+ 841 * 842 * mode = AE_MODE_ON_* / AWB_MODE_AUTO 843 *| state | trans. cause | new state | notes | 844 *+--------------------+---------------+--------------------+------------------+ 845 *| INACTIVE | HAL initiates | SEARCHING | | 846 *| | AE/AWB scan | | | 847 *+--------------------+---------------+--------------------+------------------+ 848 *| INACTIVE | AE/AWB_LOCK | LOCKED | values locked | 849 *| | on | | | 850 *+--------------------+---------------+--------------------+------------------+ 851 *| SEARCHING | HAL finishes | CONVERGED | good values, not | 852 *| | AE/AWB scan | | changing | 853 *+--------------------+---------------+--------------------+------------------+ 854 *| SEARCHING | HAL finishes | FLASH_REQUIRED | converged but too| 855 *| | AE scan | | dark w/o flash | 856 *+--------------------+---------------+--------------------+------------------+ 857 *| SEARCHING | AE/AWB_LOCK | LOCKED | values locked | 858 *| | on | | | 859 *+--------------------+---------------+--------------------+------------------+ 860 *| CONVERGED | HAL initiates | SEARCHING | values locked | 861 *| | AE/AWB scan | | | 862 *+--------------------+---------------+--------------------+------------------+ 863 *| CONVERGED | AE/AWB_LOCK | LOCKED | values locked | 864 *| | on | | | 865 *+--------------------+---------------+--------------------+------------------+ 866 *| FLASH_REQUIRED | HAL initiates | SEARCHING | values locked | 867 *| | AE/AWB scan | | | 868 *+--------------------+---------------+--------------------+------------------+ 869 *| FLASH_REQUIRED | AE/AWB_LOCK | LOCKED | values locked | 870 *| | on | | | 871 *+--------------------+---------------+--------------------+------------------+ 872 *| LOCKED | AE/AWB_LOCK | SEARCHING | values not good | 873 *| | off | | after unlock | 874 *+--------------------+---------------+--------------------+------------------+ 875 *| LOCKED | AE/AWB_LOCK | CONVERGED | values good | 876 *| | off | | after unlock | 877 *+--------------------+---------------+--------------------+------------------+ 878 *| LOCKED | AE_LOCK | FLASH_REQUIRED | exposure good, | 879 *| | off | | but too dark | 880 *+--------------------+---------------+--------------------+------------------+ 881 *| All AE states | PRECAPTURE_ | PRECAPTURE | Start precapture | 882 *| | START | | sequence | 883 *+--------------------+---------------+--------------------+------------------+ 884 *| PRECAPTURE | Sequence done.| CONVERGED | Ready for high- | 885 *| | AE_LOCK off | | quality capture | 886 *+--------------------+---------------+--------------------+------------------+ 887 *| PRECAPTURE | Sequence done.| LOCKED | Ready for high- | 888 *| | AE_LOCK on | | quality capture | 889 *+--------------------+---------------+--------------------+------------------+ 890 * 891 */ 892 893 /** 894 * S5. Cropping: 895 * 896 * Cropping of the full pixel array (for digital zoom and other use cases where 897 * a smaller FOV is desirable) is communicated through the 898 * ANDROID_SCALER_CROP_REGION setting. This is a per-request setting, and can 899 * change on a per-request basis, which is critical for implementing smooth 900 * digital zoom. 901 * 902 * The region is defined as a rectangle (x, y, width, height), with (x, y) 903 * describing the top-left corner of the rectangle. The rectangle is defined on 904 * the coordinate system of the sensor active pixel array, with (0,0) being the 905 * top-left pixel of the active pixel array. Therefore, the width and height 906 * cannot be larger than the dimensions reported in the 907 * ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY static info field. The minimum allowed 908 * width and height are reported by the HAL through the 909 * ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes the 910 * maximum supported zoom factor. Therefore, the minimum crop region width and 911 * height are: 912 * 913 * {width, height} = 914 * { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] / 915 * ANDROID_SCALER_MAX_DIGITAL_ZOOM), 916 * floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] / 917 * ANDROID_SCALER_MAX_DIGITAL_ZOOM) } 918 * 919 * If the crop region needs to fulfill specific requirements (for example, it 920 * needs to start on even coordinates, and its width/height needs to be even), 921 * the HAL must do the necessary rounding and write out the final crop region 922 * used in the output result metadata. Similarly, if the HAL implements video 923 * stabilization, it must adjust the result crop region to describe the region 924 * actually included in the output after video stabilization is applied. In 925 * general, a camera-using application must be able to determine the field of 926 * view it is receiving based on the crop region, the dimensions of the image 927 * sensor, and the lens focal length. 928 * 929 * It is assumed that the cropping is applied after raw to other color space 930 * conversion. Raw streams (RAW16 and RAW_OPAQUE) don't have this conversion stage, 931 * and are not croppable. Therefore, the crop region must be ignored by the HAL 932 * for raw streams. 933 * 934 * Since the crop region applies to all non-raw streams, which may have different aspect 935 * ratios than the crop region, the exact sensor region used for each stream may 936 * be smaller than the crop region. Specifically, each stream should maintain 937 * square pixels and its aspect ratio by minimally further cropping the defined 938 * crop region. If the stream's aspect ratio is wider than the crop region, the 939 * stream should be further cropped vertically, and if the stream's aspect ratio 940 * is narrower than the crop region, the stream should be further cropped 941 * horizontally. 942 * 943 * In all cases, the stream crop must be centered within the full crop region, 944 * and each stream is only either cropped horizontally or vertical relative to 945 * the full crop region, never both. 946 * 947 * For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a 948 * 1280x720 stream (16:9 aspect), below demonstrates the expected output regions 949 * for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x 950 * 1500 pixel array) sensor. 951 * 952 * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio) 953 * 954 * 640x480 stream crop: (500, 375, 1000, 750) (equal to crop region) 955 * 1280x720 stream crop: (500, 469, 1000, 562) (marked with =) 956 * 957 * 0 1000 2000 958 * +---------+---------+---------+----------+ 959 * | Active pixel array | 960 * | | 961 * | | 962 * + +-------------------+ + 375 963 * | | | | 964 * | O===================O | 965 * | I 1280x720 stream I | 966 * + I I + 750 967 * | I I | 968 * | O===================O | 969 * | | | | 970 * + +-------------------+ + 1125 971 * | Crop region, 640x480 stream | 972 * | | 973 * | | 974 * +---------+---------+---------+----------+ 1500 975 * 976 * Crop region: (500, 375, 1333, 750) (16:9 aspect ratio) 977 * 978 * 640x480 stream crop: (666, 375, 1000, 750) (marked with =) 979 * 1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region) 980 * 981 * 0 1000 2000 982 * +---------+---------+---------+----------+ 983 * | Active pixel array | 984 * | | 985 * | | 986 * + +---O==================O---+ + 375 987 * | | I 640x480 stream I | | 988 * | | I I | | 989 * | | I I | | 990 * + | I I | + 750 991 * | | I I | | 992 * | | I I | | 993 * | | I I | | 994 * + +---O==================O---+ + 1125 995 * | Crop region, 1280x720 stream | 996 * | | 997 * | | 998 * +---------+---------+---------+----------+ 1500 999 * 1000 * Crop region: (500, 375, 750, 750) (1:1 aspect ratio) 1001 * 1002 * 640x480 stream crop: (500, 469, 750, 562) (marked with =) 1003 * 1280x720 stream crop: (500, 543, 750, 414) (marged with #) 1004 * 1005 * 0 1000 2000 1006 * +---------+---------+---------+----------+ 1007 * | Active pixel array | 1008 * | | 1009 * | | 1010 * + +--------------+ + 375 1011 * | O==============O | 1012 * | ################ | 1013 * | # # | 1014 * + # # + 750 1015 * | # # | 1016 * | ################ 1280x720 | 1017 * | O==============O 640x480 | 1018 * + +--------------+ + 1125 1019 * | Crop region | 1020 * | | 1021 * | | 1022 * +---------+---------+---------+----------+ 1500 1023 * 1024 * And a final example, a 1024x1024 square aspect ratio stream instead of the 1025 * 480p stream: 1026 * 1027 * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio) 1028 * 1029 * 1024x1024 stream crop: (625, 375, 750, 750) (marked with #) 1030 * 1280x720 stream crop: (500, 469, 1000, 562) (marked with =) 1031 * 1032 * 0 1000 2000 1033 * +---------+---------+---------+----------+ 1034 * | Active pixel array | 1035 * | | 1036 * | 1024x1024 stream | 1037 * + +--###############--+ + 375 1038 * | | # # | | 1039 * | O===================O | 1040 * | I 1280x720 stream I | 1041 * + I I + 750 1042 * | I I | 1043 * | O===================O | 1044 * | | # # | | 1045 * + +--###############--+ + 1125 1046 * | Crop region | 1047 * | | 1048 * | | 1049 * +---------+---------+---------+----------+ 1500 1050 * 1051 */ 1052 1053 /** 1054 * S6. Error management: 1055 * 1056 * Camera HAL device ops functions that have a return value will all return 1057 * -ENODEV / NULL in case of a serious error. This means the device cannot 1058 * continue operation, and must be closed by the framework. Once this error is 1059 * returned by some method, or if notify() is called with ERROR_DEVICE, only 1060 * the close() method can be called successfully. All other methods will return 1061 * -ENODEV / NULL. 1062 * 1063 * If a device op is called in the wrong sequence, for example if the framework 1064 * calls configure_streams() is called before initialize(), the device must 1065 * return -ENOSYS from the call, and do nothing. 1066 * 1067 * Transient errors in image capture must be reported through notify() as follows: 1068 * 1069 * - The failure of an entire capture to occur must be reported by the HAL by 1070 * calling notify() with ERROR_REQUEST. Individual errors for the result 1071 * metadata or the output buffers must not be reported in this case. 1072 * 1073 * - If the metadata for a capture cannot be produced, but some image buffers 1074 * were filled, the HAL must call notify() with ERROR_RESULT. 1075 * 1076 * - If an output image buffer could not be filled, but either the metadata was 1077 * produced or some other buffers were filled, the HAL must call notify() with 1078 * ERROR_BUFFER for each failed buffer. 1079 * 1080 * In each of these transient failure cases, the HAL must still call 1081 * process_capture_result, with valid output and input (if an input buffer was 1082 * submitted) buffer_handle_t. If the result metadata could not be produced, it 1083 * should be NULL. If some buffers could not be filled, they must be returned with 1084 * process_capture_result in the error state, their release fences must be set to 1085 * the acquire fences passed by the framework, or -1 if they have been waited on by 1086 * the HAL already. 1087 * 1088 * Invalid input arguments result in -EINVAL from the appropriate methods. In 1089 * that case, the framework must act as if that call had never been made. 1090 * 1091 */ 1092 1093 /** 1094 * S7. Key Performance Indicator (KPI) glossary: 1095 * 1096 * This includes some critical definitions that are used by KPI metrics. 1097 * 1098 * Pipeline Latency: 1099 * For a given capture request, the duration from the framework calling 1100 * process_capture_request to the HAL sending capture result and all buffers 1101 * back by process_capture_result call. To make the Pipeline Latency measure 1102 * independent of frame rate, it is measured by frame count. 1103 * 1104 * For example, when frame rate is 30 (fps), the frame duration (time interval 1105 * between adjacent frame capture time) is 33 (ms). 1106 * If it takes 5 frames for framework to get the result and buffers back for 1107 * a given request, then the Pipeline Latency is 5 (frames), instead of 1108 * 5 x 33 = 165 (ms). 1109 * 1110 * The Pipeline Latency is determined by android.request.pipelineDepth and 1111 * android.request.pipelineMaxDepth, see their definitions for more details. 1112 * 1113 */ 1114 1115 /** 1116 * S8. Sample Use Cases: 1117 * 1118 * This includes some typical use case examples the camera HAL may support. 1119 * 1120 * S8.1 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_BIDIRECTIONAL stream. 1121 * 1122 * For this use case, the bidirectional stream will be used by the framework as follows: 1123 * 1124 * 1. The framework includes a buffer from this stream as output buffer in a 1125 * request as normal. 1126 * 1127 * 2. Once the HAL device returns a filled output buffer to the framework, 1128 * the framework may do one of two things with the filled buffer: 1129 * 1130 * 2. a. The framework uses the filled data, and returns the now-used buffer 1131 * to the stream queue for reuse. This behavior exactly matches the 1132 * OUTPUT type of stream. 1133 * 1134 * 2. b. The framework wants to reprocess the filled data, and uses the 1135 * buffer as an input buffer for a request. Once the HAL device has 1136 * used the reprocessing buffer, it then returns it to the 1137 * framework. The framework then returns the now-used buffer to the 1138 * stream queue for reuse. 1139 * 1140 * 3. The HAL device will be given the buffer again as an output buffer for 1141 * a request at some future point. 1142 * 1143 * For ZSL use case, the pixel format for bidirectional stream will be 1144 * HAL_PIXEL_FORMAT_RAW_OPAQUE or HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED if it 1145 * is listed in android.scaler.availableInputOutputFormatsMap. When 1146 * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, the gralloc 1147 * usage flags for the consumer endpoint will be set to GRALLOC_USAGE_HW_CAMERA_ZSL. 1148 * A configuration stream list that has BIDIRECTIONAL stream used as input, will 1149 * usually also have a distinct OUTPUT stream to get the reprocessing data. For example, 1150 * for the ZSL use case, the stream list might be configured with the following: 1151 * 1152 * - A HAL_PIXEL_FORMAT_RAW_OPAQUE bidirectional stream is used 1153 * as input. 1154 * - And a HAL_PIXEL_FORMAT_BLOB (JPEG) output stream. 1155 * 1156 * S8.2 ZSL (OPAQUE) reprocessing with CAMERA3_STREAM_INPUT stream. 1157 * 1158 * CAMERA_DEVICE_API_VERSION_3_3: 1159 * When OPAQUE_REPROCESSING capability is supported by the camera device, the INPUT stream 1160 * can be used for application/framework implemented use case like Zero Shutter Lag (ZSL). 1161 * This kind of stream will be used by the framework as follows: 1162 * 1163 * 1. Application/framework configures an opaque (RAW or YUV based) format output stream that is 1164 * used to produce the ZSL output buffers. The stream pixel format will be 1165 * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED. 1166 * 1167 * 2. Application/framework configures an opaque format input stream that is used to 1168 * send the reprocessing ZSL buffers to the HAL. The stream pixel format will 1169 * also be HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED. 1170 * 1171 * 3. Application/framework configures a YUV/JPEG output stream that is used to receive the 1172 * reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB. 1173 * 1174 * 4. Application/framework picks a ZSL buffer from the ZSL output stream when a ZSL capture is 1175 * issued by the application, and sends the data back as an input buffer in a 1176 * reprocessing request, then sends to the HAL for reprocessing. 1177 * 1178 * 5. The HAL sends back the output YUV/JPEG result to framework. 1179 * 1180 * The HAL can select the actual opaque buffer format and configure the ISP pipeline 1181 * appropriately based on the HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format and 1182 * the gralloc usage flag GRALLOC_USAGE_HW_CAMERA_ZSL. 1183 1184 * S8.3 YUV reprocessing with CAMERA3_STREAM_INPUT stream. 1185 * 1186 * When YUV reprocessing is supported by the HAL, the INPUT stream 1187 * can be used for the YUV reprocessing use cases like lucky-shot and image fusion. 1188 * This kind of stream will be used by the framework as follows: 1189 * 1190 * 1. Application/framework configures an YCbCr_420 format output stream that is 1191 * used to produce the output buffers. 1192 * 1193 * 2. Application/framework configures an YCbCr_420 format input stream that is used to 1194 * send the reprocessing YUV buffers to the HAL. 1195 * 1196 * 3. Application/framework configures a YUV/JPEG output stream that is used to receive the 1197 * reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB. 1198 * 1199 * 4. Application/framework processes the output buffers (could be as simple as picking 1200 * an output buffer directly) from the output stream when a capture is issued, and sends 1201 * the data back as an input buffer in a reprocessing request, then sends to the HAL 1202 * for reprocessing. 1203 * 1204 * 5. The HAL sends back the output YUV/JPEG result to framework. 1205 * 1206 */ 1207 1208 /** 1209 * S9. Notes on Controls and Metadata 1210 * 1211 * This section contains notes about the interpretation and usage of various metadata tags. 1212 * 1213 * S9.1 HIGH_QUALITY and FAST modes. 1214 * 1215 * Many camera post-processing blocks may be listed as having HIGH_QUALITY, 1216 * FAST, and OFF operating modes. These blocks will typically also have an 1217 * 'available modes' tag representing which of these operating modes are 1218 * available on a given device. The general policy regarding implementing 1219 * these modes is as follows: 1220 * 1221 * 1. Operating mode controls of hardware blocks that cannot be disabled 1222 * must not list OFF in their corresponding 'available modes' tags. 1223 * 1224 * 2. OFF will always be included in their corresponding 'available modes' 1225 * tag if it is possible to disable that hardware block. 1226 * 1227 * 3. FAST must always be included in the 'available modes' tags for all 1228 * post-processing blocks supported on the device. If a post-processing 1229 * block also has a slower and higher quality operating mode that does 1230 * not meet the framerate requirements for FAST mode, HIGH_QUALITY should 1231 * be included in the 'available modes' tag to represent this operating 1232 * mode. 1233 */ 1234 1235 /** 1236 * S10. Reprocessing flow and controls 1237 * 1238 * This section describes the OPAQUE and YUV reprocessing flow and controls. OPAQUE reprocessing 1239 * uses an opaque format that is not directly application-visible, and the application can 1240 * only select some of the output buffers and send back to HAL for reprocessing, while YUV 1241 * reprocessing gives the application opportunity to process the buffers before reprocessing. 1242 * 1243 * S8 gives the stream configurations for the typical reprocessing uses cases, 1244 * this section specifies the buffer flow and controls in more details. 1245 * 1246 * S10.1 OPAQUE (typically for ZSL use case) reprocessing flow and controls 1247 * 1248 * For OPAQUE reprocessing (e.g. ZSL) use case, after the application creates the specific 1249 * output and input streams, runtime buffer flow and controls are specified as below: 1250 * 1251 * 1. Application starts output streaming by sending repeating requests for output 1252 * opaque buffers and preview. The buffers are held by an application 1253 * maintained circular buffer. The requests are based on CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG 1254 * capture template, which should have all necessary settings that guarantee output 1255 * frame rate is not slowed down relative to sensor output frame rate. 1256 * 1257 * 2. When a capture is issued, the application selects one output buffer based 1258 * on application buffer selection logic, e.g. good AE and AF statistics etc. 1259 * Application then creates an reprocess request based on the capture result associated 1260 * with this selected buffer. The selected output buffer is now added to this reprocess 1261 * request as an input buffer, the output buffer of this reprocess request should be 1262 * either JPEG output buffer or YUV output buffer, or both, depending on the application 1263 * choice. 1264 * 1265 * 3. Application then alters the reprocess settings to get best image quality. The HAL must 1266 * support and only support below controls if the HAL support OPAQUE_REPROCESSING capability: 1267 * - android.jpeg.* (if JPEG buffer is included as one of the output) 1268 * - android.noiseReduction.mode (change to HIGH_QUALITY if it is supported) 1269 * - android.edge.mode (change to HIGH_QUALITY if it is supported) 1270 * All other controls must be ignored by the HAL. 1271 * 4. HAL processed the input buffer and return the output buffers in the capture results 1272 * as normal. 1273 * 1274 * S10.2 YUV reprocessing flow and controls 1275 * 1276 * The YUV reprocessing buffer flow is similar as OPAQUE reprocessing, with below difference: 1277 * 1278 * 1. Application may want to have finer granularity control of the intermediate YUV images 1279 * (before reprocessing). For example, application may choose 1280 * - android.noiseReduction.mode == MINIMAL 1281 * to make sure the no YUV domain noise reduction has applied to the output YUV buffers, 1282 * then it can do its own advanced noise reduction on them. For OPAQUE reprocessing case, this 1283 * doesn't matter, as long as the final reprocessed image has the best quality. 1284 * 2. Application may modify the YUV output buffer data. For example, for image fusion use 1285 * case, where multiple output images are merged together to improve the signal-to-noise 1286 * ratio (SNR). The input buffer may be generated from multiple buffers by the application. 1287 * To avoid excessive amount of noise reduction and insufficient amount of edge enhancement 1288 * being applied to the input buffer, the application can hint the HAL how much effective 1289 * exposure time improvement has been done by the application, then the HAL can adjust the 1290 * noise reduction and edge enhancement paramters to get best reprocessed image quality. 1291 * Below tag can be used for this purpose: 1292 * - android.reprocess.effectiveExposureFactor 1293 * The value would be exposure time increase factor applied to the original output image, 1294 * for example, if there are N image merged, the exposure time increase factor would be up 1295 * to sqrt(N). See this tag spec for more details. 1296 * 1297 * S10.3 Reprocessing pipeline characteristics 1298 * 1299 * Reprocessing pipeline has below different characteristics comparing with normal output 1300 * pipeline: 1301 * 1302 * 1. The reprocessing result can be returned ahead of the pending normal output results. But 1303 * the FIFO ordering must be maintained for all reprocessing results. For example, there are 1304 * below requests (A stands for output requests, B stands for reprocessing requests) 1305 * being processed by the HAL: 1306 * A1, A2, A3, A4, B1, A5, B2, A6... 1307 * result of B1 can be returned before A1-A4, but result of B2 must be returned after B1. 1308 * 2. Single input rule: For a given reprocessing request, all output buffers must be from the 1309 * input buffer, rather than sensor output. For example, if a reprocess request include both 1310 * JPEG and preview buffers, all output buffers must be produced from the input buffer 1311 * included by the reprocessing request, rather than sensor. The HAL must not output preview 1312 * buffers from sensor, while output JPEG buffer from the input buffer. 1313 * 3. Input buffer will be from camera output directly (ZSL case) or indirectly(image fusion 1314 * case). For the case where buffer is modified, the size will remain same. The HAL can 1315 * notify CAMERA3_MSG_ERROR_REQUEST if buffer from unknown source is sent. 1316 * 4. Result as reprocessing request: The HAL can expect that a reprocessing request is a copy 1317 * of one of the output results with minor allowed setting changes. The HAL can notify 1318 * CAMERA3_MSG_ERROR_REQUEST if a request from unknown source is issued. 1319 * 5. Output buffers may not be used as inputs across the configure stream boundary, This is 1320 * because an opaque stream like the ZSL output stream may have different actual image size 1321 * inside of the ZSL buffer to save power and bandwidth for smaller resolution JPEG capture. 1322 * The HAL may notify CAMERA3_MSG_ERROR_REQUEST if this case occurs. 1323 * 6. HAL Reprocess requests error reporting during flush should follow the same rule specified 1324 * by flush() method. 1325 * 1326 */ 1327 1328 __BEGIN_DECLS 1329 1330 struct camera3_device; 1331 1332 /********************************************************************** 1333 * 1334 * Camera3 stream and stream buffer definitions. 1335 * 1336 * These structs and enums define the handles and contents of the input and 1337 * output streams connecting the HAL to various framework and application buffer 1338 * consumers. Each stream is backed by a gralloc buffer queue. 1339 * 1340 */ 1341 1342 /** 1343 * camera3_stream_type_t: 1344 * 1345 * The type of the camera stream, which defines whether the camera HAL device is 1346 * the producer or the consumer for that stream, and how the buffers of the 1347 * stream relate to the other streams. 1348 */ 1349 typedef enum camera3_stream_type { 1350 /** 1351 * This stream is an output stream; the camera HAL device will be 1352 * responsible for filling buffers from this stream with newly captured or 1353 * reprocessed image data. 1354 */ 1355 CAMERA3_STREAM_OUTPUT = 0, 1356 1357 /** 1358 * This stream is an input stream; the camera HAL device will be responsible 1359 * for reading buffers from this stream and sending them through the camera 1360 * processing pipeline, as if the buffer was a newly captured image from the 1361 * imager. 1362 * 1363 * The pixel format for input stream can be any format reported by 1364 * android.scaler.availableInputOutputFormatsMap. The pixel format of the 1365 * output stream that is used to produce the reprocessing data may be any 1366 * format reported by android.scaler.availableStreamConfigurations. The 1367 * supported input/output stream combinations depends the camera device 1368 * capabilities, see android.scaler.availableInputOutputFormatsMap for 1369 * stream map details. 1370 * 1371 * This kind of stream is generally used to reprocess data into higher 1372 * quality images (that otherwise would cause a frame rate performance 1373 * loss), or to do off-line reprocessing. 1374 * 1375 * CAMERA_DEVICE_API_VERSION_3_3: 1376 * The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing, 1377 * see S8.2, S8.3 and S10 for more details. 1378 */ 1379 CAMERA3_STREAM_INPUT = 1, 1380 1381 /** 1382 * This stream can be used for input and output. Typically, the stream is 1383 * used as an output stream, but occasionally one already-filled buffer may 1384 * be sent back to the HAL device for reprocessing. 1385 * 1386 * This kind of stream is meant generally for Zero Shutter Lag (ZSL) 1387 * features, where copying the captured image from the output buffer to the 1388 * reprocessing input buffer would be expensive. See S8.1 for more details. 1389 * 1390 * Note that the HAL will always be reprocessing data it produced. 1391 * 1392 */ 1393 CAMERA3_STREAM_BIDIRECTIONAL = 2, 1394 1395 /** 1396 * Total number of framework-defined stream types 1397 */ 1398 CAMERA3_NUM_STREAM_TYPES 1399 1400 } camera3_stream_type_t; 1401 1402 /** 1403 * camera3_stream_rotation_t: 1404 * 1405 * The required counterclockwise rotation of camera stream. 1406 */ 1407 typedef enum camera3_stream_rotation { 1408 /* No rotation */ 1409 CAMERA3_STREAM_ROTATION_0 = 0, 1410 1411 /* Rotate by 90 degree counterclockwise */ 1412 CAMERA3_STREAM_ROTATION_90 = 1, 1413 1414 /* Rotate by 180 degree counterclockwise */ 1415 CAMERA3_STREAM_ROTATION_180 = 2, 1416 1417 /* Rotate by 270 degree counterclockwise */ 1418 CAMERA3_STREAM_ROTATION_270 = 3 1419 } camera3_stream_rotation_t; 1420 1421 /** 1422 * camera3_stream_configuration_mode_t: 1423 * 1424 * This defines the general operation mode for the HAL (for a given stream configuration), where 1425 * modes besides NORMAL have different semantics, and usually limit the generality of the API in 1426 * exchange for higher performance in some particular area. 1427 */ 1428 typedef enum camera3_stream_configuration_mode { 1429 /** 1430 * Normal stream configuration operation mode. This is the default camera operation mode, 1431 * where all semantics of HAL APIs and metadata controls apply. 1432 */ 1433 CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE = 0, 1434 1435 /** 1436 * Special constrained high speed operation mode for devices that can not support high 1437 * speed output in NORMAL mode. All streams in this configuration are operating at high speed 1438 * mode and have different characteristics and limitations to achieve high speed output. 1439 * The NORMAL mode can still be used for high speed output if the HAL can support high speed 1440 * output while satisfying all the semantics of HAL APIs and metadata controls. It is 1441 * recommended for the HAL to support high speed output in NORMAL mode (by advertising the high 1442 * speed FPS ranges in android.control.aeAvailableTargetFpsRanges) if possible. 1443 * 1444 * This mode has below limitations/requirements: 1445 * 1446 * 1. The HAL must support up to 2 streams with sizes reported by 1447 * android.control.availableHighSpeedVideoConfigurations. 1448 * 2. In this mode, the HAL is expected to output up to 120fps or higher. This mode must 1449 * support the targeted FPS range and size configurations reported by 1450 * android.control.availableHighSpeedVideoConfigurations. 1451 * 3. The HAL must support HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED output stream format. 1452 * 4. To achieve efficient high speed streaming, the HAL may have to aggregate 1453 * multiple frames together and send to camera device for processing where the request 1454 * controls are same for all the frames in this batch (batch mode). The HAL must support 1455 * max batch size and the max batch size requirements defined by 1456 * android.control.availableHighSpeedVideoConfigurations. 1457 * 5. In this mode, the HAL must override aeMode, awbMode, and afMode to ON, ON, and 1458 * CONTINUOUS_VIDEO, respectively. All post-processing block mode controls must be 1459 * overridden to be FAST. Therefore, no manual control of capture and post-processing 1460 * parameters is possible. All other controls operate the same as when 1461 * android.control.mode == AUTO. This means that all other android.control.* fields 1462 * must continue to work, such as 1463 * 1464 * android.control.aeTargetFpsRange 1465 * android.control.aeExposureCompensation 1466 * android.control.aeLock 1467 * android.control.awbLock 1468 * android.control.effectMode 1469 * android.control.aeRegions 1470 * android.control.afRegions 1471 * android.control.awbRegions 1472 * android.control.afTrigger 1473 * android.control.aePrecaptureTrigger 1474 * 1475 * Outside of android.control.*, the following controls must work: 1476 * 1477 * android.flash.mode (TORCH mode only, automatic flash for still capture will not work 1478 * since aeMode is ON) 1479 * android.lens.opticalStabilizationMode (if it is supported) 1480 * android.scaler.cropRegion 1481 * android.statistics.faceDetectMode (if it is supported) 1482 * 1483 * For more details about high speed stream requirements, see 1484 * android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO 1485 * capability defined in android.request.availableCapabilities. 1486 * 1487 * This mode only needs to be supported by HALs that include CONSTRAINED_HIGH_SPEED_VIDEO in 1488 * the android.request.availableCapabilities static metadata. 1489 */ 1490 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1, 1491 1492 /** 1493 * First value for vendor-defined stream configuration modes. 1494 */ 1495 CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000 1496 } camera3_stream_configuration_mode_t; 1497 1498 /** 1499 * camera3_stream_t: 1500 * 1501 * A handle to a single camera input or output stream. A stream is defined by 1502 * the framework by its buffer resolution and format, and additionally by the 1503 * HAL with the gralloc usage flags and the maximum in-flight buffer count. 1504 * 1505 * The stream structures are owned by the framework, but pointers to a 1506 * camera3_stream passed into the HAL by configure_streams() are valid until the 1507 * end of the first subsequent configure_streams() call that _does not_ include 1508 * that camera3_stream as an argument, or until the end of the close() call. 1509 * 1510 * All camera3_stream framework-controlled members are immutable once the 1511 * camera3_stream is passed into configure_streams(). The HAL may only change 1512 * the HAL-controlled parameters during a configure_streams() call, except for 1513 * the contents of the private pointer. 1514 * 1515 * If a configure_streams() call returns a non-fatal error, all active streams 1516 * remain valid as if configure_streams() had not been called. 1517 * 1518 * The endpoint of the stream is not visible to the camera HAL device. 1519 * In DEVICE_API_VERSION_3_1, this was changed to share consumer usage flags 1520 * on streams where the camera is a producer (OUTPUT and BIDIRECTIONAL stream 1521 * types) see the usage field below. 1522 */ 1523 typedef struct camera3_stream { 1524 1525 /***** 1526 * Set by framework before configure_streams() 1527 */ 1528 1529 /** 1530 * The type of the stream, one of the camera3_stream_type_t values. 1531 */ 1532 int stream_type; 1533 1534 /** 1535 * The width in pixels of the buffers in this stream 1536 */ 1537 uint32_t width; 1538 1539 /** 1540 * The height in pixels of the buffers in this stream 1541 */ 1542 uint32_t height; 1543 1544 /** 1545 * The pixel format for the buffers in this stream. Format is a value from 1546 * the HAL_PIXEL_FORMAT_* list in system/core/include/system/graphics.h, or 1547 * from device-specific headers. 1548 * 1549 * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform 1550 * gralloc module will select a format based on the usage flags provided by 1551 * the camera device and the other endpoint of the stream. 1552 * 1553 * <= CAMERA_DEVICE_API_VERSION_3_1: 1554 * 1555 * The camera HAL device must inspect the buffers handed to it in the 1556 * subsequent register_stream_buffers() call to obtain the 1557 * implementation-specific format details, if necessary. 1558 * 1559 * >= CAMERA_DEVICE_API_VERSION_3_2: 1560 * 1561 * register_stream_buffers() won't be called by the framework, so the HAL 1562 * should configure the ISP and sensor pipeline based purely on the sizes, 1563 * usage flags, and formats for the configured streams. 1564 */ 1565 int format; 1566 1567 /***** 1568 * Set by HAL during configure_streams(). 1569 */ 1570 1571 /** 1572 * The gralloc usage flags for this stream, as needed by the HAL. The usage 1573 * flags are defined in gralloc.h (GRALLOC_USAGE_*), or in device-specific 1574 * headers. 1575 * 1576 * For output streams, these are the HAL's producer usage flags. For input 1577 * streams, these are the HAL's consumer usage flags. The usage flags from 1578 * the producer and the consumer will be combined together and then passed 1579 * to the platform gralloc HAL module for allocating the gralloc buffers for 1580 * each stream. 1581 * 1582 * Version information: 1583 * 1584 * == CAMERA_DEVICE_API_VERSION_3_0: 1585 * 1586 * No initial value guaranteed when passed via configure_streams(). 1587 * HAL may not use this field as input, and must write over this field 1588 * with its usage flags. 1589 * 1590 * >= CAMERA_DEVICE_API_VERSION_3_1: 1591 * 1592 * For stream_type OUTPUT and BIDIRECTIONAL, when passed via 1593 * configure_streams(), the initial value of this is the consumer's 1594 * usage flags. The HAL may use these consumer flags to decide stream 1595 * configuration. 1596 * For stream_type INPUT, when passed via configure_streams(), the initial 1597 * value of this is 0. 1598 * For all streams passed via configure_streams(), the HAL must write 1599 * over this field with its usage flags. 1600 */ 1601 uint32_t usage; 1602 1603 /** 1604 * The maximum number of buffers the HAL device may need to have dequeued at 1605 * the same time. The HAL device may not have more buffers in-flight from 1606 * this stream than this value. 1607 */ 1608 uint32_t max_buffers; 1609 1610 /** 1611 * A handle to HAL-private information for the stream. Will not be inspected 1612 * by the framework code. 1613 */ 1614 void *priv; 1615 1616 /** 1617 * A field that describes the contents of the buffer. The format and buffer 1618 * dimensions define the memory layout and structure of the stream buffers, 1619 * while dataSpace defines the meaning of the data within the buffer. 1620 * 1621 * For most formats, dataSpace defines the color space of the image data. 1622 * In addition, for some formats, dataSpace indicates whether image- or 1623 * depth-based data is requested. See system/core/include/system/graphics.h 1624 * for details of formats and valid dataSpace values for each format. 1625 * 1626 * Version information: 1627 * 1628 * < CAMERA_DEVICE_API_VERSION_3_3: 1629 * 1630 * Not defined and should not be accessed. dataSpace should be assumed to 1631 * be HAL_DATASPACE_UNKNOWN, and the appropriate color space, etc, should 1632 * be determined from the usage flags and the format. 1633 * 1634 * = CAMERA_DEVICE_API_VERSION_3_3: 1635 * 1636 * Always set by the camera service. HAL must use this dataSpace to 1637 * configure the stream to the correct colorspace, or to select between 1638 * color and depth outputs if supported. The dataspace values are the 1639 * legacy definitions in graphics.h 1640 * 1641 * >= CAMERA_DEVICE_API_VERSION_3_4: 1642 * 1643 * Always set by the camera service. HAL must use this dataSpace to 1644 * configure the stream to the correct colorspace, or to select between 1645 * color and depth outputs if supported. The dataspace values are set 1646 * using the V0 dataspace definitions in graphics.h 1647 */ 1648 android_dataspace_t data_space; 1649 1650 /** 1651 * The required output rotation of the stream, one of 1652 * the camera3_stream_rotation_t values. This must be inspected by HAL along 1653 * with stream width and height. For example, if the rotation is 90 degree 1654 * and the stream width and height is 720 and 1280 respectively, camera service 1655 * will supply buffers of size 720x1280, and HAL should capture a 1280x720 image 1656 * and rotate the image by 90 degree counterclockwise. The rotation field is 1657 * no-op when the stream type is input. Camera HAL must ignore the rotation 1658 * field for an input stream. 1659 * 1660 * <= CAMERA_DEVICE_API_VERSION_3_2: 1661 * 1662 * Not defined and must not be accessed. HAL must not apply any rotation 1663 * on output images. 1664 * 1665 * >= CAMERA_DEVICE_API_VERSION_3_3: 1666 * 1667 * Always set by camera service. HAL must inspect this field during stream 1668 * configuration and returns -EINVAL if HAL cannot perform such rotation. 1669 * HAL must always support CAMERA3_STREAM_ROTATION_0, so a 1670 * configure_streams() call must not fail for unsupported rotation if 1671 * rotation field of all streams is CAMERA3_STREAM_ROTATION_0. 1672 * 1673 */ 1674 int rotation; 1675 1676 /* reserved for future use */ 1677 void *reserved[7]; 1678 1679 } camera3_stream_t; 1680 1681 /** 1682 * camera3_stream_configuration_t: 1683 * 1684 * A structure of stream definitions, used by configure_streams(). This 1685 * structure defines all the output streams and the reprocessing input 1686 * stream for the current camera use case. 1687 */ 1688 typedef struct camera3_stream_configuration { 1689 /** 1690 * The total number of streams requested by the framework. This includes 1691 * both input and output streams. The number of streams will be at least 1, 1692 * and there will be at least one output-capable stream. 1693 */ 1694 uint32_t num_streams; 1695 1696 /** 1697 * An array of camera stream pointers, defining the input/output 1698 * configuration for the camera HAL device. 1699 * 1700 * At most one input-capable stream may be defined (INPUT or BIDIRECTIONAL) 1701 * in a single configuration. 1702 * 1703 * At least one output-capable stream must be defined (OUTPUT or 1704 * BIDIRECTIONAL). 1705 */ 1706 camera3_stream_t **streams; 1707 1708 /** 1709 * >= CAMERA_DEVICE_API_VERSION_3_3: 1710 * 1711 * The operation mode of streams in this configuration, one of the value 1712 * defined in camera3_stream_configuration_mode_t. The HAL can use this 1713 * mode as an indicator to set the stream property (e.g., 1714 * camera3_stream->max_buffers) appropriately. For example, if the 1715 * configuration is 1716 * CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE, the HAL may 1717 * want to set aside more buffers for batch mode operation (see 1718 * android.control.availableHighSpeedVideoConfigurations for batch mode 1719 * definition). 1720 * 1721 */ 1722 uint32_t operation_mode; 1723 } camera3_stream_configuration_t; 1724 1725 /** 1726 * camera3_buffer_status_t: 1727 * 1728 * The current status of a single stream buffer. 1729 */ 1730 typedef enum camera3_buffer_status { 1731 /** 1732 * The buffer is in a normal state, and can be used after waiting on its 1733 * sync fence. 1734 */ 1735 CAMERA3_BUFFER_STATUS_OK = 0, 1736 1737 /** 1738 * The buffer does not contain valid data, and the data in it should not be 1739 * used. The sync fence must still be waited on before reusing the buffer. 1740 */ 1741 CAMERA3_BUFFER_STATUS_ERROR = 1 1742 1743 } camera3_buffer_status_t; 1744 1745 /** 1746 * camera3_stream_buffer_t: 1747 * 1748 * A single buffer from a camera3 stream. It includes a handle to its parent 1749 * stream, the handle to the gralloc buffer itself, and sync fences 1750 * 1751 * The buffer does not specify whether it is to be used for input or output; 1752 * that is determined by its parent stream type and how the buffer is passed to 1753 * the HAL device. 1754 */ 1755 typedef struct camera3_stream_buffer { 1756 /** 1757 * The handle of the stream this buffer is associated with 1758 */ 1759 camera3_stream_t *stream; 1760 1761 /** 1762 * The native handle to the buffer 1763 */ 1764 buffer_handle_t *buffer; 1765 1766 /** 1767 * Current state of the buffer, one of the camera3_buffer_status_t 1768 * values. The framework will not pass buffers to the HAL that are in an 1769 * error state. In case a buffer could not be filled by the HAL, it must 1770 * have its status set to CAMERA3_BUFFER_STATUS_ERROR when returned to the 1771 * framework with process_capture_result(). 1772 */ 1773 int status; 1774 1775 /** 1776 * The acquire sync fence for this buffer. The HAL must wait on this fence 1777 * fd before attempting to read from or write to this buffer. 1778 * 1779 * The framework may be set to -1 to indicate that no waiting is necessary 1780 * for this buffer. 1781 * 1782 * When the HAL returns an output buffer to the framework with 1783 * process_capture_result(), the acquire_fence must be set to -1. If the HAL 1784 * never waits on the acquire_fence due to an error in filling a buffer, 1785 * when calling process_capture_result() the HAL must set the release_fence 1786 * of the buffer to be the acquire_fence passed to it by the framework. This 1787 * will allow the framework to wait on the fence before reusing the buffer. 1788 * 1789 * For input buffers, the HAL must not change the acquire_fence field during 1790 * the process_capture_request() call. 1791 * 1792 * >= CAMERA_DEVICE_API_VERSION_3_2: 1793 * 1794 * When the HAL returns an input buffer to the framework with 1795 * process_capture_result(), the acquire_fence must be set to -1. If the HAL 1796 * never waits on input buffer acquire fence due to an error, the sync 1797 * fences should be handled similarly to the way they are handled for output 1798 * buffers. 1799 */ 1800 int acquire_fence; 1801 1802 /** 1803 * The release sync fence for this buffer. The HAL must set this fence when 1804 * returning buffers to the framework, or write -1 to indicate that no 1805 * waiting is required for this buffer. 1806 * 1807 * For the output buffers, the fences must be set in the output_buffers 1808 * array passed to process_capture_result(). 1809 * 1810 * <= CAMERA_DEVICE_API_VERSION_3_1: 1811 * 1812 * For the input buffer, the release fence must be set by the 1813 * process_capture_request() call. 1814 * 1815 * >= CAMERA_DEVICE_API_VERSION_3_2: 1816 * 1817 * For the input buffer, the fences must be set in the input_buffer 1818 * passed to process_capture_result(). 1819 * 1820 * After signaling the release_fence for this buffer, the HAL 1821 * should not make any further attempts to access this buffer as the 1822 * ownership has been fully transferred back to the framework. 1823 * 1824 * If a fence of -1 was specified then the ownership of this buffer 1825 * is transferred back immediately upon the call of process_capture_result. 1826 */ 1827 int release_fence; 1828 1829 } camera3_stream_buffer_t; 1830 1831 /** 1832 * camera3_stream_buffer_set_t: 1833 * 1834 * The complete set of gralloc buffers for a stream. This structure is given to 1835 * register_stream_buffers() to allow the camera HAL device to register/map/etc 1836 * newly allocated stream buffers. 1837 * 1838 * >= CAMERA_DEVICE_API_VERSION_3_2: 1839 * 1840 * Deprecated (and not used). In particular, 1841 * register_stream_buffers is also deprecated and will never be invoked. 1842 * 1843 */ 1844 typedef struct camera3_stream_buffer_set { 1845 /** 1846 * The stream handle for the stream these buffers belong to 1847 */ 1848 camera3_stream_t *stream; 1849 1850 /** 1851 * The number of buffers in this stream. It is guaranteed to be at least 1852 * stream->max_buffers. 1853 */ 1854 uint32_t num_buffers; 1855 1856 /** 1857 * The array of gralloc buffer handles for this stream. If the stream format 1858 * is set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL device 1859 * should inspect the passed-in buffers to determine any platform-private 1860 * pixel format information. 1861 */ 1862 buffer_handle_t **buffers; 1863 1864 } camera3_stream_buffer_set_t; 1865 1866 /** 1867 * camera3_jpeg_blob: 1868 * 1869 * Transport header for compressed JPEG buffers in output streams. 1870 * 1871 * To capture JPEG images, a stream is created using the pixel format 1872 * HAL_PIXEL_FORMAT_BLOB. The buffer size for the stream is calculated by the 1873 * framework, based on the static metadata field android.jpeg.maxSize. Since 1874 * compressed JPEG images are of variable size, the HAL needs to include the 1875 * final size of the compressed image using this structure inside the output 1876 * stream buffer. The JPEG blob ID field must be set to CAMERA3_JPEG_BLOB_ID. 1877 * 1878 * Transport header should be at the end of the JPEG output stream buffer. That 1879 * means the jpeg_blob_id must start at byte[buffer_size - 1880 * sizeof(camera3_jpeg_blob)], where the buffer_size is the size of gralloc buffer. 1881 * Any HAL using this transport header must account for it in android.jpeg.maxSize 1882 * The JPEG data itself starts at the beginning of the buffer and should be 1883 * jpeg_size bytes long. 1884 */ 1885 typedef struct camera3_jpeg_blob { 1886 uint16_t jpeg_blob_id; 1887 uint32_t jpeg_size; 1888 } camera3_jpeg_blob_t; 1889 1890 enum { 1891 CAMERA3_JPEG_BLOB_ID = 0x00FF 1892 }; 1893 1894 /********************************************************************** 1895 * 1896 * Message definitions for the HAL notify() callback. 1897 * 1898 * These definitions are used for the HAL notify callback, to signal 1899 * asynchronous events from the HAL device to the Android framework. 1900 * 1901 */ 1902 1903 /** 1904 * camera3_msg_type: 1905 * 1906 * Indicates the type of message sent, which specifies which member of the 1907 * message union is valid. 1908 * 1909 */ 1910 typedef enum camera3_msg_type { 1911 /** 1912 * An error has occurred. camera3_notify_msg.message.error contains the 1913 * error information. 1914 */ 1915 CAMERA3_MSG_ERROR = 1, 1916 1917 /** 1918 * The exposure of a given request or processing a reprocess request has 1919 * begun. camera3_notify_msg.message.shutter contains the information 1920 * the capture. 1921 */ 1922 CAMERA3_MSG_SHUTTER = 2, 1923 1924 /** 1925 * Number of framework message types 1926 */ 1927 CAMERA3_NUM_MESSAGES 1928 1929 } camera3_msg_type_t; 1930 1931 /** 1932 * Defined error codes for CAMERA_MSG_ERROR 1933 */ 1934 typedef enum camera3_error_msg_code { 1935 /** 1936 * A serious failure occured. No further frames or buffer streams will 1937 * be produced by the device. Device should be treated as closed. The 1938 * client must reopen the device to use it again. The frame_number field 1939 * is unused. 1940 */ 1941 CAMERA3_MSG_ERROR_DEVICE = 1, 1942 1943 /** 1944 * An error has occurred in processing a request. No output (metadata or 1945 * buffers) will be produced for this request. The frame_number field 1946 * specifies which request has been dropped. Subsequent requests are 1947 * unaffected, and the device remains operational. 1948 */ 1949 CAMERA3_MSG_ERROR_REQUEST = 2, 1950 1951 /** 1952 * An error has occurred in producing an output result metadata buffer 1953 * for a request, but output stream buffers for it will still be 1954 * available. Subsequent requests are unaffected, and the device remains 1955 * operational. The frame_number field specifies the request for which 1956 * result metadata won't be available. 1957 */ 1958 CAMERA3_MSG_ERROR_RESULT = 3, 1959 1960 /** 1961 * An error has occurred in placing an output buffer into a stream for a 1962 * request. The frame metadata and other buffers may still be 1963 * available. Subsequent requests are unaffected, and the device remains 1964 * operational. The frame_number field specifies the request for which the 1965 * buffer was dropped, and error_stream contains a pointer to the stream 1966 * that dropped the frame. 1967 */ 1968 CAMERA3_MSG_ERROR_BUFFER = 4, 1969 1970 /** 1971 * Number of error types 1972 */ 1973 CAMERA3_MSG_NUM_ERRORS 1974 1975 } camera3_error_msg_code_t; 1976 1977 /** 1978 * camera3_error_msg_t: 1979 * 1980 * Message contents for CAMERA3_MSG_ERROR 1981 */ 1982 typedef struct camera3_error_msg { 1983 /** 1984 * Frame number of the request the error applies to. 0 if the frame number 1985 * isn't applicable to the error. 1986 */ 1987 uint32_t frame_number; 1988 1989 /** 1990 * Pointer to the stream that had a failure. NULL if the stream isn't 1991 * applicable to the error. 1992 */ 1993 camera3_stream_t *error_stream; 1994 1995 /** 1996 * The code for this error; one of the CAMERA_MSG_ERROR enum values. 1997 */ 1998 int error_code; 1999 2000 } camera3_error_msg_t; 2001 2002 /** 2003 * camera3_shutter_msg_t: 2004 * 2005 * Message contents for CAMERA3_MSG_SHUTTER 2006 */ 2007 typedef struct camera3_shutter_msg { 2008 /** 2009 * Frame number of the request that has begun exposure or reprocessing. 2010 */ 2011 uint32_t frame_number; 2012 2013 /** 2014 * Timestamp for the start of capture. For a reprocess request, this must 2015 * be input image's start of capture. This must match the capture result 2016 * metadata's sensor exposure start timestamp. 2017 */ 2018 uint64_t timestamp; 2019 2020 } camera3_shutter_msg_t; 2021 2022 /** 2023 * camera3_notify_msg_t: 2024 * 2025 * The message structure sent to camera3_callback_ops_t.notify() 2026 */ 2027 typedef struct camera3_notify_msg { 2028 2029 /** 2030 * The message type. One of camera3_notify_msg_type, or a private extension. 2031 */ 2032 int type; 2033 2034 union { 2035 /** 2036 * Error message contents. Valid if type is CAMERA3_MSG_ERROR 2037 */ 2038 camera3_error_msg_t error; 2039 2040 /** 2041 * Shutter message contents. Valid if type is CAMERA3_MSG_SHUTTER 2042 */ 2043 camera3_shutter_msg_t shutter; 2044 2045 /** 2046 * Generic message contents. Used to ensure a minimum size for custom 2047 * message types. 2048 */ 2049 uint8_t generic[32]; 2050 } message; 2051 2052 } camera3_notify_msg_t; 2053 2054 /********************************************************************** 2055 * 2056 * Capture request/result definitions for the HAL process_capture_request() 2057 * method, and the process_capture_result() callback. 2058 * 2059 */ 2060 2061 /** 2062 * camera3_request_template_t: 2063 * 2064 * Available template types for 2065 * camera3_device_ops.construct_default_request_settings() 2066 */ 2067 typedef enum camera3_request_template { 2068 /** 2069 * Standard camera preview operation with 3A on auto. 2070 */ 2071 CAMERA3_TEMPLATE_PREVIEW = 1, 2072 2073 /** 2074 * Standard camera high-quality still capture with 3A and flash on auto. 2075 */ 2076 CAMERA3_TEMPLATE_STILL_CAPTURE = 2, 2077 2078 /** 2079 * Standard video recording plus preview with 3A on auto, torch off. 2080 */ 2081 CAMERA3_TEMPLATE_VIDEO_RECORD = 3, 2082 2083 /** 2084 * High-quality still capture while recording video. Application will 2085 * include preview, video record, and full-resolution YUV or JPEG streams in 2086 * request. Must not cause stuttering on video stream. 3A on auto. 2087 */ 2088 CAMERA3_TEMPLATE_VIDEO_SNAPSHOT = 4, 2089 2090 /** 2091 * Zero-shutter-lag mode. Application will request preview and 2092 * full-resolution data for each frame, and reprocess it to JPEG when a 2093 * still image is requested by user. Settings should provide highest-quality 2094 * full-resolution images without compromising preview frame rate. 3A on 2095 * auto. 2096 */ 2097 CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG = 5, 2098 2099 /** 2100 * A basic template for direct application control of capture 2101 * parameters. All automatic control is disabled (auto-exposure, auto-white 2102 * balance, auto-focus), and post-processing parameters are set to preview 2103 * quality. The manual capture parameters (exposure, sensitivity, etc.) 2104 * are set to reasonable defaults, but should be overridden by the 2105 * application depending on the intended use case. 2106 */ 2107 CAMERA3_TEMPLATE_MANUAL = 6, 2108 2109 /* Total number of templates */ 2110 CAMERA3_TEMPLATE_COUNT, 2111 2112 /** 2113 * First value for vendor-defined request templates 2114 */ 2115 CAMERA3_VENDOR_TEMPLATE_START = 0x40000000 2116 2117 } camera3_request_template_t; 2118 2119 /** 2120 * camera3_capture_request_t: 2121 * 2122 * A single request for image capture/buffer reprocessing, sent to the Camera 2123 * HAL device by the framework in process_capture_request(). 2124 * 2125 * The request contains the settings to be used for this capture, and the set of 2126 * output buffers to write the resulting image data in. It may optionally 2127 * contain an input buffer, in which case the request is for reprocessing that 2128 * input buffer instead of capturing a new image with the camera sensor. The 2129 * capture is identified by the frame_number. 2130 * 2131 * In response, the camera HAL device must send a camera3_capture_result 2132 * structure asynchronously to the framework, using the process_capture_result() 2133 * callback. 2134 */ 2135 typedef struct camera3_capture_request { 2136 /** 2137 * The frame number is an incrementing integer set by the framework to 2138 * uniquely identify this capture. It needs to be returned in the result 2139 * call, and is also used to identify the request in asynchronous 2140 * notifications sent to camera3_callback_ops_t.notify(). 2141 */ 2142 uint32_t frame_number; 2143 2144 /** 2145 * The settings buffer contains the capture and processing parameters for 2146 * the request. As a special case, a NULL settings buffer indicates that the 2147 * settings are identical to the most-recently submitted capture request. A 2148 * NULL buffer cannot be used as the first submitted request after a 2149 * configure_streams() call. 2150 */ 2151 const camera_metadata_t *settings; 2152 2153 /** 2154 * The input stream buffer to use for this request, if any. 2155 * 2156 * If input_buffer is NULL, then the request is for a new capture from the 2157 * imager. If input_buffer is valid, the request is for reprocessing the 2158 * image contained in input_buffer. 2159 * 2160 * In the latter case, the HAL must set the release_fence of the 2161 * input_buffer to a valid sync fence, or to -1 if the HAL does not support 2162 * sync, before process_capture_request() returns. 2163 * 2164 * The HAL is required to wait on the acquire sync fence of the input buffer 2165 * before accessing it. 2166 * 2167 * <= CAMERA_DEVICE_API_VERSION_3_1: 2168 * 2169 * Any input buffer included here will have been registered with the HAL 2170 * through register_stream_buffers() before its inclusion in a request. 2171 * 2172 * >= CAMERA_DEVICE_API_VERSION_3_2: 2173 * 2174 * The buffers will not have been pre-registered with the HAL. 2175 * Subsequent requests may reuse buffers, or provide entirely new buffers. 2176 */ 2177 camera3_stream_buffer_t *input_buffer; 2178 2179 /** 2180 * The number of output buffers for this capture request. Must be at least 2181 * 1. 2182 */ 2183 uint32_t num_output_buffers; 2184 2185 /** 2186 * An array of num_output_buffers stream buffers, to be filled with image 2187 * data from this capture/reprocess. The HAL must wait on the acquire fences 2188 * of each stream buffer before writing to them. 2189 * 2190 * The HAL takes ownership of the actual buffer_handle_t entries in 2191 * output_buffers; the framework does not access them until they are 2192 * returned in a camera3_capture_result_t. 2193 * 2194 * <= CAMERA_DEVICE_API_VERSION_3_1: 2195 * 2196 * All the buffers included here will have been registered with the HAL 2197 * through register_stream_buffers() before their inclusion in a request. 2198 * 2199 * >= CAMERA_DEVICE_API_VERSION_3_2: 2200 * 2201 * Any or all of the buffers included here may be brand new in this 2202 * request (having never before seen by the HAL). 2203 */ 2204 const camera3_stream_buffer_t *output_buffers; 2205 2206 } camera3_capture_request_t; 2207 2208 /** 2209 * camera3_capture_result_t: 2210 * 2211 * The result of a single capture/reprocess by the camera HAL device. This is 2212 * sent to the framework asynchronously with process_capture_result(), in 2213 * response to a single capture request sent to the HAL with 2214 * process_capture_request(). Multiple process_capture_result() calls may be 2215 * performed by the HAL for each request. 2216 * 2217 * Each call, all with the same frame 2218 * number, may contain some subset of the output buffers, and/or the result 2219 * metadata. The metadata may only be provided once for a given frame number; 2220 * all other calls must set the result metadata to NULL. 2221 * 2222 * The result structure contains the output metadata from this capture, and the 2223 * set of output buffers that have been/will be filled for this capture. Each 2224 * output buffer may come with a release sync fence that the framework will wait 2225 * on before reading, in case the buffer has not yet been filled by the HAL. 2226 * 2227 * >= CAMERA_DEVICE_API_VERSION_3_2: 2228 * 2229 * The metadata may be provided multiple times for a single frame number. The 2230 * framework will accumulate together the final result set by combining each 2231 * partial result together into the total result set. 2232 * 2233 * If an input buffer is given in a request, the HAL must return it in one of 2234 * the process_capture_result calls, and the call may be to just return the input 2235 * buffer, without metadata and output buffers; the sync fences must be handled 2236 * the same way they are done for output buffers. 2237 * 2238 * 2239 * Performance considerations: 2240 * 2241 * Applications will also receive these partial results immediately, so sending 2242 * partial results is a highly recommended performance optimization to avoid 2243 * the total pipeline latency before sending the results for what is known very 2244 * early on in the pipeline. 2245 * 2246 * A typical use case might be calculating the AF state halfway through the 2247 * pipeline; by sending the state back to the framework immediately, we get a 2248 * 50% performance increase and perceived responsiveness of the auto-focus. 2249 * 2250 */ 2251 typedef struct camera3_capture_result { 2252 /** 2253 * The frame number is an incrementing integer set by the framework in the 2254 * submitted request to uniquely identify this capture. It is also used to 2255 * identify the request in asynchronous notifications sent to 2256 * camera3_callback_ops_t.notify(). 2257 */ 2258 uint32_t frame_number; 2259 2260 /** 2261 * The result metadata for this capture. This contains information about the 2262 * final capture parameters, the state of the capture and post-processing 2263 * hardware, the state of the 3A algorithms, if enabled, and the output of 2264 * any enabled statistics units. 2265 * 2266 * Only one call to process_capture_result() with a given frame_number may 2267 * include the result metadata. All other calls for the same frame_number 2268 * must set this to NULL. 2269 * 2270 * If there was an error producing the result metadata, result must be an 2271 * empty metadata buffer, and notify() must be called with ERROR_RESULT. 2272 * 2273 * >= CAMERA_DEVICE_API_VERSION_3_2: 2274 * 2275 * Multiple calls to process_capture_result() with a given frame_number 2276 * may include the result metadata. 2277 * 2278 * Partial metadata submitted should not include any metadata key returned 2279 * in a previous partial result for a given frame. Each new partial result 2280 * for that frame must also set a distinct partial_result value. 2281 * 2282 * If notify has been called with ERROR_RESULT, all further partial 2283 * results for that frame are ignored by the framework. 2284 */ 2285 const camera_metadata_t *result; 2286 2287 /** 2288 * The number of output buffers returned in this result structure. Must be 2289 * less than or equal to the matching capture request's count. If this is 2290 * less than the buffer count in the capture request, at least one more call 2291 * to process_capture_result with the same frame_number must be made, to 2292 * return the remaining output buffers to the framework. This may only be 2293 * zero if the structure includes valid result metadata or an input buffer 2294 * is returned in this result. 2295 */ 2296 uint32_t num_output_buffers; 2297 2298 /** 2299 * The handles for the output stream buffers for this capture. They may not 2300 * yet be filled at the time the HAL calls process_capture_result(); the 2301 * framework will wait on the release sync fences provided by the HAL before 2302 * reading the buffers. 2303 * 2304 * The HAL must set the stream buffer's release sync fence to a valid sync 2305 * fd, or to -1 if the buffer has already been filled. 2306 * 2307 * If the HAL encounters an error while processing the buffer, and the 2308 * buffer is not filled, the buffer's status field must be set to 2309 * CAMERA3_BUFFER_STATUS_ERROR. If the HAL did not wait on the acquire fence 2310 * before encountering the error, the acquire fence should be copied into 2311 * the release fence, to allow the framework to wait on the fence before 2312 * reusing the buffer. 2313 * 2314 * The acquire fence must be set to -1 for all output buffers. If 2315 * num_output_buffers is zero, this may be NULL. In that case, at least one 2316 * more process_capture_result call must be made by the HAL to provide the 2317 * output buffers. 2318 * 2319 * When process_capture_result is called with a new buffer for a frame, 2320 * all previous frames' buffers for that corresponding stream must have been 2321 * already delivered (the fences need not have yet been signaled). 2322 * 2323 * >= CAMERA_DEVICE_API_VERSION_3_2: 2324 * 2325 * Gralloc buffers for a frame may be sent to framework before the 2326 * corresponding SHUTTER-notify. 2327 * 2328 * Performance considerations: 2329 * 2330 * Buffers delivered to the framework will not be dispatched to the 2331 * application layer until a start of exposure timestamp has been received 2332 * via a SHUTTER notify() call. It is highly recommended to 2333 * dispatch that call as early as possible. 2334 */ 2335 const camera3_stream_buffer_t *output_buffers; 2336 2337 /** 2338 * >= CAMERA_DEVICE_API_VERSION_3_2: 2339 * 2340 * The handle for the input stream buffer for this capture. It may not 2341 * yet be consumed at the time the HAL calls process_capture_result(); the 2342 * framework will wait on the release sync fences provided by the HAL before 2343 * reusing the buffer. 2344 * 2345 * The HAL should handle the sync fences the same way they are done for 2346 * output_buffers. 2347 * 2348 * Only one input buffer is allowed to be sent per request. Similarly to 2349 * output buffers, the ordering of returned input buffers must be 2350 * maintained by the HAL. 2351 * 2352 * Performance considerations: 2353 * 2354 * The input buffer should be returned as early as possible. If the HAL 2355 * supports sync fences, it can call process_capture_result to hand it back 2356 * with sync fences being set appropriately. If the sync fences are not 2357 * supported, the buffer can only be returned when it is consumed, which 2358 * may take long time; the HAL may choose to copy this input buffer to make 2359 * the buffer return sooner. 2360 */ 2361 const camera3_stream_buffer_t *input_buffer; 2362 2363 /** 2364 * >= CAMERA_DEVICE_API_VERSION_3_2: 2365 * 2366 * In order to take advantage of partial results, the HAL must set the 2367 * static metadata android.request.partialResultCount to the number of 2368 * partial results it will send for each frame. 2369 * 2370 * Each new capture result with a partial result must set 2371 * this field (partial_result) to a distinct inclusive value between 2372 * 1 and android.request.partialResultCount. 2373 * 2374 * HALs not wishing to take advantage of this feature must not 2375 * set an android.request.partialResultCount or partial_result to a value 2376 * other than 1. 2377 * 2378 * This value must be set to 0 when a capture result contains buffers only 2379 * and no metadata. 2380 */ 2381 uint32_t partial_result; 2382 2383 } camera3_capture_result_t; 2384 2385 /********************************************************************** 2386 * 2387 * Callback methods for the HAL to call into the framework. 2388 * 2389 * These methods are used to return metadata and image buffers for a completed 2390 * or failed captures, and to notify the framework of asynchronous events such 2391 * as errors. 2392 * 2393 * The framework will not call back into the HAL from within these callbacks, 2394 * and these calls will not block for extended periods. 2395 * 2396 */ 2397 typedef struct camera3_callback_ops { 2398 2399 /** 2400 * process_capture_result: 2401 * 2402 * Send results from a completed capture to the framework. 2403 * process_capture_result() may be invoked multiple times by the HAL in 2404 * response to a single capture request. This allows, for example, the 2405 * metadata and low-resolution buffers to be returned in one call, and 2406 * post-processed JPEG buffers in a later call, once it is available. Each 2407 * call must include the frame number of the request it is returning 2408 * metadata or buffers for. 2409 * 2410 * A component (buffer or metadata) of the complete result may only be 2411 * included in one process_capture_result call. A buffer for each stream, 2412 * and the result metadata, must be returned by the HAL for each request in 2413 * one of the process_capture_result calls, even in case of errors producing 2414 * some of the output. A call to process_capture_result() with neither 2415 * output buffers or result metadata is not allowed. 2416 * 2417 * The order of returning metadata and buffers for a single result does not 2418 * matter, but buffers for a given stream must be returned in FIFO order. So 2419 * the buffer for request 5 for stream A must always be returned before the 2420 * buffer for request 6 for stream A. This also applies to the result 2421 * metadata; the metadata for request 5 must be returned before the metadata 2422 * for request 6. 2423 * 2424 * However, different streams are independent of each other, so it is 2425 * acceptable and expected that the buffer for request 5 for stream A may be 2426 * returned after the buffer for request 6 for stream B is. And it is 2427 * acceptable that the result metadata for request 6 for stream B is 2428 * returned before the buffer for request 5 for stream A is. 2429 * 2430 * The HAL retains ownership of result structure, which only needs to be 2431 * valid to access during this call. The framework will copy whatever it 2432 * needs before this call returns. 2433 * 2434 * The output buffers do not need to be filled yet; the framework will wait 2435 * on the stream buffer release sync fence before reading the buffer 2436 * data. Therefore, this method should be called by the HAL as soon as 2437 * possible, even if some or all of the output buffers are still in 2438 * being filled. The HAL must include valid release sync fences into each 2439 * output_buffers stream buffer entry, or -1 if that stream buffer is 2440 * already filled. 2441 * 2442 * If the result buffer cannot be constructed for a request, the HAL should 2443 * return an empty metadata buffer, but still provide the output buffers and 2444 * their sync fences. In addition, notify() must be called with an 2445 * ERROR_RESULT message. 2446 * 2447 * If an output buffer cannot be filled, its status field must be set to 2448 * STATUS_ERROR. In addition, notify() must be called with a ERROR_BUFFER 2449 * message. 2450 * 2451 * If the entire capture has failed, then this method still needs to be 2452 * called to return the output buffers to the framework. All the buffer 2453 * statuses should be STATUS_ERROR, and the result metadata should be an 2454 * empty buffer. In addition, notify() must be called with a ERROR_REQUEST 2455 * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages 2456 * should not be sent. 2457 * 2458 * Performance requirements: 2459 * 2460 * This is a non-blocking call. The framework will return this call in 5ms. 2461 * 2462 * The pipeline latency (see S7 for definition) should be less than or equal to 2463 * 4 frame intervals, and must be less than or equal to 8 frame intervals. 2464 * 2465 */ 2466 void (*process_capture_result)(const struct camera3_callback_ops *, 2467 const camera3_capture_result_t *result); 2468 2469 /** 2470 * notify: 2471 * 2472 * Asynchronous notification callback from the HAL, fired for various 2473 * reasons. Only for information independent of frame capture, or that 2474 * require specific timing. The ownership of the message structure remains 2475 * with the HAL, and the msg only needs to be valid for the duration of this 2476 * call. 2477 * 2478 * Multiple threads may call notify() simultaneously. 2479 * 2480 * <= CAMERA_DEVICE_API_VERSION_3_1: 2481 * 2482 * The notification for the start of exposure for a given request must be 2483 * sent by the HAL before the first call to process_capture_result() for 2484 * that request is made. 2485 * 2486 * >= CAMERA_DEVICE_API_VERSION_3_2: 2487 * 2488 * Buffers delivered to the framework will not be dispatched to the 2489 * application layer until a start of exposure timestamp (or input image's 2490 * start of exposure timestamp for a reprocess request) has been received 2491 * via a SHUTTER notify() call. It is highly recommended to dispatch this 2492 * call as early as possible. 2493 * 2494 * ------------------------------------------------------------------------ 2495 * Performance requirements: 2496 * 2497 * This is a non-blocking call. The framework will return this call in 5ms. 2498 */ 2499 void (*notify)(const struct camera3_callback_ops *, 2500 const camera3_notify_msg_t *msg); 2501 2502 } camera3_callback_ops_t; 2503 2504 /********************************************************************** 2505 * 2506 * Camera device operations 2507 * 2508 */ 2509 typedef struct camera3_device_ops { 2510 2511 /** 2512 * initialize: 2513 * 2514 * One-time initialization to pass framework callback function pointers to 2515 * the HAL. Will be called once after a successful open() call, before any 2516 * other functions are called on the camera3_device_ops structure. 2517 * 2518 * Performance requirements: 2519 * 2520 * This should be a non-blocking call. The HAL should return from this call 2521 * in 5ms, and must return from this call in 10ms. 2522 * 2523 * Return values: 2524 * 2525 * 0: On successful initialization 2526 * 2527 * -ENODEV: If initialization fails. Only close() can be called successfully 2528 * by the framework after this. 2529 */ 2530 int (*initialize)(const struct camera3_device *, 2531 const camera3_callback_ops_t *callback_ops); 2532 2533 /********************************************************************** 2534 * Stream management 2535 */ 2536 2537 /** 2538 * configure_streams: 2539 * 2540 * CAMERA_DEVICE_API_VERSION_3_0 only: 2541 * 2542 * Reset the HAL camera device processing pipeline and set up new input and 2543 * output streams. This call replaces any existing stream configuration with 2544 * the streams defined in the stream_list. This method will be called at 2545 * least once after initialize() before a request is submitted with 2546 * process_capture_request(). 2547 * 2548 * The stream_list must contain at least one output-capable stream, and may 2549 * not contain more than one input-capable stream. 2550 * 2551 * The stream_list may contain streams that are also in the currently-active 2552 * set of streams (from the previous call to configure_stream()). These 2553 * streams will already have valid values for usage, max_buffers, and the 2554 * private pointer. 2555 * 2556 * If such a stream has already had its buffers registered, 2557 * register_stream_buffers() will not be called again for the stream, and 2558 * buffers from the stream can be immediately included in input requests. 2559 * 2560 * If the HAL needs to change the stream configuration for an existing 2561 * stream due to the new configuration, it may rewrite the values of usage 2562 * and/or max_buffers during the configure call. 2563 * 2564 * The framework will detect such a change, and will then reallocate the 2565 * stream buffers, and call register_stream_buffers() again before using 2566 * buffers from that stream in a request. 2567 * 2568 * If a currently-active stream is not included in stream_list, the HAL may 2569 * safely remove any references to that stream. It will not be reused in a 2570 * later configure() call by the framework, and all the gralloc buffers for 2571 * it will be freed after the configure_streams() call returns. 2572 * 2573 * The stream_list structure is owned by the framework, and may not be 2574 * accessed once this call completes. The address of an individual 2575 * camera3_stream_t structure will remain valid for access by the HAL until 2576 * the end of the first configure_stream() call which no longer includes 2577 * that camera3_stream_t in the stream_list argument. The HAL may not change 2578 * values in the stream structure outside of the private pointer, except for 2579 * the usage and max_buffers members during the configure_streams() call 2580 * itself. 2581 * 2582 * If the stream is new, the usage, max_buffer, and private pointer fields 2583 * of the stream structure will all be set to 0. The HAL device must set 2584 * these fields before the configure_streams() call returns. These fields 2585 * are then used by the framework and the platform gralloc module to 2586 * allocate the gralloc buffers for each stream. 2587 * 2588 * Before such a new stream can have its buffers included in a capture 2589 * request, the framework will call register_stream_buffers() with that 2590 * stream. However, the framework is not required to register buffers for 2591 * _all_ streams before submitting a request. This allows for quick startup 2592 * of (for example) a preview stream, with allocation for other streams 2593 * happening later or concurrently. 2594 * 2595 * ------------------------------------------------------------------------ 2596 * CAMERA_DEVICE_API_VERSION_3_1 only: 2597 * 2598 * Reset the HAL camera device processing pipeline and set up new input and 2599 * output streams. This call replaces any existing stream configuration with 2600 * the streams defined in the stream_list. This method will be called at 2601 * least once after initialize() before a request is submitted with 2602 * process_capture_request(). 2603 * 2604 * The stream_list must contain at least one output-capable stream, and may 2605 * not contain more than one input-capable stream. 2606 * 2607 * The stream_list may contain streams that are also in the currently-active 2608 * set of streams (from the previous call to configure_stream()). These 2609 * streams will already have valid values for usage, max_buffers, and the 2610 * private pointer. 2611 * 2612 * If such a stream has already had its buffers registered, 2613 * register_stream_buffers() will not be called again for the stream, and 2614 * buffers from the stream can be immediately included in input requests. 2615 * 2616 * If the HAL needs to change the stream configuration for an existing 2617 * stream due to the new configuration, it may rewrite the values of usage 2618 * and/or max_buffers during the configure call. 2619 * 2620 * The framework will detect such a change, and will then reallocate the 2621 * stream buffers, and call register_stream_buffers() again before using 2622 * buffers from that stream in a request. 2623 * 2624 * If a currently-active stream is not included in stream_list, the HAL may 2625 * safely remove any references to that stream. It will not be reused in a 2626 * later configure() call by the framework, and all the gralloc buffers for 2627 * it will be freed after the configure_streams() call returns. 2628 * 2629 * The stream_list structure is owned by the framework, and may not be 2630 * accessed once this call completes. The address of an individual 2631 * camera3_stream_t structure will remain valid for access by the HAL until 2632 * the end of the first configure_stream() call which no longer includes 2633 * that camera3_stream_t in the stream_list argument. The HAL may not change 2634 * values in the stream structure outside of the private pointer, except for 2635 * the usage and max_buffers members during the configure_streams() call 2636 * itself. 2637 * 2638 * If the stream is new, max_buffer, and private pointer fields of the 2639 * stream structure will all be set to 0. The usage will be set to the 2640 * consumer usage flags. The HAL device must set these fields before the 2641 * configure_streams() call returns. These fields are then used by the 2642 * framework and the platform gralloc module to allocate the gralloc 2643 * buffers for each stream. 2644 * 2645 * Before such a new stream can have its buffers included in a capture 2646 * request, the framework will call register_stream_buffers() with that 2647 * stream. However, the framework is not required to register buffers for 2648 * _all_ streams before submitting a request. This allows for quick startup 2649 * of (for example) a preview stream, with allocation for other streams 2650 * happening later or concurrently. 2651 * 2652 * ------------------------------------------------------------------------ 2653 * >= CAMERA_DEVICE_API_VERSION_3_2: 2654 * 2655 * Reset the HAL camera device processing pipeline and set up new input and 2656 * output streams. This call replaces any existing stream configuration with 2657 * the streams defined in the stream_list. This method will be called at 2658 * least once after initialize() before a request is submitted with 2659 * process_capture_request(). 2660 * 2661 * The stream_list must contain at least one output-capable stream, and may 2662 * not contain more than one input-capable stream. 2663 * 2664 * The stream_list may contain streams that are also in the currently-active 2665 * set of streams (from the previous call to configure_stream()). These 2666 * streams will already have valid values for usage, max_buffers, and the 2667 * private pointer. 2668 * 2669 * If the HAL needs to change the stream configuration for an existing 2670 * stream due to the new configuration, it may rewrite the values of usage 2671 * and/or max_buffers during the configure call. 2672 * 2673 * The framework will detect such a change, and may then reallocate the 2674 * stream buffers before using buffers from that stream in a request. 2675 * 2676 * If a currently-active stream is not included in stream_list, the HAL may 2677 * safely remove any references to that stream. It will not be reused in a 2678 * later configure() call by the framework, and all the gralloc buffers for 2679 * it will be freed after the configure_streams() call returns. 2680 * 2681 * The stream_list structure is owned by the framework, and may not be 2682 * accessed once this call completes. The address of an individual 2683 * camera3_stream_t structure will remain valid for access by the HAL until 2684 * the end of the first configure_stream() call which no longer includes 2685 * that camera3_stream_t in the stream_list argument. The HAL may not change 2686 * values in the stream structure outside of the private pointer, except for 2687 * the usage and max_buffers members during the configure_streams() call 2688 * itself. 2689 * 2690 * If the stream is new, max_buffer, and private pointer fields of the 2691 * stream structure will all be set to 0. The usage will be set to the 2692 * consumer usage flags. The HAL device must set these fields before the 2693 * configure_streams() call returns. These fields are then used by the 2694 * framework and the platform gralloc module to allocate the gralloc 2695 * buffers for each stream. 2696 * 2697 * Newly allocated buffers may be included in a capture request at any time 2698 * by the framework. Once a gralloc buffer is returned to the framework 2699 * with process_capture_result (and its respective release_fence has been 2700 * signaled) the framework may free or reuse it at any time. 2701 * 2702 * ------------------------------------------------------------------------ 2703 * 2704 * Preconditions: 2705 * 2706 * The framework will only call this method when no captures are being 2707 * processed. That is, all results have been returned to the framework, and 2708 * all in-flight input and output buffers have been returned and their 2709 * release sync fences have been signaled by the HAL. The framework will not 2710 * submit new requests for capture while the configure_streams() call is 2711 * underway. 2712 * 2713 * Postconditions: 2714 * 2715 * The HAL device must configure itself to provide maximum possible output 2716 * frame rate given the sizes and formats of the output streams, as 2717 * documented in the camera device's static metadata. 2718 * 2719 * Performance requirements: 2720 * 2721 * This call is expected to be heavyweight and possibly take several hundred 2722 * milliseconds to complete, since it may require resetting and 2723 * reconfiguring the image sensor and the camera processing pipeline. 2724 * Nevertheless, the HAL device should attempt to minimize the 2725 * reconfiguration delay to minimize the user-visible pauses during 2726 * application operational mode changes (such as switching from still 2727 * capture to video recording). 2728 * 2729 * The HAL should return from this call in 500ms, and must return from this 2730 * call in 1000ms. 2731 * 2732 * Return values: 2733 * 2734 * 0: On successful stream configuration 2735 * 2736 * -EINVAL: If the requested stream configuration is invalid. Some examples 2737 * of invalid stream configurations include: 2738 * 2739 * - Including more than 1 input-capable stream (INPUT or 2740 * BIDIRECTIONAL) 2741 * 2742 * - Not including any output-capable streams (OUTPUT or 2743 * BIDIRECTIONAL) 2744 * 2745 * - Including streams with unsupported formats, or an unsupported 2746 * size for that format. 2747 * 2748 * - Including too many output streams of a certain format. 2749 * 2750 * - Unsupported rotation configuration (only applies to 2751 * devices with version >= CAMERA_DEVICE_API_VERSION_3_3) 2752 * 2753 * - Stream sizes/formats don't satisfy the 2754 * camera3_stream_configuration_t->operation_mode requirements for non-NORMAL mode, 2755 * or the requested operation_mode is not supported by the HAL. 2756 * (only applies to devices with version >= CAMERA_DEVICE_API_VERSION_3_3) 2757 * 2758 * Note that the framework submitting an invalid stream 2759 * configuration is not normal operation, since stream 2760 * configurations are checked before configure. An invalid 2761 * configuration means that a bug exists in the framework code, or 2762 * there is a mismatch between the HAL's static metadata and the 2763 * requirements on streams. 2764 * 2765 * -ENODEV: If there has been a fatal error and the device is no longer 2766 * operational. Only close() can be called successfully by the 2767 * framework after this error is returned. 2768 */ 2769 int (*configure_streams)(const struct camera3_device *, 2770 camera3_stream_configuration_t *stream_list); 2771 2772 /** 2773 * register_stream_buffers: 2774 * 2775 * >= CAMERA_DEVICE_API_VERSION_3_2: 2776 * 2777 * DEPRECATED. This will not be called and must be set to NULL. 2778 * 2779 * <= CAMERA_DEVICE_API_VERSION_3_1: 2780 * 2781 * Register buffers for a given stream with the HAL device. This method is 2782 * called by the framework after a new stream is defined by 2783 * configure_streams, and before buffers from that stream are included in a 2784 * capture request. If the same stream is listed in a subsequent 2785 * configure_streams() call, register_stream_buffers will _not_ be called 2786 * again for that stream. 2787 * 2788 * The framework does not need to register buffers for all configured 2789 * streams before it submits the first capture request. This allows quick 2790 * startup for preview (or similar use cases) while other streams are still 2791 * being allocated. 2792 * 2793 * This method is intended to allow the HAL device to map or otherwise 2794 * prepare the buffers for later use. The buffers passed in will already be 2795 * locked for use. At the end of the call, all the buffers must be ready to 2796 * be returned to the stream. The buffer_set argument is only valid for the 2797 * duration of this call. 2798 * 2799 * If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2800 * the camera HAL should inspect the passed-in buffers here to determine any 2801 * platform-private pixel format information. 2802 * 2803 * Performance requirements: 2804 * 2805 * This should be a non-blocking call. The HAL should return from this call 2806 * in 1ms, and must return from this call in 5ms. 2807 * 2808 * Return values: 2809 * 2810 * 0: On successful registration of the new stream buffers 2811 * 2812 * -EINVAL: If the stream_buffer_set does not refer to a valid active 2813 * stream, or if the buffers array is invalid. 2814 * 2815 * -ENOMEM: If there was a failure in registering the buffers. The framework 2816 * must consider all the stream buffers to be unregistered, and can 2817 * try to register again later. 2818 * 2819 * -ENODEV: If there is a fatal error, and the device is no longer 2820 * operational. Only close() can be called successfully by the 2821 * framework after this error is returned. 2822 */ 2823 int (*register_stream_buffers)(const struct camera3_device *, 2824 const camera3_stream_buffer_set_t *buffer_set); 2825 2826 /********************************************************************** 2827 * Request creation and submission 2828 */ 2829 2830 /** 2831 * construct_default_request_settings: 2832 * 2833 * Create capture settings for standard camera use cases. 2834 * 2835 * The device must return a settings buffer that is configured to meet the 2836 * requested use case, which must be one of the CAMERA3_TEMPLATE_* 2837 * enums. All request control fields must be included. 2838 * 2839 * The HAL retains ownership of this structure, but the pointer to the 2840 * structure must be valid until the device is closed. The framework and the 2841 * HAL may not modify the buffer once it is returned by this call. The same 2842 * buffer may be returned for subsequent calls for the same template, or for 2843 * other templates. 2844 * 2845 * Performance requirements: 2846 * 2847 * This should be a non-blocking call. The HAL should return from this call 2848 * in 1ms, and must return from this call in 5ms. 2849 * 2850 * Return values: 2851 * 2852 * Valid metadata: On successful creation of a default settings 2853 * buffer. 2854 * 2855 * NULL: In case of a fatal error. After this is returned, only 2856 * the close() method can be called successfully by the 2857 * framework. 2858 */ 2859 const camera_metadata_t* (*construct_default_request_settings)( 2860 const struct camera3_device *, 2861 int type); 2862 2863 /** 2864 * process_capture_request: 2865 * 2866 * Send a new capture request to the HAL. The HAL should not return from 2867 * this call until it is ready to accept the next request to process. Only 2868 * one call to process_capture_request() will be made at a time by the 2869 * framework, and the calls will all be from the same thread. The next call 2870 * to process_capture_request() will be made as soon as a new request and 2871 * its associated buffers are available. In a normal preview scenario, this 2872 * means the function will be called again by the framework almost 2873 * instantly. 2874 * 2875 * The actual request processing is asynchronous, with the results of 2876 * capture being returned by the HAL through the process_capture_result() 2877 * call. This call requires the result metadata to be available, but output 2878 * buffers may simply provide sync fences to wait on. Multiple requests are 2879 * expected to be in flight at once, to maintain full output frame rate. 2880 * 2881 * The framework retains ownership of the request structure. It is only 2882 * guaranteed to be valid during this call. The HAL device must make copies 2883 * of the information it needs to retain for the capture processing. The HAL 2884 * is responsible for waiting on and closing the buffers' fences and 2885 * returning the buffer handles to the framework. 2886 * 2887 * The HAL must write the file descriptor for the input buffer's release 2888 * sync fence into input_buffer->release_fence, if input_buffer is not 2889 * NULL. If the HAL returns -1 for the input buffer release sync fence, the 2890 * framework is free to immediately reuse the input buffer. Otherwise, the 2891 * framework will wait on the sync fence before refilling and reusing the 2892 * input buffer. 2893 * 2894 * >= CAMERA_DEVICE_API_VERSION_3_2: 2895 * 2896 * The input/output buffers provided by the framework in each request 2897 * may be brand new (having never before seen by the HAL). 2898 * 2899 * ------------------------------------------------------------------------ 2900 * Performance considerations: 2901 * 2902 * Handling a new buffer should be extremely lightweight and there should be 2903 * no frame rate degradation or frame jitter introduced. 2904 * 2905 * This call must return fast enough to ensure that the requested frame 2906 * rate can be sustained, especially for streaming cases (post-processing 2907 * quality settings set to FAST). The HAL should return this call in 1 2908 * frame interval, and must return from this call in 4 frame intervals. 2909 * 2910 * Return values: 2911 * 2912 * 0: On a successful start to processing the capture request 2913 * 2914 * -EINVAL: If the input is malformed (the settings are NULL when not 2915 * allowed, there are 0 output buffers, etc) and capture processing 2916 * cannot start. Failures during request processing should be 2917 * handled by calling camera3_callback_ops_t.notify(). In case of 2918 * this error, the framework will retain responsibility for the 2919 * stream buffers' fences and the buffer handles; the HAL should 2920 * not close the fences or return these buffers with 2921 * process_capture_result. 2922 * 2923 * -ENODEV: If the camera device has encountered a serious error. After this 2924 * error is returned, only the close() method can be successfully 2925 * called by the framework. 2926 * 2927 */ 2928 int (*process_capture_request)(const struct camera3_device *, 2929 camera3_capture_request_t *request); 2930 2931 /********************************************************************** 2932 * Miscellaneous methods 2933 */ 2934 2935 /** 2936 * get_metadata_vendor_tag_ops: 2937 * 2938 * Get methods to query for vendor extension metadata tag information. The 2939 * HAL should fill in all the vendor tag operation methods, or leave ops 2940 * unchanged if no vendor tags are defined. 2941 * 2942 * The definition of vendor_tag_query_ops_t can be found in 2943 * system/media/camera/include/system/camera_metadata.h. 2944 * 2945 * >= CAMERA_DEVICE_API_VERSION_3_2: 2946 * DEPRECATED. This function has been deprecated and should be set to 2947 * NULL by the HAL. Please implement get_vendor_tag_ops in camera_common.h 2948 * instead. 2949 */ 2950 void (*get_metadata_vendor_tag_ops)(const struct camera3_device*, 2951 vendor_tag_query_ops_t* ops); 2952 2953 /** 2954 * dump: 2955 * 2956 * Print out debugging state for the camera device. This will be called by 2957 * the framework when the camera service is asked for a debug dump, which 2958 * happens when using the dumpsys tool, or when capturing a bugreport. 2959 * 2960 * The passed-in file descriptor can be used to write debugging text using 2961 * dprintf() or write(). The text should be in ASCII encoding only. 2962 * 2963 * Performance requirements: 2964 * 2965 * This must be a non-blocking call. The HAL should return from this call 2966 * in 1ms, must return from this call in 10ms. This call must avoid 2967 * deadlocks, as it may be called at any point during camera operation. 2968 * Any synchronization primitives used (such as mutex locks or semaphores) 2969 * should be acquired with a timeout. 2970 */ 2971 void (*dump)(const struct camera3_device *, int fd); 2972 2973 /** 2974 * flush: 2975 * 2976 * Flush all currently in-process captures and all buffers in the pipeline 2977 * on the given device. The framework will use this to dump all state as 2978 * quickly as possible in order to prepare for a configure_streams() call. 2979 * 2980 * No buffers are required to be successfully returned, so every buffer 2981 * held at the time of flush() (whether successfully filled or not) may be 2982 * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed 2983 * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call, 2984 * provided they are successfully filled. 2985 * 2986 * All requests currently in the HAL are expected to be returned as soon as 2987 * possible. Not-in-process requests should return errors immediately. Any 2988 * interruptible hardware blocks should be stopped, and any uninterruptible 2989 * blocks should be waited on. 2990 * 2991 * flush() may be called concurrently to process_capture_request(), with the expectation that 2992 * process_capture_request will return quickly and the request submitted in that 2993 * process_capture_request call is treated like all other in-flight requests. Due to 2994 * concurrency issues, it is possible that from the HAL's point of view, a 2995 * process_capture_request() call may be started after flush has been invoked but has not 2996 * returned yet. If such a call happens before flush() returns, the HAL should treat the new 2997 * capture request like other in-flight pending requests (see #4 below). 2998 * 2999 * More specifically, the HAL must follow below requirements for various cases: 3000 * 3001 * 1. For captures that are too late for the HAL to cancel/stop, and will be 3002 * completed normally by the HAL; i.e. the HAL can send shutter/notify and 3003 * process_capture_result and buffers as normal. 3004 * 3005 * 2. For pending requests that have not done any processing, the HAL must call notify 3006 * CAMERA3_MSG_ERROR_REQUEST, and return all the output buffers with 3007 * process_capture_result in the error state (CAMERA3_BUFFER_STATUS_ERROR). 3008 * The HAL must not place the release fence into an error state, instead, 3009 * the release fences must be set to the acquire fences passed by the framework, 3010 * or -1 if they have been waited on by the HAL already. This is also the path 3011 * to follow for any captures for which the HAL already called notify() with 3012 * CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers for. 3013 * After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only process_capture_results with 3014 * buffers in CAMERA3_BUFFER_STATUS_ERROR are allowed. No further notifys or 3015 * process_capture_result with non-null metadata is allowed. 3016 * 3017 * 3. For partially completed pending requests that will not have all the output 3018 * buffers or perhaps missing metadata, the HAL should follow below: 3019 * 3020 * 3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected result 3021 * metadata (i.e. one or more partial metadata) won't be available for the capture. 3022 * 3023 * 3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that won't 3024 * be produced for the capture. 3025 * 3026 * 3.3 Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp before 3027 * any buffers/metadata are returned with process_capture_result. 3028 * 3029 * 3.4 For captures that will produce some results, the HAL must not call 3030 * CAMERA3_MSG_ERROR_REQUEST, since that indicates complete failure. 3031 * 3032 * 3.5. Valid buffers/metadata should be passed to the framework as normal. 3033 * 3034 * 3.6. Failed buffers should be returned to the framework as described for case 2. 3035 * But failed buffers do not have to follow the strict ordering valid buffers do, 3036 * and may be out-of-order with respect to valid buffers. For example, if buffers 3037 * A, B, C, D, E are sent, D and E are failed, then A, E, B, D, C is an acceptable 3038 * return order. 3039 * 3040 * 3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is sufficient, no 3041 * need to call process_capture_result with NULL metadata or equivalent. 3042 * 3043 * 4. If a flush() is invoked while a process_capture_request() invocation is active, that 3044 * process call should return as soon as possible. In addition, if a process_capture_request() 3045 * call is made after flush() has been invoked but before flush() has returned, the 3046 * capture request provided by the late process_capture_request call should be treated like 3047 * a pending request in case #2 above. 3048 * 3049 * flush() should only return when there are no more outstanding buffers or 3050 * requests left in the HAL. The framework may call configure_streams (as 3051 * the HAL state is now quiesced) or may issue new requests. 3052 * 3053 * Note that it's sufficient to only support fully-succeeded and fully-failed result cases. 3054 * However, it is highly desirable to support the partial failure cases as well, as it 3055 * could help improve the flush call overall performance. 3056 * 3057 * Performance requirements: 3058 * 3059 * The HAL should return from this call in 100ms, and must return from this 3060 * call in 1000ms. And this call must not be blocked longer than pipeline 3061 * latency (see S7 for definition). 3062 * 3063 * Version information: 3064 * 3065 * only available if device version >= CAMERA_DEVICE_API_VERSION_3_1. 3066 * 3067 * Return values: 3068 * 3069 * 0: On a successful flush of the camera HAL. 3070 * 3071 * -EINVAL: If the input is malformed (the device is not valid). 3072 * 3073 * -ENODEV: If the camera device has encountered a serious error. After this 3074 * error is returned, only the close() method can be successfully 3075 * called by the framework. 3076 */ 3077 int (*flush)(const struct camera3_device *); 3078 3079 /* reserved for future use */ 3080 void *reserved[8]; 3081 } camera3_device_ops_t; 3082 3083 /********************************************************************** 3084 * 3085 * Camera device definition 3086 * 3087 */ 3088 typedef struct camera3_device { 3089 /** 3090 * common.version must equal CAMERA_DEVICE_API_VERSION_3_0 to identify this 3091 * device as implementing version 3.0 of the camera device HAL. 3092 * 3093 * Performance requirements: 3094 * 3095 * Camera open (common.module->common.methods->open) should return in 200ms, and must return 3096 * in 500ms. 3097 * Camera close (common.close) should return in 200ms, and must return in 500ms. 3098 * 3099 */ 3100 hw_device_t common; 3101 camera3_device_ops_t *ops; 3102 void *priv; 3103 } camera3_device_t; 3104 3105 __END_DECLS 3106 3107 #endif /* #ifdef ANDROID_INCLUDE_CAMERA3_H */ 3108