1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ANDROID_INCLUDE_CAMERA3_H 18 #define ANDROID_INCLUDE_CAMERA3_H 19 20 #include <system/camera_metadata.h> 21 #include "camera_common.h" 22 23 /** 24 * Camera device HAL 3.3 [ CAMERA_DEVICE_API_VERSION_3_3 ] 25 * 26 * This is the current recommended version of the camera device HAL. 27 * 28 * Supports the android.hardware.Camera API, and as of v3.2, the 29 * android.hardware.camera2 API in LIMITED or FULL modes. 30 * 31 * Camera devices that support this version of the HAL must return 32 * CAMERA_DEVICE_API_VERSION_3_3 in camera_device_t.common.version and in 33 * camera_info_t.device_version (from camera_module_t.get_camera_info). 34 * 35 * CAMERA_DEVICE_API_VERSION_3_3: 36 * Camera modules that may contain version 3.3 devices must implement at 37 * least version 2.2 of the camera module interface (as defined by 38 * camera_module_t.common.module_api_version). 39 * 40 * CAMERA_DEVICE_API_VERSION_3_2: 41 * Camera modules that may contain version 3.2 devices must implement at 42 * least version 2.2 of the camera module interface (as defined by 43 * camera_module_t.common.module_api_version). 44 * 45 * <= CAMERA_DEVICE_API_VERSION_3_1: 46 * Camera modules that may contain version 3.1 (or 3.0) devices must 47 * implement at least version 2.0 of the camera module interface 48 * (as defined by camera_module_t.common.module_api_version). 49 * 50 * See camera_common.h for more versioning details. 51 * 52 * Documentation index: 53 * S1. Version history 54 * S2. Startup and operation sequencing 55 * S3. Operational modes 56 * S4. 3A modes and state machines 57 * S5. Cropping 58 * S6. Error management 59 * S7. Key Performance Indicator (KPI) glossary 60 * S8. Sample Use Cases 61 * S9. Notes on Controls and Metadata 62 * S10. Reprocessing flow and controls 63 */ 64 65 /** 66 * S1. Version history: 67 * 68 * 1.0: Initial Android camera HAL (Android 4.0) [camera.h]: 69 * 70 * - Converted from C++ CameraHardwareInterface abstraction layer. 71 * 72 * - Supports android.hardware.Camera API. 73 * 74 * 2.0: Initial release of expanded-capability HAL (Android 4.2) [camera2.h]: 75 * 76 * - Sufficient for implementing existing android.hardware.Camera API. 77 * 78 * - Allows for ZSL queue in camera service layer 79 * 80 * - Not tested for any new features such manual capture control, Bayer RAW 81 * capture, reprocessing of RAW data. 82 * 83 * 3.0: First revision of expanded-capability HAL: 84 * 85 * - Major version change since the ABI is completely different. No change to 86 * the required hardware capabilities or operational model from 2.0. 87 * 88 * - Reworked input request and stream queue interfaces: Framework calls into 89 * HAL with next request and stream buffers already dequeued. Sync framework 90 * support is included, necessary for efficient implementations. 91 * 92 * - Moved triggers into requests, most notifications into results. 93 * 94 * - Consolidated all callbacks into framework into one structure, and all 95 * setup methods into a single initialize() call. 96 * 97 * - Made stream configuration into a single call to simplify stream 98 * management. Bidirectional streams replace STREAM_FROM_STREAM construct. 99 * 100 * - Limited mode semantics for older/limited hardware devices. 101 * 102 * 3.1: Minor revision of expanded-capability HAL: 103 * 104 * - configure_streams passes consumer usage flags to the HAL. 105 * 106 * - flush call to drop all in-flight requests/buffers as fast as possible. 107 * 108 * 3.2: Minor revision of expanded-capability HAL: 109 * 110 * - Deprecates get_metadata_vendor_tag_ops. Please use get_vendor_tag_ops 111 * in camera_common.h instead. 112 * 113 * - register_stream_buffers deprecated. All gralloc buffers provided 114 * by framework to HAL in process_capture_request may be new at any time. 115 * 116 * - add partial result support. process_capture_result may be called 117 * multiple times with a subset of the available result before the full 118 * result is available. 119 * 120 * - add manual template to camera3_request_template. The applications may 121 * use this template to control the capture settings directly. 122 * 123 * - Rework the bidirectional and input stream specifications. 124 * 125 * - change the input buffer return path. The buffer is returned in 126 * process_capture_result instead of process_capture_request. 127 * 128 * 3.3: Minor revision of expanded-capability HAL: 129 * 130 * - OPAQUE and YUV reprocessing API updates. 131 * 132 * - Basic support for depth output buffers. 133 * 134 * - Addition of data_space field to camera3_stream_t. 135 * 136 * - Addition of rotation field to camera3_stream_t. 137 * 138 * - Addition of camera3 stream configuration operation mode to camera3_stream_configuration_t 139 * 140 */ 141 142 /** 143 * S2. Startup and general expected operation sequence: 144 * 145 * 1. Framework calls camera_module_t->common.open(), which returns a 146 * hardware_device_t structure. 147 * 148 * 2. Framework inspects the hardware_device_t->version field, and instantiates 149 * the appropriate handler for that version of the camera hardware device. In 150 * case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to 151 * a camera3_device_t. 152 * 153 * 3. Framework calls camera3_device_t->ops->initialize() with the framework 154 * callback function pointers. This will only be called this one time after 155 * open(), before any other functions in the ops structure are called. 156 * 157 * 4. The framework calls camera3_device_t->ops->configure_streams() with a list 158 * of input/output streams to the HAL device. 159 * 160 * 5. <= CAMERA_DEVICE_API_VERSION_3_1: 161 * 162 * The framework allocates gralloc buffers and calls 163 * camera3_device_t->ops->register_stream_buffers() for at least one of the 164 * output streams listed in configure_streams. The same stream is registered 165 * only once. 166 * 167 * >= CAMERA_DEVICE_API_VERSION_3_2: 168 * 169 * camera3_device_t->ops->register_stream_buffers() is not called and must 170 * be NULL. 171 * 172 * 6. The framework requests default settings for some number of use cases with 173 * calls to camera3_device_t->ops->construct_default_request_settings(). This 174 * may occur any time after step 3. 175 * 176 * 7. The framework constructs and sends the first capture request to the HAL, 177 * with settings based on one of the sets of default settings, and with at 178 * least one output stream, which has been registered earlier by the 179 * framework. This is sent to the HAL with 180 * camera3_device_t->ops->process_capture_request(). The HAL must block the 181 * return of this call until it is ready for the next request to be sent. 182 * 183 * >= CAMERA_DEVICE_API_VERSION_3_2: 184 * 185 * The buffer_handle_t provided in the camera3_stream_buffer_t array 186 * in the camera3_capture_request_t may be new and never-before-seen 187 * by the HAL on any given new request. 188 * 189 * 8. The framework continues to submit requests, and call 190 * construct_default_request_settings to get default settings buffers for 191 * other use cases. 192 * 193 * <= CAMERA_DEVICE_API_VERSION_3_1: 194 * 195 * The framework may call register_stream_buffers() at this time for 196 * not-yet-registered streams. 197 * 198 * 9. When the capture of a request begins (sensor starts exposing for the 199 * capture) or processing a reprocess request begins, the HAL 200 * calls camera3_callback_ops_t->notify() with the SHUTTER event, including 201 * the frame number and the timestamp for start of exposure. For a reprocess 202 * request, the timestamp must be the start of exposure of the input image 203 * which can be looked up with android.sensor.timestamp from 204 * camera3_capture_request_t.settings when process_capture_request() is 205 * called. 206 * 207 * <= CAMERA_DEVICE_API_VERSION_3_1: 208 * 209 * This notify call must be made before the first call to 210 * process_capture_result() for that frame number. 211 * 212 * >= CAMERA_DEVICE_API_VERSION_3_2: 213 * 214 * The camera3_callback_ops_t->notify() call with the SHUTTER event should 215 * be made as early as possible since the framework will be unable to 216 * deliver gralloc buffers to the application layer (for that frame) until 217 * it has a valid timestamp for the start of exposure (or the input image's 218 * start of exposure for a reprocess request). 219 * 220 * Both partial metadata results and the gralloc buffers may be sent to the 221 * framework at any time before or after the SHUTTER event. 222 * 223 * 10. After some pipeline delay, the HAL begins to return completed captures to 224 * the framework with camera3_callback_ops_t->process_capture_result(). These 225 * are returned in the same order as the requests were submitted. Multiple 226 * requests can be in flight at once, depending on the pipeline depth of the 227 * camera HAL device. 228 * 229 * >= CAMERA_DEVICE_API_VERSION_3_2: 230 * 231 * Once a buffer is returned by process_capture_result as part of the 232 * camera3_stream_buffer_t array, and the fence specified by release_fence 233 * has been signaled (this is a no-op for -1 fences), the ownership of that 234 * buffer is considered to be transferred back to the framework. After that, 235 * the HAL must no longer retain that particular buffer, and the 236 * framework may clean up the memory for it immediately. 237 * 238 * process_capture_result may be called multiple times for a single frame, 239 * each time with a new disjoint piece of metadata and/or set of gralloc 240 * buffers. The framework will accumulate these partial metadata results 241 * into one result. 242 * 243 * In particular, it is legal for a process_capture_result to be called 244 * simultaneously for both a frame N and a frame N+1 as long as the 245 * above rule holds for gralloc buffers (both input and output). 246 * 247 * 11. After some time, the framework may stop submitting new requests, wait for 248 * the existing captures to complete (all buffers filled, all results 249 * returned), and then call configure_streams() again. This resets the camera 250 * hardware and pipeline for a new set of input/output streams. Some streams 251 * may be reused from the previous configuration; if these streams' buffers 252 * had already been registered with the HAL, they will not be registered 253 * again. The framework then continues from step 7, if at least one 254 * registered output stream remains (otherwise, step 5 is required first). 255 * 256 * 12. Alternatively, the framework may call camera3_device_t->common->close() 257 * to end the camera session. This may be called at any time when no other 258 * calls from the framework are active, although the call may block until all 259 * in-flight captures have completed (all results returned, all buffers 260 * filled). After the close call returns, no more calls to the 261 * camera3_callback_ops_t functions are allowed from the HAL. Once the 262 * close() call is underway, the framework may not call any other HAL device 263 * functions. 264 * 265 * 13. In case of an error or other asynchronous event, the HAL must call 266 * camera3_callback_ops_t->notify() with the appropriate error/event 267 * message. After returning from a fatal device-wide error notification, the 268 * HAL should act as if close() had been called on it. However, the HAL must 269 * either cancel or complete all outstanding captures before calling 270 * notify(), so that once notify() is called with a fatal error, the 271 * framework will not receive further callbacks from the device. Methods 272 * besides close() should return -ENODEV or NULL after the notify() method 273 * returns from a fatal error message. 274 */ 275 276 /** 277 * S3. Operational modes: 278 * 279 * The camera 3 HAL device can implement one of two possible operational modes; 280 * limited and full. Full support is expected from new higher-end 281 * devices. Limited mode has hardware requirements roughly in line with those 282 * for a camera HAL device v1 implementation, and is expected from older or 283 * inexpensive devices. Full is a strict superset of limited, and they share the 284 * same essential operational flow, as documented above. 285 * 286 * The HAL must indicate its level of support with the 287 * android.info.supportedHardwareLevel static metadata entry, with 0 indicating 288 * limited mode, and 1 indicating full mode support. 289 * 290 * Roughly speaking, limited-mode devices do not allow for application control 291 * of capture settings (3A control only), high-rate capture of high-resolution 292 * images, raw sensor readout, or support for YUV output streams above maximum 293 * recording resolution (JPEG only for large images). 294 * 295 * ** Details of limited mode behavior: 296 * 297 * - Limited-mode devices do not need to implement accurate synchronization 298 * between capture request settings and the actual image data 299 * captured. Instead, changes to settings may take effect some time in the 300 * future, and possibly not for the same output frame for each settings 301 * entry. Rapid changes in settings may result in some settings never being 302 * used for a capture. However, captures that include high-resolution output 303 * buffers ( > 1080p ) have to use the settings as specified (but see below 304 * for processing rate). 305 * 306 * - Limited-mode devices do not need to support most of the 307 * settings/result/static info metadata. Specifically, only the following settings 308 * are expected to be consumed or produced by a limited-mode HAL device: 309 * 310 * android.control.aeAntibandingMode (controls and dynamic) 311 * android.control.aeExposureCompensation (controls and dynamic) 312 * android.control.aeLock (controls and dynamic) 313 * android.control.aeMode (controls and dynamic) 314 * android.control.aeRegions (controls and dynamic) 315 * android.control.aeTargetFpsRange (controls and dynamic) 316 * android.control.aePrecaptureTrigger (controls and dynamic) 317 * android.control.afMode (controls and dynamic) 318 * android.control.afRegions (controls and dynamic) 319 * android.control.awbLock (controls and dynamic) 320 * android.control.awbMode (controls and dynamic) 321 * android.control.awbRegions (controls and dynamic) 322 * android.control.captureIntent (controls and dynamic) 323 * android.control.effectMode (controls and dynamic) 324 * android.control.mode (controls and dynamic) 325 * android.control.sceneMode (controls and dynamic) 326 * android.control.videoStabilizationMode (controls and dynamic) 327 * android.control.aeAvailableAntibandingModes (static) 328 * android.control.aeAvailableModes (static) 329 * android.control.aeAvailableTargetFpsRanges (static) 330 * android.control.aeCompensationRange (static) 331 * android.control.aeCompensationStep (static) 332 * android.control.afAvailableModes (static) 333 * android.control.availableEffects (static) 334 * android.control.availableSceneModes (static) 335 * android.control.availableVideoStabilizationModes (static) 336 * android.control.awbAvailableModes (static) 337 * android.control.maxRegions (static) 338 * android.control.sceneModeOverrides (static) 339 * android.control.aeState (dynamic) 340 * android.control.afState (dynamic) 341 * android.control.awbState (dynamic) 342 * 343 * android.flash.mode (controls and dynamic) 344 * android.flash.info.available (static) 345 * 346 * android.info.supportedHardwareLevel (static) 347 * 348 * android.jpeg.gpsCoordinates (controls and dynamic) 349 * android.jpeg.gpsProcessingMethod (controls and dynamic) 350 * android.jpeg.gpsTimestamp (controls and dynamic) 351 * android.jpeg.orientation (controls and dynamic) 352 * android.jpeg.quality (controls and dynamic) 353 * android.jpeg.thumbnailQuality (controls and dynamic) 354 * android.jpeg.thumbnailSize (controls and dynamic) 355 * android.jpeg.availableThumbnailSizes (static) 356 * android.jpeg.maxSize (static) 357 * 358 * android.lens.info.minimumFocusDistance (static) 359 * 360 * android.request.id (controls and dynamic) 361 * 362 * android.scaler.cropRegion (controls and dynamic) 363 * android.scaler.availableStreamConfigurations (static) 364 * android.scaler.availableMinFrameDurations (static) 365 * android.scaler.availableStallDurations (static) 366 * android.scaler.availableMaxDigitalZoom (static) 367 * android.scaler.maxDigitalZoom (static) 368 * android.scaler.croppingType (static) 369 * 370 * android.sensor.orientation (static) 371 * android.sensor.timestamp (dynamic) 372 * 373 * android.statistics.faceDetectMode (controls and dynamic) 374 * android.statistics.info.availableFaceDetectModes (static) 375 * android.statistics.faceIds (dynamic) 376 * android.statistics.faceLandmarks (dynamic) 377 * android.statistics.faceRectangles (dynamic) 378 * android.statistics.faceScores (dynamic) 379 * 380 * android.sync.frameNumber (dynamic) 381 * android.sync.maxLatency (static) 382 * 383 * - Captures in limited mode that include high-resolution (> 1080p) output 384 * buffers may block in process_capture_request() until all the output buffers 385 * have been filled. A full-mode HAL device must process sequences of 386 * high-resolution requests at the rate indicated in the static metadata for 387 * that pixel format. The HAL must still call process_capture_result() to 388 * provide the output; the framework must simply be prepared for 389 * process_capture_request() to block until after process_capture_result() for 390 * that request completes for high-resolution captures for limited-mode 391 * devices. 392 * 393 * - Full-mode devices must support below additional capabilities: 394 * - 30fps at maximum resolution is preferred, more than 20fps is required. 395 * - Per frame control (android.sync.maxLatency == PER_FRAME_CONTROL). 396 * - Sensor manual control metadata. See MANUAL_SENSOR defined in 397 * android.request.availableCapabilities. 398 * - Post-processing manual control metadata. See MANUAL_POST_PROCESSING defined 399 * in android.request.availableCapabilities. 400 * 401 */ 402 403 /** 404 * S4. 3A modes and state machines: 405 * 406 * While the actual 3A algorithms are up to the HAL implementation, a high-level 407 * state machine description is defined by the HAL interface, to allow the HAL 408 * device and the framework to communicate about the current state of 3A, and to 409 * trigger 3A events. 410 * 411 * When the device is opened, all the individual 3A states must be 412 * STATE_INACTIVE. Stream configuration does not reset 3A. For example, locked 413 * focus must be maintained across the configure() call. 414 * 415 * Triggering a 3A action involves simply setting the relevant trigger entry in 416 * the settings for the next request to indicate start of trigger. For example, 417 * the trigger for starting an autofocus scan is setting the entry 418 * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one 419 * request, and cancelling an autofocus scan is triggered by setting 420 * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise, 421 * the entry will not exist, or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each 422 * request with a trigger entry set to a non-IDLE value will be treated as an 423 * independent triggering event. 424 * 425 * At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting, which 426 * selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode 427 * (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting 428 * (ANDROID_CONTROL_USE_SCENE_MODE). 429 * 430 * - In OFF mode, each of the individual AE/AF/AWB modes are effectively OFF, 431 * and none of the capture controls may be overridden by the 3A routines. 432 * 433 * - In AUTO mode, Auto-focus, auto-exposure, and auto-whitebalance all run 434 * their own independent algorithms, and have their own mode, state, and 435 * trigger metadata entries, as listed in the next section. 436 * 437 * - In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must 438 * be used to determine the behavior of 3A routines. In SCENE_MODEs other than 439 * FACE_PRIORITY, the HAL must override the values of 440 * ANDROId_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected 441 * SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use 442 * CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene 443 * must be ignored for these scene modes. 444 * 445 * - For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AF_MODE controls work as in 446 * ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering 447 * and focusing on any detected faces in the scene. 448 * 449 * S4.1. Auto-focus settings and result entries: 450 * 451 * Main metadata entries: 452 * 453 * ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus 454 * mode. Set by the framework in the request settings. 455 * 456 * AF_MODE_OFF: AF is disabled; the framework/app directly controls lens 457 * position. 458 * 459 * AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is 460 * triggered. 461 * 462 * AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless 463 * AF is triggered. 464 * 465 * AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording 466 * video. Triggering immediately locks focus in current 467 * position. Canceling resumes cotinuous focusing. 468 * 469 * AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for 470 * zero-shutter-lag still capture. Triggering locks focus once currently 471 * active sweep concludes. Canceling resumes continuous focusing. 472 * 473 * AF_MODE_EDOF: Advanced extended depth of field focusing. There is no 474 * autofocus scan, so triggering one or canceling one has no effect. 475 * Images are focused automatically by the HAL. 476 * 477 * ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF 478 * algorithm state, reported by the HAL in the result metadata. 479 * 480 * AF_STATE_INACTIVE: No focusing has been done, or algorithm was 481 * reset. Lens is not moving. Always the state for MODE_OFF or MODE_EDOF. 482 * When the device is opened, it must start in this state. 483 * 484 * AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning 485 * for good focus. The lens is moving. 486 * 487 * AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is 488 * well focused. The lens is not moving. The HAL may spontaneously leave 489 * this state. 490 * 491 * AF_STATE_PASSIVE_UNFOCUSED: A continuous focus algorithm believes it is 492 * not well focused. The lens is not moving. The HAL may spontaneously 493 * leave this state. 494 * 495 * AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway. 496 * 497 * AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The 498 * lens is not moving. 499 * 500 * AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to 501 * focus. The lens is not moving. 502 * 503 * ANDROID_CONTROL_AF_TRIGGER: Control for starting an autofocus scan, the 504 * meaning of which is mode- and state- dependent. Set by the framework in 505 * the request settings. 506 * 507 * AF_TRIGGER_IDLE: No current trigger. 508 * 509 * AF_TRIGGER_START: Trigger start of AF scan. Effect is mode and state 510 * dependent. 511 * 512 * AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to 513 * default. 514 * 515 * Additional metadata entries: 516 * 517 * ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the FOV 518 * that should be used to determine good focus. This applies to all AF 519 * modes that scan for focus. Set by the framework in the request 520 * settings. 521 * 522 * S4.2. Auto-exposure settings and result entries: 523 * 524 * Main metadata entries: 525 * 526 * ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure 527 * mode. Set by the framework in the request settings. 528 * 529 * AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain, 530 * frame duration, and flash. 531 * 532 * AE_MODE_ON: Standard autoexposure, with flash control disabled. User may 533 * set flash to fire or to torch mode. 534 * 535 * AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's 536 * discretion for precapture and still capture. User control of flash 537 * disabled. 538 * 539 * AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired 540 * for capture, and at HAL's discretion for precapture.. User control of 541 * flash disabled. 542 * 543 * AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at 544 * HAL's discretion for precapture and still capture. Use a flash burst 545 * at end of precapture sequence to reduce redeye in the final 546 * picture. User control of flash disabled. 547 * 548 * ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE 549 * algorithm state, reported by the HAL in the result metadata. 550 * 551 * AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is 552 * opened, it must start in this state. 553 * 554 * AE_STATE_SEARCHING: AE is not converged to a good value, and is adjusting 555 * exposure parameters. 556 * 557 * AE_STATE_CONVERGED: AE has found good exposure values for the current 558 * scene, and the exposure parameters are not changing. HAL may 559 * spontaneously leave this state to search for better solution. 560 * 561 * AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure 562 * values are not changing. 563 * 564 * AE_STATE_FLASH_REQUIRED: The HAL has converged exposure, but believes 565 * flash is required for a sufficiently bright picture. Used for 566 * determining if a zero-shutter-lag frame can be used. 567 * 568 * AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture 569 * sequence. Depending on AE mode, this mode may involve firing the 570 * flash for metering, or a burst of flash pulses for redeye reduction. 571 * 572 * ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering 573 * sequence before capturing a high-quality image. Set by the framework in 574 * the request settings. 575 * 576 * PRECAPTURE_TRIGGER_IDLE: No current trigger. 577 * 578 * PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should 579 * use the subsequent requests to measure good exposure/white balance 580 * for an upcoming high-resolution capture. 581 * 582 * Additional metadata entries: 583 * 584 * ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current 585 * values 586 * 587 * ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE 588 * algorithm target brightness point. 589 * 590 * ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame 591 * rate range for the AE algorithm. The AE routine cannot change the frame 592 * rate to be outside these bounds. 593 * 594 * ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV 595 * that should be used to determine good exposure levels. This applies to 596 * all AE modes besides OFF. 597 * 598 * S4.3. Auto-whitebalance settings and result entries: 599 * 600 * Main metadata entries: 601 * 602 * ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance 603 * mode. 604 * 605 * AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix. 606 * 607 * AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color 608 * transform, possibly using more complex transforms than a simple 609 * matrix. 610 * 611 * AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor 612 * incandescent (tungsten) lighting, roughly 2700K. 613 * 614 * AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent 615 * lighting, roughly 5000K. 616 * 617 * AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for 618 * fluorescent lighting, roughly 3000K. 619 * 620 * AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight, 621 * roughly 5500K. 622 * 623 * AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded 624 * daylight, roughly 6500K. 625 * 626 * AWB_MODE_TWILIGHT: Fixed white balance settings good for 627 * near-sunset/sunrise, roughly 15000K. 628 * 629 * AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly 630 * lit by the sun, roughly 7500K. 631 * 632 * ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB 633 * algorithm state, reported by the HAL in the result metadata. 634 * 635 * AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device 636 * is opened, it must start in this state. 637 * 638 * AWB_STATE_SEARCHING: AWB is not converged to a good value, and is 639 * changing color adjustment parameters. 640 * 641 * AWB_STATE_CONVERGED: AWB has found good color adjustment values for the 642 * current scene, and the parameters are not changing. HAL may 643 * spontaneously leave this state to search for better solution. 644 * 645 * AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color 646 * adjustment values are not changing. 647 * 648 * Additional metadata entries: 649 * 650 * ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to 651 * their current values. 652 * 653 * ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV 654 * that should be used to determine good color balance. This applies only 655 * to auto-WB mode. 656 * 657 * S4.4. General state machine transition notes 658 * 659 * Switching between AF, AE, or AWB modes always resets the algorithm's state 660 * to INACTIVE. Similarly, switching between CONTROL_MODE or 661 * CONTROL_SCENE_MODE if CONTROL_MODE == USE_SCENE_MODE resets all the 662 * algorithm states to INACTIVE. 663 * 664 * The tables below are per-mode. 665 * 666 * S4.5. AF state machines 667 * 668 * when enabling AF or changing AF mode 669 *| state | trans. cause | new state | notes | 670 *+--------------------+---------------+--------------------+------------------+ 671 *| Any | AF mode change| INACTIVE | | 672 *+--------------------+---------------+--------------------+------------------+ 673 * 674 * mode = AF_MODE_OFF or AF_MODE_EDOF 675 *| state | trans. cause | new state | notes | 676 *+--------------------+---------------+--------------------+------------------+ 677 *| INACTIVE | | INACTIVE | Never changes | 678 *+--------------------+---------------+--------------------+------------------+ 679 * 680 * mode = AF_MODE_AUTO or AF_MODE_MACRO 681 *| state | trans. cause | new state | notes | 682 *+--------------------+---------------+--------------------+------------------+ 683 *| INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep | 684 *| | | | Lens now moving | 685 *+--------------------+---------------+--------------------+------------------+ 686 *| ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | If AF successful | 687 *| | | | Lens now locked | 688 *+--------------------+---------------+--------------------+------------------+ 689 *| ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | If AF successful | 690 *| | | | Lens now locked | 691 *+--------------------+---------------+--------------------+------------------+ 692 *| ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF | 693 *| | | | Lens now locked | 694 *+--------------------+---------------+--------------------+------------------+ 695 *| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF | 696 *+--------------------+---------------+--------------------+------------------+ 697 *| FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep | 698 *| | | | Lens now moving | 699 *+--------------------+---------------+--------------------+------------------+ 700 *| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF | 701 *+--------------------+---------------+--------------------+------------------+ 702 *| NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep | 703 *| | | | Lens now moving | 704 *+--------------------+---------------+--------------------+------------------+ 705 *| All states | mode change | INACTIVE | | 706 *+--------------------+---------------+--------------------+------------------+ 707 * 708 * mode = AF_MODE_CONTINUOUS_VIDEO 709 *| state | trans. cause | new state | notes | 710 *+--------------------+---------------+--------------------+------------------+ 711 *| INACTIVE | HAL initiates | PASSIVE_SCAN | Start AF scan | 712 *| | new scan | | Lens now moving | 713 *+--------------------+---------------+--------------------+------------------+ 714 *| INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query | 715 *| | | | Lens now locked | 716 *+--------------------+---------------+--------------------+------------------+ 717 *| PASSIVE_SCAN | HAL completes | PASSIVE_FOCUSED | End AF scan | 718 *| | current scan | | Lens now locked | 719 *+--------------------+---------------+--------------------+------------------+ 720 *| PASSIVE_SCAN | HAL fails | PASSIVE_UNFOCUSED | End AF scan | 721 *| | current scan | | Lens now locked | 722 *+--------------------+---------------+--------------------+------------------+ 723 *| PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. | 724 *| | | | if focus is good | 725 *| | | | Lens now locked | 726 *+--------------------+---------------+--------------------+------------------+ 727 *| PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. | 728 *| | | | if focus is bad | 729 *| | | | Lens now locked | 730 *+--------------------+---------------+--------------------+------------------+ 731 *| PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens | 732 *| | | | position | 733 *| | | | Lens now locked | 734 *+--------------------+---------------+--------------------+------------------+ 735 *| PASSIVE_FOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 736 *| | new scan | | Lens now moving | 737 *+--------------------+---------------+--------------------+------------------+ 738 *| PASSIVE_UNFOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 739 *| | new scan | | Lens now moving | 740 *+--------------------+---------------+--------------------+------------------+ 741 *| PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. | 742 *| | | | Lens now locked | 743 *+--------------------+---------------+--------------------+------------------+ 744 *| PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. | 745 *| | | | Lens now locked | 746 *+--------------------+---------------+--------------------+------------------+ 747 *| FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect | 748 *+--------------------+---------------+--------------------+------------------+ 749 *| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 750 *+--------------------+---------------+--------------------+------------------+ 751 *| NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect | 752 *+--------------------+---------------+--------------------+------------------+ 753 *| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 754 *+--------------------+---------------+--------------------+------------------+ 755 * 756 * mode = AF_MODE_CONTINUOUS_PICTURE 757 *| state | trans. cause | new state | notes | 758 *+--------------------+---------------+--------------------+------------------+ 759 *| INACTIVE | HAL initiates | PASSIVE_SCAN | Start AF scan | 760 *| | new scan | | Lens now moving | 761 *+--------------------+---------------+--------------------+------------------+ 762 *| INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query | 763 *| | | | Lens now locked | 764 *+--------------------+---------------+--------------------+------------------+ 765 *| PASSIVE_SCAN | HAL completes | PASSIVE_FOCUSED | End AF scan | 766 *| | current scan | | Lens now locked | 767 *+--------------------+---------------+--------------------+------------------+ 768 *| PASSIVE_SCAN | HAL fails | PASSIVE_UNFOCUSED | End AF scan | 769 *| | current scan | | Lens now locked | 770 *+--------------------+---------------+--------------------+------------------+ 771 *| PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual trans. | 772 *| | | | once focus good | 773 *| | | | Lens now locked | 774 *+--------------------+---------------+--------------------+------------------+ 775 *| PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual trans. | 776 *| | | | if cannot focus | 777 *| | | | Lens now locked | 778 *+--------------------+---------------+--------------------+------------------+ 779 *| PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens | 780 *| | | | position | 781 *| | | | Lens now locked | 782 *+--------------------+---------------+--------------------+------------------+ 783 *| PASSIVE_FOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 784 *| | new scan | | Lens now moving | 785 *+--------------------+---------------+--------------------+------------------+ 786 *| PASSIVE_UNFOCUSED | HAL initiates | PASSIVE_SCAN | Start AF scan | 787 *| | new scan | | Lens now moving | 788 *+--------------------+---------------+--------------------+------------------+ 789 *| PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. | 790 *| | | | Lens now locked | 791 *+--------------------+---------------+--------------------+------------------+ 792 *| PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. | 793 *| | | | Lens now locked | 794 *+--------------------+---------------+--------------------+------------------+ 795 *| FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect | 796 *+--------------------+---------------+--------------------+------------------+ 797 *| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 798 *+--------------------+---------------+--------------------+------------------+ 799 *| NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect | 800 *+--------------------+---------------+--------------------+------------------+ 801 *| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan | 802 *+--------------------+---------------+--------------------+------------------+ 803 * 804 * S4.6. AE and AWB state machines 805 * 806 * The AE and AWB state machines are mostly identical. AE has additional 807 * FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two 808 * states should be ignored for the AWB state machine. 809 * 810 * when enabling AE/AWB or changing AE/AWB mode 811 *| state | trans. cause | new state | notes | 812 *+--------------------+---------------+--------------------+------------------+ 813 *| Any | mode change | INACTIVE | | 814 *+--------------------+---------------+--------------------+------------------+ 815 * 816 * mode = AE_MODE_OFF / AWB mode not AUTO 817 *| state | trans. cause | new state | notes | 818 *+--------------------+---------------+--------------------+------------------+ 819 *| INACTIVE | | INACTIVE | AE/AWB disabled | 820 *+--------------------+---------------+--------------------+------------------+ 821 * 822 * mode = AE_MODE_ON_* / AWB_MODE_AUTO 823 *| state | trans. cause | new state | notes | 824 *+--------------------+---------------+--------------------+------------------+ 825 *| INACTIVE | HAL initiates | SEARCHING | | 826 *| | AE/AWB scan | | | 827 *+--------------------+---------------+--------------------+------------------+ 828 *| INACTIVE | AE/AWB_LOCK | LOCKED | values locked | 829 *| | on | | | 830 *+--------------------+---------------+--------------------+------------------+ 831 *| SEARCHING | HAL finishes | CONVERGED | good values, not | 832 *| | AE/AWB scan | | changing | 833 *+--------------------+---------------+--------------------+------------------+ 834 *| SEARCHING | HAL finishes | FLASH_REQUIRED | converged but too| 835 *| | AE scan | | dark w/o flash | 836 *+--------------------+---------------+--------------------+------------------+ 837 *| SEARCHING | AE/AWB_LOCK | LOCKED | values locked | 838 *| | on | | | 839 *+--------------------+---------------+--------------------+------------------+ 840 *| CONVERGED | HAL initiates | SEARCHING | values locked | 841 *| | AE/AWB scan | | | 842 *+--------------------+---------------+--------------------+------------------+ 843 *| CONVERGED | AE/AWB_LOCK | LOCKED | values locked | 844 *| | on | | | 845 *+--------------------+---------------+--------------------+------------------+ 846 *| FLASH_REQUIRED | HAL initiates | SEARCHING | values locked | 847 *| | AE/AWB scan | | | 848 *+--------------------+---------------+--------------------+------------------+ 849 *| FLASH_REQUIRED | AE/AWB_LOCK | LOCKED | values locked | 850 *| | on | | | 851 *+--------------------+---------------+--------------------+------------------+ 852 *| LOCKED | AE/AWB_LOCK | SEARCHING | values not good | 853 *| | off | | after unlock | 854 *+--------------------+---------------+--------------------+------------------+ 855 *| LOCKED | AE/AWB_LOCK | CONVERGED | values good | 856 *| | off | | after unlock | 857 *+--------------------+---------------+--------------------+------------------+ 858 *| LOCKED | AE_LOCK | FLASH_REQUIRED | exposure good, | 859 *| | off | | but too dark | 860 *+--------------------+---------------+--------------------+------------------+ 861 *| All AE states | PRECAPTURE_ | PRECAPTURE | Start precapture | 862 *| | START | | sequence | 863 *+--------------------+---------------+--------------------+------------------+ 864 *| PRECAPTURE | Sequence done.| CONVERGED | Ready for high- | 865 *| | AE_LOCK off | | quality capture | 866 *+--------------------+---------------+--------------------+------------------+ 867 *| PRECAPTURE | Sequence done.| LOCKED | Ready for high- | 868 *| | AE_LOCK on | | quality capture | 869 *+--------------------+---------------+--------------------+------------------+ 870 * 871 */ 872 873 /** 874 * S5. Cropping: 875 * 876 * Cropping of the full pixel array (for digital zoom and other use cases where 877 * a smaller FOV is desirable) is communicated through the 878 * ANDROID_SCALER_CROP_REGION setting. This is a per-request setting, and can 879 * change on a per-request basis, which is critical for implementing smooth 880 * digital zoom. 881 * 882 * The region is defined as a rectangle (x, y, width, height), with (x, y) 883 * describing the top-left corner of the rectangle. The rectangle is defined on 884 * the coordinate system of the sensor active pixel array, with (0,0) being the 885 * top-left pixel of the active pixel array. Therefore, the width and height 886 * cannot be larger than the dimensions reported in the 887 * ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY static info field. The minimum allowed 888 * width and height are reported by the HAL through the 889 * ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes the 890 * maximum supported zoom factor. Therefore, the minimum crop region width and 891 * height are: 892 * 893 * {width, height} = 894 * { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] / 895 * ANDROID_SCALER_MAX_DIGITAL_ZOOM), 896 * floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] / 897 * ANDROID_SCALER_MAX_DIGITAL_ZOOM) } 898 * 899 * If the crop region needs to fulfill specific requirements (for example, it 900 * needs to start on even coordinates, and its width/height needs to be even), 901 * the HAL must do the necessary rounding and write out the final crop region 902 * used in the output result metadata. Similarly, if the HAL implements video 903 * stabilization, it must adjust the result crop region to describe the region 904 * actually included in the output after video stabilization is applied. In 905 * general, a camera-using application must be able to determine the field of 906 * view it is receiving based on the crop region, the dimensions of the image 907 * sensor, and the lens focal length. 908 * 909 * It is assumed that the cropping is applied after raw to other color space 910 * conversion. Raw streams (RAW16 and RAW_OPAQUE) don't have this conversion stage, 911 * and are not croppable. Therefore, the crop region must be ignored by the HAL 912 * for raw streams. 913 * 914 * Since the crop region applies to all non-raw streams, which may have different aspect 915 * ratios than the crop region, the exact sensor region used for each stream may 916 * be smaller than the crop region. Specifically, each stream should maintain 917 * square pixels and its aspect ratio by minimally further cropping the defined 918 * crop region. If the stream's aspect ratio is wider than the crop region, the 919 * stream should be further cropped vertically, and if the stream's aspect ratio 920 * is narrower than the crop region, the stream should be further cropped 921 * horizontally. 922 * 923 * In all cases, the stream crop must be centered within the full crop region, 924 * and each stream is only either cropped horizontally or vertical relative to 925 * the full crop region, never both. 926 * 927 * For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a 928 * 1280x720 stream (16:9 aspect), below demonstrates the expected output regions 929 * for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x 930 * 1500 pixel array) sensor. 931 * 932 * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio) 933 * 934 * 640x480 stream crop: (500, 375, 1000, 750) (equal to crop region) 935 * 1280x720 stream crop: (500, 469, 1000, 562) (marked with =) 936 * 937 * 0 1000 2000 938 * +---------+---------+---------+----------+ 939 * | Active pixel array | 940 * | | 941 * | | 942 * + +-------------------+ + 375 943 * | | | | 944 * | O===================O | 945 * | I 1280x720 stream I | 946 * + I I + 750 947 * | I I | 948 * | O===================O | 949 * | | | | 950 * + +-------------------+ + 1125 951 * | Crop region, 640x480 stream | 952 * | | 953 * | | 954 * +---------+---------+---------+----------+ 1500 955 * 956 * Crop region: (500, 375, 1333, 750) (16:9 aspect ratio) 957 * 958 * 640x480 stream crop: (666, 375, 1000, 750) (marked with =) 959 * 1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region) 960 * 961 * 0 1000 2000 962 * +---------+---------+---------+----------+ 963 * | Active pixel array | 964 * | | 965 * | | 966 * + +---O==================O---+ + 375 967 * | | I 640x480 stream I | | 968 * | | I I | | 969 * | | I I | | 970 * + | I I | + 750 971 * | | I I | | 972 * | | I I | | 973 * | | I I | | 974 * + +---O==================O---+ + 1125 975 * | Crop region, 1280x720 stream | 976 * | | 977 * | | 978 * +---------+---------+---------+----------+ 1500 979 * 980 * Crop region: (500, 375, 750, 750) (1:1 aspect ratio) 981 * 982 * 640x480 stream crop: (500, 469, 750, 562) (marked with =) 983 * 1280x720 stream crop: (500, 543, 750, 414) (marged with #) 984 * 985 * 0 1000 2000 986 * +---------+---------+---------+----------+ 987 * | Active pixel array | 988 * | | 989 * | | 990 * + +--------------+ + 375 991 * | O==============O | 992 * | ################ | 993 * | # # | 994 * + # # + 750 995 * | # # | 996 * | ################ 1280x720 | 997 * | O==============O 640x480 | 998 * + +--------------+ + 1125 999 * | Crop region | 1000 * | | 1001 * | | 1002 * +---------+---------+---------+----------+ 1500 1003 * 1004 * And a final example, a 1024x1024 square aspect ratio stream instead of the 1005 * 480p stream: 1006 * 1007 * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio) 1008 * 1009 * 1024x1024 stream crop: (625, 375, 750, 750) (marked with #) 1010 * 1280x720 stream crop: (500, 469, 1000, 562) (marked with =) 1011 * 1012 * 0 1000 2000 1013 * +---------+---------+---------+----------+ 1014 * | Active pixel array | 1015 * | | 1016 * | 1024x1024 stream | 1017 * + +--###############--+ + 375 1018 * | | # # | | 1019 * | O===================O | 1020 * | I 1280x720 stream I | 1021 * + I I + 750 1022 * | I I | 1023 * | O===================O | 1024 * | | # # | | 1025 * + +--###############--+ + 1125 1026 * | Crop region | 1027 * | | 1028 * | | 1029 * +---------+---------+---------+----------+ 1500 1030 * 1031 */ 1032 1033 /** 1034 * S6. Error management: 1035 * 1036 * Camera HAL device ops functions that have a return value will all return 1037 * -ENODEV / NULL in case of a serious error. This means the device cannot 1038 * continue operation, and must be closed by the framework. Once this error is 1039 * returned by some method, or if notify() is called with ERROR_DEVICE, only 1040 * the close() method can be called successfully. All other methods will return 1041 * -ENODEV / NULL. 1042 * 1043 * If a device op is called in the wrong sequence, for example if the framework 1044 * calls configure_streams() is called before initialize(), the device must 1045 * return -ENOSYS from the call, and do nothing. 1046 * 1047 * Transient errors in image capture must be reported through notify() as follows: 1048 * 1049 * - The failure of an entire capture to occur must be reported by the HAL by 1050 * calling notify() with ERROR_REQUEST. Individual errors for the result 1051 * metadata or the output buffers must not be reported in this case. 1052 * 1053 * - If the metadata for a capture cannot be produced, but some image buffers 1054 * were filled, the HAL must call notify() with ERROR_RESULT. 1055 * 1056 * - If an output image buffer could not be filled, but either the metadata was 1057 * produced or some other buffers were filled, the HAL must call notify() with 1058 * ERROR_BUFFER for each failed buffer. 1059 * 1060 * In each of these transient failure cases, the HAL must still call 1061 * process_capture_result, with valid output and input (if an input buffer was 1062 * submitted) buffer_handle_t. If the result metadata could not be produced, it 1063 * should be NULL. If some buffers could not be filled, they must be returned with 1064 * process_capture_result in the error state, their release fences must be set to 1065 * the acquire fences passed by the framework, or -1 if they have been waited on by 1066 * the HAL already. 1067 * 1068 * Invalid input arguments result in -EINVAL from the appropriate methods. In 1069 * that case, the framework must act as if that call had never been made. 1070 * 1071 */ 1072 1073 /** 1074 * S7. Key Performance Indicator (KPI) glossary: 1075 * 1076 * This includes some critical definitions that are used by KPI metrics. 1077 * 1078 * Pipeline Latency: 1079 * For a given capture request, the duration from the framework calling 1080 * process_capture_request to the HAL sending capture result and all buffers 1081 * back by process_capture_result call. To make the Pipeline Latency measure 1082 * independent of frame rate, it is measured by frame count. 1083 * 1084 * For example, when frame rate is 30 (fps), the frame duration (time interval 1085 * between adjacent frame capture time) is 33 (ms). 1086 * If it takes 5 frames for framework to get the result and buffers back for 1087 * a given request, then the Pipeline Latency is 5 (frames), instead of 1088 * 5 x 33 = 165 (ms). 1089 * 1090 * The Pipeline Latency is determined by android.request.pipelineDepth and 1091 * android.request.pipelineMaxDepth, see their definitions for more details. 1092 * 1093 */ 1094 1095 /** 1096 * S8. Sample Use Cases: 1097 * 1098 * This includes some typical use case examples the camera HAL may support. 1099 * 1100 * S8.1 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_BIDIRECTIONAL stream. 1101 * 1102 * For this use case, the bidirectional stream will be used by the framework as follows: 1103 * 1104 * 1. The framework includes a buffer from this stream as output buffer in a 1105 * request as normal. 1106 * 1107 * 2. Once the HAL device returns a filled output buffer to the framework, 1108 * the framework may do one of two things with the filled buffer: 1109 * 1110 * 2. a. The framework uses the filled data, and returns the now-used buffer 1111 * to the stream queue for reuse. This behavior exactly matches the 1112 * OUTPUT type of stream. 1113 * 1114 * 2. b. The framework wants to reprocess the filled data, and uses the 1115 * buffer as an input buffer for a request. Once the HAL device has 1116 * used the reprocessing buffer, it then returns it to the 1117 * framework. The framework then returns the now-used buffer to the 1118 * stream queue for reuse. 1119 * 1120 * 3. The HAL device will be given the buffer again as an output buffer for 1121 * a request at some future point. 1122 * 1123 * For ZSL use case, the pixel format for bidirectional stream will be 1124 * HAL_PIXEL_FORMAT_RAW_OPAQUE or HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED if it 1125 * is listed in android.scaler.availableInputOutputFormatsMap. When 1126 * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, the gralloc 1127 * usage flags for the consumer endpoint will be set to GRALLOC_USAGE_HW_CAMERA_ZSL. 1128 * A configuration stream list that has BIDIRECTIONAL stream used as input, will 1129 * usually also have a distinct OUTPUT stream to get the reprocessing data. For example, 1130 * for the ZSL use case, the stream list might be configured with the following: 1131 * 1132 * - A HAL_PIXEL_FORMAT_RAW_OPAQUE bidirectional stream is used 1133 * as input. 1134 * - And a HAL_PIXEL_FORMAT_BLOB (JPEG) output stream. 1135 * 1136 * S8.2 ZSL (OPAQUE) reprocessing with CAMERA3_STREAM_INPUT stream. 1137 * 1138 * CAMERA_DEVICE_API_VERSION_3_3: 1139 * When OPAQUE_REPROCESSING capability is supported by the camera device, the INPUT stream 1140 * can be used for application/framework implemented use case like Zero Shutter Lag (ZSL). 1141 * This kind of stream will be used by the framework as follows: 1142 * 1143 * 1. Application/framework configures an opaque (RAW or YUV based) format output stream that is 1144 * used to produce the ZSL output buffers. The stream pixel format will be 1145 * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED. 1146 * 1147 * 2. Application/framework configures an opaque format input stream that is used to 1148 * send the reprocessing ZSL buffers to the HAL. The stream pixel format will 1149 * also be HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED. 1150 * 1151 * 3. Application/framework configures a YUV/JPEG output stream that is used to receive the 1152 * reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB. 1153 * 1154 * 4. Application/framework picks a ZSL buffer from the ZSL output stream when a ZSL capture is 1155 * issued by the application, and sends the data back as an input buffer in a 1156 * reprocessing request, then sends to the HAL for reprocessing. 1157 * 1158 * 5. The HAL sends back the output YUV/JPEG result to framework. 1159 * 1160 * The HAL can select the actual opaque buffer format and configure the ISP pipeline 1161 * appropriately based on the HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format and 1162 * the gralloc usage flag GRALLOC_USAGE_HW_CAMERA_ZSL. 1163 1164 * S8.3 YUV reprocessing with CAMERA3_STREAM_INPUT stream. 1165 * 1166 * When YUV reprocessing is supported by the HAL, the INPUT stream 1167 * can be used for the YUV reprocessing use cases like lucky-shot and image fusion. 1168 * This kind of stream will be used by the framework as follows: 1169 * 1170 * 1. Application/framework configures an YCbCr_420 format output stream that is 1171 * used to produce the output buffers. 1172 * 1173 * 2. Application/framework configures an YCbCr_420 format input stream that is used to 1174 * send the reprocessing YUV buffers to the HAL. 1175 * 1176 * 3. Application/framework configures a YUV/JPEG output stream that is used to receive the 1177 * reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB. 1178 * 1179 * 4. Application/framework processes the output buffers (could be as simple as picking 1180 * an output buffer directly) from the output stream when a capture is issued, and sends 1181 * the data back as an input buffer in a reprocessing request, then sends to the HAL 1182 * for reprocessing. 1183 * 1184 * 5. The HAL sends back the output YUV/JPEG result to framework. 1185 * 1186 */ 1187 1188 /** 1189 * S9. Notes on Controls and Metadata 1190 * 1191 * This section contains notes about the interpretation and usage of various metadata tags. 1192 * 1193 * S9.1 HIGH_QUALITY and FAST modes. 1194 * 1195 * Many camera post-processing blocks may be listed as having HIGH_QUALITY, 1196 * FAST, and OFF operating modes. These blocks will typically also have an 1197 * 'available modes' tag representing which of these operating modes are 1198 * available on a given device. The general policy regarding implementing 1199 * these modes is as follows: 1200 * 1201 * 1. Operating mode controls of hardware blocks that cannot be disabled 1202 * must not list OFF in their corresponding 'available modes' tags. 1203 * 1204 * 2. OFF will always be included in their corresponding 'available modes' 1205 * tag if it is possible to disable that hardware block. 1206 * 1207 * 3. FAST must always be included in the 'available modes' tags for all 1208 * post-processing blocks supported on the device. If a post-processing 1209 * block also has a slower and higher quality operating mode that does 1210 * not meet the framerate requirements for FAST mode, HIGH_QUALITY should 1211 * be included in the 'available modes' tag to represent this operating 1212 * mode. 1213 */ 1214 1215 /** 1216 * S10. Reprocessing flow and controls 1217 * 1218 * This section describes the OPAQUE and YUV reprocessing flow and controls. OPAQUE reprocessing 1219 * uses an opaque format that is not directly application-visible, and the application can 1220 * only select some of the output buffers and send back to HAL for reprocessing, while YUV 1221 * reprocessing gives the application opportunity to process the buffers before reprocessing. 1222 * 1223 * S8 gives the stream configurations for the typical reprocessing uses cases, 1224 * this section specifies the buffer flow and controls in more details. 1225 * 1226 * S10.1 OPAQUE (typically for ZSL use case) reprocessing flow and controls 1227 * 1228 * For OPAQUE reprocessing (e.g. ZSL) use case, after the application creates the specific 1229 * output and input streams, runtime buffer flow and controls are specified as below: 1230 * 1231 * 1. Application starts output streaming by sending repeating requests for output 1232 * opaque buffers and preview. The buffers are held by an application 1233 * maintained circular buffer. The requests are based on CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG 1234 * capture template, which should have all necessary settings that guarantee output 1235 * frame rate is not slowed down relative to sensor output frame rate. 1236 * 1237 * 2. When a capture is issued, the application selects one output buffer based 1238 * on application buffer selection logic, e.g. good AE and AF statistics etc. 1239 * Application then creates an reprocess request based on the capture result associated 1240 * with this selected buffer. The selected output buffer is now added to this reprocess 1241 * request as an input buffer, the output buffer of this reprocess request should be 1242 * either JPEG output buffer or YUV output buffer, or both, depending on the application 1243 * choice. 1244 * 1245 * 3. Application then alters the reprocess settings to get best image quality. The HAL must 1246 * support and only support below controls if the HAL support OPAQUE_REPROCESSING capability: 1247 * - android.jpeg.* (if JPEG buffer is included as one of the output) 1248 * - android.noiseReduction.mode (change to HIGH_QUALITY if it is supported) 1249 * - android.edge.mode (change to HIGH_QUALITY if it is supported) 1250 * All other controls must be ignored by the HAL. 1251 * 4. HAL processed the input buffer and return the output buffers in the capture results 1252 * as normal. 1253 * 1254 * S10.2 YUV reprocessing flow and controls 1255 * 1256 * The YUV reprocessing buffer flow is similar as OPAQUE reprocessing, with below difference: 1257 * 1258 * 1. Application may want to have finer granularity control of the intermediate YUV images 1259 * (before reprocessing). For example, application may choose 1260 * - android.noiseReduction.mode == MINIMAL 1261 * to make sure the no YUV domain noise reduction has applied to the output YUV buffers, 1262 * then it can do its own advanced noise reduction on them. For OPAQUE reprocessing case, this 1263 * doesn't matter, as long as the final reprocessed image has the best quality. 1264 * 2. Application may modify the YUV output buffer data. For example, for image fusion use 1265 * case, where multiple output images are merged together to improve the signal-to-noise 1266 * ratio (SNR). The input buffer may be generated from multiple buffers by the application. 1267 * To avoid excessive amount of noise reduction and insufficient amount of edge enhancement 1268 * being applied to the input buffer, the application can hint the HAL how much effective 1269 * exposure time improvement has been done by the application, then the HAL can adjust the 1270 * noise reduction and edge enhancement paramters to get best reprocessed image quality. 1271 * Below tag can be used for this purpose: 1272 * - android.reprocess.effectiveExposureFactor 1273 * The value would be exposure time increase factor applied to the original output image, 1274 * for example, if there are N image merged, the exposure time increase factor would be up 1275 * to sqrt(N). See this tag spec for more details. 1276 * 1277 * S10.3 Reprocessing pipeline characteristics 1278 * 1279 * Reprocessing pipeline has below different characteristics comparing with normal output 1280 * pipeline: 1281 * 1282 * 1. The reprocessing result can be returned ahead of the pending normal output results. But 1283 * the FIFO ordering must be maintained for all reprocessing results. For example, there are 1284 * below requests (A stands for output requests, B stands for reprocessing requests) 1285 * being processed by the HAL: 1286 * A1, A2, A3, A4, B1, A5, B2, A6... 1287 * result of B1 can be returned before A1-A4, but result of B2 must be returned after B1. 1288 * 2. Single input rule: For a given reprocessing request, all output buffers must be from the 1289 * input buffer, rather than sensor output. For example, if a reprocess request include both 1290 * JPEG and preview buffers, all output buffers must be produced from the input buffer 1291 * included by the reprocessing request, rather than sensor. The HAL must not output preview 1292 * buffers from sensor, while output JPEG buffer from the input buffer. 1293 * 3. Input buffer will be from camera output directly (ZSL case) or indirectly(image fusion 1294 * case). For the case where buffer is modified, the size will remain same. The HAL can 1295 * notify CAMERA3_MSG_ERROR_REQUEST if buffer from unknown source is sent. 1296 * 4. Result as reprocessing request: The HAL can expect that a reprocessing request is a copy 1297 * of one of the output results with minor allowed setting changes. The HAL can notify 1298 * CAMERA3_MSG_ERROR_REQUEST if a request from unknown source is issued. 1299 * 5. Output buffers may not be used as inputs across the configure stream boundary, This is 1300 * because an opaque stream like the ZSL output stream may have different actual image size 1301 * inside of the ZSL buffer to save power and bandwidth for smaller resolution JPEG capture. 1302 * The HAL may notify CAMERA3_MSG_ERROR_REQUEST if this case occurs. 1303 * 6. HAL Reprocess requests error reporting during flush should follow the same rule specified 1304 * by flush() method. 1305 * 1306 */ 1307 1308 __BEGIN_DECLS 1309 1310 struct camera3_device; 1311 1312 /********************************************************************** 1313 * 1314 * Camera3 stream and stream buffer definitions. 1315 * 1316 * These structs and enums define the handles and contents of the input and 1317 * output streams connecting the HAL to various framework and application buffer 1318 * consumers. Each stream is backed by a gralloc buffer queue. 1319 * 1320 */ 1321 1322 /** 1323 * camera3_stream_type_t: 1324 * 1325 * The type of the camera stream, which defines whether the camera HAL device is 1326 * the producer or the consumer for that stream, and how the buffers of the 1327 * stream relate to the other streams. 1328 */ 1329 typedef enum camera3_stream_type { 1330 /** 1331 * This stream is an output stream; the camera HAL device will be 1332 * responsible for filling buffers from this stream with newly captured or 1333 * reprocessed image data. 1334 */ 1335 CAMERA3_STREAM_OUTPUT = 0, 1336 1337 /** 1338 * This stream is an input stream; the camera HAL device will be responsible 1339 * for reading buffers from this stream and sending them through the camera 1340 * processing pipeline, as if the buffer was a newly captured image from the 1341 * imager. 1342 * 1343 * The pixel format for input stream can be any format reported by 1344 * android.scaler.availableInputOutputFormatsMap. The pixel format of the 1345 * output stream that is used to produce the reprocessing data may be any 1346 * format reported by android.scaler.availableStreamConfigurations. The 1347 * supported input/output stream combinations depends the camera device 1348 * capabilities, see android.scaler.availableInputOutputFormatsMap for 1349 * stream map details. 1350 * 1351 * This kind of stream is generally used to reprocess data into higher 1352 * quality images (that otherwise would cause a frame rate performance 1353 * loss), or to do off-line reprocessing. 1354 * 1355 * CAMERA_DEVICE_API_VERSION_3_3: 1356 * The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing, 1357 * see S8.2, S8.3 and S10 for more details. 1358 */ 1359 CAMERA3_STREAM_INPUT = 1, 1360 1361 /** 1362 * This stream can be used for input and output. Typically, the stream is 1363 * used as an output stream, but occasionally one already-filled buffer may 1364 * be sent back to the HAL device for reprocessing. 1365 * 1366 * This kind of stream is meant generally for Zero Shutter Lag (ZSL) 1367 * features, where copying the captured image from the output buffer to the 1368 * reprocessing input buffer would be expensive. See S8.1 for more details. 1369 * 1370 * Note that the HAL will always be reprocessing data it produced. 1371 * 1372 */ 1373 CAMERA3_STREAM_BIDIRECTIONAL = 2, 1374 1375 /** 1376 * Total number of framework-defined stream types 1377 */ 1378 CAMERA3_NUM_STREAM_TYPES 1379 1380 } camera3_stream_type_t; 1381 1382 /** 1383 * camera3_stream_rotation_t: 1384 * 1385 * The required counterclockwise rotation of camera stream. 1386 */ 1387 typedef enum camera3_stream_rotation { 1388 /* No rotation */ 1389 CAMERA3_STREAM_ROTATION_0 = 0, 1390 1391 /* Rotate by 90 degree counterclockwise */ 1392 CAMERA3_STREAM_ROTATION_90 = 1, 1393 1394 /* Rotate by 180 degree counterclockwise */ 1395 CAMERA3_STREAM_ROTATION_180 = 2, 1396 1397 /* Rotate by 270 degree counterclockwise */ 1398 CAMERA3_STREAM_ROTATION_270 = 3 1399 } camera3_stream_rotation_t; 1400 1401 /** 1402 * camera3_stream_configuration_mode_t: 1403 * 1404 * This defines the general operation mode for the HAL (for a given stream configuration), where 1405 * modes besides NORMAL have different semantics, and usually limit the generality of the API in 1406 * exchange for higher performance in some particular area. 1407 */ 1408 typedef enum camera3_stream_configuration_mode { 1409 /** 1410 * Normal stream configuration operation mode. This is the default camera operation mode, 1411 * where all semantics of HAL APIs and metadata controls apply. 1412 */ 1413 CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE = 0, 1414 1415 /** 1416 * Special constrained high speed operation mode for devices that can not support high 1417 * speed output in NORMAL mode. All streams in this configuration are operating at high speed 1418 * mode and have different characteristics and limitations to achieve high speed output. 1419 * The NORMAL mode can still be used for high speed output if the HAL can support high speed 1420 * output while satisfying all the semantics of HAL APIs and metadata controls. It is 1421 * recommended for the HAL to support high speed output in NORMAL mode (by advertising the high 1422 * speed FPS ranges in android.control.aeAvailableTargetFpsRanges) if possible. 1423 * 1424 * This mode has below limitations/requirements: 1425 * 1426 * 1. The HAL must support up to 2 streams with sizes reported by 1427 * android.control.availableHighSpeedVideoConfigurations. 1428 * 2. In this mode, the HAL is expected to output up to 120fps or higher. This mode must 1429 * support the targeted FPS range and size configurations reported by 1430 * android.control.availableHighSpeedVideoConfigurations. 1431 * 3. The HAL must support HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED output stream format. 1432 * 4. To achieve efficient high speed streaming, the HAL may have to aggregate 1433 * multiple frames together and send to camera device for processing where the request 1434 * controls are same for all the frames in this batch (batch mode). The HAL must support 1435 * max batch size and the max batch size requirements defined by 1436 * android.control.availableHighSpeedVideoConfigurations. 1437 * 5. In this mode, the HAL must override aeMode, awbMode, and afMode to ON, ON, and 1438 * CONTINUOUS_VIDEO, respectively. All post-processing block mode controls must be 1439 * overridden to be FAST. Therefore, no manual control of capture and post-processing 1440 * parameters is possible. All other controls operate the same as when 1441 * android.control.mode == AUTO. This means that all other android.control.* fields 1442 * must continue to work, such as 1443 * 1444 * android.control.aeTargetFpsRange 1445 * android.control.aeExposureCompensation 1446 * android.control.aeLock 1447 * android.control.awbLock 1448 * android.control.effectMode 1449 * android.control.aeRegions 1450 * android.control.afRegions 1451 * android.control.awbRegions 1452 * android.control.afTrigger 1453 * android.control.aePrecaptureTrigger 1454 * 1455 * Outside of android.control.*, the following controls must work: 1456 * 1457 * android.flash.mode (TORCH mode only, automatic flash for still capture will not work 1458 * since aeMode is ON) 1459 * android.lens.opticalStabilizationMode (if it is supported) 1460 * android.scaler.cropRegion 1461 * android.statistics.faceDetectMode (if it is supported) 1462 * 1463 * For more details about high speed stream requirements, see 1464 * android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO 1465 * capability defined in android.request.availableCapabilities. 1466 * 1467 * This mode only needs to be supported by HALs that include CONSTRAINED_HIGH_SPEED_VIDEO in 1468 * the android.request.availableCapabilities static metadata. 1469 */ 1470 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1, 1471 1472 /** 1473 * First value for vendor-defined stream configuration modes. 1474 */ 1475 CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000 1476 } camera3_stream_configuration_mode_t; 1477 1478 /** 1479 * camera3_stream_t: 1480 * 1481 * A handle to a single camera input or output stream. A stream is defined by 1482 * the framework by its buffer resolution and format, and additionally by the 1483 * HAL with the gralloc usage flags and the maximum in-flight buffer count. 1484 * 1485 * The stream structures are owned by the framework, but pointers to a 1486 * camera3_stream passed into the HAL by configure_streams() are valid until the 1487 * end of the first subsequent configure_streams() call that _does not_ include 1488 * that camera3_stream as an argument, or until the end of the close() call. 1489 * 1490 * All camera3_stream framework-controlled members are immutable once the 1491 * camera3_stream is passed into configure_streams(). The HAL may only change 1492 * the HAL-controlled parameters during a configure_streams() call, except for 1493 * the contents of the private pointer. 1494 * 1495 * If a configure_streams() call returns a non-fatal error, all active streams 1496 * remain valid as if configure_streams() had not been called. 1497 * 1498 * The endpoint of the stream is not visible to the camera HAL device. 1499 * In DEVICE_API_VERSION_3_1, this was changed to share consumer usage flags 1500 * on streams where the camera is a producer (OUTPUT and BIDIRECTIONAL stream 1501 * types) see the usage field below. 1502 */ 1503 typedef struct camera3_stream { 1504 1505 /***** 1506 * Set by framework before configure_streams() 1507 */ 1508 1509 /** 1510 * The type of the stream, one of the camera3_stream_type_t values. 1511 */ 1512 int stream_type; 1513 1514 /** 1515 * The width in pixels of the buffers in this stream 1516 */ 1517 uint32_t width; 1518 1519 /** 1520 * The height in pixels of the buffers in this stream 1521 */ 1522 uint32_t height; 1523 1524 /** 1525 * The pixel format for the buffers in this stream. Format is a value from 1526 * the HAL_PIXEL_FORMAT_* list in system/core/include/system/graphics.h, or 1527 * from device-specific headers. 1528 * 1529 * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform 1530 * gralloc module will select a format based on the usage flags provided by 1531 * the camera device and the other endpoint of the stream. 1532 * 1533 * <= CAMERA_DEVICE_API_VERSION_3_1: 1534 * 1535 * The camera HAL device must inspect the buffers handed to it in the 1536 * subsequent register_stream_buffers() call to obtain the 1537 * implementation-specific format details, if necessary. 1538 * 1539 * >= CAMERA_DEVICE_API_VERSION_3_2: 1540 * 1541 * register_stream_buffers() won't be called by the framework, so the HAL 1542 * should configure the ISP and sensor pipeline based purely on the sizes, 1543 * usage flags, and formats for the configured streams. 1544 */ 1545 int format; 1546 1547 /***** 1548 * Set by HAL during configure_streams(). 1549 */ 1550 1551 /** 1552 * The gralloc usage flags for this stream, as needed by the HAL. The usage 1553 * flags are defined in gralloc.h (GRALLOC_USAGE_*), or in device-specific 1554 * headers. 1555 * 1556 * For output streams, these are the HAL's producer usage flags. For input 1557 * streams, these are the HAL's consumer usage flags. The usage flags from 1558 * the producer and the consumer will be combined together and then passed 1559 * to the platform gralloc HAL module for allocating the gralloc buffers for 1560 * each stream. 1561 * 1562 * Version information: 1563 * 1564 * == CAMERA_DEVICE_API_VERSION_3_0: 1565 * 1566 * No initial value guaranteed when passed via configure_streams(). 1567 * HAL may not use this field as input, and must write over this field 1568 * with its usage flags. 1569 * 1570 * >= CAMERA_DEVICE_API_VERSION_3_1: 1571 * 1572 * For stream_type OUTPUT and BIDIRECTIONAL, when passed via 1573 * configure_streams(), the initial value of this is the consumer's 1574 * usage flags. The HAL may use these consumer flags to decide stream 1575 * configuration. 1576 * For stream_type INPUT, when passed via configure_streams(), the initial 1577 * value of this is 0. 1578 * For all streams passed via configure_streams(), the HAL must write 1579 * over this field with its usage flags. 1580 */ 1581 uint32_t usage; 1582 1583 /** 1584 * The maximum number of buffers the HAL device may need to have dequeued at 1585 * the same time. The HAL device may not have more buffers in-flight from 1586 * this stream than this value. 1587 */ 1588 uint32_t max_buffers; 1589 1590 /** 1591 * A handle to HAL-private information for the stream. Will not be inspected 1592 * by the framework code. 1593 */ 1594 void *priv; 1595 1596 /** 1597 * A field that describes the contents of the buffer. The format and buffer 1598 * dimensions define the memory layout and structure of the stream buffers, 1599 * while dataSpace defines the meaning of the data within the buffer. 1600 * 1601 * For most formats, dataSpace defines the color space of the image data. 1602 * In addition, for some formats, dataSpace indicates whether image- or 1603 * depth-based data is requested. See system/core/include/system/graphics.h 1604 * for details of formats and valid dataSpace values for each format. 1605 * 1606 * Version information: 1607 * 1608 * < CAMERA_DEVICE_API_VERSION_3_3: 1609 * 1610 * Not defined and should not be accessed. dataSpace should be assumed to 1611 * be HAL_DATASPACE_UNKNOWN, and the appropriate color space, etc, should 1612 * be determined from the usage flags and the format. 1613 * 1614 * >= CAMERA_DEVICE_API_VERSION_3_3: 1615 * 1616 * Always set by the camera service. HAL must use this dataSpace to 1617 * configure the stream to the correct colorspace, or to select between 1618 * color and depth outputs if supported. 1619 */ 1620 android_dataspace_t data_space; 1621 1622 /** 1623 * The required output rotation of the stream, one of 1624 * the camera3_stream_rotation_t values. This must be inspected by HAL along 1625 * with stream width and height. For example, if the rotation is 90 degree 1626 * and the stream width and height is 720 and 1280 respectively, camera service 1627 * will supply buffers of size 720x1280, and HAL should capture a 1280x720 image 1628 * and rotate the image by 90 degree counterclockwise. The rotation field is 1629 * no-op when the stream type is input. Camera HAL must ignore the rotation 1630 * field for an input stream. 1631 * 1632 * <= CAMERA_DEVICE_API_VERSION_3_2: 1633 * 1634 * Not defined and must not be accessed. HAL must not apply any rotation 1635 * on output images. 1636 * 1637 * >= CAMERA_DEVICE_API_VERSION_3_3: 1638 * 1639 * Always set by camera service. HAL must inspect this field during stream 1640 * configuration and returns -EINVAL if HAL cannot perform such rotation. 1641 * HAL must always support CAMERA3_STREAM_ROTATION_0, so a 1642 * configure_streams() call must not fail for unsupported rotation if 1643 * rotation field of all streams is CAMERA3_STREAM_ROTATION_0. 1644 * 1645 */ 1646 int rotation; 1647 1648 /* reserved for future use */ 1649 void *reserved[7]; 1650 1651 } camera3_stream_t; 1652 1653 /** 1654 * camera3_stream_configuration_t: 1655 * 1656 * A structure of stream definitions, used by configure_streams(). This 1657 * structure defines all the output streams and the reprocessing input 1658 * stream for the current camera use case. 1659 */ 1660 typedef struct camera3_stream_configuration { 1661 /** 1662 * The total number of streams requested by the framework. This includes 1663 * both input and output streams. The number of streams will be at least 1, 1664 * and there will be at least one output-capable stream. 1665 */ 1666 uint32_t num_streams; 1667 1668 /** 1669 * An array of camera stream pointers, defining the input/output 1670 * configuration for the camera HAL device. 1671 * 1672 * At most one input-capable stream may be defined (INPUT or BIDIRECTIONAL) 1673 * in a single configuration. 1674 * 1675 * At least one output-capable stream must be defined (OUTPUT or 1676 * BIDIRECTIONAL). 1677 */ 1678 camera3_stream_t **streams; 1679 1680 /** 1681 * >= CAMERA_DEVICE_API_VERSION_3_3: 1682 * 1683 * The operation mode of streams in this configuration, one of the value defined in 1684 * camera3_stream_configuration_mode_t. 1685 * The HAL can use this mode as an indicator to set the stream property (e.g., 1686 * camera3_stream->max_buffers) appropriately. For example, if the configuration is 1687 * CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE, the HAL may want to set aside more 1688 * buffers for batch mode operation (see android.control.availableHighSpeedVideoConfigurations 1689 * for batch mode definition). 1690 * 1691 */ 1692 uint32_t operation_mode; 1693 } camera3_stream_configuration_t; 1694 1695 /** 1696 * camera3_buffer_status_t: 1697 * 1698 * The current status of a single stream buffer. 1699 */ 1700 typedef enum camera3_buffer_status { 1701 /** 1702 * The buffer is in a normal state, and can be used after waiting on its 1703 * sync fence. 1704 */ 1705 CAMERA3_BUFFER_STATUS_OK = 0, 1706 1707 /** 1708 * The buffer does not contain valid data, and the data in it should not be 1709 * used. The sync fence must still be waited on before reusing the buffer. 1710 */ 1711 CAMERA3_BUFFER_STATUS_ERROR = 1 1712 1713 } camera3_buffer_status_t; 1714 1715 /** 1716 * camera3_stream_buffer_t: 1717 * 1718 * A single buffer from a camera3 stream. It includes a handle to its parent 1719 * stream, the handle to the gralloc buffer itself, and sync fences 1720 * 1721 * The buffer does not specify whether it is to be used for input or output; 1722 * that is determined by its parent stream type and how the buffer is passed to 1723 * the HAL device. 1724 */ 1725 typedef struct camera3_stream_buffer { 1726 /** 1727 * The handle of the stream this buffer is associated with 1728 */ 1729 camera3_stream_t *stream; 1730 1731 /** 1732 * The native handle to the buffer 1733 */ 1734 buffer_handle_t *buffer; 1735 1736 /** 1737 * Current state of the buffer, one of the camera3_buffer_status_t 1738 * values. The framework will not pass buffers to the HAL that are in an 1739 * error state. In case a buffer could not be filled by the HAL, it must 1740 * have its status set to CAMERA3_BUFFER_STATUS_ERROR when returned to the 1741 * framework with process_capture_result(). 1742 */ 1743 int status; 1744 1745 /** 1746 * The acquire sync fence for this buffer. The HAL must wait on this fence 1747 * fd before attempting to read from or write to this buffer. 1748 * 1749 * The framework may be set to -1 to indicate that no waiting is necessary 1750 * for this buffer. 1751 * 1752 * When the HAL returns an output buffer to the framework with 1753 * process_capture_result(), the acquire_fence must be set to -1. If the HAL 1754 * never waits on the acquire_fence due to an error in filling a buffer, 1755 * when calling process_capture_result() the HAL must set the release_fence 1756 * of the buffer to be the acquire_fence passed to it by the framework. This 1757 * will allow the framework to wait on the fence before reusing the buffer. 1758 * 1759 * For input buffers, the HAL must not change the acquire_fence field during 1760 * the process_capture_request() call. 1761 * 1762 * >= CAMERA_DEVICE_API_VERSION_3_2: 1763 * 1764 * When the HAL returns an input buffer to the framework with 1765 * process_capture_result(), the acquire_fence must be set to -1. If the HAL 1766 * never waits on input buffer acquire fence due to an error, the sync 1767 * fences should be handled similarly to the way they are handled for output 1768 * buffers. 1769 */ 1770 int acquire_fence; 1771 1772 /** 1773 * The release sync fence for this buffer. The HAL must set this fence when 1774 * returning buffers to the framework, or write -1 to indicate that no 1775 * waiting is required for this buffer. 1776 * 1777 * For the output buffers, the fences must be set in the output_buffers 1778 * array passed to process_capture_result(). 1779 * 1780 * <= CAMERA_DEVICE_API_VERSION_3_1: 1781 * 1782 * For the input buffer, the release fence must be set by the 1783 * process_capture_request() call. 1784 * 1785 * >= CAMERA_DEVICE_API_VERSION_3_2: 1786 * 1787 * For the input buffer, the fences must be set in the input_buffer 1788 * passed to process_capture_result(). 1789 * 1790 * After signaling the release_fence for this buffer, the HAL 1791 * should not make any further attempts to access this buffer as the 1792 * ownership has been fully transferred back to the framework. 1793 * 1794 * If a fence of -1 was specified then the ownership of this buffer 1795 * is transferred back immediately upon the call of process_capture_result. 1796 */ 1797 int release_fence; 1798 1799 } camera3_stream_buffer_t; 1800 1801 /** 1802 * camera3_stream_buffer_set_t: 1803 * 1804 * The complete set of gralloc buffers for a stream. This structure is given to 1805 * register_stream_buffers() to allow the camera HAL device to register/map/etc 1806 * newly allocated stream buffers. 1807 * 1808 * >= CAMERA_DEVICE_API_VERSION_3_2: 1809 * 1810 * Deprecated (and not used). In particular, 1811 * register_stream_buffers is also deprecated and will never be invoked. 1812 * 1813 */ 1814 typedef struct camera3_stream_buffer_set { 1815 /** 1816 * The stream handle for the stream these buffers belong to 1817 */ 1818 camera3_stream_t *stream; 1819 1820 /** 1821 * The number of buffers in this stream. It is guaranteed to be at least 1822 * stream->max_buffers. 1823 */ 1824 uint32_t num_buffers; 1825 1826 /** 1827 * The array of gralloc buffer handles for this stream. If the stream format 1828 * is set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL device 1829 * should inspect the passed-in buffers to determine any platform-private 1830 * pixel format information. 1831 */ 1832 buffer_handle_t **buffers; 1833 1834 } camera3_stream_buffer_set_t; 1835 1836 /** 1837 * camera3_jpeg_blob: 1838 * 1839 * Transport header for compressed JPEG buffers in output streams. 1840 * 1841 * To capture JPEG images, a stream is created using the pixel format 1842 * HAL_PIXEL_FORMAT_BLOB. The buffer size for the stream is calculated by the 1843 * framework, based on the static metadata field android.jpeg.maxSize. Since 1844 * compressed JPEG images are of variable size, the HAL needs to include the 1845 * final size of the compressed image using this structure inside the output 1846 * stream buffer. The JPEG blob ID field must be set to CAMERA3_JPEG_BLOB_ID. 1847 * 1848 * Transport header should be at the end of the JPEG output stream buffer. That 1849 * means the jpeg_blob_id must start at byte[buffer_size - 1850 * sizeof(camera3_jpeg_blob)], where the buffer_size is the size of gralloc buffer. 1851 * Any HAL using this transport header must account for it in android.jpeg.maxSize 1852 * The JPEG data itself starts at the beginning of the buffer and should be 1853 * jpeg_size bytes long. 1854 */ 1855 typedef struct camera3_jpeg_blob { 1856 uint16_t jpeg_blob_id; 1857 uint32_t jpeg_size; 1858 } camera3_jpeg_blob_t; 1859 1860 enum { 1861 CAMERA3_JPEG_BLOB_ID = 0x00FF 1862 }; 1863 1864 /********************************************************************** 1865 * 1866 * Message definitions for the HAL notify() callback. 1867 * 1868 * These definitions are used for the HAL notify callback, to signal 1869 * asynchronous events from the HAL device to the Android framework. 1870 * 1871 */ 1872 1873 /** 1874 * camera3_msg_type: 1875 * 1876 * Indicates the type of message sent, which specifies which member of the 1877 * message union is valid. 1878 * 1879 */ 1880 typedef enum camera3_msg_type { 1881 /** 1882 * An error has occurred. camera3_notify_msg.message.error contains the 1883 * error information. 1884 */ 1885 CAMERA3_MSG_ERROR = 1, 1886 1887 /** 1888 * The exposure of a given request or processing a reprocess request has 1889 * begun. camera3_notify_msg.message.shutter contains the information 1890 * the capture. 1891 */ 1892 CAMERA3_MSG_SHUTTER = 2, 1893 1894 /** 1895 * Number of framework message types 1896 */ 1897 CAMERA3_NUM_MESSAGES 1898 1899 } camera3_msg_type_t; 1900 1901 /** 1902 * Defined error codes for CAMERA_MSG_ERROR 1903 */ 1904 typedef enum camera3_error_msg_code { 1905 /** 1906 * A serious failure occured. No further frames or buffer streams will 1907 * be produced by the device. Device should be treated as closed. The 1908 * client must reopen the device to use it again. The frame_number field 1909 * is unused. 1910 */ 1911 CAMERA3_MSG_ERROR_DEVICE = 1, 1912 1913 /** 1914 * An error has occurred in processing a request. No output (metadata or 1915 * buffers) will be produced for this request. The frame_number field 1916 * specifies which request has been dropped. Subsequent requests are 1917 * unaffected, and the device remains operational. 1918 */ 1919 CAMERA3_MSG_ERROR_REQUEST = 2, 1920 1921 /** 1922 * An error has occurred in producing an output result metadata buffer 1923 * for a request, but output stream buffers for it will still be 1924 * available. Subsequent requests are unaffected, and the device remains 1925 * operational. The frame_number field specifies the request for which 1926 * result metadata won't be available. 1927 */ 1928 CAMERA3_MSG_ERROR_RESULT = 3, 1929 1930 /** 1931 * An error has occurred in placing an output buffer into a stream for a 1932 * request. The frame metadata and other buffers may still be 1933 * available. Subsequent requests are unaffected, and the device remains 1934 * operational. The frame_number field specifies the request for which the 1935 * buffer was dropped, and error_stream contains a pointer to the stream 1936 * that dropped the frame.u 1937 */ 1938 CAMERA3_MSG_ERROR_BUFFER = 4, 1939 1940 /** 1941 * Number of error types 1942 */ 1943 CAMERA3_MSG_NUM_ERRORS 1944 1945 } camera3_error_msg_code_t; 1946 1947 /** 1948 * camera3_error_msg_t: 1949 * 1950 * Message contents for CAMERA3_MSG_ERROR 1951 */ 1952 typedef struct camera3_error_msg { 1953 /** 1954 * Frame number of the request the error applies to. 0 if the frame number 1955 * isn't applicable to the error. 1956 */ 1957 uint32_t frame_number; 1958 1959 /** 1960 * Pointer to the stream that had a failure. NULL if the stream isn't 1961 * applicable to the error. 1962 */ 1963 camera3_stream_t *error_stream; 1964 1965 /** 1966 * The code for this error; one of the CAMERA_MSG_ERROR enum values. 1967 */ 1968 int error_code; 1969 1970 } camera3_error_msg_t; 1971 1972 /** 1973 * camera3_shutter_msg_t: 1974 * 1975 * Message contents for CAMERA3_MSG_SHUTTER 1976 */ 1977 typedef struct camera3_shutter_msg { 1978 /** 1979 * Frame number of the request that has begun exposure or reprocessing. 1980 */ 1981 uint32_t frame_number; 1982 1983 /** 1984 * Timestamp for the start of capture. For a reprocess request, this must 1985 * be input image's start of capture. This must match the capture result 1986 * metadata's sensor exposure start timestamp. 1987 */ 1988 uint64_t timestamp; 1989 1990 } camera3_shutter_msg_t; 1991 1992 /** 1993 * camera3_notify_msg_t: 1994 * 1995 * The message structure sent to camera3_callback_ops_t.notify() 1996 */ 1997 typedef struct camera3_notify_msg { 1998 1999 /** 2000 * The message type. One of camera3_notify_msg_type, or a private extension. 2001 */ 2002 int type; 2003 2004 union { 2005 /** 2006 * Error message contents. Valid if type is CAMERA3_MSG_ERROR 2007 */ 2008 camera3_error_msg_t error; 2009 2010 /** 2011 * Shutter message contents. Valid if type is CAMERA3_MSG_SHUTTER 2012 */ 2013 camera3_shutter_msg_t shutter; 2014 2015 /** 2016 * Generic message contents. Used to ensure a minimum size for custom 2017 * message types. 2018 */ 2019 uint8_t generic[32]; 2020 } message; 2021 2022 } camera3_notify_msg_t; 2023 2024 /********************************************************************** 2025 * 2026 * Capture request/result definitions for the HAL process_capture_request() 2027 * method, and the process_capture_result() callback. 2028 * 2029 */ 2030 2031 /** 2032 * camera3_request_template_t: 2033 * 2034 * Available template types for 2035 * camera3_device_ops.construct_default_request_settings() 2036 */ 2037 typedef enum camera3_request_template { 2038 /** 2039 * Standard camera preview operation with 3A on auto. 2040 */ 2041 CAMERA3_TEMPLATE_PREVIEW = 1, 2042 2043 /** 2044 * Standard camera high-quality still capture with 3A and flash on auto. 2045 */ 2046 CAMERA3_TEMPLATE_STILL_CAPTURE = 2, 2047 2048 /** 2049 * Standard video recording plus preview with 3A on auto, torch off. 2050 */ 2051 CAMERA3_TEMPLATE_VIDEO_RECORD = 3, 2052 2053 /** 2054 * High-quality still capture while recording video. Application will 2055 * include preview, video record, and full-resolution YUV or JPEG streams in 2056 * request. Must not cause stuttering on video stream. 3A on auto. 2057 */ 2058 CAMERA3_TEMPLATE_VIDEO_SNAPSHOT = 4, 2059 2060 /** 2061 * Zero-shutter-lag mode. Application will request preview and 2062 * full-resolution data for each frame, and reprocess it to JPEG when a 2063 * still image is requested by user. Settings should provide highest-quality 2064 * full-resolution images without compromising preview frame rate. 3A on 2065 * auto. 2066 */ 2067 CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG = 5, 2068 2069 /** 2070 * A basic template for direct application control of capture 2071 * parameters. All automatic control is disabled (auto-exposure, auto-white 2072 * balance, auto-focus), and post-processing parameters are set to preview 2073 * quality. The manual capture parameters (exposure, sensitivity, etc.) 2074 * are set to reasonable defaults, but should be overridden by the 2075 * application depending on the intended use case. 2076 */ 2077 CAMERA3_TEMPLATE_MANUAL = 6, 2078 2079 /* Total number of templates */ 2080 CAMERA3_TEMPLATE_COUNT, 2081 2082 /** 2083 * First value for vendor-defined request templates 2084 */ 2085 CAMERA3_VENDOR_TEMPLATE_START = 0x40000000 2086 2087 } camera3_request_template_t; 2088 2089 /** 2090 * camera3_capture_request_t: 2091 * 2092 * A single request for image capture/buffer reprocessing, sent to the Camera 2093 * HAL device by the framework in process_capture_request(). 2094 * 2095 * The request contains the settings to be used for this capture, and the set of 2096 * output buffers to write the resulting image data in. It may optionally 2097 * contain an input buffer, in which case the request is for reprocessing that 2098 * input buffer instead of capturing a new image with the camera sensor. The 2099 * capture is identified by the frame_number. 2100 * 2101 * In response, the camera HAL device must send a camera3_capture_result 2102 * structure asynchronously to the framework, using the process_capture_result() 2103 * callback. 2104 */ 2105 typedef struct camera3_capture_request { 2106 /** 2107 * The frame number is an incrementing integer set by the framework to 2108 * uniquely identify this capture. It needs to be returned in the result 2109 * call, and is also used to identify the request in asynchronous 2110 * notifications sent to camera3_callback_ops_t.notify(). 2111 */ 2112 uint32_t frame_number; 2113 2114 /** 2115 * The settings buffer contains the capture and processing parameters for 2116 * the request. As a special case, a NULL settings buffer indicates that the 2117 * settings are identical to the most-recently submitted capture request. A 2118 * NULL buffer cannot be used as the first submitted request after a 2119 * configure_streams() call. 2120 */ 2121 const camera_metadata_t *settings; 2122 2123 /** 2124 * The input stream buffer to use for this request, if any. 2125 * 2126 * If input_buffer is NULL, then the request is for a new capture from the 2127 * imager. If input_buffer is valid, the request is for reprocessing the 2128 * image contained in input_buffer. 2129 * 2130 * In the latter case, the HAL must set the release_fence of the 2131 * input_buffer to a valid sync fence, or to -1 if the HAL does not support 2132 * sync, before process_capture_request() returns. 2133 * 2134 * The HAL is required to wait on the acquire sync fence of the input buffer 2135 * before accessing it. 2136 * 2137 * <= CAMERA_DEVICE_API_VERSION_3_1: 2138 * 2139 * Any input buffer included here will have been registered with the HAL 2140 * through register_stream_buffers() before its inclusion in a request. 2141 * 2142 * >= CAMERA_DEVICE_API_VERSION_3_2: 2143 * 2144 * The buffers will not have been pre-registered with the HAL. 2145 * Subsequent requests may reuse buffers, or provide entirely new buffers. 2146 */ 2147 camera3_stream_buffer_t *input_buffer; 2148 2149 /** 2150 * The number of output buffers for this capture request. Must be at least 2151 * 1. 2152 */ 2153 uint32_t num_output_buffers; 2154 2155 /** 2156 * An array of num_output_buffers stream buffers, to be filled with image 2157 * data from this capture/reprocess. The HAL must wait on the acquire fences 2158 * of each stream buffer before writing to them. 2159 * 2160 * The HAL takes ownership of the actual buffer_handle_t entries in 2161 * output_buffers; the framework does not access them until they are 2162 * returned in a camera3_capture_result_t. 2163 * 2164 * <= CAMERA_DEVICE_API_VERSION_3_1: 2165 * 2166 * All the buffers included here will have been registered with the HAL 2167 * through register_stream_buffers() before their inclusion in a request. 2168 * 2169 * >= CAMERA_DEVICE_API_VERSION_3_2: 2170 * 2171 * Any or all of the buffers included here may be brand new in this 2172 * request (having never before seen by the HAL). 2173 */ 2174 const camera3_stream_buffer_t *output_buffers; 2175 2176 } camera3_capture_request_t; 2177 2178 /** 2179 * camera3_capture_result_t: 2180 * 2181 * The result of a single capture/reprocess by the camera HAL device. This is 2182 * sent to the framework asynchronously with process_capture_result(), in 2183 * response to a single capture request sent to the HAL with 2184 * process_capture_request(). Multiple process_capture_result() calls may be 2185 * performed by the HAL for each request. 2186 * 2187 * Each call, all with the same frame 2188 * number, may contain some subset of the output buffers, and/or the result 2189 * metadata. The metadata may only be provided once for a given frame number; 2190 * all other calls must set the result metadata to NULL. 2191 * 2192 * The result structure contains the output metadata from this capture, and the 2193 * set of output buffers that have been/will be filled for this capture. Each 2194 * output buffer may come with a release sync fence that the framework will wait 2195 * on before reading, in case the buffer has not yet been filled by the HAL. 2196 * 2197 * >= CAMERA_DEVICE_API_VERSION_3_2: 2198 * 2199 * The metadata may be provided multiple times for a single frame number. The 2200 * framework will accumulate together the final result set by combining each 2201 * partial result together into the total result set. 2202 * 2203 * If an input buffer is given in a request, the HAL must return it in one of 2204 * the process_capture_result calls, and the call may be to just return the input 2205 * buffer, without metadata and output buffers; the sync fences must be handled 2206 * the same way they are done for output buffers. 2207 * 2208 * 2209 * Performance considerations: 2210 * 2211 * Applications will also receive these partial results immediately, so sending 2212 * partial results is a highly recommended performance optimization to avoid 2213 * the total pipeline latency before sending the results for what is known very 2214 * early on in the pipeline. 2215 * 2216 * A typical use case might be calculating the AF state halfway through the 2217 * pipeline; by sending the state back to the framework immediately, we get a 2218 * 50% performance increase and perceived responsiveness of the auto-focus. 2219 * 2220 */ 2221 typedef struct camera3_capture_result { 2222 /** 2223 * The frame number is an incrementing integer set by the framework in the 2224 * submitted request to uniquely identify this capture. It is also used to 2225 * identify the request in asynchronous notifications sent to 2226 * camera3_callback_ops_t.notify(). 2227 */ 2228 uint32_t frame_number; 2229 2230 /** 2231 * The result metadata for this capture. This contains information about the 2232 * final capture parameters, the state of the capture and post-processing 2233 * hardware, the state of the 3A algorithms, if enabled, and the output of 2234 * any enabled statistics units. 2235 * 2236 * Only one call to process_capture_result() with a given frame_number may 2237 * include the result metadata. All other calls for the same frame_number 2238 * must set this to NULL. 2239 * 2240 * If there was an error producing the result metadata, result must be an 2241 * empty metadata buffer, and notify() must be called with ERROR_RESULT. 2242 * 2243 * >= CAMERA_DEVICE_API_VERSION_3_2: 2244 * 2245 * Multiple calls to process_capture_result() with a given frame_number 2246 * may include the result metadata. 2247 * 2248 * Partial metadata submitted should not include any metadata key returned 2249 * in a previous partial result for a given frame. Each new partial result 2250 * for that frame must also set a distinct partial_result value. 2251 * 2252 * If notify has been called with ERROR_RESULT, all further partial 2253 * results for that frame are ignored by the framework. 2254 */ 2255 const camera_metadata_t *result; 2256 2257 /** 2258 * The number of output buffers returned in this result structure. Must be 2259 * less than or equal to the matching capture request's count. If this is 2260 * less than the buffer count in the capture request, at least one more call 2261 * to process_capture_result with the same frame_number must be made, to 2262 * return the remaining output buffers to the framework. This may only be 2263 * zero if the structure includes valid result metadata or an input buffer 2264 * is returned in this result. 2265 */ 2266 uint32_t num_output_buffers; 2267 2268 /** 2269 * The handles for the output stream buffers for this capture. They may not 2270 * yet be filled at the time the HAL calls process_capture_result(); the 2271 * framework will wait on the release sync fences provided by the HAL before 2272 * reading the buffers. 2273 * 2274 * The HAL must set the stream buffer's release sync fence to a valid sync 2275 * fd, or to -1 if the buffer has already been filled. 2276 * 2277 * If the HAL encounters an error while processing the buffer, and the 2278 * buffer is not filled, the buffer's status field must be set to 2279 * CAMERA3_BUFFER_STATUS_ERROR. If the HAL did not wait on the acquire fence 2280 * before encountering the error, the acquire fence should be copied into 2281 * the release fence, to allow the framework to wait on the fence before 2282 * reusing the buffer. 2283 * 2284 * The acquire fence must be set to -1 for all output buffers. If 2285 * num_output_buffers is zero, this may be NULL. In that case, at least one 2286 * more process_capture_result call must be made by the HAL to provide the 2287 * output buffers. 2288 * 2289 * When process_capture_result is called with a new buffer for a frame, 2290 * all previous frames' buffers for that corresponding stream must have been 2291 * already delivered (the fences need not have yet been signaled). 2292 * 2293 * >= CAMERA_DEVICE_API_VERSION_3_2: 2294 * 2295 * Gralloc buffers for a frame may be sent to framework before the 2296 * corresponding SHUTTER-notify. 2297 * 2298 * Performance considerations: 2299 * 2300 * Buffers delivered to the framework will not be dispatched to the 2301 * application layer until a start of exposure timestamp has been received 2302 * via a SHUTTER notify() call. It is highly recommended to 2303 * dispatch that call as early as possible. 2304 */ 2305 const camera3_stream_buffer_t *output_buffers; 2306 2307 /** 2308 * >= CAMERA_DEVICE_API_VERSION_3_2: 2309 * 2310 * The handle for the input stream buffer for this capture. It may not 2311 * yet be consumed at the time the HAL calls process_capture_result(); the 2312 * framework will wait on the release sync fences provided by the HAL before 2313 * reusing the buffer. 2314 * 2315 * The HAL should handle the sync fences the same way they are done for 2316 * output_buffers. 2317 * 2318 * Only one input buffer is allowed to be sent per request. Similarly to 2319 * output buffers, the ordering of returned input buffers must be 2320 * maintained by the HAL. 2321 * 2322 * Performance considerations: 2323 * 2324 * The input buffer should be returned as early as possible. If the HAL 2325 * supports sync fences, it can call process_capture_result to hand it back 2326 * with sync fences being set appropriately. If the sync fences are not 2327 * supported, the buffer can only be returned when it is consumed, which 2328 * may take long time; the HAL may choose to copy this input buffer to make 2329 * the buffer return sooner. 2330 */ 2331 const camera3_stream_buffer_t *input_buffer; 2332 2333 /** 2334 * >= CAMERA_DEVICE_API_VERSION_3_2: 2335 * 2336 * In order to take advantage of partial results, the HAL must set the 2337 * static metadata android.request.partialResultCount to the number of 2338 * partial results it will send for each frame. 2339 * 2340 * Each new capture result with a partial result must set 2341 * this field (partial_result) to a distinct inclusive value between 2342 * 1 and android.request.partialResultCount. 2343 * 2344 * HALs not wishing to take advantage of this feature must not 2345 * set an android.request.partialResultCount or partial_result to a value 2346 * other than 1. 2347 * 2348 * This value must be set to 0 when a capture result contains buffers only 2349 * and no metadata. 2350 */ 2351 uint32_t partial_result; 2352 2353 } camera3_capture_result_t; 2354 2355 /********************************************************************** 2356 * 2357 * Callback methods for the HAL to call into the framework. 2358 * 2359 * These methods are used to return metadata and image buffers for a completed 2360 * or failed captures, and to notify the framework of asynchronous events such 2361 * as errors. 2362 * 2363 * The framework will not call back into the HAL from within these callbacks, 2364 * and these calls will not block for extended periods. 2365 * 2366 */ 2367 typedef struct camera3_callback_ops { 2368 2369 /** 2370 * process_capture_result: 2371 * 2372 * Send results from a completed capture to the framework. 2373 * process_capture_result() may be invoked multiple times by the HAL in 2374 * response to a single capture request. This allows, for example, the 2375 * metadata and low-resolution buffers to be returned in one call, and 2376 * post-processed JPEG buffers in a later call, once it is available. Each 2377 * call must include the frame number of the request it is returning 2378 * metadata or buffers for. 2379 * 2380 * A component (buffer or metadata) of the complete result may only be 2381 * included in one process_capture_result call. A buffer for each stream, 2382 * and the result metadata, must be returned by the HAL for each request in 2383 * one of the process_capture_result calls, even in case of errors producing 2384 * some of the output. A call to process_capture_result() with neither 2385 * output buffers or result metadata is not allowed. 2386 * 2387 * The order of returning metadata and buffers for a single result does not 2388 * matter, but buffers for a given stream must be returned in FIFO order. So 2389 * the buffer for request 5 for stream A must always be returned before the 2390 * buffer for request 6 for stream A. This also applies to the result 2391 * metadata; the metadata for request 5 must be returned before the metadata 2392 * for request 6. 2393 * 2394 * However, different streams are independent of each other, so it is 2395 * acceptable and expected that the buffer for request 5 for stream A may be 2396 * returned after the buffer for request 6 for stream B is. And it is 2397 * acceptable that the result metadata for request 6 for stream B is 2398 * returned before the buffer for request 5 for stream A is. 2399 * 2400 * The HAL retains ownership of result structure, which only needs to be 2401 * valid to access during this call. The framework will copy whatever it 2402 * needs before this call returns. 2403 * 2404 * The output buffers do not need to be filled yet; the framework will wait 2405 * on the stream buffer release sync fence before reading the buffer 2406 * data. Therefore, this method should be called by the HAL as soon as 2407 * possible, even if some or all of the output buffers are still in 2408 * being filled. The HAL must include valid release sync fences into each 2409 * output_buffers stream buffer entry, or -1 if that stream buffer is 2410 * already filled. 2411 * 2412 * If the result buffer cannot be constructed for a request, the HAL should 2413 * return an empty metadata buffer, but still provide the output buffers and 2414 * their sync fences. In addition, notify() must be called with an 2415 * ERROR_RESULT message. 2416 * 2417 * If an output buffer cannot be filled, its status field must be set to 2418 * STATUS_ERROR. In addition, notify() must be called with a ERROR_BUFFER 2419 * message. 2420 * 2421 * If the entire capture has failed, then this method still needs to be 2422 * called to return the output buffers to the framework. All the buffer 2423 * statuses should be STATUS_ERROR, and the result metadata should be an 2424 * empty buffer. In addition, notify() must be called with a ERROR_REQUEST 2425 * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages 2426 * should not be sent. 2427 * 2428 * Performance requirements: 2429 * 2430 * This is a non-blocking call. The framework will return this call in 5ms. 2431 * 2432 * The pipeline latency (see S7 for definition) should be less than or equal to 2433 * 4 frame intervals, and must be less than or equal to 8 frame intervals. 2434 * 2435 */ 2436 void (*process_capture_result)(const struct camera3_callback_ops *, 2437 const camera3_capture_result_t *result); 2438 2439 /** 2440 * notify: 2441 * 2442 * Asynchronous notification callback from the HAL, fired for various 2443 * reasons. Only for information independent of frame capture, or that 2444 * require specific timing. The ownership of the message structure remains 2445 * with the HAL, and the msg only needs to be valid for the duration of this 2446 * call. 2447 * 2448 * Multiple threads may call notify() simultaneously. 2449 * 2450 * <= CAMERA_DEVICE_API_VERSION_3_1: 2451 * 2452 * The notification for the start of exposure for a given request must be 2453 * sent by the HAL before the first call to process_capture_result() for 2454 * that request is made. 2455 * 2456 * >= CAMERA_DEVICE_API_VERSION_3_2: 2457 * 2458 * Buffers delivered to the framework will not be dispatched to the 2459 * application layer until a start of exposure timestamp (or input image's 2460 * start of exposure timestamp for a reprocess request) has been received 2461 * via a SHUTTER notify() call. It is highly recommended to dispatch this 2462 * call as early as possible. 2463 * 2464 * ------------------------------------------------------------------------ 2465 * Performance requirements: 2466 * 2467 * This is a non-blocking call. The framework will return this call in 5ms. 2468 */ 2469 void (*notify)(const struct camera3_callback_ops *, 2470 const camera3_notify_msg_t *msg); 2471 2472 } camera3_callback_ops_t; 2473 2474 /********************************************************************** 2475 * 2476 * Camera device operations 2477 * 2478 */ 2479 typedef struct camera3_device_ops { 2480 2481 /** 2482 * initialize: 2483 * 2484 * One-time initialization to pass framework callback function pointers to 2485 * the HAL. Will be called once after a successful open() call, before any 2486 * other functions are called on the camera3_device_ops structure. 2487 * 2488 * Performance requirements: 2489 * 2490 * This should be a non-blocking call. The HAL should return from this call 2491 * in 5ms, and must return from this call in 10ms. 2492 * 2493 * Return values: 2494 * 2495 * 0: On successful initialization 2496 * 2497 * -ENODEV: If initialization fails. Only close() can be called successfully 2498 * by the framework after this. 2499 */ 2500 int (*initialize)(const struct camera3_device *, 2501 const camera3_callback_ops_t *callback_ops); 2502 2503 /********************************************************************** 2504 * Stream management 2505 */ 2506 2507 /** 2508 * configure_streams: 2509 * 2510 * CAMERA_DEVICE_API_VERSION_3_0 only: 2511 * 2512 * Reset the HAL camera device processing pipeline and set up new input and 2513 * output streams. This call replaces any existing stream configuration with 2514 * the streams defined in the stream_list. This method will be called at 2515 * least once after initialize() before a request is submitted with 2516 * process_capture_request(). 2517 * 2518 * The stream_list must contain at least one output-capable stream, and may 2519 * not contain more than one input-capable stream. 2520 * 2521 * The stream_list may contain streams that are also in the currently-active 2522 * set of streams (from the previous call to configure_stream()). These 2523 * streams will already have valid values for usage, max_buffers, and the 2524 * private pointer. 2525 * 2526 * If such a stream has already had its buffers registered, 2527 * register_stream_buffers() will not be called again for the stream, and 2528 * buffers from the stream can be immediately included in input requests. 2529 * 2530 * If the HAL needs to change the stream configuration for an existing 2531 * stream due to the new configuration, it may rewrite the values of usage 2532 * and/or max_buffers during the configure call. 2533 * 2534 * The framework will detect such a change, and will then reallocate the 2535 * stream buffers, and call register_stream_buffers() again before using 2536 * buffers from that stream in a request. 2537 * 2538 * If a currently-active stream is not included in stream_list, the HAL may 2539 * safely remove any references to that stream. It will not be reused in a 2540 * later configure() call by the framework, and all the gralloc buffers for 2541 * it will be freed after the configure_streams() call returns. 2542 * 2543 * The stream_list structure is owned by the framework, and may not be 2544 * accessed once this call completes. The address of an individual 2545 * camera3_stream_t structure will remain valid for access by the HAL until 2546 * the end of the first configure_stream() call which no longer includes 2547 * that camera3_stream_t in the stream_list argument. The HAL may not change 2548 * values in the stream structure outside of the private pointer, except for 2549 * the usage and max_buffers members during the configure_streams() call 2550 * itself. 2551 * 2552 * If the stream is new, the usage, max_buffer, and private pointer fields 2553 * of the stream structure will all be set to 0. The HAL device must set 2554 * these fields before the configure_streams() call returns. These fields 2555 * are then used by the framework and the platform gralloc module to 2556 * allocate the gralloc buffers for each stream. 2557 * 2558 * Before such a new stream can have its buffers included in a capture 2559 * request, the framework will call register_stream_buffers() with that 2560 * stream. However, the framework is not required to register buffers for 2561 * _all_ streams before submitting a request. This allows for quick startup 2562 * of (for example) a preview stream, with allocation for other streams 2563 * happening later or concurrently. 2564 * 2565 * ------------------------------------------------------------------------ 2566 * CAMERA_DEVICE_API_VERSION_3_1 only: 2567 * 2568 * Reset the HAL camera device processing pipeline and set up new input and 2569 * output streams. This call replaces any existing stream configuration with 2570 * the streams defined in the stream_list. This method will be called at 2571 * least once after initialize() before a request is submitted with 2572 * process_capture_request(). 2573 * 2574 * The stream_list must contain at least one output-capable stream, and may 2575 * not contain more than one input-capable stream. 2576 * 2577 * The stream_list may contain streams that are also in the currently-active 2578 * set of streams (from the previous call to configure_stream()). These 2579 * streams will already have valid values for usage, max_buffers, and the 2580 * private pointer. 2581 * 2582 * If such a stream has already had its buffers registered, 2583 * register_stream_buffers() will not be called again for the stream, and 2584 * buffers from the stream can be immediately included in input requests. 2585 * 2586 * If the HAL needs to change the stream configuration for an existing 2587 * stream due to the new configuration, it may rewrite the values of usage 2588 * and/or max_buffers during the configure call. 2589 * 2590 * The framework will detect such a change, and will then reallocate the 2591 * stream buffers, and call register_stream_buffers() again before using 2592 * buffers from that stream in a request. 2593 * 2594 * If a currently-active stream is not included in stream_list, the HAL may 2595 * safely remove any references to that stream. It will not be reused in a 2596 * later configure() call by the framework, and all the gralloc buffers for 2597 * it will be freed after the configure_streams() call returns. 2598 * 2599 * The stream_list structure is owned by the framework, and may not be 2600 * accessed once this call completes. The address of an individual 2601 * camera3_stream_t structure will remain valid for access by the HAL until 2602 * the end of the first configure_stream() call which no longer includes 2603 * that camera3_stream_t in the stream_list argument. The HAL may not change 2604 * values in the stream structure outside of the private pointer, except for 2605 * the usage and max_buffers members during the configure_streams() call 2606 * itself. 2607 * 2608 * If the stream is new, max_buffer, and private pointer fields of the 2609 * stream structure will all be set to 0. The usage will be set to the 2610 * consumer usage flags. The HAL device must set these fields before the 2611 * configure_streams() call returns. These fields are then used by the 2612 * framework and the platform gralloc module to allocate the gralloc 2613 * buffers for each stream. 2614 * 2615 * Before such a new stream can have its buffers included in a capture 2616 * request, the framework will call register_stream_buffers() with that 2617 * stream. However, the framework is not required to register buffers for 2618 * _all_ streams before submitting a request. This allows for quick startup 2619 * of (for example) a preview stream, with allocation for other streams 2620 * happening later or concurrently. 2621 * 2622 * ------------------------------------------------------------------------ 2623 * >= CAMERA_DEVICE_API_VERSION_3_2: 2624 * 2625 * Reset the HAL camera device processing pipeline and set up new input and 2626 * output streams. This call replaces any existing stream configuration with 2627 * the streams defined in the stream_list. This method will be called at 2628 * least once after initialize() before a request is submitted with 2629 * process_capture_request(). 2630 * 2631 * The stream_list must contain at least one output-capable stream, and may 2632 * not contain more than one input-capable stream. 2633 * 2634 * The stream_list may contain streams that are also in the currently-active 2635 * set of streams (from the previous call to configure_stream()). These 2636 * streams will already have valid values for usage, max_buffers, and the 2637 * private pointer. 2638 * 2639 * If the HAL needs to change the stream configuration for an existing 2640 * stream due to the new configuration, it may rewrite the values of usage 2641 * and/or max_buffers during the configure call. 2642 * 2643 * The framework will detect such a change, and may then reallocate the 2644 * stream buffers before using buffers from that stream in a request. 2645 * 2646 * If a currently-active stream is not included in stream_list, the HAL may 2647 * safely remove any references to that stream. It will not be reused in a 2648 * later configure() call by the framework, and all the gralloc buffers for 2649 * it will be freed after the configure_streams() call returns. 2650 * 2651 * The stream_list structure is owned by the framework, and may not be 2652 * accessed once this call completes. The address of an individual 2653 * camera3_stream_t structure will remain valid for access by the HAL until 2654 * the end of the first configure_stream() call which no longer includes 2655 * that camera3_stream_t in the stream_list argument. The HAL may not change 2656 * values in the stream structure outside of the private pointer, except for 2657 * the usage and max_buffers members during the configure_streams() call 2658 * itself. 2659 * 2660 * If the stream is new, max_buffer, and private pointer fields of the 2661 * stream structure will all be set to 0. The usage will be set to the 2662 * consumer usage flags. The HAL device must set these fields before the 2663 * configure_streams() call returns. These fields are then used by the 2664 * framework and the platform gralloc module to allocate the gralloc 2665 * buffers for each stream. 2666 * 2667 * Newly allocated buffers may be included in a capture request at any time 2668 * by the framework. Once a gralloc buffer is returned to the framework 2669 * with process_capture_result (and its respective release_fence has been 2670 * signaled) the framework may free or reuse it at any time. 2671 * 2672 * ------------------------------------------------------------------------ 2673 * 2674 * Preconditions: 2675 * 2676 * The framework will only call this method when no captures are being 2677 * processed. That is, all results have been returned to the framework, and 2678 * all in-flight input and output buffers have been returned and their 2679 * release sync fences have been signaled by the HAL. The framework will not 2680 * submit new requests for capture while the configure_streams() call is 2681 * underway. 2682 * 2683 * Postconditions: 2684 * 2685 * The HAL device must configure itself to provide maximum possible output 2686 * frame rate given the sizes and formats of the output streams, as 2687 * documented in the camera device's static metadata. 2688 * 2689 * Performance requirements: 2690 * 2691 * This call is expected to be heavyweight and possibly take several hundred 2692 * milliseconds to complete, since it may require resetting and 2693 * reconfiguring the image sensor and the camera processing pipeline. 2694 * Nevertheless, the HAL device should attempt to minimize the 2695 * reconfiguration delay to minimize the user-visible pauses during 2696 * application operational mode changes (such as switching from still 2697 * capture to video recording). 2698 * 2699 * The HAL should return from this call in 500ms, and must return from this 2700 * call in 1000ms. 2701 * 2702 * Return values: 2703 * 2704 * 0: On successful stream configuration 2705 * 2706 * -EINVAL: If the requested stream configuration is invalid. Some examples 2707 * of invalid stream configurations include: 2708 * 2709 * - Including more than 1 input-capable stream (INPUT or 2710 * BIDIRECTIONAL) 2711 * 2712 * - Not including any output-capable streams (OUTPUT or 2713 * BIDIRECTIONAL) 2714 * 2715 * - Including streams with unsupported formats, or an unsupported 2716 * size for that format. 2717 * 2718 * - Including too many output streams of a certain format. 2719 * 2720 * - Unsupported rotation configuration (only applies to 2721 * devices with version >= CAMERA_DEVICE_API_VERSION_3_3) 2722 * 2723 * - Stream sizes/formats don't satisfy the 2724 * camera3_stream_configuration_t->operation_mode requirements for non-NORMAL mode, 2725 * or the requested operation_mode is not supported by the HAL. 2726 * (only applies to devices with version >= CAMERA_DEVICE_API_VERSION_3_3) 2727 * 2728 * Note that the framework submitting an invalid stream 2729 * configuration is not normal operation, since stream 2730 * configurations are checked before configure. An invalid 2731 * configuration means that a bug exists in the framework code, or 2732 * there is a mismatch between the HAL's static metadata and the 2733 * requirements on streams. 2734 * 2735 * -ENODEV: If there has been a fatal error and the device is no longer 2736 * operational. Only close() can be called successfully by the 2737 * framework after this error is returned. 2738 */ 2739 int (*configure_streams)(const struct camera3_device *, 2740 camera3_stream_configuration_t *stream_list); 2741 2742 /** 2743 * register_stream_buffers: 2744 * 2745 * >= CAMERA_DEVICE_API_VERSION_3_2: 2746 * 2747 * DEPRECATED. This will not be called and must be set to NULL. 2748 * 2749 * <= CAMERA_DEVICE_API_VERSION_3_1: 2750 * 2751 * Register buffers for a given stream with the HAL device. This method is 2752 * called by the framework after a new stream is defined by 2753 * configure_streams, and before buffers from that stream are included in a 2754 * capture request. If the same stream is listed in a subsequent 2755 * configure_streams() call, register_stream_buffers will _not_ be called 2756 * again for that stream. 2757 * 2758 * The framework does not need to register buffers for all configured 2759 * streams before it submits the first capture request. This allows quick 2760 * startup for preview (or similar use cases) while other streams are still 2761 * being allocated. 2762 * 2763 * This method is intended to allow the HAL device to map or otherwise 2764 * prepare the buffers for later use. The buffers passed in will already be 2765 * locked for use. At the end of the call, all the buffers must be ready to 2766 * be returned to the stream. The buffer_set argument is only valid for the 2767 * duration of this call. 2768 * 2769 * If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2770 * the camera HAL should inspect the passed-in buffers here to determine any 2771 * platform-private pixel format information. 2772 * 2773 * Performance requirements: 2774 * 2775 * This should be a non-blocking call. The HAL should return from this call 2776 * in 1ms, and must return from this call in 5ms. 2777 * 2778 * Return values: 2779 * 2780 * 0: On successful registration of the new stream buffers 2781 * 2782 * -EINVAL: If the stream_buffer_set does not refer to a valid active 2783 * stream, or if the buffers array is invalid. 2784 * 2785 * -ENOMEM: If there was a failure in registering the buffers. The framework 2786 * must consider all the stream buffers to be unregistered, and can 2787 * try to register again later. 2788 * 2789 * -ENODEV: If there is a fatal error, and the device is no longer 2790 * operational. Only close() can be called successfully by the 2791 * framework after this error is returned. 2792 */ 2793 int (*register_stream_buffers)(const struct camera3_device *, 2794 const camera3_stream_buffer_set_t *buffer_set); 2795 2796 /********************************************************************** 2797 * Request creation and submission 2798 */ 2799 2800 /** 2801 * construct_default_request_settings: 2802 * 2803 * Create capture settings for standard camera use cases. 2804 * 2805 * The device must return a settings buffer that is configured to meet the 2806 * requested use case, which must be one of the CAMERA3_TEMPLATE_* 2807 * enums. All request control fields must be included. 2808 * 2809 * The HAL retains ownership of this structure, but the pointer to the 2810 * structure must be valid until the device is closed. The framework and the 2811 * HAL may not modify the buffer once it is returned by this call. The same 2812 * buffer may be returned for subsequent calls for the same template, or for 2813 * other templates. 2814 * 2815 * Performance requirements: 2816 * 2817 * This should be a non-blocking call. The HAL should return from this call 2818 * in 1ms, and must return from this call in 5ms. 2819 * 2820 * Return values: 2821 * 2822 * Valid metadata: On successful creation of a default settings 2823 * buffer. 2824 * 2825 * NULL: In case of a fatal error. After this is returned, only 2826 * the close() method can be called successfully by the 2827 * framework. 2828 */ 2829 const camera_metadata_t* (*construct_default_request_settings)( 2830 const struct camera3_device *, 2831 int type); 2832 2833 /** 2834 * process_capture_request: 2835 * 2836 * Send a new capture request to the HAL. The HAL should not return from 2837 * this call until it is ready to accept the next request to process. Only 2838 * one call to process_capture_request() will be made at a time by the 2839 * framework, and the calls will all be from the same thread. The next call 2840 * to process_capture_request() will be made as soon as a new request and 2841 * its associated buffers are available. In a normal preview scenario, this 2842 * means the function will be called again by the framework almost 2843 * instantly. 2844 * 2845 * The actual request processing is asynchronous, with the results of 2846 * capture being returned by the HAL through the process_capture_result() 2847 * call. This call requires the result metadata to be available, but output 2848 * buffers may simply provide sync fences to wait on. Multiple requests are 2849 * expected to be in flight at once, to maintain full output frame rate. 2850 * 2851 * The framework retains ownership of the request structure. It is only 2852 * guaranteed to be valid during this call. The HAL device must make copies 2853 * of the information it needs to retain for the capture processing. The HAL 2854 * is responsible for waiting on and closing the buffers' fences and 2855 * returning the buffer handles to the framework. 2856 * 2857 * The HAL must write the file descriptor for the input buffer's release 2858 * sync fence into input_buffer->release_fence, if input_buffer is not 2859 * NULL. If the HAL returns -1 for the input buffer release sync fence, the 2860 * framework is free to immediately reuse the input buffer. Otherwise, the 2861 * framework will wait on the sync fence before refilling and reusing the 2862 * input buffer. 2863 * 2864 * >= CAMERA_DEVICE_API_VERSION_3_2: 2865 * 2866 * The input/output buffers provided by the framework in each request 2867 * may be brand new (having never before seen by the HAL). 2868 * 2869 * ------------------------------------------------------------------------ 2870 * Performance considerations: 2871 * 2872 * Handling a new buffer should be extremely lightweight and there should be 2873 * no frame rate degradation or frame jitter introduced. 2874 * 2875 * This call must return fast enough to ensure that the requested frame 2876 * rate can be sustained, especially for streaming cases (post-processing 2877 * quality settings set to FAST). The HAL should return this call in 1 2878 * frame interval, and must return from this call in 4 frame intervals. 2879 * 2880 * Return values: 2881 * 2882 * 0: On a successful start to processing the capture request 2883 * 2884 * -EINVAL: If the input is malformed (the settings are NULL when not 2885 * allowed, there are 0 output buffers, etc) and capture processing 2886 * cannot start. Failures during request processing should be 2887 * handled by calling camera3_callback_ops_t.notify(). In case of 2888 * this error, the framework will retain responsibility for the 2889 * stream buffers' fences and the buffer handles; the HAL should 2890 * not close the fences or return these buffers with 2891 * process_capture_result. 2892 * 2893 * -ENODEV: If the camera device has encountered a serious error. After this 2894 * error is returned, only the close() method can be successfully 2895 * called by the framework. 2896 * 2897 */ 2898 int (*process_capture_request)(const struct camera3_device *, 2899 camera3_capture_request_t *request); 2900 2901 /********************************************************************** 2902 * Miscellaneous methods 2903 */ 2904 2905 /** 2906 * get_metadata_vendor_tag_ops: 2907 * 2908 * Get methods to query for vendor extension metadata tag information. The 2909 * HAL should fill in all the vendor tag operation methods, or leave ops 2910 * unchanged if no vendor tags are defined. 2911 * 2912 * The definition of vendor_tag_query_ops_t can be found in 2913 * system/media/camera/include/system/camera_metadata.h. 2914 * 2915 * >= CAMERA_DEVICE_API_VERSION_3_2: 2916 * DEPRECATED. This function has been deprecated and should be set to 2917 * NULL by the HAL. Please implement get_vendor_tag_ops in camera_common.h 2918 * instead. 2919 */ 2920 void (*get_metadata_vendor_tag_ops)(const struct camera3_device*, 2921 vendor_tag_query_ops_t* ops); 2922 2923 /** 2924 * dump: 2925 * 2926 * Print out debugging state for the camera device. This will be called by 2927 * the framework when the camera service is asked for a debug dump, which 2928 * happens when using the dumpsys tool, or when capturing a bugreport. 2929 * 2930 * The passed-in file descriptor can be used to write debugging text using 2931 * dprintf() or write(). The text should be in ASCII encoding only. 2932 * 2933 * Performance requirements: 2934 * 2935 * This must be a non-blocking call. The HAL should return from this call 2936 * in 1ms, must return from this call in 10ms. This call must avoid 2937 * deadlocks, as it may be called at any point during camera operation. 2938 * Any synchronization primitives used (such as mutex locks or semaphores) 2939 * should be acquired with a timeout. 2940 */ 2941 void (*dump)(const struct camera3_device *, int fd); 2942 2943 /** 2944 * flush: 2945 * 2946 * Flush all currently in-process captures and all buffers in the pipeline 2947 * on the given device. The framework will use this to dump all state as 2948 * quickly as possible in order to prepare for a configure_streams() call. 2949 * 2950 * No buffers are required to be successfully returned, so every buffer 2951 * held at the time of flush() (whether successfully filled or not) may be 2952 * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed 2953 * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call, 2954 * provided they are successfully filled. 2955 * 2956 * All requests currently in the HAL are expected to be returned as soon as 2957 * possible. Not-in-process requests should return errors immediately. Any 2958 * interruptible hardware blocks should be stopped, and any uninterruptible 2959 * blocks should be waited on. 2960 * 2961 * flush() may be called concurrently to process_capture_request(), with the expectation that 2962 * process_capture_request will return quickly and the request submitted in that 2963 * process_capture_request call is treated like all other in-flight requests. Due to 2964 * concurrency issues, it is possible that from the HAL's point of view, a 2965 * process_capture_request() call may be started after flush has been invoked but has not 2966 * returned yet. If such a call happens before flush() returns, the HAL should treat the new 2967 * capture request like other in-flight pending requests (see #4 below). 2968 * 2969 * More specifically, the HAL must follow below requirements for various cases: 2970 * 2971 * 1. For captures that are too late for the HAL to cancel/stop, and will be 2972 * completed normally by the HAL; i.e. the HAL can send shutter/notify and 2973 * process_capture_result and buffers as normal. 2974 * 2975 * 2. For pending requests that have not done any processing, the HAL must call notify 2976 * CAMERA3_MSG_ERROR_REQUEST, and return all the output buffers with 2977 * process_capture_result in the error state (CAMERA3_BUFFER_STATUS_ERROR). 2978 * The HAL must not place the release fence into an error state, instead, 2979 * the release fences must be set to the acquire fences passed by the framework, 2980 * or -1 if they have been waited on by the HAL already. This is also the path 2981 * to follow for any captures for which the HAL already called notify() with 2982 * CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers for. 2983 * After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only process_capture_results with 2984 * buffers in CAMERA3_BUFFER_STATUS_ERROR are allowed. No further notifys or 2985 * process_capture_result with non-null metadata is allowed. 2986 * 2987 * 3. For partially completed pending requests that will not have all the output 2988 * buffers or perhaps missing metadata, the HAL should follow below: 2989 * 2990 * 3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected result 2991 * metadata (i.e. one or more partial metadata) won't be available for the capture. 2992 * 2993 * 3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that won't 2994 * be produced for the capture. 2995 * 2996 * 3.3 Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp before 2997 * any buffers/metadata are returned with process_capture_result. 2998 * 2999 * 3.4 For captures that will produce some results, the HAL must not call 3000 * CAMERA3_MSG_ERROR_REQUEST, since that indicates complete failure. 3001 * 3002 * 3.5. Valid buffers/metadata should be passed to the framework as normal. 3003 * 3004 * 3.6. Failed buffers should be returned to the framework as described for case 2. 3005 * But failed buffers do not have to follow the strict ordering valid buffers do, 3006 * and may be out-of-order with respect to valid buffers. For example, if buffers 3007 * A, B, C, D, E are sent, D and E are failed, then A, E, B, D, C is an acceptable 3008 * return order. 3009 * 3010 * 3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is sufficient, no 3011 * need to call process_capture_result with NULL metadata or equivalent. 3012 * 3013 * 4. If a flush() is invoked while a process_capture_request() invocation is active, that 3014 * process call should return as soon as possible. In addition, if a process_capture_request() 3015 * call is made after flush() has been invoked but before flush() has returned, the 3016 * capture request provided by the late process_capture_request call should be treated like 3017 * a pending request in case #2 above. 3018 * 3019 * flush() should only return when there are no more outstanding buffers or 3020 * requests left in the HAL. The framework may call configure_streams (as 3021 * the HAL state is now quiesced) or may issue new requests. 3022 * 3023 * Note that it's sufficient to only support fully-succeeded and fully-failed result cases. 3024 * However, it is highly desirable to support the partial failure cases as well, as it 3025 * could help improve the flush call overall performance. 3026 * 3027 * Performance requirements: 3028 * 3029 * The HAL should return from this call in 100ms, and must return from this 3030 * call in 1000ms. And this call must not be blocked longer than pipeline 3031 * latency (see S7 for definition). 3032 * 3033 * Version information: 3034 * 3035 * only available if device version >= CAMERA_DEVICE_API_VERSION_3_1. 3036 * 3037 * Return values: 3038 * 3039 * 0: On a successful flush of the camera HAL. 3040 * 3041 * -EINVAL: If the input is malformed (the device is not valid). 3042 * 3043 * -ENODEV: If the camera device has encountered a serious error. After this 3044 * error is returned, only the close() method can be successfully 3045 * called by the framework. 3046 */ 3047 int (*flush)(const struct camera3_device *); 3048 3049 /* reserved for future use */ 3050 void *reserved[8]; 3051 } camera3_device_ops_t; 3052 3053 /********************************************************************** 3054 * 3055 * Camera device definition 3056 * 3057 */ 3058 typedef struct camera3_device { 3059 /** 3060 * common.version must equal CAMERA_DEVICE_API_VERSION_3_0 to identify this 3061 * device as implementing version 3.0 of the camera device HAL. 3062 * 3063 * Performance requirements: 3064 * 3065 * Camera open (common.module->common.methods->open) should return in 200ms, and must return 3066 * in 500ms. 3067 * Camera close (common.close) should return in 200ms, and must return in 500ms. 3068 * 3069 */ 3070 hw_device_t common; 3071 camera3_device_ops_t *ops; 3072 void *priv; 3073 } camera3_device_t; 3074 3075 __END_DECLS 3076 3077 #endif /* #ifdef ANDROID_INCLUDE_CAMERA3_H */ 3078