1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /* 18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates 19 * functionality of an advanced fake camera. 20 */ 21 22 #include <inttypes.h> 23 24 //#define LOG_NDEBUG 0 25 //#define LOG_NNDEBUG 0 26 #define LOG_TAG "EmulatedCamera_FakeCamera3" 27 #include <cutils/properties.h> 28 #include <utils/Log.h> 29 30 #include "EmulatedFakeCamera3.h" 31 #include "EmulatedCameraFactory.h" 32 #include <ui/Fence.h> 33 #include <ui/Rect.h> 34 #include <ui/GraphicBufferMapper.h> 35 #include "gralloc_cb.h" 36 37 #include "fake-pipeline2/Sensor.h" 38 #include "fake-pipeline2/JpegCompressor.h" 39 #include <cmath> 40 41 #include <vector> 42 43 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0 44 #define ALOGVV ALOGV 45 #else 46 #define ALOGVV(...) ((void)0) 47 #endif 48 49 namespace android { 50 51 /** 52 * Constants for camera capabilities 53 */ 54 55 const int64_t USEC = 1000LL; 56 const int64_t MSEC = USEC * 1000LL; 57 const int64_t SEC = MSEC * 1000LL; 58 59 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = { 60 HAL_PIXEL_FORMAT_RAW16, 61 HAL_PIXEL_FORMAT_BLOB, 62 HAL_PIXEL_FORMAT_RGBA_8888, 63 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 64 // These are handled by YCbCr_420_888 65 // HAL_PIXEL_FORMAT_YV12, 66 // HAL_PIXEL_FORMAT_YCrCb_420_SP, 67 HAL_PIXEL_FORMAT_YCbCr_420_888, 68 HAL_PIXEL_FORMAT_Y16 69 }; 70 71 /** 72 * 3A constants 73 */ 74 75 // Default exposure and gain targets for different scenarios 76 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC; 77 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC; 78 const int EmulatedFakeCamera3::kNormalSensitivity = 100; 79 const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400; 80 const float EmulatedFakeCamera3::kExposureTrackRate = 0.1; 81 const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10; 82 const int EmulatedFakeCamera3::kStableAeMaxFrames = 100; 83 const float EmulatedFakeCamera3::kExposureWanderMin = -2; 84 const float EmulatedFakeCamera3::kExposureWanderMax = 1; 85 86 /** 87 * Camera device lifecycle methods 88 */ 89 90 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack, 91 struct hw_module_t* module) : 92 EmulatedCamera3(cameraId, module), 93 mFacingBack(facingBack) { 94 ALOGI("Constructing emulated fake camera 3: ID %d, facing %s", 95 mCameraID, facingBack ? "back" : "front"); 96 97 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { 98 mDefaultTemplates[i] = NULL; 99 } 100 101 } 102 103 EmulatedFakeCamera3::~EmulatedFakeCamera3() { 104 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { 105 if (mDefaultTemplates[i] != NULL) { 106 free_camera_metadata(mDefaultTemplates[i]); 107 } 108 } 109 } 110 111 status_t EmulatedFakeCamera3::Initialize() { 112 ALOGV("%s: E", __FUNCTION__); 113 status_t res; 114 115 if (mStatus != STATUS_ERROR) { 116 ALOGE("%s: Already initialized!", __FUNCTION__); 117 return INVALID_OPERATION; 118 } 119 120 res = getCameraCapabilities(); 121 if (res != OK) { 122 ALOGE("%s: Unable to get camera capabilities: %s (%d)", 123 __FUNCTION__, strerror(-res), res); 124 return res; 125 } 126 127 res = constructStaticInfo(); 128 if (res != OK) { 129 ALOGE("%s: Unable to allocate static info: %s (%d)", 130 __FUNCTION__, strerror(-res), res); 131 return res; 132 } 133 134 return EmulatedCamera3::Initialize(); 135 } 136 137 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) { 138 ALOGV("%s: E", __FUNCTION__); 139 Mutex::Autolock l(mLock); 140 status_t res; 141 142 if (mStatus != STATUS_CLOSED) { 143 ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus); 144 return INVALID_OPERATION; 145 } 146 147 mSensor = new Sensor(); 148 mSensor->setSensorListener(this); 149 150 res = mSensor->startUp(); 151 if (res != NO_ERROR) return res; 152 153 mReadoutThread = new ReadoutThread(this); 154 mJpegCompressor = new JpegCompressor(); 155 156 res = mReadoutThread->run("EmuCam3::readoutThread"); 157 if (res != NO_ERROR) return res; 158 159 // Initialize fake 3A 160 161 mControlMode = ANDROID_CONTROL_MODE_AUTO; 162 mFacePriority = false; 163 mAeMode = ANDROID_CONTROL_AE_MODE_ON; 164 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO; 165 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 166 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 167 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 168 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 169 mAeCounter = 0; 170 mAeTargetExposureTime = kNormalExposureTime; 171 mAeCurrentExposureTime = kNormalExposureTime; 172 mAeCurrentSensitivity = kNormalSensitivity; 173 174 return EmulatedCamera3::connectCamera(device); 175 } 176 177 status_t EmulatedFakeCamera3::closeCamera() { 178 ALOGV("%s: E", __FUNCTION__); 179 status_t res; 180 { 181 Mutex::Autolock l(mLock); 182 if (mStatus == STATUS_CLOSED) return OK; 183 184 res = mSensor->shutDown(); 185 if (res != NO_ERROR) { 186 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); 187 return res; 188 } 189 mSensor.clear(); 190 191 mReadoutThread->requestExit(); 192 } 193 194 mReadoutThread->join(); 195 196 { 197 Mutex::Autolock l(mLock); 198 // Clear out private stream information 199 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) { 200 PrivateStreamInfo *privStream = 201 static_cast<PrivateStreamInfo*>((*s)->priv); 202 delete privStream; 203 (*s)->priv = NULL; 204 } 205 mStreams.clear(); 206 mReadoutThread.clear(); 207 } 208 209 return EmulatedCamera3::closeCamera(); 210 } 211 212 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) { 213 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT; 214 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation(); 215 return EmulatedCamera3::getCameraInfo(info); 216 } 217 218 /** 219 * Camera3 interface methods 220 */ 221 222 status_t EmulatedFakeCamera3::configureStreams( 223 camera3_stream_configuration *streamList) { 224 Mutex::Autolock l(mLock); 225 ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams); 226 227 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) { 228 ALOGE("%s: Cannot configure streams in state %d", 229 __FUNCTION__, mStatus); 230 return NO_INIT; 231 } 232 233 /** 234 * Sanity-check input list. 235 */ 236 if (streamList == NULL) { 237 ALOGE("%s: NULL stream configuration", __FUNCTION__); 238 return BAD_VALUE; 239 } 240 241 if (streamList->streams == NULL) { 242 ALOGE("%s: NULL stream list", __FUNCTION__); 243 return BAD_VALUE; 244 } 245 246 if (streamList->num_streams < 1) { 247 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__, 248 streamList->num_streams); 249 return BAD_VALUE; 250 } 251 252 camera3_stream_t *inputStream = NULL; 253 for (size_t i = 0; i < streamList->num_streams; i++) { 254 camera3_stream_t *newStream = streamList->streams[i]; 255 256 if (newStream == NULL) { 257 ALOGE("%s: Stream index %zu was NULL", 258 __FUNCTION__, i); 259 return BAD_VALUE; 260 } 261 262 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x", 263 __FUNCTION__, newStream, i, newStream->stream_type, 264 newStream->usage, 265 newStream->format); 266 267 if (newStream->stream_type == CAMERA3_STREAM_INPUT || 268 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 269 if (inputStream != NULL) { 270 271 ALOGE("%s: Multiple input streams requested!", __FUNCTION__); 272 return BAD_VALUE; 273 } 274 inputStream = newStream; 275 } 276 277 bool validFormat = false; 278 for (size_t f = 0; 279 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]); 280 f++) { 281 if (newStream->format == kAvailableFormats[f]) { 282 validFormat = true; 283 break; 284 } 285 } 286 if (!validFormat) { 287 ALOGE("%s: Unsupported stream format 0x%x requested", 288 __FUNCTION__, newStream->format); 289 return BAD_VALUE; 290 } 291 } 292 mInputStream = inputStream; 293 294 /** 295 * Initially mark all existing streams as not alive 296 */ 297 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) { 298 PrivateStreamInfo *privStream = 299 static_cast<PrivateStreamInfo*>((*s)->priv); 300 privStream->alive = false; 301 } 302 303 /** 304 * Find new streams and mark still-alive ones 305 */ 306 for (size_t i = 0; i < streamList->num_streams; i++) { 307 camera3_stream_t *newStream = streamList->streams[i]; 308 if (newStream->priv == NULL) { 309 // New stream, construct info 310 PrivateStreamInfo *privStream = new PrivateStreamInfo(); 311 privStream->alive = true; 312 313 newStream->max_buffers = kMaxBufferCount; 314 newStream->priv = privStream; 315 mStreams.push_back(newStream); 316 } else { 317 // Existing stream, mark as still alive. 318 PrivateStreamInfo *privStream = 319 static_cast<PrivateStreamInfo*>(newStream->priv); 320 privStream->alive = true; 321 } 322 // Always update usage and max buffers 323 newStream->max_buffers = kMaxBufferCount; 324 switch (newStream->stream_type) { 325 case CAMERA3_STREAM_OUTPUT: 326 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 327 break; 328 case CAMERA3_STREAM_INPUT: 329 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 330 break; 331 case CAMERA3_STREAM_BIDIRECTIONAL: 332 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 333 GRALLOC_USAGE_HW_CAMERA_WRITE; 334 break; 335 } 336 } 337 338 /** 339 * Reap the dead streams 340 */ 341 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) { 342 PrivateStreamInfo *privStream = 343 static_cast<PrivateStreamInfo*>((*s)->priv); 344 if (!privStream->alive) { 345 (*s)->priv = NULL; 346 delete privStream; 347 s = mStreams.erase(s); 348 } else { 349 ++s; 350 } 351 } 352 353 /** 354 * Can't reuse settings across configure call 355 */ 356 mPrevSettings.clear(); 357 358 return OK; 359 } 360 361 status_t EmulatedFakeCamera3::registerStreamBuffers( 362 const camera3_stream_buffer_set *bufferSet) { 363 ALOGV("%s: E", __FUNCTION__); 364 Mutex::Autolock l(mLock); 365 366 // Should not be called in HAL versions >= 3.2 367 368 ALOGE("%s: Should not be invoked on new HALs!", 369 __FUNCTION__); 370 return NO_INIT; 371 } 372 373 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings( 374 int type) { 375 ALOGV("%s: E", __FUNCTION__); 376 Mutex::Autolock l(mLock); 377 378 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) { 379 ALOGE("%s: Unknown request settings template: %d", 380 __FUNCTION__, type); 381 return NULL; 382 } 383 384 if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) { 385 ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability", 386 __FUNCTION__, type); 387 return NULL; 388 } 389 390 /** 391 * Cache is not just an optimization - pointer returned has to live at 392 * least as long as the camera device instance does. 393 */ 394 if (mDefaultTemplates[type] != NULL) { 395 return mDefaultTemplates[type]; 396 } 397 398 CameraMetadata settings; 399 400 /** android.request */ 401 402 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; 403 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); 404 405 static const int32_t id = 0; 406 settings.update(ANDROID_REQUEST_ID, &id, 1); 407 408 static const int32_t frameCount = 0; 409 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1); 410 411 /** android.lens */ 412 413 static const float focalLength = 5.0f; 414 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1); 415 416 if (hasCapability(BACKWARD_COMPATIBLE)) { 417 static const float focusDistance = 0; 418 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); 419 420 static const float aperture = 2.8f; 421 settings.update(ANDROID_LENS_APERTURE, &aperture, 1); 422 423 static const float filterDensity = 0; 424 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1); 425 426 static const uint8_t opticalStabilizationMode = 427 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 428 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, 429 &opticalStabilizationMode, 1); 430 431 // FOCUS_RANGE set only in frame 432 } 433 434 /** android.sensor */ 435 436 if (hasCapability(MANUAL_SENSOR)) { 437 static const int64_t exposureTime = 10 * MSEC; 438 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); 439 440 static const int64_t frameDuration = 33333333L; // 1/30 s 441 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); 442 443 static const int32_t sensitivity = 100; 444 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); 445 } 446 447 // TIMESTAMP set only in frame 448 449 /** android.flash */ 450 451 if (hasCapability(BACKWARD_COMPATIBLE)) { 452 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 453 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 454 455 static const uint8_t flashPower = 10; 456 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1); 457 458 static const int64_t firingTime = 0; 459 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); 460 } 461 462 /** Processing block modes */ 463 if (hasCapability(MANUAL_POST_PROCESSING)) { 464 uint8_t hotPixelMode = 0; 465 uint8_t demosaicMode = 0; 466 uint8_t noiseMode = 0; 467 uint8_t shadingMode = 0; 468 uint8_t colorMode = 0; 469 uint8_t tonemapMode = 0; 470 uint8_t edgeMode = 0; 471 switch (type) { 472 case CAMERA3_TEMPLATE_STILL_CAPTURE: 473 // fall-through 474 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 475 // fall-through 476 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 477 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; 478 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY; 479 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; 480 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY; 481 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; 482 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; 483 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY; 484 break; 485 case CAMERA3_TEMPLATE_PREVIEW: 486 // fall-through 487 case CAMERA3_TEMPLATE_VIDEO_RECORD: 488 // fall-through 489 default: 490 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST; 491 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; 492 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST; 493 shadingMode = ANDROID_SHADING_MODE_FAST; 494 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST; 495 tonemapMode = ANDROID_TONEMAP_MODE_FAST; 496 edgeMode = ANDROID_EDGE_MODE_FAST; 497 break; 498 } 499 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); 500 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); 501 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1); 502 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); 503 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1); 504 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1); 505 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); 506 } 507 508 /** android.colorCorrection */ 509 510 if (hasCapability(MANUAL_POST_PROCESSING)) { 511 static const camera_metadata_rational colorTransform[9] = { 512 {1,1}, {0,1}, {0,1}, 513 {0,1}, {1,1}, {0,1}, 514 {0,1}, {0,1}, {1,1} 515 }; 516 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); 517 518 static const float colorGains[4] = { 519 1.0f, 1.0f, 1.0f, 1.0f 520 }; 521 settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4); 522 } 523 524 /** android.tonemap */ 525 526 if (hasCapability(MANUAL_POST_PROCESSING)) { 527 static const float tonemapCurve[4] = { 528 0.f, 0.f, 529 1.f, 1.f 530 }; 531 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4); 532 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4); 533 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4); 534 } 535 536 /** android.scaler */ 537 if (hasCapability(BACKWARD_COMPATIBLE)) { 538 static const int32_t cropRegion[4] = { 539 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1] 540 }; 541 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4); 542 } 543 544 /** android.jpeg */ 545 if (hasCapability(BACKWARD_COMPATIBLE)) { 546 static const uint8_t jpegQuality = 80; 547 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1); 548 549 static const int32_t thumbnailSize[2] = { 550 640, 480 551 }; 552 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); 553 554 static const uint8_t thumbnailQuality = 80; 555 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); 556 557 static const double gpsCoordinates[2] = { 558 0, 0 559 }; 560 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2); 561 562 static const uint8_t gpsProcessingMethod[32] = "None"; 563 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32); 564 565 static const int64_t gpsTimestamp = 0; 566 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); 567 568 static const int32_t jpegOrientation = 0; 569 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); 570 } 571 572 /** android.stats */ 573 574 if (hasCapability(BACKWARD_COMPATIBLE)) { 575 static const uint8_t faceDetectMode = 576 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 577 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); 578 579 static const uint8_t hotPixelMapMode = 580 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; 581 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1); 582 } 583 584 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram, 585 // sharpnessMap only in frames 586 587 /** android.control */ 588 589 uint8_t controlIntent = 0; 590 switch (type) { 591 case CAMERA3_TEMPLATE_PREVIEW: 592 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 593 break; 594 case CAMERA3_TEMPLATE_STILL_CAPTURE: 595 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 596 break; 597 case CAMERA3_TEMPLATE_VIDEO_RECORD: 598 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 599 break; 600 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 601 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 602 break; 603 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 604 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 605 break; 606 case CAMERA3_TEMPLATE_MANUAL: 607 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; 608 break; 609 default: 610 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 611 break; 612 } 613 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 614 615 const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ? 616 ANDROID_CONTROL_MODE_OFF : 617 ANDROID_CONTROL_MODE_AUTO; 618 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 619 620 int32_t aeTargetFpsRange[2] = { 621 5, 30 622 }; 623 if (type == CAMERA3_TEMPLATE_VIDEO_RECORD || type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) { 624 aeTargetFpsRange[0] = 30; 625 } 626 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); 627 628 if (hasCapability(BACKWARD_COMPATIBLE)) { 629 630 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 631 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 632 633 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; 634 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 635 636 const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ? 637 ANDROID_CONTROL_AE_MODE_OFF : 638 ANDROID_CONTROL_AE_MODE_ON; 639 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 640 641 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 642 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 643 644 static const int32_t controlRegions[5] = { 645 0, 0, 0, 0, 0 646 }; 647 settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5); 648 649 static const int32_t aeExpCompensation = 0; 650 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); 651 652 653 static const uint8_t aeAntibandingMode = 654 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; 655 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1); 656 657 static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; 658 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); 659 660 const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ? 661 ANDROID_CONTROL_AWB_MODE_OFF : 662 ANDROID_CONTROL_AWB_MODE_AUTO; 663 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 664 665 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 666 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 667 668 uint8_t afMode = 0; 669 switch (type) { 670 case CAMERA3_TEMPLATE_PREVIEW: 671 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 672 break; 673 case CAMERA3_TEMPLATE_STILL_CAPTURE: 674 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 675 break; 676 case CAMERA3_TEMPLATE_VIDEO_RECORD: 677 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 678 break; 679 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 680 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 681 break; 682 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 683 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 684 break; 685 case CAMERA3_TEMPLATE_MANUAL: 686 afMode = ANDROID_CONTROL_AF_MODE_OFF; 687 break; 688 default: 689 afMode = ANDROID_CONTROL_AF_MODE_AUTO; 690 break; 691 } 692 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); 693 694 settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5); 695 696 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 697 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); 698 699 static const uint8_t vstabMode = 700 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 701 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1); 702 703 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF; 704 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1); 705 706 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; 707 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); 708 709 static const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST; 710 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); 711 712 static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; 713 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1); 714 } 715 716 mDefaultTemplates[type] = settings.release(); 717 718 return mDefaultTemplates[type]; 719 } 720 721 status_t EmulatedFakeCamera3::processCaptureRequest( 722 camera3_capture_request *request) { 723 724 Mutex::Autolock l(mLock); 725 status_t res; 726 727 /** Validation */ 728 729 if (mStatus < STATUS_READY) { 730 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__, 731 mStatus); 732 return INVALID_OPERATION; 733 } 734 735 if (request == NULL) { 736 ALOGE("%s: NULL request!", __FUNCTION__); 737 return BAD_VALUE; 738 } 739 740 uint32_t frameNumber = request->frame_number; 741 742 if (request->settings == NULL && mPrevSettings.isEmpty()) { 743 ALOGE("%s: Request %d: NULL settings for first request after" 744 "configureStreams()", __FUNCTION__, frameNumber); 745 return BAD_VALUE; 746 } 747 748 if (request->input_buffer != NULL && 749 request->input_buffer->stream != mInputStream) { 750 ALOGE("%s: Request %d: Input buffer not from input stream!", 751 __FUNCTION__, frameNumber); 752 ALOGV("%s: Bad stream %p, expected: %p", 753 __FUNCTION__, request->input_buffer->stream, 754 mInputStream); 755 ALOGV("%s: Bad stream type %d, expected stream type %d", 756 __FUNCTION__, request->input_buffer->stream->stream_type, 757 mInputStream ? mInputStream->stream_type : -1); 758 759 return BAD_VALUE; 760 } 761 762 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 763 ALOGE("%s: Request %d: No output buffers provided!", 764 __FUNCTION__, frameNumber); 765 return BAD_VALUE; 766 } 767 768 // Validate all buffers, starting with input buffer if it's given 769 770 ssize_t idx; 771 const camera3_stream_buffer_t *b; 772 if (request->input_buffer != NULL) { 773 idx = -1; 774 b = request->input_buffer; 775 } else { 776 idx = 0; 777 b = request->output_buffers; 778 } 779 do { 780 PrivateStreamInfo *priv = 781 static_cast<PrivateStreamInfo*>(b->stream->priv); 782 if (priv == NULL) { 783 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!", 784 __FUNCTION__, frameNumber, idx); 785 return BAD_VALUE; 786 } 787 if (!priv->alive) { 788 ALOGE("%s: Request %d: Buffer %zu: Dead stream!", 789 __FUNCTION__, frameNumber, idx); 790 return BAD_VALUE; 791 } 792 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 793 ALOGE("%s: Request %d: Buffer %zu: Status not OK!", 794 __FUNCTION__, frameNumber, idx); 795 return BAD_VALUE; 796 } 797 if (b->release_fence != -1) { 798 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!", 799 __FUNCTION__, frameNumber, idx); 800 return BAD_VALUE; 801 } 802 if (b->buffer == NULL) { 803 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!", 804 __FUNCTION__, frameNumber, idx); 805 return BAD_VALUE; 806 } 807 idx++; 808 b = &(request->output_buffers[idx]); 809 } while (idx < (ssize_t)request->num_output_buffers); 810 811 // TODO: Validate settings parameters 812 813 /** 814 * Start processing this request 815 */ 816 817 mStatus = STATUS_ACTIVE; 818 819 CameraMetadata settings; 820 821 if (request->settings == NULL) { 822 settings.acquire(mPrevSettings); 823 } else { 824 settings = request->settings; 825 } 826 827 res = process3A(settings); 828 if (res != OK) { 829 return res; 830 } 831 832 // TODO: Handle reprocessing 833 834 /** 835 * Get ready for sensor config 836 */ 837 838 nsecs_t exposureTime; 839 nsecs_t frameDuration; 840 uint32_t sensitivity; 841 bool needJpeg = false; 842 camera_metadata_entry_t entry; 843 844 entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME); 845 exposureTime = (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0]; 846 entry = settings.find(ANDROID_SENSOR_FRAME_DURATION); 847 frameDuration = (entry.count > 0)? entry.data.i64[0] : Sensor::kFrameDurationRange[0]; 848 entry = settings.find(ANDROID_SENSOR_SENSITIVITY); 849 sensitivity = (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0]; 850 851 if (exposureTime > frameDuration) { 852 frameDuration = exposureTime + Sensor::kMinVerticalBlank; 853 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); 854 } 855 856 Buffers *sensorBuffers = new Buffers(); 857 HalBufferVector *buffers = new HalBufferVector(); 858 859 sensorBuffers->setCapacity(request->num_output_buffers); 860 buffers->setCapacity(request->num_output_buffers); 861 862 // Process all the buffers we got for output, constructing internal buffer 863 // structures for them, and lock them for writing. 864 for (size_t i = 0; i < request->num_output_buffers; i++) { 865 const camera3_stream_buffer &srcBuf = request->output_buffers[i]; 866 const cb_handle_t *privBuffer = 867 static_cast<const cb_handle_t*>(*srcBuf.buffer); 868 StreamBuffer destBuf; 869 destBuf.streamId = kGenericStreamId; 870 destBuf.width = srcBuf.stream->width; 871 destBuf.height = srcBuf.stream->height; 872 destBuf.format = privBuffer->format; // Use real private format 873 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc 874 destBuf.dataSpace = srcBuf.stream->data_space; 875 destBuf.buffer = srcBuf.buffer; 876 877 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) { 878 needJpeg = true; 879 } 880 881 // Wait on fence 882 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence); 883 res = bufferAcquireFence->wait(kFenceTimeoutMs); 884 if (res == TIMED_OUT) { 885 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms", 886 __FUNCTION__, frameNumber, i, kFenceTimeoutMs); 887 } 888 if (res == OK) { 889 // Lock buffer for writing 890 const Rect rect(destBuf.width, destBuf.height); 891 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { 892 if (privBuffer->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { 893 android_ycbcr ycbcr = android_ycbcr(); 894 res = GraphicBufferMapper::get().lockYCbCr( 895 *(destBuf.buffer), 896 GRALLOC_USAGE_HW_CAMERA_WRITE, rect, 897 &ycbcr); 898 // This is only valid because we know that emulator's 899 // YCbCr_420_888 is really contiguous NV21 under the hood 900 destBuf.img = static_cast<uint8_t*>(ycbcr.y); 901 } else { 902 ALOGE("Unexpected private format for flexible YUV: 0x%x", 903 privBuffer->format); 904 res = INVALID_OPERATION; 905 } 906 } else { 907 res = GraphicBufferMapper::get().lock(*(destBuf.buffer), 908 GRALLOC_USAGE_HW_CAMERA_WRITE, rect, 909 (void**)&(destBuf.img)); 910 } 911 if (res != OK) { 912 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer", 913 __FUNCTION__, frameNumber, i); 914 } 915 } 916 917 if (res != OK) { 918 // Either waiting or locking failed. Unlock locked buffers and bail 919 // out. 920 for (size_t j = 0; j < i; j++) { 921 GraphicBufferMapper::get().unlock( 922 *(request->output_buffers[i].buffer)); 923 } 924 return NO_INIT; 925 } 926 927 sensorBuffers->push_back(destBuf); 928 buffers->push_back(srcBuf); 929 } 930 931 /** 932 * Wait for JPEG compressor to not be busy, if needed 933 */ 934 if (needJpeg) { 935 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); 936 if (!ready) { 937 ALOGE("%s: Timeout waiting for JPEG compression to complete!", 938 __FUNCTION__); 939 return NO_INIT; 940 } 941 } 942 943 /** 944 * Wait until the in-flight queue has room 945 */ 946 res = mReadoutThread->waitForReadout(); 947 if (res != OK) { 948 ALOGE("%s: Timeout waiting for previous requests to complete!", 949 __FUNCTION__); 950 return NO_INIT; 951 } 952 953 /** 954 * Wait until sensor's ready. This waits for lengthy amounts of time with 955 * mLock held, but the interface spec is that no other calls may by done to 956 * the HAL by the framework while process_capture_request is happening. 957 */ 958 int syncTimeoutCount = 0; 959 while(!mSensor->waitForVSync(kSyncWaitTimeout)) { 960 if (mStatus == STATUS_ERROR) { 961 return NO_INIT; 962 } 963 if (syncTimeoutCount == kMaxSyncTimeoutCount) { 964 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms", 965 __FUNCTION__, frameNumber, 966 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000); 967 return NO_INIT; 968 } 969 syncTimeoutCount++; 970 } 971 972 /** 973 * Configure sensor and queue up the request to the readout thread 974 */ 975 mSensor->setExposureTime(exposureTime); 976 mSensor->setFrameDuration(frameDuration); 977 mSensor->setSensitivity(sensitivity); 978 mSensor->setDestinationBuffers(sensorBuffers); 979 mSensor->setFrameNumber(request->frame_number); 980 981 ReadoutThread::Request r; 982 r.frameNumber = request->frame_number; 983 r.settings = settings; 984 r.sensorBuffers = sensorBuffers; 985 r.buffers = buffers; 986 987 mReadoutThread->queueCaptureRequest(r); 988 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number); 989 990 // Cache the settings for next time 991 mPrevSettings.acquire(settings); 992 993 return OK; 994 } 995 996 status_t EmulatedFakeCamera3::flush() { 997 ALOGW("%s: Not implemented; ignored", __FUNCTION__); 998 return OK; 999 } 1000 1001 /** Debug methods */ 1002 1003 void EmulatedFakeCamera3::dump(int fd) { 1004 1005 } 1006 1007 /** 1008 * Private methods 1009 */ 1010 1011 status_t EmulatedFakeCamera3::getCameraCapabilities() { 1012 1013 const char *key = mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps"; 1014 1015 /* Defined by 'qemu.sf.*_camera_caps' boot property: if the 1016 * property doesn't exist, it is assumed to list FULL. */ 1017 char prop[PROPERTY_VALUE_MAX]; 1018 if (property_get(key, prop, NULL) > 0) { 1019 char *saveptr = nullptr; 1020 char *cap = strtok_r(prop, " ,", &saveptr); 1021 while (cap != NULL) { 1022 for (int i = 0; i < NUM_CAPABILITIES; i++) { 1023 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) { 1024 mCapabilities.add(static_cast<AvailableCapabilities>(i)); 1025 break; 1026 } 1027 } 1028 cap = strtok_r(NULL, " ,", &saveptr); 1029 } 1030 if (mCapabilities.size() == 0) { 1031 ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop); 1032 } 1033 } 1034 // Default to FULL_LEVEL plus RAW if nothing is defined 1035 if (mCapabilities.size() == 0) { 1036 mCapabilities.add(FULL_LEVEL); 1037 mCapabilities.add(RAW); 1038 } 1039 1040 // Add level-based caps 1041 if (hasCapability(FULL_LEVEL)) { 1042 mCapabilities.add(BURST_CAPTURE); 1043 mCapabilities.add(READ_SENSOR_SETTINGS); 1044 mCapabilities.add(MANUAL_SENSOR); 1045 mCapabilities.add(MANUAL_POST_PROCESSING); 1046 }; 1047 1048 // Backwards-compatible is required for most other caps 1049 // Not required for DEPTH_OUTPUT, though. 1050 if (hasCapability(BURST_CAPTURE) || 1051 hasCapability(READ_SENSOR_SETTINGS) || 1052 hasCapability(RAW) || 1053 hasCapability(MANUAL_SENSOR) || 1054 hasCapability(MANUAL_POST_PROCESSING) || 1055 hasCapability(PRIVATE_REPROCESSING) || 1056 hasCapability(YUV_REPROCESSING) || 1057 hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) { 1058 mCapabilities.add(BACKWARD_COMPATIBLE); 1059 } 1060 1061 ALOGI("Camera %d capabilities:", mCameraID); 1062 for (size_t i = 0; i < mCapabilities.size(); i++) { 1063 ALOGI(" %s", sAvailableCapabilitiesStrings[mCapabilities[i]]); 1064 } 1065 1066 return OK; 1067 } 1068 1069 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) { 1070 ssize_t idx = mCapabilities.indexOf(cap); 1071 return idx >= 0; 1072 } 1073 1074 status_t EmulatedFakeCamera3::constructStaticInfo() { 1075 1076 CameraMetadata info; 1077 Vector<int32_t> availableCharacteristicsKeys; 1078 status_t res; 1079 1080 #define ADD_STATIC_ENTRY(name, varptr, count) \ 1081 availableCharacteristicsKeys.add(name); \ 1082 res = info.update(name, varptr, count); \ 1083 if (res != OK) return res 1084 1085 // android.sensor 1086 1087 if (hasCapability(MANUAL_SENSOR)) { 1088 1089 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1090 Sensor::kExposureTimeRange, 2); 1091 1092 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1093 &Sensor::kFrameDurationRange[1], 1); 1094 1095 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, 1096 Sensor::kSensitivityRange, 1097 sizeof(Sensor::kSensitivityRange) 1098 /sizeof(int32_t)); 1099 1100 ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, 1101 &Sensor::kSensitivityRange[1], 1); 1102 } 1103 1104 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm 1105 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1106 sensorPhysicalSize, 2); 1107 1108 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1109 (int32_t*)Sensor::kResolution, 2); 1110 1111 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1112 (int32_t*)Sensor::kActiveArray, 4); 1113 1114 static const int32_t orientation = 90; // Aligned with 'long edge' 1115 ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1); 1116 1117 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; 1118 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); 1119 1120 if (hasCapability(RAW)) { 1121 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1122 &Sensor::kColorFilterArrangement, 1); 1123 1124 ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1125 (int32_t*)&Sensor::kMaxRawValue, 1); 1126 1127 static const int32_t blackLevelPattern[4] = { 1128 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel, 1129 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel 1130 }; 1131 ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1132 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t)); 1133 } 1134 1135 if (hasCapability(BACKWARD_COMPATIBLE)) { 1136 static const int32_t availableTestPatternModes[] = { 1137 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF 1138 }; 1139 ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, 1140 availableTestPatternModes, sizeof(availableTestPatternModes)/sizeof(int32_t)); 1141 } 1142 1143 // android.lens 1144 1145 static const float focalLength = 3.30f; // mm 1146 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1147 &focalLength, 1); 1148 1149 if (hasCapability(BACKWARD_COMPATIBLE)) { 1150 // 5 cm min focus distance for back camera, infinity (fixed focus) for front 1151 const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0; 1152 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1153 &minFocusDistance, 1); 1154 1155 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front 1156 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0; 1157 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1158 &minFocusDistance, 1); 1159 1160 static const float aperture = 2.8f; 1161 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1162 &aperture, 1); 1163 static const float filterDensity = 0; 1164 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1165 &filterDensity, 1); 1166 static const uint8_t availableOpticalStabilization = 1167 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 1168 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1169 &availableOpticalStabilization, 1); 1170 1171 static const int32_t lensShadingMapSize[] = {1, 1}; 1172 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize, 1173 sizeof(lensShadingMapSize)/sizeof(int32_t)); 1174 1175 static const uint8_t lensFocusCalibration = 1176 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE; 1177 ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &lensFocusCalibration, 1); 1178 } 1179 1180 if (hasCapability(DEPTH_OUTPUT)) { 1181 // These could be included for non-DEPTH capability as well, but making this variable for 1182 // testing coverage 1183 1184 // 90 degree rotation to align with long edge of a phone device that's by default portrait 1185 static const float qO[] = { 0.707107f, 0.f, 0.f, 0.707107f}; 1186 1187 // Either a 180-degree rotation for back-facing, or no rotation for front-facing 1188 const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0, (mFacingBack ? 0.f : 1.f)}; 1189 1190 // Quarternion product, orientation change then facing 1191 const float lensPoseRotation[] = {qO[0]*qF[0] - qO[1]*qF[1] - qO[2]*qF[2] - qO[3]*qF[3], 1192 qO[0]*qF[1] + qO[1]*qF[0] + qO[2]*qF[3] - qO[3]*qF[2], 1193 qO[0]*qF[2] + qO[2]*qF[0] + qO[1]*qF[3] - qO[3]*qF[1], 1194 qO[0]*qF[3] + qO[3]*qF[0] + qO[1]*qF[2] - qO[2]*qF[1]}; 1195 1196 ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation, 1197 sizeof(lensPoseRotation)/sizeof(float)); 1198 1199 // Only one camera facing each way, so 0 translation needed to the center of the 'main' 1200 // camera 1201 static const float lensPoseTranslation[] = {0.f, 0.f, 0.f}; 1202 1203 ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation, 1204 sizeof(lensPoseTranslation)/sizeof(float)); 1205 1206 // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and active array size 1207 float f_x = focalLength * Sensor::kActiveArray[2] / sensorPhysicalSize[0]; 1208 float f_y = focalLength * Sensor::kActiveArray[3] / sensorPhysicalSize[1]; 1209 float c_x = Sensor::kActiveArray[2] / 2.f; 1210 float c_y = Sensor::kActiveArray[3] / 2.f; 1211 float s = 0.f; 1212 const float lensIntrinsics[] = { f_x, f_y, c_x, c_y, s }; 1213 1214 ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics, 1215 sizeof(lensIntrinsics)/sizeof(float)); 1216 1217 // No radial or tangential distortion 1218 1219 float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f}; 1220 1221 ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion, 1222 sizeof(lensRadialDistortion)/sizeof(float)); 1223 1224 } 1225 1226 1227 static const uint8_t lensFacing = mFacingBack ? 1228 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1229 ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1); 1230 1231 // android.flash 1232 1233 static const uint8_t flashAvailable = 0; 1234 ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); 1235 1236 // android.tonemap 1237 1238 if (hasCapability(MANUAL_POST_PROCESSING)) { 1239 static const int32_t tonemapCurvePoints = 128; 1240 ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1); 1241 1242 static const uint8_t availableToneMapModes[] = { 1243 ANDROID_TONEMAP_MODE_CONTRAST_CURVE, ANDROID_TONEMAP_MODE_FAST, 1244 ANDROID_TONEMAP_MODE_HIGH_QUALITY 1245 }; 1246 ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availableToneMapModes, 1247 sizeof(availableToneMapModes)); 1248 } 1249 1250 // android.scaler 1251 1252 const std::vector<int32_t> availableStreamConfigurationsBasic = { 1253 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, 1254 HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, 1255 HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, 1256 HAL_PIXEL_FORMAT_BLOB, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT 1257 }; 1258 const std::vector<int32_t> availableStreamConfigurationsRaw = { 1259 HAL_PIXEL_FORMAT_RAW16, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT 1260 }; 1261 const std::vector<int32_t> availableStreamConfigurationsBurst = { 1262 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, 1263 HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, 1264 HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT 1265 }; 1266 1267 std::vector<int32_t> availableStreamConfigurations; 1268 1269 if (hasCapability(BACKWARD_COMPATIBLE)) { 1270 availableStreamConfigurations.insert(availableStreamConfigurations.end(), 1271 availableStreamConfigurationsBasic.begin(), 1272 availableStreamConfigurationsBasic.end()); 1273 } 1274 if (hasCapability(RAW)) { 1275 availableStreamConfigurations.insert(availableStreamConfigurations.end(), 1276 availableStreamConfigurationsRaw.begin(), 1277 availableStreamConfigurationsRaw.end()); 1278 } 1279 if (hasCapability(BURST_CAPTURE)) { 1280 availableStreamConfigurations.insert(availableStreamConfigurations.end(), 1281 availableStreamConfigurationsBurst.begin(), 1282 availableStreamConfigurationsBurst.end()); 1283 } 1284 1285 if (availableStreamConfigurations.size() > 0) { 1286 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, 1287 &availableStreamConfigurations[0], 1288 availableStreamConfigurations.size()); 1289 } 1290 1291 const std::vector<int64_t> availableMinFrameDurationsBasic = { 1292 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, Sensor::kFrameDurationRange[0], 1293 HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, Sensor::kFrameDurationRange[0], 1294 HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, Sensor::kFrameDurationRange[0], 1295 HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0] 1296 }; 1297 const std::vector<int64_t> availableMinFrameDurationsRaw = { 1298 HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0] 1299 }; 1300 const std::vector<int64_t> availableMinFrameDurationsBurst = { 1301 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, Sensor::kFrameDurationRange[0], 1302 HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, Sensor::kFrameDurationRange[0], 1303 HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, Sensor::kFrameDurationRange[0], 1304 }; 1305 1306 std::vector<int64_t> availableMinFrameDurations; 1307 1308 if (hasCapability(BACKWARD_COMPATIBLE)) { 1309 availableMinFrameDurations.insert(availableMinFrameDurations.end(), 1310 availableMinFrameDurationsBasic.begin(), 1311 availableMinFrameDurationsBasic.end()); 1312 } 1313 if (hasCapability(RAW)) { 1314 availableMinFrameDurations.insert(availableMinFrameDurations.end(), 1315 availableMinFrameDurationsRaw.begin(), 1316 availableMinFrameDurationsRaw.end()); 1317 } 1318 if (hasCapability(BURST_CAPTURE)) { 1319 availableMinFrameDurations.insert(availableMinFrameDurations.end(), 1320 availableMinFrameDurationsBurst.begin(), 1321 availableMinFrameDurationsBurst.end()); 1322 } 1323 1324 if (availableMinFrameDurations.size() > 0) { 1325 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, 1326 &availableMinFrameDurations[0], 1327 availableMinFrameDurations.size()); 1328 } 1329 1330 const std::vector<int64_t> availableStallDurationsBasic = { 1331 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, 0, 1332 HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, 0, 1333 HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, 0, 1334 HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0] 1335 }; 1336 const std::vector<int64_t> availableStallDurationsRaw = { 1337 HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0] 1338 }; 1339 const std::vector<int64_t> availableStallDurationsBurst = { 1340 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0, 1341 HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0, 1342 HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, 0 1343 }; 1344 1345 std::vector<int64_t> availableStallDurations; 1346 1347 if (hasCapability(BACKWARD_COMPATIBLE)) { 1348 availableStallDurations.insert(availableStallDurations.end(), 1349 availableStallDurationsBasic.begin(), 1350 availableStallDurationsBasic.end()); 1351 } 1352 if (hasCapability(RAW)) { 1353 availableStallDurations.insert(availableStallDurations.end(), 1354 availableStallDurationsRaw.begin(), 1355 availableStallDurationsRaw.end()); 1356 } 1357 if (hasCapability(BURST_CAPTURE)) { 1358 availableStallDurations.insert(availableStallDurations.end(), 1359 availableStallDurationsBurst.begin(), 1360 availableStallDurationsBurst.end()); 1361 } 1362 1363 if (availableStallDurations.size() > 0) { 1364 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, 1365 &availableStallDurations[0], 1366 availableStallDurations.size()); 1367 } 1368 1369 if (hasCapability(BACKWARD_COMPATIBLE)) { 1370 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; 1371 ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE, 1372 &croppingType, 1); 1373 1374 static const float maxZoom = 10; 1375 ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1376 &maxZoom, 1); 1377 } 1378 1379 // android.jpeg 1380 1381 if (hasCapability(BACKWARD_COMPATIBLE)) { 1382 static const int32_t jpegThumbnailSizes[] = { 1383 0, 0, 1384 160, 120, 1385 320, 240 1386 }; 1387 ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1388 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); 1389 1390 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize; 1391 ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); 1392 } 1393 1394 // android.stats 1395 1396 if (hasCapability(BACKWARD_COMPATIBLE)) { 1397 static const uint8_t availableFaceDetectModes[] = { 1398 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, 1399 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, 1400 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL 1401 }; 1402 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1403 availableFaceDetectModes, 1404 sizeof(availableFaceDetectModes)); 1405 1406 static const int32_t maxFaceCount = 8; 1407 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1408 &maxFaceCount, 1); 1409 1410 1411 static const uint8_t availableShadingMapModes[] = { 1412 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF 1413 }; 1414 ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, 1415 availableShadingMapModes, sizeof(availableShadingMapModes)); 1416 } 1417 1418 // android.sync 1419 1420 static const int32_t maxLatency = 1421 hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3; 1422 ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1); 1423 1424 // android.control 1425 1426 if (hasCapability(BACKWARD_COMPATIBLE)) { 1427 static const uint8_t availableControlModes[] = { 1428 ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO, ANDROID_CONTROL_MODE_USE_SCENE_MODE 1429 }; 1430 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, 1431 availableControlModes, sizeof(availableControlModes)); 1432 } else { 1433 static const uint8_t availableControlModes[] = { 1434 ANDROID_CONTROL_MODE_AUTO 1435 }; 1436 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES, 1437 availableControlModes, sizeof(availableControlModes)); 1438 } 1439 1440 static const uint8_t availableSceneModes[] = { 1441 hasCapability(BACKWARD_COMPATIBLE) ? 1442 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY : 1443 ANDROID_CONTROL_SCENE_MODE_DISABLED 1444 }; 1445 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1446 availableSceneModes, sizeof(availableSceneModes)); 1447 1448 if (hasCapability(BACKWARD_COMPATIBLE)) { 1449 static const uint8_t availableEffects[] = { 1450 ANDROID_CONTROL_EFFECT_MODE_OFF 1451 }; 1452 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1453 availableEffects, sizeof(availableEffects)); 1454 } 1455 1456 if (hasCapability(BACKWARD_COMPATIBLE)) { 1457 static const int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1}; 1458 ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS, 1459 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0])); 1460 1461 static const uint8_t availableAeModes[] = { 1462 ANDROID_CONTROL_AE_MODE_OFF, 1463 ANDROID_CONTROL_AE_MODE_ON 1464 }; 1465 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1466 availableAeModes, sizeof(availableAeModes)); 1467 1468 static const camera_metadata_rational exposureCompensationStep = { 1469 1, 3 1470 }; 1471 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1472 &exposureCompensationStep, 1); 1473 1474 int32_t exposureCompensationRange[] = {-9, 9}; 1475 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1476 exposureCompensationRange, 1477 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1478 } 1479 1480 static const int32_t availableTargetFpsRanges[] = { 1481 5, 30, 15, 30, 15, 15, 30, 30 1482 }; 1483 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1484 availableTargetFpsRanges, 1485 sizeof(availableTargetFpsRanges)/sizeof(int32_t)); 1486 1487 if (hasCapability(BACKWARD_COMPATIBLE)) { 1488 static const uint8_t availableAntibandingModes[] = { 1489 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, 1490 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO 1491 }; 1492 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1493 availableAntibandingModes, sizeof(availableAntibandingModes)); 1494 } 1495 1496 static const uint8_t aeLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ? 1497 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; 1498 1499 ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE, 1500 &aeLockAvailable, 1); 1501 1502 if (hasCapability(BACKWARD_COMPATIBLE)) { 1503 static const uint8_t availableAwbModes[] = { 1504 ANDROID_CONTROL_AWB_MODE_OFF, 1505 ANDROID_CONTROL_AWB_MODE_AUTO, 1506 ANDROID_CONTROL_AWB_MODE_INCANDESCENT, 1507 ANDROID_CONTROL_AWB_MODE_FLUORESCENT, 1508 ANDROID_CONTROL_AWB_MODE_DAYLIGHT, 1509 ANDROID_CONTROL_AWB_MODE_SHADE 1510 }; 1511 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1512 availableAwbModes, sizeof(availableAwbModes)); 1513 } 1514 1515 static const uint8_t awbLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ? 1516 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; 1517 1518 ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, 1519 &awbLockAvailable, 1); 1520 1521 static const uint8_t availableAfModesBack[] = { 1522 ANDROID_CONTROL_AF_MODE_OFF, 1523 ANDROID_CONTROL_AF_MODE_AUTO, 1524 ANDROID_CONTROL_AF_MODE_MACRO, 1525 ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, 1526 ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE 1527 }; 1528 1529 static const uint8_t availableAfModesFront[] = { 1530 ANDROID_CONTROL_AF_MODE_OFF 1531 }; 1532 1533 if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) { 1534 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1535 availableAfModesBack, sizeof(availableAfModesBack)); 1536 } else { 1537 ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1538 availableAfModesFront, sizeof(availableAfModesFront)); 1539 } 1540 1541 static const uint8_t availableVstabModes[] = { 1542 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF 1543 }; 1544 ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1545 availableVstabModes, sizeof(availableVstabModes)); 1546 1547 // android.colorCorrection 1548 1549 if (hasCapability(BACKWARD_COMPATIBLE)) { 1550 static const uint8_t availableAberrationModes[] = { 1551 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF, 1552 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST, 1553 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY 1554 }; 1555 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, 1556 availableAberrationModes, sizeof(availableAberrationModes)); 1557 } else { 1558 static const uint8_t availableAberrationModes[] = { 1559 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF, 1560 }; 1561 ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, 1562 availableAberrationModes, sizeof(availableAberrationModes)); 1563 } 1564 // android.edge 1565 1566 if (hasCapability(BACKWARD_COMPATIBLE)) { 1567 static const uint8_t availableEdgeModes[] = { 1568 ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST, ANDROID_EDGE_MODE_HIGH_QUALITY 1569 }; 1570 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, 1571 availableEdgeModes, sizeof(availableEdgeModes)); 1572 } else { 1573 static const uint8_t availableEdgeModes[] = { 1574 ANDROID_EDGE_MODE_OFF 1575 }; 1576 ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES, 1577 availableEdgeModes, sizeof(availableEdgeModes)); 1578 } 1579 1580 // android.info 1581 1582 static const uint8_t supportedHardwareLevel = 1583 hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL : 1584 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; 1585 ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 1586 &supportedHardwareLevel, 1587 /*count*/1); 1588 1589 // android.noiseReduction 1590 1591 if (hasCapability(BACKWARD_COMPATIBLE)) { 1592 static const uint8_t availableNoiseReductionModes[] = { 1593 ANDROID_NOISE_REDUCTION_MODE_OFF, 1594 ANDROID_NOISE_REDUCTION_MODE_FAST, 1595 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY 1596 }; 1597 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, 1598 availableNoiseReductionModes, sizeof(availableNoiseReductionModes)); 1599 } else { 1600 static const uint8_t availableNoiseReductionModes[] = { 1601 ANDROID_NOISE_REDUCTION_MODE_OFF, 1602 }; 1603 ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, 1604 availableNoiseReductionModes, sizeof(availableNoiseReductionModes)); 1605 } 1606 1607 // android.depth 1608 1609 if (hasCapability(DEPTH_OUTPUT)) { 1610 1611 static const int32_t maxDepthSamples = 100; 1612 ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES, 1613 &maxDepthSamples, 1); 1614 1615 static const int32_t availableDepthStreamConfigurations[] = { 1616 HAL_PIXEL_FORMAT_Y16, 160, 120, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT, 1617 HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT 1618 }; 1619 ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, 1620 availableDepthStreamConfigurations, 1621 sizeof(availableDepthStreamConfigurations)/sizeof(int32_t)); 1622 1623 static const int64_t availableDepthMinFrameDurations[] = { 1624 HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0], 1625 HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0] 1626 }; 1627 ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, 1628 availableDepthMinFrameDurations, 1629 sizeof(availableDepthMinFrameDurations)/sizeof(int64_t)); 1630 1631 static const int64_t availableDepthStallDurations[] = { 1632 HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0], 1633 HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0] 1634 }; 1635 ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, 1636 availableDepthStallDurations, 1637 sizeof(availableDepthStallDurations)/sizeof(int64_t)); 1638 1639 uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE; 1640 ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, 1641 &depthIsExclusive, 1); 1642 } 1643 1644 // android.shading 1645 1646 if (hasCapability(BACKWARD_COMPATIBLE)) { 1647 static const uint8_t availableShadingModes[] = { 1648 ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST, ANDROID_SHADING_MODE_HIGH_QUALITY 1649 }; 1650 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes, 1651 sizeof(availableShadingModes)); 1652 } else { 1653 static const uint8_t availableShadingModes[] = { 1654 ANDROID_SHADING_MODE_OFF 1655 }; 1656 ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes, 1657 sizeof(availableShadingModes)); 1658 } 1659 1660 // android.request 1661 1662 static const int32_t maxNumOutputStreams[] = { 1663 kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount 1664 }; 1665 ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams, 3); 1666 1667 static const uint8_t maxPipelineDepth = kMaxBufferCount; 1668 ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1); 1669 1670 static const int32_t partialResultCount = 1; 1671 ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, 1672 &partialResultCount, /*count*/1); 1673 1674 SortedVector<uint8_t> caps; 1675 for (size_t i = 0; i < mCapabilities.size(); i++) { 1676 switch(mCapabilities[i]) { 1677 case BACKWARD_COMPATIBLE: 1678 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE); 1679 break; 1680 case MANUAL_SENSOR: 1681 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR); 1682 break; 1683 case MANUAL_POST_PROCESSING: 1684 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING); 1685 break; 1686 case RAW: 1687 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW); 1688 break; 1689 case PRIVATE_REPROCESSING: 1690 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING); 1691 break; 1692 case READ_SENSOR_SETTINGS: 1693 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS); 1694 break; 1695 case BURST_CAPTURE: 1696 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 1697 break; 1698 case YUV_REPROCESSING: 1699 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING); 1700 break; 1701 case DEPTH_OUTPUT: 1702 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT); 1703 break; 1704 case CONSTRAINED_HIGH_SPEED_VIDEO: 1705 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO); 1706 break; 1707 default: 1708 // Ignore LEVELs 1709 break; 1710 } 1711 } 1712 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size()); 1713 1714 // Scan a default request template for included request keys 1715 Vector<int32_t> availableRequestKeys; 1716 const camera_metadata_t *previewRequest = 1717 constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW); 1718 for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) { 1719 camera_metadata_ro_entry_t entry; 1720 get_camera_metadata_ro_entry(previewRequest, i, &entry); 1721 availableRequestKeys.add(entry.tag); 1722 } 1723 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(), 1724 availableRequestKeys.size()); 1725 1726 // Add a few more result keys. Must be kept up to date with the various places that add these 1727 1728 Vector<int32_t> availableResultKeys(availableRequestKeys); 1729 if (hasCapability(BACKWARD_COMPATIBLE)) { 1730 availableResultKeys.add(ANDROID_CONTROL_AE_STATE); 1731 availableResultKeys.add(ANDROID_CONTROL_AF_STATE); 1732 availableResultKeys.add(ANDROID_CONTROL_AWB_STATE); 1733 availableResultKeys.add(ANDROID_FLASH_STATE); 1734 availableResultKeys.add(ANDROID_LENS_STATE); 1735 availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE); 1736 availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW); 1737 availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER); 1738 } 1739 1740 if (hasCapability(DEPTH_OUTPUT)) { 1741 availableResultKeys.add(ANDROID_LENS_POSE_ROTATION); 1742 availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION); 1743 availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION); 1744 availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION); 1745 } 1746 1747 availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH); 1748 availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP); 1749 1750 ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(), 1751 availableResultKeys.size()); 1752 1753 // Needs to be last, to collect all the keys set 1754 1755 availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS); 1756 info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, 1757 availableCharacteristicsKeys); 1758 1759 mCameraInfo = info.release(); 1760 1761 #undef ADD_STATIC_ENTRY 1762 return OK; 1763 } 1764 1765 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) { 1766 /** 1767 * Extract top-level 3A controls 1768 */ 1769 status_t res; 1770 1771 bool facePriority = false; 1772 1773 camera_metadata_entry e; 1774 1775 e = settings.find(ANDROID_CONTROL_MODE); 1776 if (e.count == 0) { 1777 ALOGE("%s: No control mode entry!", __FUNCTION__); 1778 return BAD_VALUE; 1779 } 1780 uint8_t controlMode = e.data.u8[0]; 1781 1782 if (controlMode == ANDROID_CONTROL_MODE_OFF) { 1783 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 1784 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1785 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 1786 update3A(settings); 1787 return OK; 1788 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 1789 if (!hasCapability(BACKWARD_COMPATIBLE)) { 1790 ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!", 1791 __FUNCTION__); 1792 return BAD_VALUE; 1793 } 1794 1795 e = settings.find(ANDROID_CONTROL_SCENE_MODE); 1796 if (e.count == 0) { 1797 ALOGE("%s: No scene mode entry!", __FUNCTION__); 1798 return BAD_VALUE; 1799 } 1800 uint8_t sceneMode = e.data.u8[0]; 1801 1802 switch(sceneMode) { 1803 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: 1804 mFacePriority = true; 1805 break; 1806 default: 1807 ALOGE("%s: Emulator doesn't support scene mode %d", 1808 __FUNCTION__, sceneMode); 1809 return BAD_VALUE; 1810 } 1811 } else { 1812 mFacePriority = false; 1813 } 1814 1815 // controlMode == AUTO or sceneMode = FACE_PRIORITY 1816 // Process individual 3A controls 1817 1818 res = doFakeAE(settings); 1819 if (res != OK) return res; 1820 1821 res = doFakeAF(settings); 1822 if (res != OK) return res; 1823 1824 res = doFakeAWB(settings); 1825 if (res != OK) return res; 1826 1827 update3A(settings); 1828 return OK; 1829 } 1830 1831 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) { 1832 camera_metadata_entry e; 1833 1834 e = settings.find(ANDROID_CONTROL_AE_MODE); 1835 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) { 1836 ALOGE("%s: No AE mode entry!", __FUNCTION__); 1837 return BAD_VALUE; 1838 } 1839 uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON; 1840 1841 switch (aeMode) { 1842 case ANDROID_CONTROL_AE_MODE_OFF: 1843 // AE is OFF 1844 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 1845 return OK; 1846 case ANDROID_CONTROL_AE_MODE_ON: 1847 // OK for AUTO modes 1848 break; 1849 default: 1850 // Mostly silently ignore unsupported modes 1851 ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON", 1852 __FUNCTION__, aeMode); 1853 break; 1854 } 1855 1856 e = settings.find(ANDROID_CONTROL_AE_LOCK); 1857 bool aeLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false; 1858 1859 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); 1860 bool precaptureTrigger = false; 1861 if (e.count != 0) { 1862 precaptureTrigger = 1863 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START); 1864 } 1865 1866 if (precaptureTrigger) { 1867 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger); 1868 } else if (e.count > 0) { 1869 ALOGV("%s: Pre capture trigger was present? %zu", 1870 __FUNCTION__, 1871 e.count); 1872 } 1873 1874 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { 1875 // Run precapture sequence 1876 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { 1877 mAeCounter = 0; 1878 } 1879 1880 if (mFacePriority) { 1881 mAeTargetExposureTime = kFacePriorityExposureTime; 1882 } else { 1883 mAeTargetExposureTime = kNormalExposureTime; 1884 } 1885 1886 if (mAeCounter > kPrecaptureMinFrames && 1887 (mAeTargetExposureTime - mAeCurrentExposureTime) < 1888 mAeTargetExposureTime / 10) { 1889 // Done with precapture 1890 mAeCounter = 0; 1891 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED : 1892 ANDROID_CONTROL_AE_STATE_CONVERGED; 1893 } else { 1894 // Converge some more 1895 mAeCurrentExposureTime += 1896 (mAeTargetExposureTime - mAeCurrentExposureTime) * 1897 kExposureTrackRate; 1898 mAeCounter++; 1899 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE; 1900 } 1901 1902 } else if (!aeLocked) { 1903 // Run standard occasional AE scan 1904 switch (mAeState) { 1905 case ANDROID_CONTROL_AE_STATE_CONVERGED: 1906 case ANDROID_CONTROL_AE_STATE_INACTIVE: 1907 mAeCounter++; 1908 if (mAeCounter > kStableAeMaxFrames) { 1909 mAeTargetExposureTime = 1910 mFacePriority ? kFacePriorityExposureTime : 1911 kNormalExposureTime; 1912 float exposureStep = ((double)rand() / RAND_MAX) * 1913 (kExposureWanderMax - kExposureWanderMin) + 1914 kExposureWanderMin; 1915 mAeTargetExposureTime *= std::pow(2, exposureStep); 1916 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING; 1917 } 1918 break; 1919 case ANDROID_CONTROL_AE_STATE_SEARCHING: 1920 mAeCurrentExposureTime += 1921 (mAeTargetExposureTime - mAeCurrentExposureTime) * 1922 kExposureTrackRate; 1923 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) < 1924 mAeTargetExposureTime / 10) { 1925 // Close enough 1926 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 1927 mAeCounter = 0; 1928 } 1929 break; 1930 case ANDROID_CONTROL_AE_STATE_LOCKED: 1931 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 1932 mAeCounter = 0; 1933 break; 1934 default: 1935 ALOGE("%s: Emulator in unexpected AE state %d", 1936 __FUNCTION__, mAeState); 1937 return INVALID_OPERATION; 1938 } 1939 } else { 1940 // AE is locked 1941 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED; 1942 } 1943 1944 return OK; 1945 } 1946 1947 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) { 1948 camera_metadata_entry e; 1949 1950 e = settings.find(ANDROID_CONTROL_AF_MODE); 1951 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) { 1952 ALOGE("%s: No AF mode entry!", __FUNCTION__); 1953 return BAD_VALUE; 1954 } 1955 uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF; 1956 1957 e = settings.find(ANDROID_CONTROL_AF_TRIGGER); 1958 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t; 1959 af_trigger_t afTrigger; 1960 if (e.count != 0) { 1961 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]); 1962 1963 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger); 1964 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode); 1965 } else { 1966 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 1967 } 1968 1969 switch (afMode) { 1970 case ANDROID_CONTROL_AF_MODE_OFF: 1971 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1972 return OK; 1973 case ANDROID_CONTROL_AF_MODE_AUTO: 1974 case ANDROID_CONTROL_AF_MODE_MACRO: 1975 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1976 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1977 if (!mFacingBack) { 1978 ALOGE("%s: Front camera doesn't support AF mode %d", 1979 __FUNCTION__, afMode); 1980 return BAD_VALUE; 1981 } 1982 // OK, handle transitions lower on 1983 break; 1984 default: 1985 ALOGE("%s: Emulator doesn't support AF mode %d", 1986 __FUNCTION__, afMode); 1987 return BAD_VALUE; 1988 } 1989 1990 bool afModeChanged = mAfMode != afMode; 1991 mAfMode = afMode; 1992 1993 /** 1994 * Simulate AF triggers. Transition at most 1 state per frame. 1995 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN). 1996 */ 1997 1998 bool afTriggerStart = false; 1999 bool afTriggerCancel = false; 2000 switch (afTrigger) { 2001 case ANDROID_CONTROL_AF_TRIGGER_IDLE: 2002 break; 2003 case ANDROID_CONTROL_AF_TRIGGER_START: 2004 afTriggerStart = true; 2005 break; 2006 case ANDROID_CONTROL_AF_TRIGGER_CANCEL: 2007 afTriggerCancel = true; 2008 // Cancel trigger always transitions into INACTIVE 2009 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 2010 2011 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__); 2012 2013 // Stay in 'inactive' until at least next frame 2014 return OK; 2015 default: 2016 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger); 2017 return BAD_VALUE; 2018 } 2019 2020 // If we get down here, we're either in an autofocus mode 2021 // or in a continuous focus mode (and no other modes) 2022 2023 int oldAfState = mAfState; 2024 switch (mAfState) { 2025 case ANDROID_CONTROL_AF_STATE_INACTIVE: 2026 if (afTriggerStart) { 2027 switch (afMode) { 2028 case ANDROID_CONTROL_AF_MODE_AUTO: 2029 // fall-through 2030 case ANDROID_CONTROL_AF_MODE_MACRO: 2031 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 2032 break; 2033 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 2034 // fall-through 2035 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 2036 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 2037 break; 2038 } 2039 } else { 2040 // At least one frame stays in INACTIVE 2041 if (!afModeChanged) { 2042 switch (afMode) { 2043 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 2044 // fall-through 2045 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 2046 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN; 2047 break; 2048 } 2049 } 2050 } 2051 break; 2052 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 2053 /** 2054 * When the AF trigger is activated, the algorithm should finish 2055 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED 2056 * or AF_NOT_FOCUSED as appropriate 2057 */ 2058 if (afTriggerStart) { 2059 // Randomly transition to focused or not focused 2060 if (rand() % 3) { 2061 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 2062 } else { 2063 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 2064 } 2065 } 2066 /** 2067 * When the AF trigger is not involved, the AF algorithm should 2068 * start in INACTIVE state, and then transition into PASSIVE_SCAN 2069 * and PASSIVE_FOCUSED states 2070 */ 2071 else if (!afTriggerCancel) { 2072 // Randomly transition to passive focus 2073 if (rand() % 3 == 0) { 2074 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED; 2075 } 2076 } 2077 2078 break; 2079 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 2080 if (afTriggerStart) { 2081 // Randomly transition to focused or not focused 2082 if (rand() % 3) { 2083 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 2084 } else { 2085 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 2086 } 2087 } 2088 // TODO: initiate passive scan (PASSIVE_SCAN) 2089 break; 2090 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: 2091 // Simulate AF sweep completing instantaneously 2092 2093 // Randomly transition to focused or not focused 2094 if (rand() % 3) { 2095 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 2096 } else { 2097 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 2098 } 2099 break; 2100 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 2101 if (afTriggerStart) { 2102 switch (afMode) { 2103 case ANDROID_CONTROL_AF_MODE_AUTO: 2104 // fall-through 2105 case ANDROID_CONTROL_AF_MODE_MACRO: 2106 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 2107 break; 2108 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 2109 // fall-through 2110 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 2111 // continuous autofocus => trigger start has no effect 2112 break; 2113 } 2114 } 2115 break; 2116 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 2117 if (afTriggerStart) { 2118 switch (afMode) { 2119 case ANDROID_CONTROL_AF_MODE_AUTO: 2120 // fall-through 2121 case ANDROID_CONTROL_AF_MODE_MACRO: 2122 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 2123 break; 2124 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 2125 // fall-through 2126 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 2127 // continuous autofocus => trigger start has no effect 2128 break; 2129 } 2130 } 2131 break; 2132 default: 2133 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState); 2134 } 2135 2136 { 2137 char afStateString[100] = {0,}; 2138 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, 2139 oldAfState, 2140 afStateString, 2141 sizeof(afStateString)); 2142 2143 char afNewStateString[100] = {0,}; 2144 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, 2145 mAfState, 2146 afNewStateString, 2147 sizeof(afNewStateString)); 2148 ALOGVV("%s: AF state transitioned from %s to %s", 2149 __FUNCTION__, afStateString, afNewStateString); 2150 } 2151 2152 2153 return OK; 2154 } 2155 2156 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) { 2157 camera_metadata_entry e; 2158 2159 e = settings.find(ANDROID_CONTROL_AWB_MODE); 2160 if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) { 2161 ALOGE("%s: No AWB mode entry!", __FUNCTION__); 2162 return BAD_VALUE; 2163 } 2164 uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO; 2165 2166 // TODO: Add white balance simulation 2167 2168 switch (awbMode) { 2169 case ANDROID_CONTROL_AWB_MODE_OFF: 2170 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 2171 return OK; 2172 case ANDROID_CONTROL_AWB_MODE_AUTO: 2173 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: 2174 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: 2175 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: 2176 case ANDROID_CONTROL_AWB_MODE_SHADE: 2177 // OK 2178 break; 2179 default: 2180 ALOGE("%s: Emulator doesn't support AWB mode %d", 2181 __FUNCTION__, awbMode); 2182 return BAD_VALUE; 2183 } 2184 2185 return OK; 2186 } 2187 2188 2189 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) { 2190 if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) { 2191 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, 2192 &mAeCurrentExposureTime, 1); 2193 settings.update(ANDROID_SENSOR_SENSITIVITY, 2194 &mAeCurrentSensitivity, 1); 2195 } 2196 2197 settings.update(ANDROID_CONTROL_AE_STATE, 2198 &mAeState, 1); 2199 settings.update(ANDROID_CONTROL_AF_STATE, 2200 &mAfState, 1); 2201 settings.update(ANDROID_CONTROL_AWB_STATE, 2202 &mAwbState, 1); 2203 2204 uint8_t lensState; 2205 switch (mAfState) { 2206 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 2207 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: 2208 lensState = ANDROID_LENS_STATE_MOVING; 2209 break; 2210 case ANDROID_CONTROL_AF_STATE_INACTIVE: 2211 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 2212 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 2213 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 2214 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED: 2215 default: 2216 lensState = ANDROID_LENS_STATE_STATIONARY; 2217 break; 2218 } 2219 settings.update(ANDROID_LENS_STATE, &lensState, 1); 2220 2221 } 2222 2223 void EmulatedFakeCamera3::signalReadoutIdle() { 2224 Mutex::Autolock l(mLock); 2225 // Need to chek isIdle again because waiting on mLock may have allowed 2226 // something to be placed in the in-flight queue. 2227 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) { 2228 ALOGV("Now idle"); 2229 mStatus = STATUS_READY; 2230 } 2231 } 2232 2233 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e, 2234 nsecs_t timestamp) { 2235 switch(e) { 2236 case Sensor::SensorListener::EXPOSURE_START: { 2237 ALOGVV("%s: Frame %d: Sensor started exposure at %lld", 2238 __FUNCTION__, frameNumber, timestamp); 2239 // Trigger shutter notify to framework 2240 camera3_notify_msg_t msg; 2241 msg.type = CAMERA3_MSG_SHUTTER; 2242 msg.message.shutter.frame_number = frameNumber; 2243 msg.message.shutter.timestamp = timestamp; 2244 sendNotify(&msg); 2245 break; 2246 } 2247 default: 2248 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__, 2249 e, timestamp); 2250 break; 2251 } 2252 } 2253 2254 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) : 2255 mParent(parent), mJpegWaiting(false) { 2256 } 2257 2258 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() { 2259 for (List<Request>::iterator i = mInFlightQueue.begin(); 2260 i != mInFlightQueue.end(); i++) { 2261 delete i->buffers; 2262 delete i->sensorBuffers; 2263 } 2264 } 2265 2266 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) { 2267 Mutex::Autolock l(mLock); 2268 2269 mInFlightQueue.push_back(r); 2270 mInFlightSignal.signal(); 2271 } 2272 2273 bool EmulatedFakeCamera3::ReadoutThread::isIdle() { 2274 Mutex::Autolock l(mLock); 2275 return mInFlightQueue.empty() && !mThreadActive; 2276 } 2277 2278 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() { 2279 status_t res; 2280 Mutex::Autolock l(mLock); 2281 int loopCount = 0; 2282 while (mInFlightQueue.size() >= kMaxQueueSize) { 2283 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); 2284 if (res != OK && res != TIMED_OUT) { 2285 ALOGE("%s: Error waiting for in-flight queue to shrink", 2286 __FUNCTION__); 2287 return INVALID_OPERATION; 2288 } 2289 if (loopCount == kMaxWaitLoops) { 2290 ALOGE("%s: Timed out waiting for in-flight queue to shrink", 2291 __FUNCTION__); 2292 return TIMED_OUT; 2293 } 2294 loopCount++; 2295 } 2296 return OK; 2297 } 2298 2299 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { 2300 status_t res; 2301 2302 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__); 2303 2304 // First wait for a request from the in-flight queue 2305 2306 if (mCurrentRequest.settings.isEmpty()) { 2307 Mutex::Autolock l(mLock); 2308 if (mInFlightQueue.empty()) { 2309 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); 2310 if (res == TIMED_OUT) { 2311 ALOGVV("%s: ReadoutThread: Timed out waiting for request", 2312 __FUNCTION__); 2313 return true; 2314 } else if (res != NO_ERROR) { 2315 ALOGE("%s: Error waiting for capture requests: %d", 2316 __FUNCTION__, res); 2317 return false; 2318 } 2319 } 2320 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber; 2321 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings); 2322 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers; 2323 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers; 2324 mInFlightQueue.erase(mInFlightQueue.begin()); 2325 mInFlightSignal.signal(); 2326 mThreadActive = true; 2327 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__, 2328 mCurrentRequest.frameNumber); 2329 } 2330 2331 // Then wait for it to be delivered from the sensor 2332 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor", 2333 __FUNCTION__); 2334 2335 nsecs_t captureTime; 2336 bool gotFrame = 2337 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime); 2338 if (!gotFrame) { 2339 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame", 2340 __FUNCTION__); 2341 return true; 2342 } 2343 2344 ALOGVV("Sensor done with readout for frame %d, captured at %lld ", 2345 mCurrentRequest.frameNumber, captureTime); 2346 2347 // Check if we need to JPEG encode a buffer, and send it for async 2348 // compression if so. Otherwise prepare the buffer for return. 2349 bool needJpeg = false; 2350 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin(); 2351 while(buf != mCurrentRequest.buffers->end()) { 2352 bool goodBuffer = true; 2353 if ( buf->stream->format == 2354 HAL_PIXEL_FORMAT_BLOB && buf->stream->data_space != HAL_DATASPACE_DEPTH) { 2355 Mutex::Autolock jl(mJpegLock); 2356 if (mJpegWaiting) { 2357 // This shouldn't happen, because processCaptureRequest should 2358 // be stalling until JPEG compressor is free. 2359 ALOGE("%s: Already processing a JPEG!", __FUNCTION__); 2360 goodBuffer = false; 2361 } 2362 if (goodBuffer) { 2363 // Compressor takes ownership of sensorBuffers here 2364 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers, 2365 this); 2366 goodBuffer = (res == OK); 2367 } 2368 if (goodBuffer) { 2369 needJpeg = true; 2370 2371 mJpegHalBuffer = *buf; 2372 mJpegFrameNumber = mCurrentRequest.frameNumber; 2373 mJpegWaiting = true; 2374 2375 mCurrentRequest.sensorBuffers = NULL; 2376 buf = mCurrentRequest.buffers->erase(buf); 2377 2378 continue; 2379 } 2380 ALOGE("%s: Error compressing output buffer: %s (%d)", 2381 __FUNCTION__, strerror(-res), res); 2382 // fallthrough for cleanup 2383 } 2384 GraphicBufferMapper::get().unlock(*(buf->buffer)); 2385 2386 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : 2387 CAMERA3_BUFFER_STATUS_ERROR; 2388 buf->acquire_fence = -1; 2389 buf->release_fence = -1; 2390 2391 ++buf; 2392 } // end while 2393 2394 // Construct result for all completed buffers and results 2395 2396 camera3_capture_result result; 2397 2398 if (mParent->hasCapability(BACKWARD_COMPATIBLE)) { 2399 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; 2400 mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER, 2401 &sceneFlicker, 1); 2402 2403 static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; 2404 mCurrentRequest.settings.update(ANDROID_FLASH_STATE, 2405 &flashState, 1); 2406 2407 nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0]; 2408 mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, 2409 &rollingShutterSkew, 1); 2410 2411 float focusRange[] = { 1.0f/5.0f, 0 }; // 5 m to infinity in focus 2412 mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, 2413 focusRange, sizeof(focusRange)/sizeof(float)); 2414 } 2415 2416 if (mParent->hasCapability(DEPTH_OUTPUT)) { 2417 camera_metadata_entry_t entry; 2418 2419 find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_TRANSLATION, &entry); 2420 mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION, 2421 entry.data.f, entry.count); 2422 2423 find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION, &entry); 2424 mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION, 2425 entry.data.f, entry.count); 2426 2427 find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_INTRINSIC_CALIBRATION, &entry); 2428 mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION, 2429 entry.data.f, entry.count); 2430 2431 find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_RADIAL_DISTORTION, &entry); 2432 mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION, 2433 entry.data.f, entry.count); 2434 } 2435 2436 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, 2437 &captureTime, 1); 2438 2439 2440 // JPEGs take a stage longer 2441 const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1; 2442 mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, 2443 &pipelineDepth, 1); 2444 2445 result.frame_number = mCurrentRequest.frameNumber; 2446 result.result = mCurrentRequest.settings.getAndLock(); 2447 result.num_output_buffers = mCurrentRequest.buffers->size(); 2448 result.output_buffers = mCurrentRequest.buffers->array(); 2449 result.input_buffer = nullptr; 2450 result.partial_result = 1; 2451 2452 // Go idle if queue is empty, before sending result 2453 bool signalIdle = false; 2454 { 2455 Mutex::Autolock l(mLock); 2456 if (mInFlightQueue.empty()) { 2457 mThreadActive = false; 2458 signalIdle = true; 2459 } 2460 } 2461 if (signalIdle) mParent->signalReadoutIdle(); 2462 2463 // Send it off to the framework 2464 ALOGVV("%s: ReadoutThread: Send result to framework", 2465 __FUNCTION__); 2466 mParent->sendCaptureResult(&result); 2467 2468 // Clean up 2469 mCurrentRequest.settings.unlock(result.result); 2470 2471 delete mCurrentRequest.buffers; 2472 mCurrentRequest.buffers = NULL; 2473 if (!needJpeg) { 2474 delete mCurrentRequest.sensorBuffers; 2475 mCurrentRequest.sensorBuffers = NULL; 2476 } 2477 mCurrentRequest.settings.clear(); 2478 2479 return true; 2480 } 2481 2482 void EmulatedFakeCamera3::ReadoutThread::onJpegDone( 2483 const StreamBuffer &jpegBuffer, bool success) { 2484 Mutex::Autolock jl(mJpegLock); 2485 2486 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer)); 2487 2488 mJpegHalBuffer.status = success ? 2489 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; 2490 mJpegHalBuffer.acquire_fence = -1; 2491 mJpegHalBuffer.release_fence = -1; 2492 mJpegWaiting = false; 2493 2494 camera3_capture_result result; 2495 result.frame_number = mJpegFrameNumber; 2496 result.result = NULL; 2497 result.num_output_buffers = 1; 2498 result.output_buffers = &mJpegHalBuffer; 2499 2500 if (!success) { 2501 ALOGE("%s: Compression failure, returning error state buffer to" 2502 " framework", __FUNCTION__); 2503 } else { 2504 ALOGV("%s: Compression complete, returning buffer to framework", 2505 __FUNCTION__); 2506 } 2507 2508 mParent->sendCaptureResult(&result); 2509 } 2510 2511 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone( 2512 const StreamBuffer &inputBuffer) { 2513 // Should never get here, since the input buffer has to be returned 2514 // by end of processCaptureRequest 2515 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__); 2516 } 2517 2518 2519 }; // namespace android 2520