1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /* 18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates 19 * functionality of an advanced fake camera. 20 */ 21 22 //#define LOG_NDEBUG 0 23 //#define LOG_NNDEBUG 0 24 #define LOG_TAG "EmulatedCamera_FakeCamera3" 25 #include <utils/Log.h> 26 27 #include "EmulatedFakeCamera3.h" 28 #include "EmulatedCameraFactory.h" 29 #include <ui/Fence.h> 30 #include <ui/Rect.h> 31 #include <ui/GraphicBufferMapper.h> 32 #include "gralloc_cb.h" 33 34 #include "fake-pipeline2/Sensor.h" 35 #include "fake-pipeline2/JpegCompressor.h" 36 #include <cmath> 37 38 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0 39 #define ALOGVV ALOGV 40 #else 41 #define ALOGVV(...) ((void)0) 42 #endif 43 44 namespace android { 45 46 /** 47 * Constants for camera capabilities 48 */ 49 50 const int64_t USEC = 1000LL; 51 const int64_t MSEC = USEC * 1000LL; 52 const int64_t SEC = MSEC * 1000LL; 53 54 const int32_t EmulatedFakeCamera3::kAvailableFormats[5] = { 55 HAL_PIXEL_FORMAT_RAW_SENSOR, 56 HAL_PIXEL_FORMAT_BLOB, 57 HAL_PIXEL_FORMAT_RGBA_8888, 58 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 59 // These are handled by YCbCr_420_888 60 // HAL_PIXEL_FORMAT_YV12, 61 // HAL_PIXEL_FORMAT_YCrCb_420_SP, 62 HAL_PIXEL_FORMAT_YCbCr_420_888 63 }; 64 65 const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = { 66 640, 480 67 // Sensor::kResolution[0], Sensor::kResolution[1] 68 }; 69 70 const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = { 71 (const uint64_t)Sensor::kFrameDurationRange[0] 72 }; 73 74 const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[4] = { 75 640, 480, 320, 240 76 // Sensor::kResolution[0], Sensor::kResolution[1] 77 }; 78 79 const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = { 80 320, 240, 160, 120 81 // Sensor::kResolution[0], Sensor::kResolution[1] 82 }; 83 84 const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = { 85 (const uint64_t)Sensor::kFrameDurationRange[0] 86 }; 87 88 const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = { 89 640, 480 90 // Sensor::kResolution[0], Sensor::kResolution[1] 91 }; 92 93 const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = { 94 320, 240 95 // Sensor::kResolution[0], Sensor::kResolution[1] 96 }; 97 98 99 const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = { 100 (const uint64_t)Sensor::kFrameDurationRange[0] 101 }; 102 103 /** 104 * 3A constants 105 */ 106 107 // Default exposure and gain targets for different scenarios 108 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC; 109 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC; 110 const int EmulatedFakeCamera3::kNormalSensitivity = 100; 111 const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400; 112 const float EmulatedFakeCamera3::kExposureTrackRate = 0.1; 113 const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10; 114 const int EmulatedFakeCamera3::kStableAeMaxFrames = 100; 115 const float EmulatedFakeCamera3::kExposureWanderMin = -2; 116 const float EmulatedFakeCamera3::kExposureWanderMax = 1; 117 118 /** 119 * Camera device lifecycle methods 120 */ 121 122 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack, 123 struct hw_module_t* module) : 124 EmulatedCamera3(cameraId, module), 125 mFacingBack(facingBack) { 126 ALOGI("Constructing emulated fake camera 3 facing %s", 127 facingBack ? "back" : "front"); 128 129 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { 130 mDefaultTemplates[i] = NULL; 131 } 132 133 /** 134 * Front cameras = limited mode 135 * Back cameras = full mode 136 */ 137 mFullMode = facingBack; 138 } 139 140 EmulatedFakeCamera3::~EmulatedFakeCamera3() { 141 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { 142 if (mDefaultTemplates[i] != NULL) { 143 free_camera_metadata(mDefaultTemplates[i]); 144 } 145 } 146 } 147 148 status_t EmulatedFakeCamera3::Initialize() { 149 ALOGV("%s: E", __FUNCTION__); 150 status_t res; 151 152 if (mStatus != STATUS_ERROR) { 153 ALOGE("%s: Already initialized!", __FUNCTION__); 154 return INVALID_OPERATION; 155 } 156 157 res = constructStaticInfo(); 158 if (res != OK) { 159 ALOGE("%s: Unable to allocate static info: %s (%d)", 160 __FUNCTION__, strerror(-res), res); 161 return res; 162 } 163 164 return EmulatedCamera3::Initialize(); 165 } 166 167 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) { 168 ALOGV("%s: E", __FUNCTION__); 169 Mutex::Autolock l(mLock); 170 status_t res; 171 172 if (mStatus != STATUS_CLOSED) { 173 ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus); 174 return INVALID_OPERATION; 175 } 176 177 mSensor = new Sensor(); 178 mSensor->setSensorListener(this); 179 180 res = mSensor->startUp(); 181 if (res != NO_ERROR) return res; 182 183 mReadoutThread = new ReadoutThread(this); 184 mJpegCompressor = new JpegCompressor(); 185 186 res = mReadoutThread->run("EmuCam3::readoutThread"); 187 if (res != NO_ERROR) return res; 188 189 // Initialize fake 3A 190 191 mControlMode = ANDROID_CONTROL_MODE_AUTO; 192 mFacePriority = false; 193 mAeMode = ANDROID_CONTROL_AE_MODE_ON; 194 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO; 195 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 196 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 197 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 198 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 199 mAfTriggerId = 0; 200 mAeTriggerId = 0; 201 mAeCurrentExposureTime = kNormalExposureTime; 202 mAeCurrentSensitivity = kNormalSensitivity; 203 204 return EmulatedCamera3::connectCamera(device); 205 } 206 207 status_t EmulatedFakeCamera3::closeCamera() { 208 ALOGV("%s: E", __FUNCTION__); 209 status_t res; 210 { 211 Mutex::Autolock l(mLock); 212 if (mStatus == STATUS_CLOSED) return OK; 213 214 res = mSensor->shutDown(); 215 if (res != NO_ERROR) { 216 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); 217 return res; 218 } 219 mSensor.clear(); 220 221 mReadoutThread->requestExit(); 222 } 223 224 mReadoutThread->join(); 225 226 { 227 Mutex::Autolock l(mLock); 228 // Clear out private stream information 229 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) { 230 PrivateStreamInfo *privStream = 231 static_cast<PrivateStreamInfo*>((*s)->priv); 232 delete privStream; 233 (*s)->priv = NULL; 234 } 235 mStreams.clear(); 236 mReadoutThread.clear(); 237 } 238 239 return EmulatedCamera3::closeCamera(); 240 } 241 242 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) { 243 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT; 244 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation(); 245 return EmulatedCamera3::getCameraInfo(info); 246 } 247 248 /** 249 * Camera3 interface methods 250 */ 251 252 status_t EmulatedFakeCamera3::configureStreams( 253 camera3_stream_configuration *streamList) { 254 Mutex::Autolock l(mLock); 255 ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams); 256 257 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) { 258 ALOGE("%s: Cannot configure streams in state %d", 259 __FUNCTION__, mStatus); 260 return NO_INIT; 261 } 262 263 /** 264 * Sanity-check input list. 265 */ 266 if (streamList == NULL) { 267 ALOGE("%s: NULL stream configuration", __FUNCTION__); 268 return BAD_VALUE; 269 } 270 271 if (streamList->streams == NULL) { 272 ALOGE("%s: NULL stream list", __FUNCTION__); 273 return BAD_VALUE; 274 } 275 276 if (streamList->num_streams < 1) { 277 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__, 278 streamList->num_streams); 279 return BAD_VALUE; 280 } 281 282 camera3_stream_t *inputStream = NULL; 283 for (size_t i = 0; i < streamList->num_streams; i++) { 284 camera3_stream_t *newStream = streamList->streams[i]; 285 286 if (newStream == NULL) { 287 ALOGE("%s: Stream index %d was NULL", 288 __FUNCTION__, i); 289 return BAD_VALUE; 290 } 291 292 ALOGV("%s: Stream %p (id %d), type %d, usage 0x%x, format 0x%x", 293 __FUNCTION__, newStream, i, newStream->stream_type, 294 newStream->usage, 295 newStream->format); 296 297 if (newStream->stream_type == CAMERA3_STREAM_INPUT || 298 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 299 if (inputStream != NULL) { 300 301 ALOGE("%s: Multiple input streams requested!", __FUNCTION__); 302 return BAD_VALUE; 303 } 304 inputStream = newStream; 305 } 306 307 bool validFormat = false; 308 for (size_t f = 0; 309 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]); 310 f++) { 311 if (newStream->format == kAvailableFormats[f]) { 312 validFormat = true; 313 break; 314 } 315 } 316 if (!validFormat) { 317 ALOGE("%s: Unsupported stream format 0x%x requested", 318 __FUNCTION__, newStream->format); 319 return BAD_VALUE; 320 } 321 } 322 mInputStream = inputStream; 323 324 /** 325 * Initially mark all existing streams as not alive 326 */ 327 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) { 328 PrivateStreamInfo *privStream = 329 static_cast<PrivateStreamInfo*>((*s)->priv); 330 privStream->alive = false; 331 } 332 333 /** 334 * Find new streams and mark still-alive ones 335 */ 336 for (size_t i = 0; i < streamList->num_streams; i++) { 337 camera3_stream_t *newStream = streamList->streams[i]; 338 if (newStream->priv == NULL) { 339 // New stream, construct info 340 PrivateStreamInfo *privStream = new PrivateStreamInfo(); 341 privStream->alive = true; 342 privStream->registered = false; 343 344 switch (newStream->stream_type) { 345 case CAMERA3_STREAM_OUTPUT: 346 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 347 break; 348 case CAMERA3_STREAM_INPUT: 349 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 350 break; 351 case CAMERA3_STREAM_BIDIRECTIONAL: 352 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 353 GRALLOC_USAGE_HW_CAMERA_WRITE; 354 break; 355 } 356 newStream->max_buffers = kMaxBufferCount; 357 newStream->priv = privStream; 358 mStreams.push_back(newStream); 359 } else { 360 // Existing stream, mark as still alive. 361 PrivateStreamInfo *privStream = 362 static_cast<PrivateStreamInfo*>(newStream->priv); 363 privStream->alive = true; 364 } 365 } 366 367 /** 368 * Reap the dead streams 369 */ 370 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) { 371 PrivateStreamInfo *privStream = 372 static_cast<PrivateStreamInfo*>((*s)->priv); 373 if (!privStream->alive) { 374 (*s)->priv = NULL; 375 delete privStream; 376 s = mStreams.erase(s); 377 } else { 378 ++s; 379 } 380 } 381 382 /** 383 * Can't reuse settings across configure call 384 */ 385 mPrevSettings.clear(); 386 387 return OK; 388 } 389 390 status_t EmulatedFakeCamera3::registerStreamBuffers( 391 const camera3_stream_buffer_set *bufferSet) { 392 ALOGV("%s: E", __FUNCTION__); 393 Mutex::Autolock l(mLock); 394 395 /** 396 * Sanity checks 397 */ 398 399 // OK: register streams at any time during configure 400 // (but only once per stream) 401 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) { 402 ALOGE("%s: Cannot register buffers in state %d", 403 __FUNCTION__, mStatus); 404 return NO_INIT; 405 } 406 407 if (bufferSet == NULL) { 408 ALOGE("%s: NULL buffer set!", __FUNCTION__); 409 return BAD_VALUE; 410 } 411 412 StreamIterator s = mStreams.begin(); 413 for (; s != mStreams.end(); ++s) { 414 if (bufferSet->stream == *s) break; 415 } 416 if (s == mStreams.end()) { 417 ALOGE("%s: Trying to register buffers for a non-configured stream!", 418 __FUNCTION__); 419 return BAD_VALUE; 420 } 421 422 /** 423 * Register the buffers. This doesn't mean anything to the emulator besides 424 * marking them off as registered. 425 */ 426 427 PrivateStreamInfo *privStream = 428 static_cast<PrivateStreamInfo*>((*s)->priv); 429 430 if (privStream->registered) { 431 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__); 432 return BAD_VALUE; 433 } 434 435 privStream->registered = true; 436 437 return OK; 438 } 439 440 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings( 441 int type) { 442 ALOGV("%s: E", __FUNCTION__); 443 Mutex::Autolock l(mLock); 444 445 if (type < 0 || type >= CAMERA2_TEMPLATE_COUNT) { 446 ALOGE("%s: Unknown request settings template: %d", 447 __FUNCTION__, type); 448 return NULL; 449 } 450 451 /** 452 * Cache is not just an optimization - pointer returned has to live at 453 * least as long as the camera device instance does. 454 */ 455 if (mDefaultTemplates[type] != NULL) { 456 return mDefaultTemplates[type]; 457 } 458 459 CameraMetadata settings; 460 461 /** android.request */ 462 463 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 464 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 465 466 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; 467 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); 468 469 static const int32_t id = 0; 470 settings.update(ANDROID_REQUEST_ID, &id, 1); 471 472 static const int32_t frameCount = 0; 473 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1); 474 475 /** android.lens */ 476 477 static const float focusDistance = 0; 478 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); 479 480 static const float aperture = 2.8f; 481 settings.update(ANDROID_LENS_APERTURE, &aperture, 1); 482 483 static const float focalLength = 5.0f; 484 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1); 485 486 static const float filterDensity = 0; 487 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1); 488 489 static const uint8_t opticalStabilizationMode = 490 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 491 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, 492 &opticalStabilizationMode, 1); 493 494 // FOCUS_RANGE set only in frame 495 496 /** android.sensor */ 497 498 static const int64_t exposureTime = 10 * MSEC; 499 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); 500 501 static const int64_t frameDuration = 33333333L; // 1/30 s 502 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); 503 504 static const int32_t sensitivity = 100; 505 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); 506 507 // TIMESTAMP set only in frame 508 509 /** android.flash */ 510 511 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 512 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 513 514 static const uint8_t flashPower = 10; 515 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1); 516 517 static const int64_t firingTime = 0; 518 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); 519 520 /** Processing block modes */ 521 uint8_t hotPixelMode = 0; 522 uint8_t demosaicMode = 0; 523 uint8_t noiseMode = 0; 524 uint8_t shadingMode = 0; 525 uint8_t geometricMode = 0; 526 uint8_t colorMode = 0; 527 uint8_t tonemapMode = 0; 528 uint8_t edgeMode = 0; 529 switch (type) { 530 case CAMERA2_TEMPLATE_STILL_CAPTURE: 531 // fall-through 532 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT: 533 // fall-through 534 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG: 535 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; 536 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY; 537 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; 538 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY; 539 geometricMode = ANDROID_GEOMETRIC_MODE_HIGH_QUALITY; 540 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; 541 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; 542 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY; 543 break; 544 case CAMERA2_TEMPLATE_PREVIEW: 545 // fall-through 546 case CAMERA2_TEMPLATE_VIDEO_RECORD: 547 // fall-through 548 default: 549 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST; 550 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; 551 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST; 552 shadingMode = ANDROID_SHADING_MODE_FAST; 553 geometricMode = ANDROID_GEOMETRIC_MODE_FAST; 554 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST; 555 tonemapMode = ANDROID_TONEMAP_MODE_FAST; 556 edgeMode = ANDROID_EDGE_MODE_FAST; 557 break; 558 } 559 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); 560 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); 561 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1); 562 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); 563 settings.update(ANDROID_GEOMETRIC_MODE, &geometricMode, 1); 564 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1); 565 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1); 566 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); 567 568 /** android.noise */ 569 static const uint8_t noiseStrength = 5; 570 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1); 571 572 /** android.color */ 573 static const float colorTransform[9] = { 574 1.0f, 0.f, 0.f, 575 0.f, 1.f, 0.f, 576 0.f, 0.f, 1.f 577 }; 578 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); 579 580 /** android.tonemap */ 581 static const float tonemapCurve[4] = { 582 0.f, 0.f, 583 1.f, 1.f 584 }; 585 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4); 586 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4); 587 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4); 588 589 /** android.edge */ 590 static const uint8_t edgeStrength = 5; 591 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1); 592 593 /** android.scaler */ 594 static const int32_t cropRegion[3] = { 595 0, 0, (int32_t)Sensor::kResolution[0] 596 }; 597 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 3); 598 599 /** android.jpeg */ 600 static const uint8_t jpegQuality = 80; 601 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1); 602 603 static const int32_t thumbnailSize[2] = { 604 640, 480 605 }; 606 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); 607 608 static const uint8_t thumbnailQuality = 80; 609 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); 610 611 static const double gpsCoordinates[2] = { 612 0, 0 613 }; 614 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2); 615 616 static const uint8_t gpsProcessingMethod[32] = "None"; 617 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32); 618 619 static const int64_t gpsTimestamp = 0; 620 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); 621 622 static const int32_t jpegOrientation = 0; 623 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); 624 625 /** android.stats */ 626 627 static const uint8_t faceDetectMode = 628 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 629 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); 630 631 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; 632 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); 633 634 static const uint8_t sharpnessMapMode = 635 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; 636 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); 637 638 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram, 639 // sharpnessMap only in frames 640 641 /** android.control */ 642 643 uint8_t controlIntent = 0; 644 switch (type) { 645 case CAMERA2_TEMPLATE_PREVIEW: 646 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 647 break; 648 case CAMERA2_TEMPLATE_STILL_CAPTURE: 649 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 650 break; 651 case CAMERA2_TEMPLATE_VIDEO_RECORD: 652 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 653 break; 654 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT: 655 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 656 break; 657 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG: 658 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 659 break; 660 default: 661 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 662 break; 663 } 664 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 665 666 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 667 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 668 669 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 670 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 671 672 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; 673 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 674 675 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 676 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 677 678 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 679 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 680 681 static const int32_t controlRegions[5] = { 682 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], 683 1000 684 }; 685 settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5); 686 687 static const int32_t aeExpCompensation = 0; 688 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); 689 690 static const int32_t aeTargetFpsRange[2] = { 691 10, 30 692 }; 693 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); 694 695 static const uint8_t aeAntibandingMode = 696 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; 697 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1); 698 699 static const uint8_t awbMode = 700 ANDROID_CONTROL_AWB_MODE_AUTO; 701 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 702 703 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 704 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 705 706 settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5); 707 708 uint8_t afMode = 0; 709 switch (type) { 710 case CAMERA2_TEMPLATE_PREVIEW: 711 afMode = ANDROID_CONTROL_AF_MODE_AUTO; 712 break; 713 case CAMERA2_TEMPLATE_STILL_CAPTURE: 714 afMode = ANDROID_CONTROL_AF_MODE_AUTO; 715 break; 716 case CAMERA2_TEMPLATE_VIDEO_RECORD: 717 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 718 break; 719 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT: 720 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 721 break; 722 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG: 723 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 724 break; 725 default: 726 afMode = ANDROID_CONTROL_AF_MODE_AUTO; 727 break; 728 } 729 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); 730 731 settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5); 732 733 static const uint8_t vstabMode = 734 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 735 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1); 736 737 // aeState, awbState, afState only in frame 738 739 mDefaultTemplates[type] = settings.release(); 740 741 return mDefaultTemplates[type]; 742 } 743 744 status_t EmulatedFakeCamera3::processCaptureRequest( 745 camera3_capture_request *request) { 746 747 Mutex::Autolock l(mLock); 748 status_t res; 749 750 /** Validation */ 751 752 if (mStatus < STATUS_READY) { 753 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__, 754 mStatus); 755 return INVALID_OPERATION; 756 } 757 758 if (request == NULL) { 759 ALOGE("%s: NULL request!", __FUNCTION__); 760 return BAD_VALUE; 761 } 762 763 uint32_t frameNumber = request->frame_number; 764 765 if (request->settings == NULL && mPrevSettings.isEmpty()) { 766 ALOGE("%s: Request %d: NULL settings for first request after" 767 "configureStreams()", __FUNCTION__, frameNumber); 768 return BAD_VALUE; 769 } 770 771 if (request->input_buffer != NULL && 772 request->input_buffer->stream != mInputStream) { 773 ALOGE("%s: Request %d: Input buffer not from input stream!", 774 __FUNCTION__, frameNumber); 775 ALOGV("%s: Bad stream %p, expected: %p", 776 __FUNCTION__, request->input_buffer->stream, 777 mInputStream); 778 ALOGV("%s: Bad stream type %d, expected stream type %d", 779 __FUNCTION__, request->input_buffer->stream->stream_type, 780 mInputStream ? mInputStream->stream_type : -1); 781 782 return BAD_VALUE; 783 } 784 785 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 786 ALOGE("%s: Request %d: No output buffers provided!", 787 __FUNCTION__, frameNumber); 788 return BAD_VALUE; 789 } 790 791 // Validate all buffers, starting with input buffer if it's given 792 793 ssize_t idx; 794 const camera3_stream_buffer_t *b; 795 if (request->input_buffer != NULL) { 796 idx = -1; 797 b = request->input_buffer; 798 } else { 799 idx = 0; 800 b = request->output_buffers; 801 } 802 do { 803 PrivateStreamInfo *priv = 804 static_cast<PrivateStreamInfo*>(b->stream->priv); 805 if (priv == NULL) { 806 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 807 __FUNCTION__, frameNumber, idx); 808 return BAD_VALUE; 809 } 810 if (!priv->alive || !priv->registered) { 811 ALOGE("%s: Request %d: Buffer %d: Unregistered or dead stream!", 812 __FUNCTION__, frameNumber, idx); 813 return BAD_VALUE; 814 } 815 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 816 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 817 __FUNCTION__, frameNumber, idx); 818 return BAD_VALUE; 819 } 820 if (b->release_fence != -1) { 821 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 822 __FUNCTION__, frameNumber, idx); 823 return BAD_VALUE; 824 } 825 if (b->buffer == NULL) { 826 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 827 __FUNCTION__, frameNumber, idx); 828 return BAD_VALUE; 829 } 830 idx++; 831 b = &(request->output_buffers[idx]); 832 } while (idx < (ssize_t)request->num_output_buffers); 833 834 // TODO: Validate settings parameters 835 836 /** 837 * Start processing this request 838 */ 839 840 mStatus = STATUS_ACTIVE; 841 842 CameraMetadata settings; 843 844 if (request->settings == NULL) { 845 settings.acquire(mPrevSettings); 846 } else { 847 settings = request->settings; 848 } 849 850 res = process3A(settings); 851 if (res != OK) { 852 return res; 853 } 854 855 // TODO: Handle reprocessing 856 857 /** 858 * Get ready for sensor config 859 */ 860 861 nsecs_t exposureTime; 862 nsecs_t frameDuration; 863 uint32_t sensitivity; 864 bool needJpeg = false; 865 866 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 867 frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 868 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 869 870 Buffers *sensorBuffers = new Buffers(); 871 HalBufferVector *buffers = new HalBufferVector(); 872 873 sensorBuffers->setCapacity(request->num_output_buffers); 874 buffers->setCapacity(request->num_output_buffers); 875 876 // Process all the buffers we got for output, constructing internal buffer 877 // structures for them, and lock them for writing. 878 for (size_t i = 0; i < request->num_output_buffers; i++) { 879 const camera3_stream_buffer &srcBuf = request->output_buffers[i]; 880 const cb_handle_t *privBuffer = 881 static_cast<const cb_handle_t*>(*srcBuf.buffer); 882 StreamBuffer destBuf; 883 destBuf.streamId = kGenericStreamId; 884 destBuf.width = srcBuf.stream->width; 885 destBuf.height = srcBuf.stream->height; 886 destBuf.format = privBuffer->format; // Use real private format 887 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc 888 destBuf.buffer = srcBuf.buffer; 889 890 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) { 891 needJpeg = true; 892 } 893 894 // Wait on fence 895 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence); 896 res = bufferAcquireFence->wait(kFenceTimeoutMs); 897 if (res == TIMED_OUT) { 898 ALOGE("%s: Request %d: Buffer %d: Fence timed out after %d ms", 899 __FUNCTION__, frameNumber, i, kFenceTimeoutMs); 900 } 901 if (res == OK) { 902 // Lock buffer for writing 903 const Rect rect(destBuf.width, destBuf.height); 904 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { 905 if (privBuffer->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { 906 android_ycbcr ycbcr = android_ycbcr(); 907 res = GraphicBufferMapper::get().lockYCbCr( 908 *(destBuf.buffer), 909 GRALLOC_USAGE_HW_CAMERA_WRITE, rect, 910 &ycbcr); 911 // This is only valid because we know that emulator's 912 // YCbCr_420_888 is really contiguous NV21 under the hood 913 destBuf.img = static_cast<uint8_t*>(ycbcr.y); 914 } else { 915 ALOGE("Unexpected private format for flexible YUV: 0x%x", 916 privBuffer->format); 917 res = INVALID_OPERATION; 918 } 919 } else { 920 res = GraphicBufferMapper::get().lock(*(destBuf.buffer), 921 GRALLOC_USAGE_HW_CAMERA_WRITE, rect, 922 (void**)&(destBuf.img)); 923 } 924 if (res != OK) { 925 ALOGE("%s: Request %d: Buffer %d: Unable to lock buffer", 926 __FUNCTION__, frameNumber, i); 927 } 928 } 929 930 if (res != OK) { 931 // Either waiting or locking failed. Unlock locked buffers and bail 932 // out. 933 for (size_t j = 0; j < i; j++) { 934 GraphicBufferMapper::get().unlock( 935 *(request->output_buffers[i].buffer)); 936 } 937 return NO_INIT; 938 } 939 940 sensorBuffers->push_back(destBuf); 941 buffers->push_back(srcBuf); 942 } 943 944 /** 945 * Wait for JPEG compressor to not be busy, if needed 946 */ 947 if (needJpeg) { 948 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); 949 if (!ready) { 950 ALOGE("%s: Timeout waiting for JPEG compression to complete!", 951 __FUNCTION__); 952 return NO_INIT; 953 } 954 } 955 956 /** 957 * Wait until the in-flight queue has room 958 */ 959 res = mReadoutThread->waitForReadout(); 960 if (res != OK) { 961 ALOGE("%s: Timeout waiting for previous requests to complete!", 962 __FUNCTION__); 963 return NO_INIT; 964 } 965 966 /** 967 * Wait until sensor's ready. This waits for lengthy amounts of time with 968 * mLock held, but the interface spec is that no other calls may by done to 969 * the HAL by the framework while process_capture_request is happening. 970 */ 971 int syncTimeoutCount = 0; 972 while(!mSensor->waitForVSync(kSyncWaitTimeout)) { 973 if (mStatus == STATUS_ERROR) { 974 return NO_INIT; 975 } 976 if (syncTimeoutCount == kMaxSyncTimeoutCount) { 977 ALOGE("%s: Request %d: Sensor sync timed out after %lld ms", 978 __FUNCTION__, frameNumber, 979 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000); 980 return NO_INIT; 981 } 982 syncTimeoutCount++; 983 } 984 985 /** 986 * Configure sensor and queue up the request to the readout thread 987 */ 988 mSensor->setExposureTime(exposureTime); 989 mSensor->setFrameDuration(frameDuration); 990 mSensor->setSensitivity(sensitivity); 991 mSensor->setDestinationBuffers(sensorBuffers); 992 mSensor->setFrameNumber(request->frame_number); 993 994 ReadoutThread::Request r; 995 r.frameNumber = request->frame_number; 996 r.settings = settings; 997 r.sensorBuffers = sensorBuffers; 998 r.buffers = buffers; 999 1000 mReadoutThread->queueCaptureRequest(r); 1001 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number); 1002 1003 // Cache the settings for next time 1004 mPrevSettings.acquire(settings); 1005 1006 return OK; 1007 } 1008 1009 /** Debug methods */ 1010 1011 void EmulatedFakeCamera3::dump(int fd) { 1012 1013 } 1014 1015 /** Tag query methods */ 1016 const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) { 1017 return NULL; 1018 } 1019 1020 const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) { 1021 return NULL; 1022 } 1023 1024 int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) { 1025 return 0; 1026 } 1027 1028 /** 1029 * Private methods 1030 */ 1031 1032 status_t EmulatedFakeCamera3::constructStaticInfo() { 1033 1034 CameraMetadata info; 1035 // android.lens 1036 1037 // 5 cm min focus distance for back camera, infinity (fixed focus) for front 1038 const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0; 1039 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1040 &minFocusDistance, 1); 1041 1042 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front 1043 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0; 1044 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1045 &minFocusDistance, 1); 1046 1047 static const float focalLength = 3.30f; // mm 1048 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1049 &focalLength, 1); 1050 static const float aperture = 2.8f; 1051 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1052 &aperture, 1); 1053 static const float filterDensity = 0; 1054 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1055 &filterDensity, 1); 1056 static const uint8_t availableOpticalStabilization = 1057 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 1058 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1059 &availableOpticalStabilization, 1); 1060 1061 static const int32_t lensShadingMapSize[] = {1, 1}; 1062 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize, 1063 sizeof(lensShadingMapSize)/sizeof(int32_t)); 1064 1065 static const float lensShadingMap[3 * 1 * 1 ] = 1066 { 1.f, 1.f, 1.f }; 1067 info.update(ANDROID_LENS_INFO_SHADING_MAP, lensShadingMap, 1068 sizeof(lensShadingMap)/sizeof(float)); 1069 1070 // Identity transform 1071 static const int32_t geometricCorrectionMapSize[] = {2, 2}; 1072 info.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1073 geometricCorrectionMapSize, 1074 sizeof(geometricCorrectionMapSize)/sizeof(int32_t)); 1075 1076 static const float geometricCorrectionMap[2 * 3 * 2 * 2] = { 1077 0.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1078 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1079 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 1080 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; 1081 info.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1082 geometricCorrectionMap, 1083 sizeof(geometricCorrectionMap)/sizeof(float)); 1084 1085 uint8_t lensFacing = mFacingBack ? 1086 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1087 info.update(ANDROID_LENS_FACING, &lensFacing, 1); 1088 1089 float lensPosition[3]; 1090 if (mFacingBack) { 1091 // Back-facing camera is center-top on device 1092 lensPosition[0] = 0; 1093 lensPosition[1] = 20; 1094 lensPosition[2] = -5; 1095 } else { 1096 // Front-facing camera is center-right on device 1097 lensPosition[0] = 20; 1098 lensPosition[1] = 20; 1099 lensPosition[2] = 0; 1100 } 1101 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/ 1102 sizeof(float)); 1103 1104 // android.sensor 1105 1106 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1107 Sensor::kExposureTimeRange, 2); 1108 1109 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1110 &Sensor::kFrameDurationRange[1], 1); 1111 1112 info.update(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES, 1113 (int32_t*)Sensor::kAvailableSensitivities, 1114 sizeof(Sensor::kAvailableSensitivities) 1115 /sizeof(uint32_t)); 1116 1117 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1118 &Sensor::kColorFilterArrangement, 1); 1119 1120 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm 1121 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1122 sensorPhysicalSize, 2); 1123 1124 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1125 (int32_t*)Sensor::kResolution, 2); 1126 1127 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1128 (int32_t*)Sensor::kResolution, 2); 1129 1130 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1131 (int32_t*)&Sensor::kMaxRawValue, 1); 1132 1133 static const int32_t blackLevelPattern[4] = { 1134 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel, 1135 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel 1136 }; 1137 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1138 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t)); 1139 1140 //TODO: sensor color calibration fields 1141 1142 // android.flash 1143 static const uint8_t flashAvailable = 0; 1144 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); 1145 1146 static const int64_t flashChargeDuration = 0; 1147 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1); 1148 1149 // android.tonemap 1150 1151 static const int32_t tonemapCurvePoints = 128; 1152 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1); 1153 1154 // android.scaler 1155 1156 info.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1157 kAvailableFormats, 1158 sizeof(kAvailableFormats)/sizeof(int32_t)); 1159 1160 info.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1161 (int32_t*)kAvailableRawSizes, 1162 sizeof(kAvailableRawSizes)/sizeof(uint32_t)); 1163 1164 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1165 (int64_t*)kAvailableRawMinDurations, 1166 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t)); 1167 1168 if (mFacingBack) { 1169 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1170 (int32_t*)kAvailableProcessedSizesBack, 1171 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t)); 1172 } else { 1173 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1174 (int32_t*)kAvailableProcessedSizesFront, 1175 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t)); 1176 } 1177 1178 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, 1179 (int64_t*)kAvailableProcessedMinDurations, 1180 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t)); 1181 1182 if (mFacingBack) { 1183 info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1184 (int32_t*)kAvailableJpegSizesBack, 1185 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t)); 1186 } else { 1187 info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1188 (int32_t*)kAvailableJpegSizesFront, 1189 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t)); 1190 } 1191 1192 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 1193 (int64_t*)kAvailableJpegMinDurations, 1194 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t)); 1195 1196 static const float maxZoom = 10; 1197 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1198 &maxZoom, 1); 1199 1200 // android.jpeg 1201 1202 static const int32_t jpegThumbnailSizes[] = { 1203 0, 0, 1204 160, 120, 1205 320, 240 1206 }; 1207 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1208 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); 1209 1210 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize; 1211 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); 1212 1213 // android.stats 1214 1215 static const uint8_t availableFaceDetectModes[] = { 1216 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, 1217 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, 1218 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL 1219 }; 1220 1221 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1222 availableFaceDetectModes, 1223 sizeof(availableFaceDetectModes)); 1224 1225 static const int32_t maxFaceCount = 8; 1226 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1227 &maxFaceCount, 1); 1228 1229 static const int32_t histogramSize = 64; 1230 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1231 &histogramSize, 1); 1232 1233 static const int32_t maxHistogramCount = 1000; 1234 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1235 &maxHistogramCount, 1); 1236 1237 static const int32_t sharpnessMapSize[2] = {64, 64}; 1238 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1239 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); 1240 1241 static const int32_t maxSharpnessMapValue = 1000; 1242 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1243 &maxSharpnessMapValue, 1); 1244 1245 // android.control 1246 1247 static const uint8_t availableSceneModes[] = { 1248 ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED 1249 }; 1250 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1251 availableSceneModes, sizeof(availableSceneModes)); 1252 1253 static const uint8_t availableEffects[] = { 1254 ANDROID_CONTROL_EFFECT_MODE_OFF 1255 }; 1256 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1257 availableEffects, sizeof(availableEffects)); 1258 1259 int32_t max3aRegions = 0; 1260 info.update(ANDROID_CONTROL_MAX_REGIONS, 1261 &max3aRegions, 1); 1262 1263 static const uint8_t availableAeModes[] = { 1264 ANDROID_CONTROL_AE_MODE_OFF, 1265 ANDROID_CONTROL_AE_MODE_ON 1266 }; 1267 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1268 availableAeModes, sizeof(availableAeModes)); 1269 1270 static const camera_metadata_rational exposureCompensationStep = { 1271 1, 3 1272 }; 1273 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1274 &exposureCompensationStep, 1); 1275 1276 int32_t exposureCompensationRange[] = {-9, 9}; 1277 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1278 exposureCompensationRange, 1279 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1280 1281 static const int32_t availableTargetFpsRanges[] = { 1282 5, 30, 15, 30 1283 }; 1284 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1285 availableTargetFpsRanges, 1286 sizeof(availableTargetFpsRanges)/sizeof(int32_t)); 1287 1288 static const uint8_t availableAntibandingModes[] = { 1289 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, 1290 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO 1291 }; 1292 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1293 availableAntibandingModes, sizeof(availableAntibandingModes)); 1294 1295 static const uint8_t availableAwbModes[] = { 1296 ANDROID_CONTROL_AWB_MODE_OFF, 1297 ANDROID_CONTROL_AWB_MODE_AUTO, 1298 ANDROID_CONTROL_AWB_MODE_INCANDESCENT, 1299 ANDROID_CONTROL_AWB_MODE_FLUORESCENT, 1300 ANDROID_CONTROL_AWB_MODE_DAYLIGHT, 1301 ANDROID_CONTROL_AWB_MODE_SHADE 1302 }; 1303 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1304 availableAwbModes, sizeof(availableAwbModes)); 1305 1306 static const uint8_t availableAfModesBack[] = { 1307 ANDROID_CONTROL_AF_MODE_OFF, 1308 ANDROID_CONTROL_AF_MODE_AUTO, 1309 ANDROID_CONTROL_AF_MODE_MACRO, 1310 ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, 1311 ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE 1312 }; 1313 1314 static const uint8_t availableAfModesFront[] = { 1315 ANDROID_CONTROL_AF_MODE_OFF 1316 }; 1317 1318 if (mFacingBack) { 1319 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1320 availableAfModesBack, sizeof(availableAfModesBack)); 1321 } else { 1322 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1323 availableAfModesFront, sizeof(availableAfModesFront)); 1324 } 1325 1326 static const uint8_t availableVstabModes[] = { 1327 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF 1328 }; 1329 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1330 availableVstabModes, sizeof(availableVstabModes)); 1331 1332 // android.info 1333 const uint8_t supportedHardwareLevel = 1334 mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL : 1335 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; 1336 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 1337 &supportedHardwareLevel, 1338 /*count*/1); 1339 1340 mCameraInfo = info.release(); 1341 1342 return OK; 1343 } 1344 1345 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) { 1346 /** 1347 * Extract top-level 3A controls 1348 */ 1349 status_t res; 1350 1351 bool facePriority = false; 1352 1353 camera_metadata_entry e; 1354 1355 e = settings.find(ANDROID_CONTROL_MODE); 1356 if (e.count == 0) { 1357 ALOGE("%s: No control mode entry!", __FUNCTION__); 1358 return BAD_VALUE; 1359 } 1360 uint8_t controlMode = e.data.u8[0]; 1361 1362 e = settings.find(ANDROID_CONTROL_SCENE_MODE); 1363 if (e.count == 0) { 1364 ALOGE("%s: No scene mode entry!", __FUNCTION__); 1365 return BAD_VALUE; 1366 } 1367 uint8_t sceneMode = e.data.u8[0]; 1368 1369 if (controlMode == ANDROID_CONTROL_MODE_OFF) { 1370 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 1371 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1372 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 1373 update3A(settings); 1374 return OK; 1375 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 1376 switch(sceneMode) { 1377 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: 1378 mFacePriority = true; 1379 break; 1380 default: 1381 ALOGE("%s: Emulator doesn't support scene mode %d", 1382 __FUNCTION__, sceneMode); 1383 return BAD_VALUE; 1384 } 1385 } else { 1386 mFacePriority = false; 1387 } 1388 1389 // controlMode == AUTO or sceneMode = FACE_PRIORITY 1390 // Process individual 3A controls 1391 1392 res = doFakeAE(settings); 1393 if (res != OK) return res; 1394 1395 res = doFakeAF(settings); 1396 if (res != OK) return res; 1397 1398 res = doFakeAWB(settings); 1399 if (res != OK) return res; 1400 1401 update3A(settings); 1402 return OK; 1403 } 1404 1405 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) { 1406 camera_metadata_entry e; 1407 1408 e = settings.find(ANDROID_CONTROL_AE_MODE); 1409 if (e.count == 0) { 1410 ALOGE("%s: No AE mode entry!", __FUNCTION__); 1411 return BAD_VALUE; 1412 } 1413 uint8_t aeMode = e.data.u8[0]; 1414 1415 switch (aeMode) { 1416 case ANDROID_CONTROL_AE_MODE_OFF: 1417 // AE is OFF 1418 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 1419 return OK; 1420 case ANDROID_CONTROL_AE_MODE_ON: 1421 // OK for AUTO modes 1422 break; 1423 default: 1424 ALOGE("%s: Emulator doesn't support AE mode %d", 1425 __FUNCTION__, aeMode); 1426 return BAD_VALUE; 1427 } 1428 1429 e = settings.find(ANDROID_CONTROL_AE_LOCK); 1430 if (e.count == 0) { 1431 ALOGE("%s: No AE lock entry!", __FUNCTION__); 1432 return BAD_VALUE; 1433 } 1434 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON); 1435 1436 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); 1437 bool precaptureTrigger = false; 1438 if (e.count != 0) { 1439 precaptureTrigger = 1440 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START); 1441 } 1442 1443 if (precaptureTrigger) { 1444 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger); 1445 } else if (e.count > 0) { 1446 ALOGV("%s: Pre capture trigger was present? %d", 1447 __FUNCTION__, 1448 e.count); 1449 } 1450 1451 // If we have an aePrecaptureTrigger, aePrecaptureId should be set too 1452 if (e.count != 0) { 1453 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); 1454 1455 if (e.count == 0) { 1456 ALOGE("%s: When android.control.aePrecaptureTrigger is set " 1457 " in the request, aePrecaptureId needs to be set as well", 1458 __FUNCTION__); 1459 return BAD_VALUE; 1460 } 1461 1462 mAeTriggerId = e.data.i32[0]; 1463 } 1464 1465 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { 1466 // Run precapture sequence 1467 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { 1468 mAeCounter = 0; 1469 } 1470 1471 if (mFacePriority) { 1472 mAeTargetExposureTime = kFacePriorityExposureTime; 1473 } else { 1474 mAeTargetExposureTime = kNormalExposureTime; 1475 } 1476 1477 if (mAeCounter > kPrecaptureMinFrames && 1478 (mAeTargetExposureTime - mAeCurrentExposureTime) < 1479 mAeTargetExposureTime / 10) { 1480 // Done with precapture 1481 mAeCounter = 0; 1482 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED : 1483 ANDROID_CONTROL_AE_STATE_CONVERGED; 1484 } else { 1485 // Converge some more 1486 mAeCurrentExposureTime += 1487 (mAeTargetExposureTime - mAeCurrentExposureTime) * 1488 kExposureTrackRate; 1489 mAeCounter++; 1490 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE; 1491 } 1492 1493 } else if (!aeLocked) { 1494 // Run standard occasional AE scan 1495 switch (mAeState) { 1496 case ANDROID_CONTROL_AE_STATE_CONVERGED: 1497 case ANDROID_CONTROL_AE_STATE_INACTIVE: 1498 mAeCounter++; 1499 if (mAeCounter > kStableAeMaxFrames) { 1500 mAeTargetExposureTime = 1501 mFacePriority ? kFacePriorityExposureTime : 1502 kNormalExposureTime; 1503 float exposureStep = ((double)rand() / RAND_MAX) * 1504 (kExposureWanderMax - kExposureWanderMin) + 1505 kExposureWanderMin; 1506 mAeTargetExposureTime *= std::pow(2, exposureStep); 1507 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING; 1508 } 1509 break; 1510 case ANDROID_CONTROL_AE_STATE_SEARCHING: 1511 mAeCurrentExposureTime += 1512 (mAeTargetExposureTime - mAeCurrentExposureTime) * 1513 kExposureTrackRate; 1514 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) < 1515 mAeTargetExposureTime / 10) { 1516 // Close enough 1517 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 1518 mAeCounter = 0; 1519 } 1520 break; 1521 case ANDROID_CONTROL_AE_STATE_LOCKED: 1522 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 1523 mAeCounter = 0; 1524 break; 1525 default: 1526 ALOGE("%s: Emulator in unexpected AE state %d", 1527 __FUNCTION__, mAeState); 1528 return INVALID_OPERATION; 1529 } 1530 } else { 1531 // AE is locked 1532 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED; 1533 } 1534 1535 return OK; 1536 } 1537 1538 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) { 1539 camera_metadata_entry e; 1540 1541 e = settings.find(ANDROID_CONTROL_AF_MODE); 1542 if (e.count == 0) { 1543 ALOGE("%s: No AF mode entry!", __FUNCTION__); 1544 return BAD_VALUE; 1545 } 1546 uint8_t afMode = e.data.u8[0]; 1547 1548 e = settings.find(ANDROID_CONTROL_AF_TRIGGER); 1549 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t; 1550 af_trigger_t afTrigger; 1551 // If we have an afTrigger, afTriggerId should be set too 1552 if (e.count != 0) { 1553 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]); 1554 1555 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID); 1556 1557 if (e.count == 0) { 1558 ALOGE("%s: When android.control.afTrigger is set " 1559 " in the request, afTriggerId needs to be set as well", 1560 __FUNCTION__); 1561 return BAD_VALUE; 1562 } 1563 1564 mAfTriggerId = e.data.i32[0]; 1565 1566 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger); 1567 ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId); 1568 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode); 1569 } else { 1570 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 1571 } 1572 1573 switch (afMode) { 1574 case ANDROID_CONTROL_AF_MODE_OFF: 1575 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1576 return OK; 1577 case ANDROID_CONTROL_AF_MODE_AUTO: 1578 case ANDROID_CONTROL_AF_MODE_MACRO: 1579 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1580 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1581 if (!mFacingBack) { 1582 ALOGE("%s: Front camera doesn't support AF mode %d", 1583 __FUNCTION__, afMode); 1584 return BAD_VALUE; 1585 } 1586 // OK, handle transitions lower on 1587 break; 1588 default: 1589 ALOGE("%s: Emulator doesn't support AF mode %d", 1590 __FUNCTION__, afMode); 1591 return BAD_VALUE; 1592 } 1593 1594 bool afModeChanged = mAfMode != afMode; 1595 mAfMode = afMode; 1596 1597 /** 1598 * Simulate AF triggers. Transition at most 1 state per frame. 1599 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN). 1600 */ 1601 1602 bool afTriggerStart = false; 1603 bool afTriggerCancel = false; 1604 switch (afTrigger) { 1605 case ANDROID_CONTROL_AF_TRIGGER_IDLE: 1606 break; 1607 case ANDROID_CONTROL_AF_TRIGGER_START: 1608 afTriggerStart = true; 1609 break; 1610 case ANDROID_CONTROL_AF_TRIGGER_CANCEL: 1611 afTriggerCancel = true; 1612 // Cancel trigger always transitions into INACTIVE 1613 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1614 1615 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__); 1616 1617 // Stay in 'inactive' until at least next frame 1618 return OK; 1619 default: 1620 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger); 1621 return BAD_VALUE; 1622 } 1623 1624 // If we get down here, we're either in an autofocus mode 1625 // or in a continuous focus mode (and no other modes) 1626 1627 int oldAfState = mAfState; 1628 switch (mAfState) { 1629 case ANDROID_CONTROL_AF_STATE_INACTIVE: 1630 if (afTriggerStart) { 1631 switch (afMode) { 1632 case ANDROID_CONTROL_AF_MODE_AUTO: 1633 // fall-through 1634 case ANDROID_CONTROL_AF_MODE_MACRO: 1635 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 1636 break; 1637 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1638 // fall-through 1639 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1640 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1641 break; 1642 } 1643 } else { 1644 // At least one frame stays in INACTIVE 1645 if (!afModeChanged) { 1646 switch (afMode) { 1647 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1648 // fall-through 1649 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1650 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN; 1651 break; 1652 } 1653 } 1654 } 1655 break; 1656 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 1657 /** 1658 * When the AF trigger is activated, the algorithm should finish 1659 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED 1660 * or AF_NOT_FOCUSED as appropriate 1661 */ 1662 if (afTriggerStart) { 1663 // Randomly transition to focused or not focused 1664 if (rand() % 3) { 1665 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 1666 } else { 1667 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1668 } 1669 } 1670 /** 1671 * When the AF trigger is not involved, the AF algorithm should 1672 * start in INACTIVE state, and then transition into PASSIVE_SCAN 1673 * and PASSIVE_FOCUSED states 1674 */ 1675 else if (!afTriggerCancel) { 1676 // Randomly transition to passive focus 1677 if (rand() % 3 == 0) { 1678 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED; 1679 } 1680 } 1681 1682 break; 1683 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 1684 if (afTriggerStart) { 1685 // Randomly transition to focused or not focused 1686 if (rand() % 3) { 1687 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 1688 } else { 1689 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1690 } 1691 } 1692 // TODO: initiate passive scan (PASSIVE_SCAN) 1693 break; 1694 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: 1695 // Simulate AF sweep completing instantaneously 1696 1697 // Randomly transition to focused or not focused 1698 if (rand() % 3) { 1699 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 1700 } else { 1701 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1702 } 1703 break; 1704 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 1705 if (afTriggerStart) { 1706 switch (afMode) { 1707 case ANDROID_CONTROL_AF_MODE_AUTO: 1708 // fall-through 1709 case ANDROID_CONTROL_AF_MODE_MACRO: 1710 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 1711 break; 1712 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1713 // fall-through 1714 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1715 // continuous autofocus => trigger start has no effect 1716 break; 1717 } 1718 } 1719 break; 1720 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 1721 if (afTriggerStart) { 1722 switch (afMode) { 1723 case ANDROID_CONTROL_AF_MODE_AUTO: 1724 // fall-through 1725 case ANDROID_CONTROL_AF_MODE_MACRO: 1726 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 1727 break; 1728 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1729 // fall-through 1730 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1731 // continuous autofocus => trigger start has no effect 1732 break; 1733 } 1734 } 1735 break; 1736 default: 1737 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState); 1738 } 1739 1740 { 1741 char afStateString[100] = {0,}; 1742 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, 1743 oldAfState, 1744 afStateString, 1745 sizeof(afStateString)); 1746 1747 char afNewStateString[100] = {0,}; 1748 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, 1749 mAfState, 1750 afNewStateString, 1751 sizeof(afNewStateString)); 1752 ALOGVV("%s: AF state transitioned from %s to %s", 1753 __FUNCTION__, afStateString, afNewStateString); 1754 } 1755 1756 1757 return OK; 1758 } 1759 1760 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) { 1761 camera_metadata_entry e; 1762 1763 e = settings.find(ANDROID_CONTROL_AWB_MODE); 1764 if (e.count == 0) { 1765 ALOGE("%s: No AWB mode entry!", __FUNCTION__); 1766 return BAD_VALUE; 1767 } 1768 uint8_t awbMode = e.data.u8[0]; 1769 1770 // TODO: Add white balance simulation 1771 1772 switch (awbMode) { 1773 case ANDROID_CONTROL_AWB_MODE_OFF: 1774 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 1775 return OK; 1776 case ANDROID_CONTROL_AWB_MODE_AUTO: 1777 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: 1778 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: 1779 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: 1780 case ANDROID_CONTROL_AWB_MODE_SHADE: 1781 // OK 1782 break; 1783 default: 1784 ALOGE("%s: Emulator doesn't support AWB mode %d", 1785 __FUNCTION__, awbMode); 1786 return BAD_VALUE; 1787 } 1788 1789 return OK; 1790 } 1791 1792 1793 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) { 1794 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) { 1795 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, 1796 &mAeCurrentExposureTime, 1); 1797 settings.update(ANDROID_SENSOR_SENSITIVITY, 1798 &mAeCurrentSensitivity, 1); 1799 } 1800 1801 settings.update(ANDROID_CONTROL_AE_STATE, 1802 &mAeState, 1); 1803 settings.update(ANDROID_CONTROL_AF_STATE, 1804 &mAfState, 1); 1805 settings.update(ANDROID_CONTROL_AWB_STATE, 1806 &mAwbState, 1); 1807 /** 1808 * TODO: Trigger IDs need a think-through 1809 */ 1810 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, 1811 &mAeTriggerId, 1); 1812 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID, 1813 &mAfTriggerId, 1); 1814 } 1815 1816 void EmulatedFakeCamera3::signalReadoutIdle() { 1817 Mutex::Autolock l(mLock); 1818 // Need to chek isIdle again because waiting on mLock may have allowed 1819 // something to be placed in the in-flight queue. 1820 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) { 1821 ALOGV("Now idle"); 1822 mStatus = STATUS_READY; 1823 } 1824 } 1825 1826 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e, 1827 nsecs_t timestamp) { 1828 switch(e) { 1829 case Sensor::SensorListener::EXPOSURE_START: { 1830 ALOGVV("%s: Frame %d: Sensor started exposure at %lld", 1831 __FUNCTION__, frameNumber, timestamp); 1832 // Trigger shutter notify to framework 1833 camera3_notify_msg_t msg; 1834 msg.type = CAMERA3_MSG_SHUTTER; 1835 msg.message.shutter.frame_number = frameNumber; 1836 msg.message.shutter.timestamp = timestamp; 1837 sendNotify(&msg); 1838 break; 1839 } 1840 default: 1841 ALOGW("%s: Unexpected sensor event %d at %lld", __FUNCTION__, 1842 e, timestamp); 1843 break; 1844 } 1845 } 1846 1847 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) : 1848 mParent(parent), mJpegWaiting(false) { 1849 } 1850 1851 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() { 1852 for (List<Request>::iterator i = mInFlightQueue.begin(); 1853 i != mInFlightQueue.end(); i++) { 1854 delete i->buffers; 1855 delete i->sensorBuffers; 1856 } 1857 } 1858 1859 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) { 1860 Mutex::Autolock l(mLock); 1861 1862 mInFlightQueue.push_back(r); 1863 mInFlightSignal.signal(); 1864 } 1865 1866 bool EmulatedFakeCamera3::ReadoutThread::isIdle() { 1867 Mutex::Autolock l(mLock); 1868 return mInFlightQueue.empty() && !mThreadActive; 1869 } 1870 1871 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() { 1872 status_t res; 1873 Mutex::Autolock l(mLock); 1874 int loopCount = 0; 1875 while (mInFlightQueue.size() >= kMaxQueueSize) { 1876 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); 1877 if (res != OK && res != TIMED_OUT) { 1878 ALOGE("%s: Error waiting for in-flight queue to shrink", 1879 __FUNCTION__); 1880 return INVALID_OPERATION; 1881 } 1882 if (loopCount == kMaxWaitLoops) { 1883 ALOGE("%s: Timed out waiting for in-flight queue to shrink", 1884 __FUNCTION__); 1885 return TIMED_OUT; 1886 } 1887 loopCount++; 1888 } 1889 return OK; 1890 } 1891 1892 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { 1893 status_t res; 1894 1895 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__); 1896 1897 // First wait for a request from the in-flight queue 1898 1899 if (mCurrentRequest.settings.isEmpty()) { 1900 Mutex::Autolock l(mLock); 1901 if (mInFlightQueue.empty()) { 1902 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); 1903 if (res == TIMED_OUT) { 1904 ALOGVV("%s: ReadoutThread: Timed out waiting for request", 1905 __FUNCTION__); 1906 return true; 1907 } else if (res != NO_ERROR) { 1908 ALOGE("%s: Error waiting for capture requests: %d", 1909 __FUNCTION__, res); 1910 return false; 1911 } 1912 } 1913 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber; 1914 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings); 1915 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers; 1916 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers; 1917 mInFlightQueue.erase(mInFlightQueue.begin()); 1918 mInFlightSignal.signal(); 1919 mThreadActive = true; 1920 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__, 1921 mCurrentRequest.frameNumber); 1922 } 1923 1924 // Then wait for it to be delivered from the sensor 1925 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor", 1926 __FUNCTION__); 1927 1928 nsecs_t captureTime; 1929 bool gotFrame = 1930 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime); 1931 if (!gotFrame) { 1932 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame", 1933 __FUNCTION__); 1934 return true; 1935 } 1936 1937 ALOGVV("Sensor done with readout for frame %d, captured at %lld ", 1938 mCurrentRequest.frameNumber, captureTime); 1939 1940 // Check if we need to JPEG encode a buffer, and send it for async 1941 // compression if so. Otherwise prepare the buffer for return. 1942 bool needJpeg = false; 1943 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin(); 1944 while(buf != mCurrentRequest.buffers->end()) { 1945 bool goodBuffer = true; 1946 if ( buf->stream->format == 1947 HAL_PIXEL_FORMAT_BLOB) { 1948 Mutex::Autolock jl(mJpegLock); 1949 if (mJpegWaiting) { 1950 // This shouldn't happen, because processCaptureRequest should 1951 // be stalling until JPEG compressor is free. 1952 ALOGE("%s: Already processing a JPEG!", __FUNCTION__); 1953 goodBuffer = false; 1954 } 1955 if (goodBuffer) { 1956 // Compressor takes ownership of sensorBuffers here 1957 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers, 1958 this); 1959 goodBuffer = (res == OK); 1960 } 1961 if (goodBuffer) { 1962 needJpeg = true; 1963 1964 mJpegHalBuffer = *buf; 1965 mJpegFrameNumber = mCurrentRequest.frameNumber; 1966 mJpegWaiting = true; 1967 1968 mCurrentRequest.sensorBuffers = NULL; 1969 buf = mCurrentRequest.buffers->erase(buf); 1970 1971 continue; 1972 } 1973 ALOGE("%s: Error compressing output buffer: %s (%d)", 1974 __FUNCTION__, strerror(-res), res); 1975 // fallthrough for cleanup 1976 } 1977 GraphicBufferMapper::get().unlock(*(buf->buffer)); 1978 1979 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : 1980 CAMERA3_BUFFER_STATUS_ERROR; 1981 buf->acquire_fence = -1; 1982 buf->release_fence = -1; 1983 1984 ++buf; 1985 } // end while 1986 1987 // Construct result for all completed buffers and results 1988 1989 camera3_capture_result result; 1990 1991 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, 1992 &captureTime, 1); 1993 1994 result.frame_number = mCurrentRequest.frameNumber; 1995 result.result = mCurrentRequest.settings.getAndLock(); 1996 result.num_output_buffers = mCurrentRequest.buffers->size(); 1997 result.output_buffers = mCurrentRequest.buffers->array(); 1998 1999 // Go idle if queue is empty, before sending result 2000 bool signalIdle = false; 2001 { 2002 Mutex::Autolock l(mLock); 2003 if (mInFlightQueue.empty()) { 2004 mThreadActive = false; 2005 signalIdle = true; 2006 } 2007 } 2008 if (signalIdle) mParent->signalReadoutIdle(); 2009 2010 // Send it off to the framework 2011 ALOGVV("%s: ReadoutThread: Send result to framework", 2012 __FUNCTION__); 2013 mParent->sendCaptureResult(&result); 2014 2015 // Clean up 2016 mCurrentRequest.settings.unlock(result.result); 2017 2018 delete mCurrentRequest.buffers; 2019 mCurrentRequest.buffers = NULL; 2020 if (!needJpeg) { 2021 delete mCurrentRequest.sensorBuffers; 2022 mCurrentRequest.sensorBuffers = NULL; 2023 } 2024 mCurrentRequest.settings.clear(); 2025 2026 return true; 2027 } 2028 2029 void EmulatedFakeCamera3::ReadoutThread::onJpegDone( 2030 const StreamBuffer &jpegBuffer, bool success) { 2031 Mutex::Autolock jl(mJpegLock); 2032 2033 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer)); 2034 2035 mJpegHalBuffer.status = success ? 2036 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; 2037 mJpegHalBuffer.acquire_fence = -1; 2038 mJpegHalBuffer.release_fence = -1; 2039 mJpegWaiting = false; 2040 2041 camera3_capture_result result; 2042 result.frame_number = mJpegFrameNumber; 2043 result.result = NULL; 2044 result.num_output_buffers = 1; 2045 result.output_buffers = &mJpegHalBuffer; 2046 2047 if (!success) { 2048 ALOGE("%s: Compression failure, returning error state buffer to" 2049 " framework", __FUNCTION__); 2050 } else { 2051 ALOGV("%s: Compression complete, returning buffer to framework", 2052 __FUNCTION__); 2053 } 2054 2055 mParent->sendCaptureResult(&result); 2056 } 2057 2058 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone( 2059 const StreamBuffer &inputBuffer) { 2060 // Should never get here, since the input buffer has to be returned 2061 // by end of processCaptureRequest 2062 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__); 2063 } 2064 2065 2066 }; // namespace android 2067