1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /* 18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates 19 * functionality of an advanced fake camera. 20 */ 21 22 //#define LOG_NDEBUG 0 23 //#define LOG_NNDEBUG 0 24 #define LOG_TAG "EmulatedCamera_FakeCamera3" 25 #include <utils/Log.h> 26 27 #include "EmulatedFakeCamera3.h" 28 #include "EmulatedCameraFactory.h" 29 #include <ui/Fence.h> 30 #include <ui/Rect.h> 31 #include <ui/GraphicBufferMapper.h> 32 #include "gralloc_cb.h" 33 34 #include "fake-pipeline2/Sensor.h" 35 #include "fake-pipeline2/JpegCompressor.h" 36 #include <cmath> 37 38 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0 39 #define ALOGVV ALOGV 40 #else 41 #define ALOGVV(...) ((void)0) 42 #endif 43 44 namespace android { 45 46 /** 47 * Constants for camera capabilities 48 */ 49 50 const int64_t USEC = 1000LL; 51 const int64_t MSEC = USEC * 1000LL; 52 const int64_t SEC = MSEC * 1000LL; 53 54 const int32_t EmulatedFakeCamera3::kAvailableFormats[5] = { 55 HAL_PIXEL_FORMAT_RAW_SENSOR, 56 HAL_PIXEL_FORMAT_BLOB, 57 HAL_PIXEL_FORMAT_RGBA_8888, 58 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 59 // These are handled by YCbCr_420_888 60 // HAL_PIXEL_FORMAT_YV12, 61 // HAL_PIXEL_FORMAT_YCrCb_420_SP, 62 HAL_PIXEL_FORMAT_YCbCr_420_888 63 }; 64 65 const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = { 66 640, 480 67 // Sensor::kResolution[0], Sensor::kResolution[1] 68 }; 69 70 const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = { 71 (const uint64_t)Sensor::kFrameDurationRange[0] 72 }; 73 74 const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[4] = { 75 640, 480, 320, 240 76 // Sensor::kResolution[0], Sensor::kResolution[1] 77 }; 78 79 const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = { 80 320, 240, 160, 120 81 // Sensor::kResolution[0], Sensor::kResolution[1] 82 }; 83 84 const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = { 85 (const uint64_t)Sensor::kFrameDurationRange[0] 86 }; 87 88 const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = { 89 640, 480 90 // Sensor::kResolution[0], Sensor::kResolution[1] 91 }; 92 93 const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = { 94 320, 240 95 // Sensor::kResolution[0], Sensor::kResolution[1] 96 }; 97 98 99 const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = { 100 (const uint64_t)Sensor::kFrameDurationRange[0] 101 }; 102 103 /** 104 * 3A constants 105 */ 106 107 // Default exposure and gain targets for different scenarios 108 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC; 109 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC; 110 const int EmulatedFakeCamera3::kNormalSensitivity = 100; 111 const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400; 112 const float EmulatedFakeCamera3::kExposureTrackRate = 0.1; 113 const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10; 114 const int EmulatedFakeCamera3::kStableAeMaxFrames = 100; 115 const float EmulatedFakeCamera3::kExposureWanderMin = -2; 116 const float EmulatedFakeCamera3::kExposureWanderMax = 1; 117 118 /** 119 * Camera device lifecycle methods 120 */ 121 122 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack, 123 struct hw_module_t* module) : 124 EmulatedCamera3(cameraId, module), 125 mFacingBack(facingBack) { 126 ALOGI("Constructing emulated fake camera 3 facing %s", 127 facingBack ? "back" : "front"); 128 129 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { 130 mDefaultTemplates[i] = NULL; 131 } 132 133 /** 134 * Front cameras = limited mode 135 * Back cameras = full mode 136 */ 137 mFullMode = facingBack; 138 } 139 140 EmulatedFakeCamera3::~EmulatedFakeCamera3() { 141 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { 142 if (mDefaultTemplates[i] != NULL) { 143 free_camera_metadata(mDefaultTemplates[i]); 144 } 145 } 146 } 147 148 status_t EmulatedFakeCamera3::Initialize() { 149 ALOGV("%s: E", __FUNCTION__); 150 status_t res; 151 152 if (mStatus != STATUS_ERROR) { 153 ALOGE("%s: Already initialized!", __FUNCTION__); 154 return INVALID_OPERATION; 155 } 156 157 res = constructStaticInfo(); 158 if (res != OK) { 159 ALOGE("%s: Unable to allocate static info: %s (%d)", 160 __FUNCTION__, strerror(-res), res); 161 return res; 162 } 163 164 return EmulatedCamera3::Initialize(); 165 } 166 167 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) { 168 ALOGV("%s: E", __FUNCTION__); 169 Mutex::Autolock l(mLock); 170 status_t res; 171 172 if (mStatus != STATUS_CLOSED) { 173 ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus); 174 return INVALID_OPERATION; 175 } 176 177 mSensor = new Sensor(); 178 mSensor->setSensorListener(this); 179 180 res = mSensor->startUp(); 181 if (res != NO_ERROR) return res; 182 183 mReadoutThread = new ReadoutThread(this); 184 mJpegCompressor = new JpegCompressor(); 185 186 res = mReadoutThread->run("EmuCam3::readoutThread"); 187 if (res != NO_ERROR) return res; 188 189 // Initialize fake 3A 190 191 mControlMode = ANDROID_CONTROL_MODE_AUTO; 192 mFacePriority = false; 193 mAeMode = ANDROID_CONTROL_AE_MODE_ON; 194 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO; 195 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 196 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 197 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 198 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 199 mAfTriggerId = 0; 200 mAeTriggerId = 0; 201 mAeCurrentExposureTime = kNormalExposureTime; 202 mAeCurrentSensitivity = kNormalSensitivity; 203 204 return EmulatedCamera3::connectCamera(device); 205 } 206 207 status_t EmulatedFakeCamera3::closeCamera() { 208 ALOGV("%s: E", __FUNCTION__); 209 status_t res; 210 { 211 Mutex::Autolock l(mLock); 212 if (mStatus == STATUS_CLOSED) return OK; 213 214 res = mSensor->shutDown(); 215 if (res != NO_ERROR) { 216 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); 217 return res; 218 } 219 mSensor.clear(); 220 221 mReadoutThread->requestExit(); 222 } 223 224 mReadoutThread->join(); 225 226 { 227 Mutex::Autolock l(mLock); 228 // Clear out private stream information 229 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) { 230 PrivateStreamInfo *privStream = 231 static_cast<PrivateStreamInfo*>((*s)->priv); 232 delete privStream; 233 (*s)->priv = NULL; 234 } 235 mStreams.clear(); 236 mReadoutThread.clear(); 237 } 238 239 return EmulatedCamera3::closeCamera(); 240 } 241 242 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) { 243 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT; 244 info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation(); 245 return EmulatedCamera3::getCameraInfo(info); 246 } 247 248 /** 249 * Camera3 interface methods 250 */ 251 252 status_t EmulatedFakeCamera3::configureStreams( 253 camera3_stream_configuration *streamList) { 254 Mutex::Autolock l(mLock); 255 ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams); 256 257 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) { 258 ALOGE("%s: Cannot configure streams in state %d", 259 __FUNCTION__, mStatus); 260 return NO_INIT; 261 } 262 263 /** 264 * Sanity-check input list. 265 */ 266 if (streamList == NULL) { 267 ALOGE("%s: NULL stream configuration", __FUNCTION__); 268 return BAD_VALUE; 269 } 270 271 if (streamList->streams == NULL) { 272 ALOGE("%s: NULL stream list", __FUNCTION__); 273 return BAD_VALUE; 274 } 275 276 if (streamList->num_streams < 1) { 277 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__, 278 streamList->num_streams); 279 return BAD_VALUE; 280 } 281 282 camera3_stream_t *inputStream = NULL; 283 for (size_t i = 0; i < streamList->num_streams; i++) { 284 camera3_stream_t *newStream = streamList->streams[i]; 285 286 if (newStream == NULL) { 287 ALOGE("%s: Stream index %d was NULL", 288 __FUNCTION__, i); 289 return BAD_VALUE; 290 } 291 292 ALOGV("%s: Stream %p (id %d), type %d, usage 0x%x, format 0x%x", 293 __FUNCTION__, newStream, i, newStream->stream_type, 294 newStream->usage, 295 newStream->format); 296 297 if (newStream->stream_type == CAMERA3_STREAM_INPUT || 298 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 299 if (inputStream != NULL) { 300 301 ALOGE("%s: Multiple input streams requested!", __FUNCTION__); 302 return BAD_VALUE; 303 } 304 inputStream = newStream; 305 } 306 307 bool validFormat = false; 308 for (size_t f = 0; 309 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]); 310 f++) { 311 if (newStream->format == kAvailableFormats[f]) { 312 validFormat = true; 313 break; 314 } 315 } 316 if (!validFormat) { 317 ALOGE("%s: Unsupported stream format 0x%x requested", 318 __FUNCTION__, newStream->format); 319 return BAD_VALUE; 320 } 321 } 322 mInputStream = inputStream; 323 324 /** 325 * Initially mark all existing streams as not alive 326 */ 327 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) { 328 PrivateStreamInfo *privStream = 329 static_cast<PrivateStreamInfo*>((*s)->priv); 330 privStream->alive = false; 331 } 332 333 /** 334 * Find new streams and mark still-alive ones 335 */ 336 for (size_t i = 0; i < streamList->num_streams; i++) { 337 camera3_stream_t *newStream = streamList->streams[i]; 338 if (newStream->priv == NULL) { 339 // New stream, construct info 340 PrivateStreamInfo *privStream = new PrivateStreamInfo(); 341 privStream->alive = true; 342 privStream->registered = false; 343 344 switch (newStream->stream_type) { 345 case CAMERA3_STREAM_OUTPUT: 346 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 347 break; 348 case CAMERA3_STREAM_INPUT: 349 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 350 break; 351 case CAMERA3_STREAM_BIDIRECTIONAL: 352 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 353 GRALLOC_USAGE_HW_CAMERA_WRITE; 354 break; 355 } 356 newStream->max_buffers = kMaxBufferCount; 357 newStream->priv = privStream; 358 mStreams.push_back(newStream); 359 } else { 360 // Existing stream, mark as still alive. 361 PrivateStreamInfo *privStream = 362 static_cast<PrivateStreamInfo*>(newStream->priv); 363 privStream->alive = true; 364 } 365 } 366 367 /** 368 * Reap the dead streams 369 */ 370 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) { 371 PrivateStreamInfo *privStream = 372 static_cast<PrivateStreamInfo*>((*s)->priv); 373 if (!privStream->alive) { 374 (*s)->priv = NULL; 375 delete privStream; 376 s = mStreams.erase(s); 377 } else { 378 ++s; 379 } 380 } 381 382 /** 383 * Can't reuse settings across configure call 384 */ 385 mPrevSettings.clear(); 386 387 return OK; 388 } 389 390 status_t EmulatedFakeCamera3::registerStreamBuffers( 391 const camera3_stream_buffer_set *bufferSet) { 392 ALOGV("%s: E", __FUNCTION__); 393 Mutex::Autolock l(mLock); 394 395 /** 396 * Sanity checks 397 */ 398 399 // OK: register streams at any time during configure 400 // (but only once per stream) 401 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) { 402 ALOGE("%s: Cannot register buffers in state %d", 403 __FUNCTION__, mStatus); 404 return NO_INIT; 405 } 406 407 if (bufferSet == NULL) { 408 ALOGE("%s: NULL buffer set!", __FUNCTION__); 409 return BAD_VALUE; 410 } 411 412 StreamIterator s = mStreams.begin(); 413 for (; s != mStreams.end(); ++s) { 414 if (bufferSet->stream == *s) break; 415 } 416 if (s == mStreams.end()) { 417 ALOGE("%s: Trying to register buffers for a non-configured stream!", 418 __FUNCTION__); 419 return BAD_VALUE; 420 } 421 422 /** 423 * Register the buffers. This doesn't mean anything to the emulator besides 424 * marking them off as registered. 425 */ 426 427 PrivateStreamInfo *privStream = 428 static_cast<PrivateStreamInfo*>((*s)->priv); 429 430 if (privStream->registered) { 431 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__); 432 return BAD_VALUE; 433 } 434 435 privStream->registered = true; 436 437 return OK; 438 } 439 440 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings( 441 int type) { 442 ALOGV("%s: E", __FUNCTION__); 443 Mutex::Autolock l(mLock); 444 445 if (type < 0 || type >= CAMERA2_TEMPLATE_COUNT) { 446 ALOGE("%s: Unknown request settings template: %d", 447 __FUNCTION__, type); 448 return NULL; 449 } 450 451 /** 452 * Cache is not just an optimization - pointer returned has to live at 453 * least as long as the camera device instance does. 454 */ 455 if (mDefaultTemplates[type] != NULL) { 456 return mDefaultTemplates[type]; 457 } 458 459 CameraMetadata settings; 460 461 /** android.request */ 462 463 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 464 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 465 466 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; 467 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); 468 469 static const int32_t id = 0; 470 settings.update(ANDROID_REQUEST_ID, &id, 1); 471 472 static const int32_t frameCount = 0; 473 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1); 474 475 /** android.lens */ 476 477 static const float focusDistance = 0; 478 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); 479 480 static const float aperture = 2.8f; 481 settings.update(ANDROID_LENS_APERTURE, &aperture, 1); 482 483 static const float focalLength = 5.0f; 484 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1); 485 486 static const float filterDensity = 0; 487 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1); 488 489 static const uint8_t opticalStabilizationMode = 490 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 491 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, 492 &opticalStabilizationMode, 1); 493 494 // FOCUS_RANGE set only in frame 495 496 /** android.sensor */ 497 498 static const int64_t exposureTime = 10 * MSEC; 499 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); 500 501 static const int64_t frameDuration = 33333333L; // 1/30 s 502 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); 503 504 static const int32_t sensitivity = 100; 505 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); 506 507 // TIMESTAMP set only in frame 508 509 /** android.flash */ 510 511 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 512 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 513 514 static const uint8_t flashPower = 10; 515 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1); 516 517 static const int64_t firingTime = 0; 518 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); 519 520 /** Processing block modes */ 521 uint8_t hotPixelMode = 0; 522 uint8_t demosaicMode = 0; 523 uint8_t noiseMode = 0; 524 uint8_t shadingMode = 0; 525 uint8_t geometricMode = 0; 526 uint8_t colorMode = 0; 527 uint8_t tonemapMode = 0; 528 uint8_t edgeMode = 0; 529 switch (type) { 530 case CAMERA2_TEMPLATE_STILL_CAPTURE: 531 // fall-through 532 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT: 533 // fall-through 534 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG: 535 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; 536 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY; 537 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; 538 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY; 539 geometricMode = ANDROID_GEOMETRIC_MODE_HIGH_QUALITY; 540 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; 541 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; 542 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY; 543 break; 544 case CAMERA2_TEMPLATE_PREVIEW: 545 // fall-through 546 case CAMERA2_TEMPLATE_VIDEO_RECORD: 547 // fall-through 548 default: 549 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST; 550 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; 551 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST; 552 shadingMode = ANDROID_SHADING_MODE_FAST; 553 geometricMode = ANDROID_GEOMETRIC_MODE_FAST; 554 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST; 555 tonemapMode = ANDROID_TONEMAP_MODE_FAST; 556 edgeMode = ANDROID_EDGE_MODE_FAST; 557 break; 558 } 559 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); 560 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); 561 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1); 562 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); 563 settings.update(ANDROID_GEOMETRIC_MODE, &geometricMode, 1); 564 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1); 565 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1); 566 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); 567 568 /** android.noise */ 569 static const uint8_t noiseStrength = 5; 570 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1); 571 572 /** android.color */ 573 static const float colorTransform[9] = { 574 1.0f, 0.f, 0.f, 575 0.f, 1.f, 0.f, 576 0.f, 0.f, 1.f 577 }; 578 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); 579 580 /** android.tonemap */ 581 static const float tonemapCurve[4] = { 582 0.f, 0.f, 583 1.f, 1.f 584 }; 585 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4); 586 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4); 587 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4); 588 589 /** android.edge */ 590 static const uint8_t edgeStrength = 5; 591 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1); 592 593 /** android.scaler */ 594 static const int32_t cropRegion[3] = { 595 0, 0, (int32_t)Sensor::kResolution[0] 596 }; 597 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 3); 598 599 /** android.jpeg */ 600 static const uint8_t jpegQuality = 80; 601 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1); 602 603 static const int32_t thumbnailSize[2] = { 604 640, 480 605 }; 606 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); 607 608 static const uint8_t thumbnailQuality = 80; 609 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); 610 611 static const double gpsCoordinates[2] = { 612 0, 0 613 }; 614 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2); 615 616 static const uint8_t gpsProcessingMethod[32] = "None"; 617 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32); 618 619 static const int64_t gpsTimestamp = 0; 620 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); 621 622 static const int32_t jpegOrientation = 0; 623 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); 624 625 /** android.stats */ 626 627 static const uint8_t faceDetectMode = 628 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 629 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); 630 631 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; 632 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); 633 634 static const uint8_t sharpnessMapMode = 635 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; 636 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); 637 638 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram, 639 // sharpnessMap only in frames 640 641 /** android.control */ 642 643 uint8_t controlIntent = 0; 644 switch (type) { 645 case CAMERA2_TEMPLATE_PREVIEW: 646 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 647 break; 648 case CAMERA2_TEMPLATE_STILL_CAPTURE: 649 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 650 break; 651 case CAMERA2_TEMPLATE_VIDEO_RECORD: 652 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 653 break; 654 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT: 655 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 656 break; 657 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG: 658 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 659 break; 660 default: 661 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 662 break; 663 } 664 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 665 666 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 667 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 668 669 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 670 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 671 672 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; 673 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 674 675 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 676 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 677 678 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 679 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 680 681 static const int32_t controlRegions[5] = { 682 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], 683 1000 684 }; 685 settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5); 686 687 static const int32_t aeExpCompensation = 0; 688 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); 689 690 static const int32_t aeTargetFpsRange[2] = { 691 10, 30 692 }; 693 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); 694 695 static const uint8_t aeAntibandingMode = 696 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; 697 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1); 698 699 static const uint8_t awbMode = 700 ANDROID_CONTROL_AWB_MODE_AUTO; 701 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 702 703 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 704 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 705 706 settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5); 707 708 uint8_t afMode = 0; 709 switch (type) { 710 case CAMERA2_TEMPLATE_PREVIEW: 711 afMode = ANDROID_CONTROL_AF_MODE_AUTO; 712 break; 713 case CAMERA2_TEMPLATE_STILL_CAPTURE: 714 afMode = ANDROID_CONTROL_AF_MODE_AUTO; 715 break; 716 case CAMERA2_TEMPLATE_VIDEO_RECORD: 717 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 718 break; 719 case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT: 720 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; 721 break; 722 case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG: 723 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; 724 break; 725 default: 726 afMode = ANDROID_CONTROL_AF_MODE_AUTO; 727 break; 728 } 729 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); 730 731 settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5); 732 733 static const uint8_t vstabMode = 734 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 735 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1); 736 737 // aeState, awbState, afState only in frame 738 739 mDefaultTemplates[type] = settings.release(); 740 741 return mDefaultTemplates[type]; 742 } 743 744 status_t EmulatedFakeCamera3::processCaptureRequest( 745 camera3_capture_request *request) { 746 747 Mutex::Autolock l(mLock); 748 status_t res; 749 750 /** Validation */ 751 752 if (mStatus < STATUS_READY) { 753 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__, 754 mStatus); 755 return INVALID_OPERATION; 756 } 757 758 if (request == NULL) { 759 ALOGE("%s: NULL request!", __FUNCTION__); 760 return BAD_VALUE; 761 } 762 763 uint32_t frameNumber = request->frame_number; 764 765 if (request->settings == NULL && mPrevSettings.isEmpty()) { 766 ALOGE("%s: Request %d: NULL settings for first request after" 767 "configureStreams()", __FUNCTION__, frameNumber); 768 return BAD_VALUE; 769 } 770 771 if (request->input_buffer != NULL && 772 request->input_buffer->stream != mInputStream) { 773 ALOGE("%s: Request %d: Input buffer not from input stream!", 774 __FUNCTION__, frameNumber); 775 ALOGV("%s: Bad stream %p, expected: %p", 776 __FUNCTION__, request->input_buffer->stream, 777 mInputStream); 778 ALOGV("%s: Bad stream type %d, expected stream type %d", 779 __FUNCTION__, request->input_buffer->stream->stream_type, 780 mInputStream ? mInputStream->stream_type : -1); 781 782 return BAD_VALUE; 783 } 784 785 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 786 ALOGE("%s: Request %d: No output buffers provided!", 787 __FUNCTION__, frameNumber); 788 return BAD_VALUE; 789 } 790 791 // Validate all buffers, starting with input buffer if it's given 792 793 ssize_t idx; 794 const camera3_stream_buffer_t *b; 795 if (request->input_buffer != NULL) { 796 idx = -1; 797 b = request->input_buffer; 798 } else { 799 idx = 0; 800 b = request->output_buffers; 801 } 802 do { 803 PrivateStreamInfo *priv = 804 static_cast<PrivateStreamInfo*>(b->stream->priv); 805 if (priv == NULL) { 806 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 807 __FUNCTION__, frameNumber, idx); 808 return BAD_VALUE; 809 } 810 if (!priv->alive || !priv->registered) { 811 ALOGE("%s: Request %d: Buffer %d: Unregistered or dead stream!", 812 __FUNCTION__, frameNumber, idx); 813 return BAD_VALUE; 814 } 815 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 816 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 817 __FUNCTION__, frameNumber, idx); 818 return BAD_VALUE; 819 } 820 if (b->release_fence != -1) { 821 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 822 __FUNCTION__, frameNumber, idx); 823 return BAD_VALUE; 824 } 825 if (b->buffer == NULL) { 826 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 827 __FUNCTION__, frameNumber, idx); 828 return BAD_VALUE; 829 } 830 idx++; 831 b = &(request->output_buffers[idx]); 832 } while (idx < (ssize_t)request->num_output_buffers); 833 834 // TODO: Validate settings parameters 835 836 /** 837 * Start processing this request 838 */ 839 840 mStatus = STATUS_ACTIVE; 841 842 CameraMetadata settings; 843 844 if (request->settings == NULL) { 845 settings.acquire(mPrevSettings); 846 } else { 847 settings = request->settings; 848 } 849 850 res = process3A(settings); 851 if (res != OK) { 852 return res; 853 } 854 855 // TODO: Handle reprocessing 856 857 /** 858 * Get ready for sensor config 859 */ 860 861 nsecs_t exposureTime; 862 nsecs_t frameDuration; 863 uint32_t sensitivity; 864 bool needJpeg = false; 865 866 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 867 frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 868 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 869 870 Buffers *sensorBuffers = new Buffers(); 871 HalBufferVector *buffers = new HalBufferVector(); 872 873 sensorBuffers->setCapacity(request->num_output_buffers); 874 buffers->setCapacity(request->num_output_buffers); 875 876 // Process all the buffers we got for output, constructing internal buffer 877 // structures for them, and lock them for writing. 878 for (size_t i = 0; i < request->num_output_buffers; i++) { 879 const camera3_stream_buffer &srcBuf = request->output_buffers[i]; 880 const cb_handle_t *privBuffer = 881 static_cast<const cb_handle_t*>(*srcBuf.buffer); 882 StreamBuffer destBuf; 883 destBuf.streamId = kGenericStreamId; 884 destBuf.width = srcBuf.stream->width; 885 destBuf.height = srcBuf.stream->height; 886 destBuf.format = privBuffer->format; // Use real private format 887 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc 888 destBuf.buffer = srcBuf.buffer; 889 890 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) { 891 needJpeg = true; 892 } 893 894 // Wait on fence 895 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence); 896 res = bufferAcquireFence->wait(kFenceTimeoutMs); 897 if (res == TIMED_OUT) { 898 ALOGE("%s: Request %d: Buffer %d: Fence timed out after %d ms", 899 __FUNCTION__, frameNumber, i, kFenceTimeoutMs); 900 } 901 if (res == OK) { 902 // Lock buffer for writing 903 const Rect rect(destBuf.width, destBuf.height); 904 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { 905 if (privBuffer->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { 906 android_ycbcr ycbcr = android_ycbcr(); 907 res = GraphicBufferMapper::get().lockYCbCr( 908 *(destBuf.buffer), 909 GRALLOC_USAGE_HW_CAMERA_WRITE, rect, 910 &ycbcr); 911 // This is only valid because we know that emulator's 912 // YCbCr_420_888 is really contiguous NV21 under the hood 913 destBuf.img = static_cast<uint8_t*>(ycbcr.y); 914 } else { 915 ALOGE("Unexpected private format for flexible YUV: 0x%x", 916 privBuffer->format); 917 res = INVALID_OPERATION; 918 } 919 } else { 920 res = GraphicBufferMapper::get().lock(*(destBuf.buffer), 921 GRALLOC_USAGE_HW_CAMERA_WRITE, rect, 922 (void**)&(destBuf.img)); 923 } 924 if (res != OK) { 925 ALOGE("%s: Request %d: Buffer %d: Unable to lock buffer", 926 __FUNCTION__, frameNumber, i); 927 } 928 } 929 930 if (res != OK) { 931 // Either waiting or locking failed. Unlock locked buffers and bail 932 // out. 933 for (size_t j = 0; j < i; j++) { 934 GraphicBufferMapper::get().unlock( 935 *(request->output_buffers[i].buffer)); 936 } 937 return NO_INIT; 938 } 939 940 sensorBuffers->push_back(destBuf); 941 buffers->push_back(srcBuf); 942 } 943 944 /** 945 * Wait for JPEG compressor to not be busy, if needed 946 */ 947 if (needJpeg) { 948 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); 949 if (!ready) { 950 ALOGE("%s: Timeout waiting for JPEG compression to complete!", 951 __FUNCTION__); 952 return NO_INIT; 953 } 954 } 955 956 /** 957 * Wait until the in-flight queue has room 958 */ 959 res = mReadoutThread->waitForReadout(); 960 if (res != OK) { 961 ALOGE("%s: Timeout waiting for previous requests to complete!", 962 __FUNCTION__); 963 return NO_INIT; 964 } 965 966 /** 967 * Wait until sensor's ready. This waits for lengthy amounts of time with 968 * mLock held, but the interface spec is that no other calls may by done to 969 * the HAL by the framework while process_capture_request is happening. 970 */ 971 int syncTimeoutCount = 0; 972 while(!mSensor->waitForVSync(kSyncWaitTimeout)) { 973 if (mStatus == STATUS_ERROR) { 974 return NO_INIT; 975 } 976 if (syncTimeoutCount == kMaxSyncTimeoutCount) { 977 ALOGE("%s: Request %d: Sensor sync timed out after %lld ms", 978 __FUNCTION__, frameNumber, 979 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000); 980 return NO_INIT; 981 } 982 syncTimeoutCount++; 983 } 984 985 /** 986 * Configure sensor and queue up the request to the readout thread 987 */ 988 mSensor->setExposureTime(exposureTime); 989 mSensor->setFrameDuration(frameDuration); 990 mSensor->setSensitivity(sensitivity); 991 mSensor->setDestinationBuffers(sensorBuffers); 992 mSensor->setFrameNumber(request->frame_number); 993 994 ReadoutThread::Request r; 995 r.frameNumber = request->frame_number; 996 r.settings = settings; 997 r.sensorBuffers = sensorBuffers; 998 r.buffers = buffers; 999 1000 mReadoutThread->queueCaptureRequest(r); 1001 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number); 1002 1003 // Cache the settings for next time 1004 mPrevSettings.acquire(settings); 1005 1006 return OK; 1007 } 1008 1009 /** Debug methods */ 1010 1011 void EmulatedFakeCamera3::dump(int fd) { 1012 1013 } 1014 1015 /** Tag query methods */ 1016 const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) { 1017 return NULL; 1018 } 1019 1020 const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) { 1021 return NULL; 1022 } 1023 1024 int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) { 1025 return 0; 1026 } 1027 1028 /** 1029 * Private methods 1030 */ 1031 1032 status_t EmulatedFakeCamera3::constructStaticInfo() { 1033 1034 CameraMetadata info; 1035 // android.lens 1036 1037 // 5 cm min focus distance for back camera, infinity (fixed focus) for front 1038 const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0; 1039 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1040 &minFocusDistance, 1); 1041 1042 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front 1043 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0; 1044 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1045 &minFocusDistance, 1); 1046 1047 static const float focalLength = 3.30f; // mm 1048 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1049 &focalLength, 1); 1050 static const float aperture = 2.8f; 1051 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1052 &aperture, 1); 1053 static const float filterDensity = 0; 1054 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1055 &filterDensity, 1); 1056 static const uint8_t availableOpticalStabilization = 1057 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 1058 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1059 &availableOpticalStabilization, 1); 1060 1061 static const int32_t lensShadingMapSize[] = {1, 1}; 1062 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize, 1063 sizeof(lensShadingMapSize)/sizeof(int32_t)); 1064 1065 // Identity transform 1066 static const int32_t geometricCorrectionMapSize[] = {2, 2}; 1067 info.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1068 geometricCorrectionMapSize, 1069 sizeof(geometricCorrectionMapSize)/sizeof(int32_t)); 1070 1071 static const float geometricCorrectionMap[2 * 3 * 2 * 2] = { 1072 0.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1073 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1074 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 1075 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; 1076 info.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1077 geometricCorrectionMap, 1078 sizeof(geometricCorrectionMap)/sizeof(float)); 1079 1080 uint8_t lensFacing = mFacingBack ? 1081 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1082 info.update(ANDROID_LENS_FACING, &lensFacing, 1); 1083 1084 float lensPosition[3]; 1085 if (mFacingBack) { 1086 // Back-facing camera is center-top on device 1087 lensPosition[0] = 0; 1088 lensPosition[1] = 20; 1089 lensPosition[2] = -5; 1090 } else { 1091 // Front-facing camera is center-right on device 1092 lensPosition[0] = 20; 1093 lensPosition[1] = 20; 1094 lensPosition[2] = 0; 1095 } 1096 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/ 1097 sizeof(float)); 1098 1099 // android.sensor 1100 1101 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1102 Sensor::kExposureTimeRange, 2); 1103 1104 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1105 &Sensor::kFrameDurationRange[1], 1); 1106 1107 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, 1108 Sensor::kSensitivityRange, 1109 sizeof(Sensor::kSensitivityRange) 1110 /sizeof(int32_t)); 1111 1112 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1113 &Sensor::kColorFilterArrangement, 1); 1114 1115 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm 1116 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1117 sensorPhysicalSize, 2); 1118 1119 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1120 (int32_t*)Sensor::kResolution, 2); 1121 1122 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1123 (int32_t*)Sensor::kResolution, 2); 1124 1125 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1126 (int32_t*)&Sensor::kMaxRawValue, 1); 1127 1128 static const int32_t blackLevelPattern[4] = { 1129 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel, 1130 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel 1131 }; 1132 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1133 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t)); 1134 1135 //TODO: sensor color calibration fields 1136 1137 // android.flash 1138 static const uint8_t flashAvailable = 0; 1139 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); 1140 1141 static const int64_t flashChargeDuration = 0; 1142 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1); 1143 1144 // android.tonemap 1145 1146 static const int32_t tonemapCurvePoints = 128; 1147 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1); 1148 1149 // android.scaler 1150 1151 info.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1152 kAvailableFormats, 1153 sizeof(kAvailableFormats)/sizeof(int32_t)); 1154 1155 info.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1156 (int32_t*)kAvailableRawSizes, 1157 sizeof(kAvailableRawSizes)/sizeof(uint32_t)); 1158 1159 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1160 (int64_t*)kAvailableRawMinDurations, 1161 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t)); 1162 1163 if (mFacingBack) { 1164 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1165 (int32_t*)kAvailableProcessedSizesBack, 1166 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t)); 1167 } else { 1168 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1169 (int32_t*)kAvailableProcessedSizesFront, 1170 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t)); 1171 } 1172 1173 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, 1174 (int64_t*)kAvailableProcessedMinDurations, 1175 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t)); 1176 1177 if (mFacingBack) { 1178 info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1179 (int32_t*)kAvailableJpegSizesBack, 1180 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t)); 1181 } else { 1182 info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1183 (int32_t*)kAvailableJpegSizesFront, 1184 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t)); 1185 } 1186 1187 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 1188 (int64_t*)kAvailableJpegMinDurations, 1189 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t)); 1190 1191 static const float maxZoom = 10; 1192 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1193 &maxZoom, 1); 1194 1195 // android.jpeg 1196 1197 static const int32_t jpegThumbnailSizes[] = { 1198 0, 0, 1199 160, 120, 1200 320, 240 1201 }; 1202 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1203 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); 1204 1205 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize; 1206 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); 1207 1208 // android.stats 1209 1210 static const uint8_t availableFaceDetectModes[] = { 1211 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, 1212 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, 1213 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL 1214 }; 1215 1216 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1217 availableFaceDetectModes, 1218 sizeof(availableFaceDetectModes)); 1219 1220 static const int32_t maxFaceCount = 8; 1221 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1222 &maxFaceCount, 1); 1223 1224 static const int32_t histogramSize = 64; 1225 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1226 &histogramSize, 1); 1227 1228 static const int32_t maxHistogramCount = 1000; 1229 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1230 &maxHistogramCount, 1); 1231 1232 static const int32_t sharpnessMapSize[2] = {64, 64}; 1233 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1234 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); 1235 1236 static const int32_t maxSharpnessMapValue = 1000; 1237 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1238 &maxSharpnessMapValue, 1); 1239 1240 // android.control 1241 1242 static const uint8_t availableSceneModes[] = { 1243 ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED 1244 }; 1245 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1246 availableSceneModes, sizeof(availableSceneModes)); 1247 1248 static const uint8_t availableEffects[] = { 1249 ANDROID_CONTROL_EFFECT_MODE_OFF 1250 }; 1251 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1252 availableEffects, sizeof(availableEffects)); 1253 1254 int32_t max3aRegions = 0; 1255 info.update(ANDROID_CONTROL_MAX_REGIONS, 1256 &max3aRegions, 1); 1257 1258 static const uint8_t availableAeModes[] = { 1259 ANDROID_CONTROL_AE_MODE_OFF, 1260 ANDROID_CONTROL_AE_MODE_ON 1261 }; 1262 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 1263 availableAeModes, sizeof(availableAeModes)); 1264 1265 static const camera_metadata_rational exposureCompensationStep = { 1266 1, 3 1267 }; 1268 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1269 &exposureCompensationStep, 1); 1270 1271 int32_t exposureCompensationRange[] = {-9, 9}; 1272 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1273 exposureCompensationRange, 1274 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1275 1276 static const int32_t availableTargetFpsRanges[] = { 1277 5, 30, 15, 30 1278 }; 1279 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1280 availableTargetFpsRanges, 1281 sizeof(availableTargetFpsRanges)/sizeof(int32_t)); 1282 1283 static const uint8_t availableAntibandingModes[] = { 1284 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, 1285 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO 1286 }; 1287 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 1288 availableAntibandingModes, sizeof(availableAntibandingModes)); 1289 1290 static const uint8_t availableAwbModes[] = { 1291 ANDROID_CONTROL_AWB_MODE_OFF, 1292 ANDROID_CONTROL_AWB_MODE_AUTO, 1293 ANDROID_CONTROL_AWB_MODE_INCANDESCENT, 1294 ANDROID_CONTROL_AWB_MODE_FLUORESCENT, 1295 ANDROID_CONTROL_AWB_MODE_DAYLIGHT, 1296 ANDROID_CONTROL_AWB_MODE_SHADE 1297 }; 1298 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 1299 availableAwbModes, sizeof(availableAwbModes)); 1300 1301 static const uint8_t availableAfModesBack[] = { 1302 ANDROID_CONTROL_AF_MODE_OFF, 1303 ANDROID_CONTROL_AF_MODE_AUTO, 1304 ANDROID_CONTROL_AF_MODE_MACRO, 1305 ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, 1306 ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE 1307 }; 1308 1309 static const uint8_t availableAfModesFront[] = { 1310 ANDROID_CONTROL_AF_MODE_OFF 1311 }; 1312 1313 if (mFacingBack) { 1314 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1315 availableAfModesBack, sizeof(availableAfModesBack)); 1316 } else { 1317 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 1318 availableAfModesFront, sizeof(availableAfModesFront)); 1319 } 1320 1321 static const uint8_t availableVstabModes[] = { 1322 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF 1323 }; 1324 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1325 availableVstabModes, sizeof(availableVstabModes)); 1326 1327 // android.info 1328 const uint8_t supportedHardwareLevel = 1329 mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL : 1330 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; 1331 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 1332 &supportedHardwareLevel, 1333 /*count*/1); 1334 1335 mCameraInfo = info.release(); 1336 1337 return OK; 1338 } 1339 1340 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) { 1341 /** 1342 * Extract top-level 3A controls 1343 */ 1344 status_t res; 1345 1346 bool facePriority = false; 1347 1348 camera_metadata_entry e; 1349 1350 e = settings.find(ANDROID_CONTROL_MODE); 1351 if (e.count == 0) { 1352 ALOGE("%s: No control mode entry!", __FUNCTION__); 1353 return BAD_VALUE; 1354 } 1355 uint8_t controlMode = e.data.u8[0]; 1356 1357 e = settings.find(ANDROID_CONTROL_SCENE_MODE); 1358 if (e.count == 0) { 1359 ALOGE("%s: No scene mode entry!", __FUNCTION__); 1360 return BAD_VALUE; 1361 } 1362 uint8_t sceneMode = e.data.u8[0]; 1363 1364 if (controlMode == ANDROID_CONTROL_MODE_OFF) { 1365 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 1366 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1367 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 1368 update3A(settings); 1369 return OK; 1370 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 1371 switch(sceneMode) { 1372 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: 1373 mFacePriority = true; 1374 break; 1375 default: 1376 ALOGE("%s: Emulator doesn't support scene mode %d", 1377 __FUNCTION__, sceneMode); 1378 return BAD_VALUE; 1379 } 1380 } else { 1381 mFacePriority = false; 1382 } 1383 1384 // controlMode == AUTO or sceneMode = FACE_PRIORITY 1385 // Process individual 3A controls 1386 1387 res = doFakeAE(settings); 1388 if (res != OK) return res; 1389 1390 res = doFakeAF(settings); 1391 if (res != OK) return res; 1392 1393 res = doFakeAWB(settings); 1394 if (res != OK) return res; 1395 1396 update3A(settings); 1397 return OK; 1398 } 1399 1400 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) { 1401 camera_metadata_entry e; 1402 1403 e = settings.find(ANDROID_CONTROL_AE_MODE); 1404 if (e.count == 0) { 1405 ALOGE("%s: No AE mode entry!", __FUNCTION__); 1406 return BAD_VALUE; 1407 } 1408 uint8_t aeMode = e.data.u8[0]; 1409 1410 switch (aeMode) { 1411 case ANDROID_CONTROL_AE_MODE_OFF: 1412 // AE is OFF 1413 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; 1414 return OK; 1415 case ANDROID_CONTROL_AE_MODE_ON: 1416 // OK for AUTO modes 1417 break; 1418 default: 1419 ALOGE("%s: Emulator doesn't support AE mode %d", 1420 __FUNCTION__, aeMode); 1421 return BAD_VALUE; 1422 } 1423 1424 e = settings.find(ANDROID_CONTROL_AE_LOCK); 1425 if (e.count == 0) { 1426 ALOGE("%s: No AE lock entry!", __FUNCTION__); 1427 return BAD_VALUE; 1428 } 1429 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON); 1430 1431 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); 1432 bool precaptureTrigger = false; 1433 if (e.count != 0) { 1434 precaptureTrigger = 1435 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START); 1436 } 1437 1438 if (precaptureTrigger) { 1439 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger); 1440 } else if (e.count > 0) { 1441 ALOGV("%s: Pre capture trigger was present? %d", 1442 __FUNCTION__, 1443 e.count); 1444 } 1445 1446 // If we have an aePrecaptureTrigger, aePrecaptureId should be set too 1447 if (e.count != 0) { 1448 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID); 1449 1450 if (e.count == 0) { 1451 ALOGE("%s: When android.control.aePrecaptureTrigger is set " 1452 " in the request, aePrecaptureId needs to be set as well", 1453 __FUNCTION__); 1454 return BAD_VALUE; 1455 } 1456 1457 mAeTriggerId = e.data.i32[0]; 1458 } 1459 1460 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { 1461 // Run precapture sequence 1462 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { 1463 mAeCounter = 0; 1464 } 1465 1466 if (mFacePriority) { 1467 mAeTargetExposureTime = kFacePriorityExposureTime; 1468 } else { 1469 mAeTargetExposureTime = kNormalExposureTime; 1470 } 1471 1472 if (mAeCounter > kPrecaptureMinFrames && 1473 (mAeTargetExposureTime - mAeCurrentExposureTime) < 1474 mAeTargetExposureTime / 10) { 1475 // Done with precapture 1476 mAeCounter = 0; 1477 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED : 1478 ANDROID_CONTROL_AE_STATE_CONVERGED; 1479 } else { 1480 // Converge some more 1481 mAeCurrentExposureTime += 1482 (mAeTargetExposureTime - mAeCurrentExposureTime) * 1483 kExposureTrackRate; 1484 mAeCounter++; 1485 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE; 1486 } 1487 1488 } else if (!aeLocked) { 1489 // Run standard occasional AE scan 1490 switch (mAeState) { 1491 case ANDROID_CONTROL_AE_STATE_CONVERGED: 1492 case ANDROID_CONTROL_AE_STATE_INACTIVE: 1493 mAeCounter++; 1494 if (mAeCounter > kStableAeMaxFrames) { 1495 mAeTargetExposureTime = 1496 mFacePriority ? kFacePriorityExposureTime : 1497 kNormalExposureTime; 1498 float exposureStep = ((double)rand() / RAND_MAX) * 1499 (kExposureWanderMax - kExposureWanderMin) + 1500 kExposureWanderMin; 1501 mAeTargetExposureTime *= std::pow(2, exposureStep); 1502 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING; 1503 } 1504 break; 1505 case ANDROID_CONTROL_AE_STATE_SEARCHING: 1506 mAeCurrentExposureTime += 1507 (mAeTargetExposureTime - mAeCurrentExposureTime) * 1508 kExposureTrackRate; 1509 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) < 1510 mAeTargetExposureTime / 10) { 1511 // Close enough 1512 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 1513 mAeCounter = 0; 1514 } 1515 break; 1516 case ANDROID_CONTROL_AE_STATE_LOCKED: 1517 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 1518 mAeCounter = 0; 1519 break; 1520 default: 1521 ALOGE("%s: Emulator in unexpected AE state %d", 1522 __FUNCTION__, mAeState); 1523 return INVALID_OPERATION; 1524 } 1525 } else { 1526 // AE is locked 1527 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED; 1528 } 1529 1530 return OK; 1531 } 1532 1533 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) { 1534 camera_metadata_entry e; 1535 1536 e = settings.find(ANDROID_CONTROL_AF_MODE); 1537 if (e.count == 0) { 1538 ALOGE("%s: No AF mode entry!", __FUNCTION__); 1539 return BAD_VALUE; 1540 } 1541 uint8_t afMode = e.data.u8[0]; 1542 1543 e = settings.find(ANDROID_CONTROL_AF_TRIGGER); 1544 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t; 1545 af_trigger_t afTrigger; 1546 // If we have an afTrigger, afTriggerId should be set too 1547 if (e.count != 0) { 1548 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]); 1549 1550 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID); 1551 1552 if (e.count == 0) { 1553 ALOGE("%s: When android.control.afTrigger is set " 1554 " in the request, afTriggerId needs to be set as well", 1555 __FUNCTION__); 1556 return BAD_VALUE; 1557 } 1558 1559 mAfTriggerId = e.data.i32[0]; 1560 1561 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger); 1562 ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId); 1563 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode); 1564 } else { 1565 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 1566 } 1567 1568 switch (afMode) { 1569 case ANDROID_CONTROL_AF_MODE_OFF: 1570 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1571 return OK; 1572 case ANDROID_CONTROL_AF_MODE_AUTO: 1573 case ANDROID_CONTROL_AF_MODE_MACRO: 1574 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1575 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1576 if (!mFacingBack) { 1577 ALOGE("%s: Front camera doesn't support AF mode %d", 1578 __FUNCTION__, afMode); 1579 return BAD_VALUE; 1580 } 1581 // OK, handle transitions lower on 1582 break; 1583 default: 1584 ALOGE("%s: Emulator doesn't support AF mode %d", 1585 __FUNCTION__, afMode); 1586 return BAD_VALUE; 1587 } 1588 1589 bool afModeChanged = mAfMode != afMode; 1590 mAfMode = afMode; 1591 1592 /** 1593 * Simulate AF triggers. Transition at most 1 state per frame. 1594 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN). 1595 */ 1596 1597 bool afTriggerStart = false; 1598 bool afTriggerCancel = false; 1599 switch (afTrigger) { 1600 case ANDROID_CONTROL_AF_TRIGGER_IDLE: 1601 break; 1602 case ANDROID_CONTROL_AF_TRIGGER_START: 1603 afTriggerStart = true; 1604 break; 1605 case ANDROID_CONTROL_AF_TRIGGER_CANCEL: 1606 afTriggerCancel = true; 1607 // Cancel trigger always transitions into INACTIVE 1608 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; 1609 1610 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__); 1611 1612 // Stay in 'inactive' until at least next frame 1613 return OK; 1614 default: 1615 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger); 1616 return BAD_VALUE; 1617 } 1618 1619 // If we get down here, we're either in an autofocus mode 1620 // or in a continuous focus mode (and no other modes) 1621 1622 int oldAfState = mAfState; 1623 switch (mAfState) { 1624 case ANDROID_CONTROL_AF_STATE_INACTIVE: 1625 if (afTriggerStart) { 1626 switch (afMode) { 1627 case ANDROID_CONTROL_AF_MODE_AUTO: 1628 // fall-through 1629 case ANDROID_CONTROL_AF_MODE_MACRO: 1630 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 1631 break; 1632 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1633 // fall-through 1634 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1635 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1636 break; 1637 } 1638 } else { 1639 // At least one frame stays in INACTIVE 1640 if (!afModeChanged) { 1641 switch (afMode) { 1642 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1643 // fall-through 1644 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1645 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN; 1646 break; 1647 } 1648 } 1649 } 1650 break; 1651 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 1652 /** 1653 * When the AF trigger is activated, the algorithm should finish 1654 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED 1655 * or AF_NOT_FOCUSED as appropriate 1656 */ 1657 if (afTriggerStart) { 1658 // Randomly transition to focused or not focused 1659 if (rand() % 3) { 1660 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 1661 } else { 1662 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1663 } 1664 } 1665 /** 1666 * When the AF trigger is not involved, the AF algorithm should 1667 * start in INACTIVE state, and then transition into PASSIVE_SCAN 1668 * and PASSIVE_FOCUSED states 1669 */ 1670 else if (!afTriggerCancel) { 1671 // Randomly transition to passive focus 1672 if (rand() % 3 == 0) { 1673 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED; 1674 } 1675 } 1676 1677 break; 1678 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 1679 if (afTriggerStart) { 1680 // Randomly transition to focused or not focused 1681 if (rand() % 3) { 1682 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 1683 } else { 1684 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1685 } 1686 } 1687 // TODO: initiate passive scan (PASSIVE_SCAN) 1688 break; 1689 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: 1690 // Simulate AF sweep completing instantaneously 1691 1692 // Randomly transition to focused or not focused 1693 if (rand() % 3) { 1694 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 1695 } else { 1696 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; 1697 } 1698 break; 1699 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 1700 if (afTriggerStart) { 1701 switch (afMode) { 1702 case ANDROID_CONTROL_AF_MODE_AUTO: 1703 // fall-through 1704 case ANDROID_CONTROL_AF_MODE_MACRO: 1705 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 1706 break; 1707 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1708 // fall-through 1709 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1710 // continuous autofocus => trigger start has no effect 1711 break; 1712 } 1713 } 1714 break; 1715 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 1716 if (afTriggerStart) { 1717 switch (afMode) { 1718 case ANDROID_CONTROL_AF_MODE_AUTO: 1719 // fall-through 1720 case ANDROID_CONTROL_AF_MODE_MACRO: 1721 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; 1722 break; 1723 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: 1724 // fall-through 1725 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: 1726 // continuous autofocus => trigger start has no effect 1727 break; 1728 } 1729 } 1730 break; 1731 default: 1732 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState); 1733 } 1734 1735 { 1736 char afStateString[100] = {0,}; 1737 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, 1738 oldAfState, 1739 afStateString, 1740 sizeof(afStateString)); 1741 1742 char afNewStateString[100] = {0,}; 1743 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, 1744 mAfState, 1745 afNewStateString, 1746 sizeof(afNewStateString)); 1747 ALOGVV("%s: AF state transitioned from %s to %s", 1748 __FUNCTION__, afStateString, afNewStateString); 1749 } 1750 1751 1752 return OK; 1753 } 1754 1755 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) { 1756 camera_metadata_entry e; 1757 1758 e = settings.find(ANDROID_CONTROL_AWB_MODE); 1759 if (e.count == 0) { 1760 ALOGE("%s: No AWB mode entry!", __FUNCTION__); 1761 return BAD_VALUE; 1762 } 1763 uint8_t awbMode = e.data.u8[0]; 1764 1765 // TODO: Add white balance simulation 1766 1767 switch (awbMode) { 1768 case ANDROID_CONTROL_AWB_MODE_OFF: 1769 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; 1770 return OK; 1771 case ANDROID_CONTROL_AWB_MODE_AUTO: 1772 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: 1773 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: 1774 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: 1775 case ANDROID_CONTROL_AWB_MODE_SHADE: 1776 // OK 1777 break; 1778 default: 1779 ALOGE("%s: Emulator doesn't support AWB mode %d", 1780 __FUNCTION__, awbMode); 1781 return BAD_VALUE; 1782 } 1783 1784 return OK; 1785 } 1786 1787 1788 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) { 1789 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) { 1790 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, 1791 &mAeCurrentExposureTime, 1); 1792 settings.update(ANDROID_SENSOR_SENSITIVITY, 1793 &mAeCurrentSensitivity, 1); 1794 } 1795 1796 settings.update(ANDROID_CONTROL_AE_STATE, 1797 &mAeState, 1); 1798 settings.update(ANDROID_CONTROL_AF_STATE, 1799 &mAfState, 1); 1800 settings.update(ANDROID_CONTROL_AWB_STATE, 1801 &mAwbState, 1); 1802 /** 1803 * TODO: Trigger IDs need a think-through 1804 */ 1805 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, 1806 &mAeTriggerId, 1); 1807 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID, 1808 &mAfTriggerId, 1); 1809 } 1810 1811 void EmulatedFakeCamera3::signalReadoutIdle() { 1812 Mutex::Autolock l(mLock); 1813 // Need to chek isIdle again because waiting on mLock may have allowed 1814 // something to be placed in the in-flight queue. 1815 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) { 1816 ALOGV("Now idle"); 1817 mStatus = STATUS_READY; 1818 } 1819 } 1820 1821 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e, 1822 nsecs_t timestamp) { 1823 switch(e) { 1824 case Sensor::SensorListener::EXPOSURE_START: { 1825 ALOGVV("%s: Frame %d: Sensor started exposure at %lld", 1826 __FUNCTION__, frameNumber, timestamp); 1827 // Trigger shutter notify to framework 1828 camera3_notify_msg_t msg; 1829 msg.type = CAMERA3_MSG_SHUTTER; 1830 msg.message.shutter.frame_number = frameNumber; 1831 msg.message.shutter.timestamp = timestamp; 1832 sendNotify(&msg); 1833 break; 1834 } 1835 default: 1836 ALOGW("%s: Unexpected sensor event %d at %lld", __FUNCTION__, 1837 e, timestamp); 1838 break; 1839 } 1840 } 1841 1842 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) : 1843 mParent(parent), mJpegWaiting(false) { 1844 } 1845 1846 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() { 1847 for (List<Request>::iterator i = mInFlightQueue.begin(); 1848 i != mInFlightQueue.end(); i++) { 1849 delete i->buffers; 1850 delete i->sensorBuffers; 1851 } 1852 } 1853 1854 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) { 1855 Mutex::Autolock l(mLock); 1856 1857 mInFlightQueue.push_back(r); 1858 mInFlightSignal.signal(); 1859 } 1860 1861 bool EmulatedFakeCamera3::ReadoutThread::isIdle() { 1862 Mutex::Autolock l(mLock); 1863 return mInFlightQueue.empty() && !mThreadActive; 1864 } 1865 1866 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() { 1867 status_t res; 1868 Mutex::Autolock l(mLock); 1869 int loopCount = 0; 1870 while (mInFlightQueue.size() >= kMaxQueueSize) { 1871 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); 1872 if (res != OK && res != TIMED_OUT) { 1873 ALOGE("%s: Error waiting for in-flight queue to shrink", 1874 __FUNCTION__); 1875 return INVALID_OPERATION; 1876 } 1877 if (loopCount == kMaxWaitLoops) { 1878 ALOGE("%s: Timed out waiting for in-flight queue to shrink", 1879 __FUNCTION__); 1880 return TIMED_OUT; 1881 } 1882 loopCount++; 1883 } 1884 return OK; 1885 } 1886 1887 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { 1888 status_t res; 1889 1890 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__); 1891 1892 // First wait for a request from the in-flight queue 1893 1894 if (mCurrentRequest.settings.isEmpty()) { 1895 Mutex::Autolock l(mLock); 1896 if (mInFlightQueue.empty()) { 1897 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); 1898 if (res == TIMED_OUT) { 1899 ALOGVV("%s: ReadoutThread: Timed out waiting for request", 1900 __FUNCTION__); 1901 return true; 1902 } else if (res != NO_ERROR) { 1903 ALOGE("%s: Error waiting for capture requests: %d", 1904 __FUNCTION__, res); 1905 return false; 1906 } 1907 } 1908 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber; 1909 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings); 1910 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers; 1911 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers; 1912 mInFlightQueue.erase(mInFlightQueue.begin()); 1913 mInFlightSignal.signal(); 1914 mThreadActive = true; 1915 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__, 1916 mCurrentRequest.frameNumber); 1917 } 1918 1919 // Then wait for it to be delivered from the sensor 1920 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor", 1921 __FUNCTION__); 1922 1923 nsecs_t captureTime; 1924 bool gotFrame = 1925 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime); 1926 if (!gotFrame) { 1927 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame", 1928 __FUNCTION__); 1929 return true; 1930 } 1931 1932 ALOGVV("Sensor done with readout for frame %d, captured at %lld ", 1933 mCurrentRequest.frameNumber, captureTime); 1934 1935 // Check if we need to JPEG encode a buffer, and send it for async 1936 // compression if so. Otherwise prepare the buffer for return. 1937 bool needJpeg = false; 1938 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin(); 1939 while(buf != mCurrentRequest.buffers->end()) { 1940 bool goodBuffer = true; 1941 if ( buf->stream->format == 1942 HAL_PIXEL_FORMAT_BLOB) { 1943 Mutex::Autolock jl(mJpegLock); 1944 if (mJpegWaiting) { 1945 // This shouldn't happen, because processCaptureRequest should 1946 // be stalling until JPEG compressor is free. 1947 ALOGE("%s: Already processing a JPEG!", __FUNCTION__); 1948 goodBuffer = false; 1949 } 1950 if (goodBuffer) { 1951 // Compressor takes ownership of sensorBuffers here 1952 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers, 1953 this); 1954 goodBuffer = (res == OK); 1955 } 1956 if (goodBuffer) { 1957 needJpeg = true; 1958 1959 mJpegHalBuffer = *buf; 1960 mJpegFrameNumber = mCurrentRequest.frameNumber; 1961 mJpegWaiting = true; 1962 1963 mCurrentRequest.sensorBuffers = NULL; 1964 buf = mCurrentRequest.buffers->erase(buf); 1965 1966 continue; 1967 } 1968 ALOGE("%s: Error compressing output buffer: %s (%d)", 1969 __FUNCTION__, strerror(-res), res); 1970 // fallthrough for cleanup 1971 } 1972 GraphicBufferMapper::get().unlock(*(buf->buffer)); 1973 1974 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : 1975 CAMERA3_BUFFER_STATUS_ERROR; 1976 buf->acquire_fence = -1; 1977 buf->release_fence = -1; 1978 1979 ++buf; 1980 } // end while 1981 1982 // Construct result for all completed buffers and results 1983 1984 camera3_capture_result result; 1985 1986 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, 1987 &captureTime, 1); 1988 1989 result.frame_number = mCurrentRequest.frameNumber; 1990 result.result = mCurrentRequest.settings.getAndLock(); 1991 result.num_output_buffers = mCurrentRequest.buffers->size(); 1992 result.output_buffers = mCurrentRequest.buffers->array(); 1993 1994 // Go idle if queue is empty, before sending result 1995 bool signalIdle = false; 1996 { 1997 Mutex::Autolock l(mLock); 1998 if (mInFlightQueue.empty()) { 1999 mThreadActive = false; 2000 signalIdle = true; 2001 } 2002 } 2003 if (signalIdle) mParent->signalReadoutIdle(); 2004 2005 // Send it off to the framework 2006 ALOGVV("%s: ReadoutThread: Send result to framework", 2007 __FUNCTION__); 2008 mParent->sendCaptureResult(&result); 2009 2010 // Clean up 2011 mCurrentRequest.settings.unlock(result.result); 2012 2013 delete mCurrentRequest.buffers; 2014 mCurrentRequest.buffers = NULL; 2015 if (!needJpeg) { 2016 delete mCurrentRequest.sensorBuffers; 2017 mCurrentRequest.sensorBuffers = NULL; 2018 } 2019 mCurrentRequest.settings.clear(); 2020 2021 return true; 2022 } 2023 2024 void EmulatedFakeCamera3::ReadoutThread::onJpegDone( 2025 const StreamBuffer &jpegBuffer, bool success) { 2026 Mutex::Autolock jl(mJpegLock); 2027 2028 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer)); 2029 2030 mJpegHalBuffer.status = success ? 2031 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; 2032 mJpegHalBuffer.acquire_fence = -1; 2033 mJpegHalBuffer.release_fence = -1; 2034 mJpegWaiting = false; 2035 2036 camera3_capture_result result; 2037 result.frame_number = mJpegFrameNumber; 2038 result.result = NULL; 2039 result.num_output_buffers = 1; 2040 result.output_buffers = &mJpegHalBuffer; 2041 2042 if (!success) { 2043 ALOGE("%s: Compression failure, returning error state buffer to" 2044 " framework", __FUNCTION__); 2045 } else { 2046 ALOGV("%s: Compression complete, returning buffer to framework", 2047 __FUNCTION__); 2048 } 2049 2050 mParent->sendCaptureResult(&result); 2051 } 2052 2053 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone( 2054 const StreamBuffer &inputBuffer) { 2055 // Should never get here, since the input buffer has to be returned 2056 // by end of processCaptureRequest 2057 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__); 2058 } 2059 2060 2061 }; // namespace android 2062