Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
     19  * functionality of an advanced fake camera.
     20  */
     21 
     22 #include <inttypes.h>
     23 
     24 //#define LOG_NDEBUG 0
     25 //#define LOG_NNDEBUG 0
     26 #define LOG_TAG "EmulatedCamera_FakeCamera3"
     27 #include <cutils/properties.h>
     28 #include <utils/Log.h>
     29 
     30 #include "EmulatedFakeCamera3.h"
     31 #include "EmulatedCameraFactory.h"
     32 #include <ui/Fence.h>
     33 #include "GrallocModule.h"
     34 
     35 #include "fake-pipeline2/Sensor.h"
     36 #include "fake-pipeline2/JpegCompressor.h"
     37 #include <cmath>
     38 
     39 #include <vector>
     40 #include <algorithm>
     41 
     42 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
     43 #define ALOGVV ALOGV
     44 #else
     45 #define ALOGVV(...) ((void)0)
     46 #endif
     47 
     48 namespace android {
     49 
     50 /**
     51  * Constants for camera capabilities
     52  */
     53 
     54 const int64_t USEC = 1000LL;
     55 const int64_t MSEC = USEC * 1000LL;
     56 const int64_t SEC = MSEC * 1000LL;
     57 
     58 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
     59         HAL_PIXEL_FORMAT_RAW16,
     60         HAL_PIXEL_FORMAT_BLOB,
     61         HAL_PIXEL_FORMAT_RGBA_8888,
     62         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
     63         // These are handled by YCbCr_420_888
     64         //        HAL_PIXEL_FORMAT_YV12,
     65         //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
     66         HAL_PIXEL_FORMAT_YCbCr_420_888,
     67         HAL_PIXEL_FORMAT_Y16
     68 };
     69 
     70 const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[4] = {
     71     640, 480,
     72     1280, 720
     73     //    mSensorWidth, mSensorHeight
     74 };
     75 
     76 
     77 /**
     78  * 3A constants
     79  */
     80 
     81 // Default exposure and gain targets for different scenarios
     82 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime       = 10 * MSEC;
     83 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
     84 const int     EmulatedFakeCamera3::kNormalSensitivity        = 100;
     85 const int     EmulatedFakeCamera3::kFacePrioritySensitivity  = 400;
     86 //CTS requires 8 frames timeout in waitForAeStable
     87 const float   EmulatedFakeCamera3::kExposureTrackRate        = 0.2;
     88 const int     EmulatedFakeCamera3::kPrecaptureMinFrames      = 10;
     89 const int     EmulatedFakeCamera3::kStableAeMaxFrames        = 100;
     90 const float   EmulatedFakeCamera3::kExposureWanderMin        = -2;
     91 const float   EmulatedFakeCamera3::kExposureWanderMax        = 1;
     92 
     93 /**
     94  * Camera device lifecycle methods
     95  */
     96 
     97 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
     98         struct hw_module_t* module) :
     99         EmulatedCamera3(cameraId, module),
    100         mFacingBack(facingBack) {
    101     ALOGI("Constructing emulated fake camera 3: ID %d, facing %s",
    102             mCameraID, facingBack ? "back" : "front");
    103 
    104     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
    105         mDefaultTemplates[i] = NULL;
    106     }
    107 }
    108 
    109 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
    110     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
    111         if (mDefaultTemplates[i] != NULL) {
    112             free_camera_metadata(mDefaultTemplates[i]);
    113         }
    114     }
    115 }
    116 
    117 status_t EmulatedFakeCamera3::Initialize() {
    118     ALOGV("%s: E", __FUNCTION__);
    119     status_t res;
    120 
    121     if (mStatus != STATUS_ERROR) {
    122         ALOGE("%s: Already initialized!", __FUNCTION__);
    123         return INVALID_OPERATION;
    124     }
    125 
    126     res = getCameraCapabilities();
    127     if (res != OK) {
    128         ALOGE("%s: Unable to get camera capabilities: %s (%d)",
    129                 __FUNCTION__, strerror(-res), res);
    130         return res;
    131     }
    132 
    133     res = constructStaticInfo();
    134     if (res != OK) {
    135         ALOGE("%s: Unable to allocate static info: %s (%d)",
    136                 __FUNCTION__, strerror(-res), res);
    137         return res;
    138     }
    139 
    140     return EmulatedCamera3::Initialize();
    141 }
    142 
    143 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
    144     ALOGV("%s: E", __FUNCTION__);
    145     Mutex::Autolock l(mLock);
    146     status_t res;
    147 
    148     if (mStatus != STATUS_CLOSED) {
    149         ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
    150         return INVALID_OPERATION;
    151     }
    152 
    153     mSensor = new Sensor(mSensorWidth, mSensorHeight);
    154     mSensor->setSensorListener(this);
    155 
    156     res = mSensor->startUp();
    157     if (res != NO_ERROR) return res;
    158 
    159     mReadoutThread = new ReadoutThread(this);
    160     mJpegCompressor = new JpegCompressor();
    161 
    162     res = mReadoutThread->run("EmuCam3::readoutThread");
    163     if (res != NO_ERROR) return res;
    164 
    165     // Initialize fake 3A
    166 
    167     mControlMode  = ANDROID_CONTROL_MODE_AUTO;
    168     mFacePriority = false;
    169     mAeMode       = ANDROID_CONTROL_AE_MODE_ON;
    170     mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
    171     mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
    172     mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
    173     mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
    174     mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
    175     mAeCounter    = 0;
    176     mAeTargetExposureTime = kNormalExposureTime;
    177     mAeCurrentExposureTime = kNormalExposureTime;
    178     mAeCurrentSensitivity  = kNormalSensitivity;
    179 
    180     return EmulatedCamera3::connectCamera(device);
    181 }
    182 
    183 status_t EmulatedFakeCamera3::closeCamera() {
    184     ALOGV("%s: E", __FUNCTION__);
    185     status_t res;
    186     {
    187         Mutex::Autolock l(mLock);
    188         if (mStatus == STATUS_CLOSED) return OK;
    189 
    190         res = mSensor->shutDown();
    191         if (res != NO_ERROR) {
    192             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
    193             return res;
    194         }
    195         mSensor.clear();
    196 
    197         mReadoutThread->requestExit();
    198     }
    199 
    200     mReadoutThread->join();
    201 
    202     {
    203         Mutex::Autolock l(mLock);
    204         // Clear out private stream information
    205         for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
    206             PrivateStreamInfo *privStream =
    207                     static_cast<PrivateStreamInfo*>((*s)->priv);
    208             delete privStream;
    209             (*s)->priv = NULL;
    210         }
    211         mStreams.clear();
    212         mReadoutThread.clear();
    213     }
    214 
    215     return EmulatedCamera3::closeCamera();
    216 }
    217 
    218 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
    219     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
    220     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
    221     return EmulatedCamera3::getCameraInfo(info);
    222 }
    223 
    224 /**
    225  * Camera3 interface methods
    226  */
    227 
    228 status_t EmulatedFakeCamera3::configureStreams(
    229         camera3_stream_configuration *streamList) {
    230     Mutex::Autolock l(mLock);
    231     ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
    232 
    233     if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
    234         ALOGE("%s: Cannot configure streams in state %d",
    235                 __FUNCTION__, mStatus);
    236         return NO_INIT;
    237     }
    238 
    239     /**
    240      * Sanity-check input list.
    241      */
    242     if (streamList == NULL) {
    243         ALOGE("%s: NULL stream configuration", __FUNCTION__);
    244         return BAD_VALUE;
    245     }
    246 
    247     if (streamList->streams == NULL) {
    248         ALOGE("%s: NULL stream list", __FUNCTION__);
    249         return BAD_VALUE;
    250     }
    251 
    252     if (streamList->num_streams < 1) {
    253         ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
    254                 streamList->num_streams);
    255         return BAD_VALUE;
    256     }
    257 
    258     camera3_stream_t *inputStream = NULL;
    259     for (size_t i = 0; i < streamList->num_streams; i++) {
    260         camera3_stream_t *newStream = streamList->streams[i];
    261 
    262         if (newStream == NULL) {
    263             ALOGE("%s: Stream index %zu was NULL",
    264                   __FUNCTION__, i);
    265             return BAD_VALUE;
    266         }
    267 
    268         ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x "
    269               "width %d, height %d",
    270                 __FUNCTION__, newStream, i, newStream->stream_type,
    271                 newStream->usage,
    272                 newStream->format,
    273                 newStream->width,
    274                 newStream->height);
    275 
    276         if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
    277             newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    278             if (inputStream != NULL) {
    279 
    280                 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
    281                 return BAD_VALUE;
    282             }
    283             inputStream = newStream;
    284         }
    285 
    286         if (newStream->stream_type != CAMERA3_STREAM_INPUT) {
    287             if (newStream->rotation < CAMERA3_STREAM_ROTATION_0 ||
    288                 newStream->rotation > CAMERA3_STREAM_ROTATION_270) {
    289                 ALOGE("%s: Unsupported stream rotation 0x%x requested",
    290                       __FUNCTION__, newStream->rotation);
    291                 return BAD_VALUE;
    292             }
    293         }
    294 
    295         if (newStream->width <= 0 || newStream->width > mSensorWidth ||
    296             newStream->height <= 0 || newStream->height > mSensorHeight) {
    297             ALOGE("%s: Unsupported stream width 0x%x height 0x%x",
    298                   __FUNCTION__, newStream->width, newStream->height);
    299             return BAD_VALUE;
    300         }
    301 
    302         bool validFormat = false;
    303         for (size_t f = 0;
    304              f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
    305              f++) {
    306             if (newStream->format == kAvailableFormats[f]) {
    307                 validFormat = true;
    308                 break;
    309             }
    310         }
    311         if (!validFormat) {
    312             ALOGE("%s: Unsupported stream format 0x%x requested",
    313                     __FUNCTION__, newStream->format);
    314             return BAD_VALUE;
    315         }
    316     }
    317     mInputStream = inputStream;
    318 
    319     /**
    320      * Initially mark all existing streams as not alive
    321      */
    322     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
    323         PrivateStreamInfo *privStream =
    324                 static_cast<PrivateStreamInfo*>((*s)->priv);
    325         privStream->alive = false;
    326     }
    327 
    328     /**
    329      * Find new streams and mark still-alive ones
    330      */
    331     for (size_t i = 0; i < streamList->num_streams; i++) {
    332         camera3_stream_t *newStream = streamList->streams[i];
    333         if (newStream->priv == NULL) {
    334             // New stream, construct info
    335             PrivateStreamInfo *privStream = new PrivateStreamInfo();
    336             privStream->alive = true;
    337 
    338             newStream->max_buffers = kMaxBufferCount;
    339             newStream->priv = privStream;
    340             mStreams.push_back(newStream);
    341         } else {
    342             // Existing stream, mark as still alive.
    343             PrivateStreamInfo *privStream =
    344                     static_cast<PrivateStreamInfo*>(newStream->priv);
    345             privStream->alive = true;
    346         }
    347         // Always update usage and max buffers
    348         newStream->max_buffers = kMaxBufferCount;
    349         switch (newStream->stream_type) {
    350             case CAMERA3_STREAM_OUTPUT:
    351                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
    352                 break;
    353             case CAMERA3_STREAM_INPUT:
    354                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
    355                 break;
    356             case CAMERA3_STREAM_BIDIRECTIONAL:
    357                 newStream->usage |= (GRALLOC_USAGE_HW_CAMERA_READ |
    358                         GRALLOC_USAGE_HW_CAMERA_WRITE);
    359                 break;
    360         }
    361         // Set the buffer format, inline with gralloc implementation
    362         if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    363             if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
    364                 if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
    365                     newStream->format = HAL_PIXEL_FORMAT_RGBA_8888;
    366                 }
    367                 else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
    368                     newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
    369                 }
    370                 else {
    371                     newStream->format = HAL_PIXEL_FORMAT_RGB_888;
    372                 }
    373             }
    374         }
    375     }
    376 
    377     /**
    378      * Reap the dead streams
    379      */
    380     for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
    381         PrivateStreamInfo *privStream =
    382                 static_cast<PrivateStreamInfo*>((*s)->priv);
    383         if (!privStream->alive) {
    384             (*s)->priv = NULL;
    385             delete privStream;
    386             s = mStreams.erase(s);
    387         } else {
    388             ++s;
    389         }
    390     }
    391 
    392     /**
    393      * Can't reuse settings across configure call
    394      */
    395     mPrevSettings.clear();
    396 
    397     return OK;
    398 }
    399 
    400 status_t EmulatedFakeCamera3::registerStreamBuffers(
    401         const camera3_stream_buffer_set *bufferSet) {
    402     ALOGV("%s: E", __FUNCTION__);
    403     Mutex::Autolock l(mLock);
    404 
    405     // Should not be called in HAL versions >= 3.2
    406 
    407     ALOGE("%s: Should not be invoked on new HALs!",
    408             __FUNCTION__);
    409     return NO_INIT;
    410 }
    411 
    412 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
    413         int type) {
    414     ALOGV("%s: E", __FUNCTION__);
    415     Mutex::Autolock l(mLock);
    416 
    417     if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
    418         ALOGE("%s: Unknown request settings template: %d",
    419                 __FUNCTION__, type);
    420         return NULL;
    421     }
    422 
    423     if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
    424         ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
    425                 __FUNCTION__, type);
    426         return NULL;
    427     }
    428 
    429     /**
    430      * Cache is not just an optimization - pointer returned has to live at
    431      * least as long as the camera device instance does.
    432      */
    433     if (mDefaultTemplates[type] != NULL) {
    434         return mDefaultTemplates[type];
    435     }
    436 
    437     CameraMetadata settings;
    438 
    439     /** android.request */
    440 
    441     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
    442     settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
    443 
    444     static const int32_t id = 0;
    445     settings.update(ANDROID_REQUEST_ID, &id, 1);
    446 
    447     static const int32_t frameCount = 0;
    448     settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
    449 
    450     /** android.lens */
    451 
    452     static const float focalLength = 5.0f;
    453     settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
    454 
    455     if (hasCapability(BACKWARD_COMPATIBLE)) {
    456         static const float focusDistance = 0;
    457         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
    458 
    459         static const float aperture = 2.8f;
    460         settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
    461 
    462         static const float filterDensity = 0;
    463         settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
    464 
    465         static const uint8_t opticalStabilizationMode =
    466                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
    467         settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
    468                 &opticalStabilizationMode, 1);
    469 
    470         // FOCUS_RANGE set only in frame
    471     }
    472 
    473     /** android.sensor */
    474 
    475     if (hasCapability(MANUAL_SENSOR)) {
    476         static const int64_t exposureTime = 10 * MSEC;
    477         settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
    478 
    479         static const int64_t frameDuration = 33333333L; // 1/30 s
    480         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
    481 
    482         static const int32_t sensitivity = 100;
    483         settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
    484     }
    485 
    486     // TIMESTAMP set only in frame
    487 
    488     /** android.flash */
    489 
    490     if (hasCapability(BACKWARD_COMPATIBLE)) {
    491         static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
    492         settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
    493 
    494         static const uint8_t flashPower = 10;
    495         settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
    496 
    497         static const int64_t firingTime = 0;
    498         settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
    499     }
    500 
    501     /** Processing block modes */
    502     if (hasCapability(MANUAL_POST_PROCESSING)) {
    503         uint8_t hotPixelMode = 0;
    504         uint8_t demosaicMode = 0;
    505         uint8_t noiseMode = 0;
    506         uint8_t shadingMode = 0;
    507         uint8_t colorMode = 0;
    508         uint8_t tonemapMode = 0;
    509         uint8_t edgeMode = 0;
    510         switch (type) {
    511             case CAMERA3_TEMPLATE_STILL_CAPTURE:
    512                 // fall-through
    513             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    514                 // fall-through
    515             case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    516                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
    517                 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
    518                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
    519                 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
    520                 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
    521                 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
    522                 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
    523                 break;
    524             case CAMERA3_TEMPLATE_PREVIEW:
    525                 // fall-through
    526             case CAMERA3_TEMPLATE_VIDEO_RECORD:
    527                 // fall-through
    528             default:
    529                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
    530                 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
    531                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
    532                 shadingMode = ANDROID_SHADING_MODE_FAST;
    533                 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
    534                 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
    535                 edgeMode = ANDROID_EDGE_MODE_FAST;
    536                 break;
    537         }
    538         settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
    539         settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
    540         settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
    541         settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
    542         settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
    543         settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
    544         settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
    545     }
    546 
    547     /** android.colorCorrection */
    548 
    549     if (hasCapability(MANUAL_POST_PROCESSING)) {
    550         static const camera_metadata_rational colorTransform[9] = {
    551             {1,1}, {0,1}, {0,1},
    552             {0,1}, {1,1}, {0,1},
    553             {0,1}, {0,1}, {1,1}
    554         };
    555         settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
    556 
    557         static const float colorGains[4] = {
    558             1.0f, 1.0f, 1.0f, 1.0f
    559         };
    560         settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
    561     }
    562 
    563     /** android.tonemap */
    564 
    565     if (hasCapability(MANUAL_POST_PROCESSING)) {
    566         static const float tonemapCurve[4] = {
    567             0.f, 0.f,
    568             1.f, 1.f
    569         };
    570         settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
    571         settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
    572         settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
    573     }
    574 
    575     /** android.scaler */
    576     if (hasCapability(BACKWARD_COMPATIBLE)) {
    577         static const int32_t cropRegion[4] = {
    578             0, 0, mSensorWidth, mSensorHeight
    579         };
    580         settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
    581     }
    582 
    583     /** android.jpeg */
    584     if (hasCapability(BACKWARD_COMPATIBLE)) {
    585         static const uint8_t jpegQuality = 80;
    586         settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
    587 
    588         static const int32_t thumbnailSize[2] = {
    589             320, 240
    590         };
    591         settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
    592 
    593         static const uint8_t thumbnailQuality = 80;
    594         settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
    595 
    596         static const double gpsCoordinates[3] = {
    597             0, 0, 0
    598         };
    599         settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
    600 
    601         static const uint8_t gpsProcessingMethod[32] = "None";
    602         settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
    603 
    604         static const int64_t gpsTimestamp = 0;
    605         settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
    606 
    607         static const int32_t jpegOrientation = 0;
    608         settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
    609     }
    610 
    611     /** android.stats */
    612 
    613     if (hasCapability(BACKWARD_COMPATIBLE)) {
    614         static const uint8_t faceDetectMode =
    615                 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
    616         settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
    617 
    618         static const uint8_t hotPixelMapMode =
    619                 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
    620         settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
    621     }
    622 
    623     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
    624     // sharpnessMap only in frames
    625 
    626     /** android.control */
    627 
    628     uint8_t controlIntent = 0;
    629     switch (type) {
    630       case CAMERA3_TEMPLATE_PREVIEW:
    631         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
    632         break;
    633       case CAMERA3_TEMPLATE_STILL_CAPTURE:
    634         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
    635         break;
    636       case CAMERA3_TEMPLATE_VIDEO_RECORD:
    637         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
    638         break;
    639       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    640         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
    641         break;
    642       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    643         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
    644         break;
    645       case CAMERA3_TEMPLATE_MANUAL:
    646         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
    647         break;
    648       default:
    649         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
    650         break;
    651     }
    652     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
    653 
    654     const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    655             ANDROID_CONTROL_MODE_OFF :
    656             ANDROID_CONTROL_MODE_AUTO;
    657     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
    658 
    659     int32_t aeTargetFpsRange[2] = {
    660         15, 30
    661     };
    662     if (type == CAMERA3_TEMPLATE_VIDEO_RECORD || type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
    663         aeTargetFpsRange[0] = 30;
    664     }
    665     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
    666 
    667     if (hasCapability(BACKWARD_COMPATIBLE)) {
    668 
    669         static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
    670         settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
    671 
    672         static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
    673         settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
    674 
    675         const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    676                 ANDROID_CONTROL_AE_MODE_OFF :
    677                 ANDROID_CONTROL_AE_MODE_ON;
    678         settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
    679 
    680         static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
    681         settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
    682 
    683         static const int32_t controlRegions[5] = {
    684             0, 0, 0, 0, 0
    685         };
    686         settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
    687 
    688         static const int32_t aeExpCompensation = 0;
    689         settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
    690 
    691 
    692         static const uint8_t aeAntibandingMode =
    693                 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
    694         settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
    695 
    696         static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
    697         settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
    698 
    699         const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    700                 ANDROID_CONTROL_AWB_MODE_OFF :
    701                 ANDROID_CONTROL_AWB_MODE_AUTO;
    702         settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
    703 
    704         static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
    705         settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
    706 
    707         uint8_t afMode = 0;
    708 
    709         if (mFacingBack) {
    710             switch (type) {
    711                 case CAMERA3_TEMPLATE_PREVIEW:
    712                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    713                     break;
    714                 case CAMERA3_TEMPLATE_STILL_CAPTURE:
    715                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    716                     break;
    717                 case CAMERA3_TEMPLATE_VIDEO_RECORD:
    718                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
    719                     break;
    720                 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    721                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
    722                     break;
    723                 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    724                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    725                     break;
    726                 case CAMERA3_TEMPLATE_MANUAL:
    727                     afMode = ANDROID_CONTROL_AF_MODE_OFF;
    728                     break;
    729                 default:
    730                     afMode = ANDROID_CONTROL_AF_MODE_AUTO;
    731                     break;
    732             }
    733         } else {
    734             afMode = ANDROID_CONTROL_AF_MODE_OFF;
    735         }
    736         settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
    737 
    738         settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
    739 
    740         static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
    741         settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
    742 
    743         static const uint8_t vstabMode =
    744                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
    745         settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
    746 
    747         static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
    748         settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
    749 
    750         static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
    751         settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
    752 
    753         uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
    754         if (type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
    755             aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
    756         }
    757         settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
    758 
    759         static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
    760         settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
    761     }
    762 
    763     mDefaultTemplates[type] = settings.release();
    764 
    765     return mDefaultTemplates[type];
    766 }
    767 
    768 status_t EmulatedFakeCamera3::processCaptureRequest(
    769         camera3_capture_request *request) {
    770 
    771     Mutex::Autolock l(mLock);
    772     status_t res;
    773 
    774     /** Validation */
    775 
    776     if (mStatus < STATUS_READY) {
    777         ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
    778                 mStatus);
    779         return INVALID_OPERATION;
    780     }
    781 
    782     if (request == NULL) {
    783         ALOGE("%s: NULL request!", __FUNCTION__);
    784         return BAD_VALUE;
    785     }
    786 
    787     uint32_t frameNumber = request->frame_number;
    788 
    789     if (request->settings == NULL && mPrevSettings.isEmpty()) {
    790         ALOGE("%s: Request %d: NULL settings for first request after"
    791                 "configureStreams()", __FUNCTION__, frameNumber);
    792         return BAD_VALUE;
    793     }
    794 
    795     if (request->input_buffer != NULL &&
    796             request->input_buffer->stream != mInputStream) {
    797         ALOGE("%s: Request %d: Input buffer not from input stream!",
    798                 __FUNCTION__, frameNumber);
    799         ALOGV("%s: Bad stream %p, expected: %p",
    800               __FUNCTION__, request->input_buffer->stream,
    801               mInputStream);
    802         ALOGV("%s: Bad stream type %d, expected stream type %d",
    803               __FUNCTION__, request->input_buffer->stream->stream_type,
    804               mInputStream ? mInputStream->stream_type : -1);
    805 
    806         return BAD_VALUE;
    807     }
    808 
    809     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
    810         ALOGE("%s: Request %d: No output buffers provided!",
    811                 __FUNCTION__, frameNumber);
    812         return BAD_VALUE;
    813     }
    814 
    815     // Validate all buffers, starting with input buffer if it's given
    816 
    817     ssize_t idx;
    818     const camera3_stream_buffer_t *b;
    819     if (request->input_buffer != NULL) {
    820         idx = -1;
    821         b = request->input_buffer;
    822     } else {
    823         idx = 0;
    824         b = request->output_buffers;
    825     }
    826     do {
    827         PrivateStreamInfo *priv =
    828                 static_cast<PrivateStreamInfo*>(b->stream->priv);
    829         if (priv == NULL) {
    830             ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
    831                     __FUNCTION__, frameNumber, idx);
    832             return BAD_VALUE;
    833         }
    834         if (!priv->alive) {
    835             ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
    836                     __FUNCTION__, frameNumber, idx);
    837             return BAD_VALUE;
    838         }
    839         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    840             ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
    841                     __FUNCTION__, frameNumber, idx);
    842             return BAD_VALUE;
    843         }
    844         if (b->release_fence != -1) {
    845             ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
    846                     __FUNCTION__, frameNumber, idx);
    847             return BAD_VALUE;
    848         }
    849         if (b->buffer == NULL) {
    850             ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
    851                     __FUNCTION__, frameNumber, idx);
    852             return BAD_VALUE;
    853         }
    854         idx++;
    855         b = &(request->output_buffers[idx]);
    856     } while (idx < (ssize_t)request->num_output_buffers);
    857 
    858     // TODO: Validate settings parameters
    859 
    860     /**
    861      * Start processing this request
    862      */
    863 
    864     mStatus = STATUS_ACTIVE;
    865 
    866     CameraMetadata settings;
    867 
    868     if (request->settings == NULL) {
    869         settings.acquire(mPrevSettings);
    870     } else {
    871         settings = request->settings;
    872     }
    873 
    874     res = process3A(settings);
    875     if (res != OK) {
    876         return res;
    877     }
    878 
    879     // TODO: Handle reprocessing
    880 
    881     /**
    882      * Get ready for sensor config
    883      */
    884 
    885     nsecs_t  exposureTime;
    886     nsecs_t  frameDuration;
    887     uint32_t sensitivity;
    888     bool     needJpeg = false;
    889     camera_metadata_entry_t entry;
    890     entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
    891     exposureTime = (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
    892     entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
    893     frameDuration = (entry.count > 0)? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
    894     entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
    895     sensitivity = (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
    896 
    897     if (exposureTime > frameDuration) {
    898         frameDuration = exposureTime + Sensor::kMinVerticalBlank;
    899         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
    900     }
    901 
    902     Buffers *sensorBuffers = new Buffers();
    903     HalBufferVector *buffers = new HalBufferVector();
    904 
    905     sensorBuffers->setCapacity(request->num_output_buffers);
    906     buffers->setCapacity(request->num_output_buffers);
    907 
    908     // Process all the buffers we got for output, constructing internal buffer
    909     // structures for them, and lock them for writing.
    910     for (size_t i = 0; i < request->num_output_buffers; i++) {
    911         const camera3_stream_buffer &srcBuf = request->output_buffers[i];
    912         StreamBuffer destBuf;
    913         destBuf.streamId = kGenericStreamId;
    914         destBuf.width    = srcBuf.stream->width;
    915         destBuf.height   = srcBuf.stream->height;
    916         // inline with goldfish gralloc
    917         if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    918             if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
    919                 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
    920                     destBuf.format = HAL_PIXEL_FORMAT_RGBA_8888;
    921                 }
    922                 else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
    923                     destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
    924                 }
    925                 else if ((srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_MASK)
    926                          == GRALLOC_USAGE_HW_CAMERA_ZSL) {
    927                     destBuf.format = HAL_PIXEL_FORMAT_RGB_888;
    928                 }
    929             }
    930         }
    931         else {
    932             destBuf.format = srcBuf.stream->format;
    933         }
    934         destBuf.stride   = srcBuf.stream->width;
    935         destBuf.dataSpace = srcBuf.stream->data_space;
    936         destBuf.buffer   = srcBuf.buffer;
    937 
    938         if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
    939             needJpeg = true;
    940         }
    941 
    942         // Wait on fence
    943         sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
    944         res = bufferAcquireFence->wait(kFenceTimeoutMs);
    945         if (res == TIMED_OUT) {
    946             ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
    947                     __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
    948         }
    949         if (res == OK) {
    950             // Lock buffer for writing
    951             if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
    952                 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
    953                     android_ycbcr ycbcr = android_ycbcr();
    954                     res = GrallocModule::getInstance().lock_ycbcr(
    955                         *(destBuf.buffer),
    956                         GRALLOC_USAGE_HW_CAMERA_WRITE,
    957                         0, 0, destBuf.width, destBuf.height,
    958                         &ycbcr);
    959                     // This is only valid because we know that emulator's
    960                     // YCbCr_420_888 is really contiguous NV21 under the hood
    961                     destBuf.img = static_cast<uint8_t*>(ycbcr.y);
    962                 } else {
    963                     ALOGE("Unexpected private format for flexible YUV: 0x%x",
    964                             destBuf.format);
    965                     res = INVALID_OPERATION;
    966                 }
    967             } else {
    968                 res = GrallocModule::getInstance().lock(
    969                     *(destBuf.buffer),
    970                     GRALLOC_USAGE_HW_CAMERA_WRITE,
    971                     0, 0, destBuf.width, destBuf.height,
    972                     (void**)&(destBuf.img));
    973 
    974             }
    975             if (res != OK) {
    976                 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
    977                         __FUNCTION__, frameNumber, i);
    978             } else {
    979                 ALOGV("%s, stream format 0x%x width %d height %d buffer 0x%p img 0x%p",
    980                   __FUNCTION__, destBuf.format, destBuf.width, destBuf.height,
    981                   destBuf.buffer, destBuf.img);
    982             }
    983         }
    984 
    985         if (res != OK) {
    986             // Either waiting or locking failed. Unlock locked buffers and bail
    987             // out.
    988             for (size_t j = 0; j < i; j++) {
    989                 GrallocModule::getInstance().unlock(
    990                         *(request->output_buffers[i].buffer));
    991             }
    992             delete sensorBuffers;
    993             delete buffers;
    994             return NO_INIT;
    995         }
    996 
    997         sensorBuffers->push_back(destBuf);
    998         buffers->push_back(srcBuf);
    999     }
   1000 
   1001     /**
   1002      * Wait for JPEG compressor to not be busy, if needed
   1003      */
   1004     if (needJpeg) {
   1005         bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
   1006         if (!ready) {
   1007             ALOGE("%s: Timeout waiting for JPEG compression to complete!",
   1008                     __FUNCTION__);
   1009             return NO_INIT;
   1010         }
   1011         res = mJpegCompressor->reserve();
   1012         if (res != OK) {
   1013             ALOGE("%s: Error managing JPEG compressor resources, can't reserve it!", __FUNCTION__);
   1014             return NO_INIT;
   1015         }
   1016     }
   1017 
   1018     /**
   1019      * Wait until the in-flight queue has room
   1020      */
   1021     res = mReadoutThread->waitForReadout();
   1022     if (res != OK) {
   1023         ALOGE("%s: Timeout waiting for previous requests to complete!",
   1024                 __FUNCTION__);
   1025         return NO_INIT;
   1026     }
   1027 
   1028     /**
   1029      * Wait until sensor's ready. This waits for lengthy amounts of time with
   1030      * mLock held, but the interface spec is that no other calls may by done to
   1031      * the HAL by the framework while process_capture_request is happening.
   1032      */
   1033     int syncTimeoutCount = 0;
   1034     while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
   1035         if (mStatus == STATUS_ERROR) {
   1036             return NO_INIT;
   1037         }
   1038         if (syncTimeoutCount == kMaxSyncTimeoutCount) {
   1039             ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
   1040                     __FUNCTION__, frameNumber,
   1041                     kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
   1042             return NO_INIT;
   1043         }
   1044         syncTimeoutCount++;
   1045     }
   1046 
   1047     /**
   1048      * Configure sensor and queue up the request to the readout thread
   1049      */
   1050     mSensor->setExposureTime(exposureTime);
   1051     mSensor->setFrameDuration(frameDuration);
   1052     mSensor->setSensitivity(sensitivity);
   1053     mSensor->setDestinationBuffers(sensorBuffers);
   1054     mSensor->setFrameNumber(request->frame_number);
   1055 
   1056     ReadoutThread::Request r;
   1057     r.frameNumber = request->frame_number;
   1058     r.settings = settings;
   1059     r.sensorBuffers = sensorBuffers;
   1060     r.buffers = buffers;
   1061 
   1062     mReadoutThread->queueCaptureRequest(r);
   1063     ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
   1064 
   1065     // Cache the settings for next time
   1066     mPrevSettings.acquire(settings);
   1067 
   1068     return OK;
   1069 }
   1070 
   1071 status_t EmulatedFakeCamera3::flush() {
   1072     ALOGW("%s: Not implemented; ignored", __FUNCTION__);
   1073     return OK;
   1074 }
   1075 
   1076 /** Debug methods */
   1077 
   1078 void EmulatedFakeCamera3::dump(int fd) {
   1079 
   1080 }
   1081 
   1082 /**
   1083  * Private methods
   1084  */
   1085 
   1086 status_t EmulatedFakeCamera3::getCameraCapabilities() {
   1087 
   1088     const char *key = mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
   1089 
   1090     /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
   1091      * property doesn't exist, it is assumed to list FULL. */
   1092     char prop[PROPERTY_VALUE_MAX];
   1093     if (property_get(key, prop, NULL) > 0) {
   1094         char *saveptr = nullptr;
   1095         char *cap = strtok_r(prop, " ,", &saveptr);
   1096         while (cap != NULL) {
   1097             for (int i = 0; i < NUM_CAPABILITIES; i++) {
   1098                 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
   1099                     mCapabilities.add(static_cast<AvailableCapabilities>(i));
   1100                     break;
   1101                 }
   1102             }
   1103             cap = strtok_r(NULL, " ,", &saveptr);
   1104         }
   1105         if (mCapabilities.size() == 0) {
   1106             ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
   1107         }
   1108     }
   1109     // Default to FULL_LEVEL plus RAW if nothing is defined
   1110     if (mCapabilities.size() == 0) {
   1111         mCapabilities.add(FULL_LEVEL);
   1112         // "RAW" causes several CTS failures: b/68723953, disable it so far.
   1113         // TODO: add "RAW" back when all failures are resolved.
   1114         //mCapabilities.add(RAW);
   1115     }
   1116 
   1117     // Add level-based caps
   1118     if (hasCapability(FULL_LEVEL)) {
   1119         mCapabilities.add(BURST_CAPTURE);
   1120         mCapabilities.add(READ_SENSOR_SETTINGS);
   1121         mCapabilities.add(MANUAL_SENSOR);
   1122         mCapabilities.add(MANUAL_POST_PROCESSING);
   1123     };
   1124 
   1125     // Backwards-compatible is required for most other caps
   1126     // Not required for DEPTH_OUTPUT, though.
   1127     if (hasCapability(BURST_CAPTURE) ||
   1128             hasCapability(READ_SENSOR_SETTINGS) ||
   1129             hasCapability(RAW) ||
   1130             hasCapability(MANUAL_SENSOR) ||
   1131             hasCapability(MANUAL_POST_PROCESSING) ||
   1132             hasCapability(PRIVATE_REPROCESSING) ||
   1133             hasCapability(YUV_REPROCESSING) ||
   1134             hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
   1135         mCapabilities.add(BACKWARD_COMPATIBLE);
   1136     }
   1137 
   1138     ALOGI("Camera %d capabilities:", mCameraID);
   1139     for (size_t i = 0; i < mCapabilities.size(); i++) {
   1140         ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
   1141     }
   1142 
   1143     return OK;
   1144 }
   1145 
   1146 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
   1147     ssize_t idx = mCapabilities.indexOf(cap);
   1148     return idx >= 0;
   1149 }
   1150 
   1151 status_t EmulatedFakeCamera3::constructStaticInfo() {
   1152 
   1153     CameraMetadata info;
   1154     Vector<int32_t> availableCharacteristicsKeys;
   1155     status_t res;
   1156 
   1157     // Find max width/height
   1158     int32_t width = 0, height = 0;
   1159     size_t rawSizeCount = sizeof(kAvailableRawSizes)/sizeof(kAvailableRawSizes[0]);
   1160     for (size_t index = 0; index + 1 < rawSizeCount; index += 2) {
   1161         if (width <= kAvailableRawSizes[index] &&
   1162             height <= kAvailableRawSizes[index+1]) {
   1163             width = kAvailableRawSizes[index];
   1164             height = kAvailableRawSizes[index+1];
   1165         }
   1166     }
   1167 
   1168     if (width < 640 || height < 480) {
   1169         width = 640;
   1170         height = 480;
   1171     }
   1172     mSensorWidth = width;
   1173     mSensorHeight = height;
   1174 
   1175 #define ADD_STATIC_ENTRY(name, varptr, count) \
   1176         availableCharacteristicsKeys.add(name);   \
   1177         res = info.update(name, varptr, count); \
   1178         if (res != OK) return res
   1179 
   1180     // android.sensor
   1181 
   1182     if (hasCapability(MANUAL_SENSOR)) {
   1183 
   1184         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   1185                 Sensor::kExposureTimeRange, 2);
   1186 
   1187         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   1188                 &Sensor::kFrameDurationRange[1], 1);
   1189 
   1190         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   1191                 Sensor::kSensitivityRange,
   1192                 sizeof(Sensor::kSensitivityRange)
   1193                 /sizeof(int32_t));
   1194 
   1195         ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   1196                 &Sensor::kSensitivityRange[1], 1);
   1197     }
   1198 
   1199     static const uint8_t sensorColorFilterArrangement =
   1200         ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
   1201     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   1202             &sensorColorFilterArrangement, 1);
   1203 
   1204     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
   1205     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   1206             sensorPhysicalSize, 2);
   1207 
   1208     const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
   1209     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   1210             pixelArray, 2);
   1211     const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
   1212     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   1213             activeArray, 4);
   1214 
   1215     static const int32_t orientation = 90; // Aligned with 'long edge'
   1216     ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
   1217 
   1218     static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
   1219     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
   1220 
   1221     if (hasCapability(RAW) || hasCapability(MANUAL_SENSOR)) {
   1222         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   1223                 (int32_t*)&Sensor::kMaxRawValue, 1);
   1224 
   1225         static const int32_t blackLevelPattern[4] = {
   1226             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
   1227             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
   1228         };
   1229         ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   1230                 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
   1231     }
   1232 
   1233     if (hasCapability(RAW)) {
   1234         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   1235                 &Sensor::kColorFilterArrangement, 1);
   1236     }
   1237 
   1238     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1239         static const int32_t availableTestPatternModes[] = {
   1240             ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
   1241         };
   1242         ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   1243                 availableTestPatternModes, sizeof(availableTestPatternModes)/sizeof(int32_t));
   1244     }
   1245 
   1246     // android.lens
   1247 
   1248     static const float focalLength = 5.0f; // mm
   1249     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   1250             &focalLength, 1);
   1251 
   1252     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1253         // 5 cm min focus distance for back camera, infinity (fixed focus) for front
   1254         const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
   1255         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   1256                 &minFocusDistance, 1);
   1257 
   1258         // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
   1259         const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
   1260         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   1261                 &minFocusDistance, 1);
   1262 
   1263         static const float aperture = 2.8f;
   1264         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   1265                 &aperture, 1);
   1266         static const float filterDensity = 0;
   1267         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   1268                 &filterDensity, 1);
   1269         static const uint8_t availableOpticalStabilization =
   1270                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   1271         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   1272                 &availableOpticalStabilization, 1);
   1273 
   1274         static const int32_t lensShadingMapSize[] = {1, 1};
   1275         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
   1276                 sizeof(lensShadingMapSize)/sizeof(int32_t));
   1277 
   1278         static const uint8_t lensFocusCalibration =
   1279                 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
   1280         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &lensFocusCalibration, 1);
   1281     }
   1282 
   1283     if (hasCapability(DEPTH_OUTPUT)) {
   1284         // These could be included for non-DEPTH capability as well, but making this variable for
   1285         // testing coverage
   1286 
   1287         // 90 degree rotation to align with long edge of a phone device that's by default portrait
   1288         static const float qO[] = { 0.707107f, 0.f, 0.f, 0.707107f};
   1289 
   1290         // Either a 180-degree rotation for back-facing, or no rotation for front-facing
   1291         const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0, (mFacingBack ? 0.f : 1.f)};
   1292 
   1293         // Quarternion product, orientation change then facing
   1294         const float lensPoseRotation[] = {qO[0]*qF[0] - qO[1]*qF[1] - qO[2]*qF[2] - qO[3]*qF[3],
   1295                                           qO[0]*qF[1] + qO[1]*qF[0] + qO[2]*qF[3] - qO[3]*qF[2],
   1296                                           qO[0]*qF[2] + qO[2]*qF[0] + qO[1]*qF[3] - qO[3]*qF[1],
   1297                                           qO[0]*qF[3] + qO[3]*qF[0] + qO[1]*qF[2] - qO[2]*qF[1]};
   1298 
   1299         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
   1300                 sizeof(lensPoseRotation)/sizeof(float));
   1301 
   1302         // Only one camera facing each way, so 0 translation needed to the center of the 'main'
   1303         // camera
   1304         static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
   1305 
   1306         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
   1307                 sizeof(lensPoseTranslation)/sizeof(float));
   1308 
   1309         // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and active array size
   1310         float f_x = focalLength * mSensorWidth / sensorPhysicalSize[0];
   1311         float f_y = focalLength * mSensorHeight / sensorPhysicalSize[1];
   1312         float c_x = mSensorWidth / 2.f;
   1313         float c_y = mSensorHeight / 2.f;
   1314         float s = 0.f;
   1315         const float lensIntrinsics[] = { f_x, f_y, c_x, c_y, s };
   1316 
   1317         ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
   1318                 sizeof(lensIntrinsics)/sizeof(float));
   1319 
   1320         // No radial or tangential distortion
   1321 
   1322         float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
   1323 
   1324         ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
   1325                 sizeof(lensRadialDistortion)/sizeof(float));
   1326 
   1327     }
   1328 
   1329 
   1330     static const uint8_t lensFacing = mFacingBack ?
   1331             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   1332     ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
   1333 
   1334     // android.flash
   1335 
   1336     static const uint8_t flashAvailable = 0;
   1337     ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
   1338 
   1339     // android.hotPixel
   1340 
   1341     if (hasCapability(MANUAL_POST_PROCESSING)) {
   1342         static const uint8_t availableHotPixelModes[] = {
   1343             ANDROID_HOT_PIXEL_MODE_FAST, ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY
   1344         };
   1345         ADD_STATIC_ENTRY(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   1346                 availableHotPixelModes, sizeof(availableHotPixelModes));
   1347     }
   1348 
   1349     // android.tonemap
   1350 
   1351     if (hasCapability(MANUAL_POST_PROCESSING)) {
   1352         static const int32_t tonemapCurvePoints = 128;
   1353         ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
   1354 
   1355         static const uint8_t availableToneMapModes[] = {
   1356             ANDROID_TONEMAP_MODE_CONTRAST_CURVE,  ANDROID_TONEMAP_MODE_FAST,
   1357             ANDROID_TONEMAP_MODE_HIGH_QUALITY
   1358         };
   1359         ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availableToneMapModes,
   1360                 sizeof(availableToneMapModes));
   1361     }
   1362 
   1363     // android.scaler
   1364 
   1365     const std::vector<int32_t> availableStreamConfigurationsBasic = {
   1366         HAL_PIXEL_FORMAT_BLOB, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1367         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1368         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1369         HAL_PIXEL_FORMAT_BLOB, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1370         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1371         HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1372         HAL_PIXEL_FORMAT_BLOB, 176, 144, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1373         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1374         HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1375         HAL_PIXEL_FORMAT_BLOB, 1280, 720, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1376     };
   1377 
   1378     // Always need to include 640x480 in basic formats
   1379     const std::vector<int32_t> availableStreamConfigurationsBasic640 = {
   1380         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1381         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1382         HAL_PIXEL_FORMAT_BLOB, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
   1383     };
   1384 
   1385     const std::vector<int32_t> availableStreamConfigurationsRaw = {
   1386         HAL_PIXEL_FORMAT_RAW16, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1387     };
   1388 
   1389     const std::vector<int32_t> availableStreamConfigurationsBurst = {
   1390         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1391         HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1392         HAL_PIXEL_FORMAT_RGBA_8888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1393     };
   1394 
   1395     std::vector<int32_t> availableStreamConfigurations;
   1396 
   1397     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1398         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1399                 availableStreamConfigurationsBasic.begin(),
   1400                 availableStreamConfigurationsBasic.end());
   1401         if (width > 640) {
   1402             availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1403                     availableStreamConfigurationsBasic640.begin(),
   1404                     availableStreamConfigurationsBasic640.end());
   1405         }
   1406     }
   1407     if (hasCapability(RAW)) {
   1408         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1409                 availableStreamConfigurationsRaw.begin(),
   1410                 availableStreamConfigurationsRaw.end());
   1411     }
   1412     if (hasCapability(BURST_CAPTURE)) {
   1413         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1414                 availableStreamConfigurationsBurst.begin(),
   1415                 availableStreamConfigurationsBurst.end());
   1416     }
   1417 
   1418     if (availableStreamConfigurations.size() > 0) {
   1419         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   1420                 &availableStreamConfigurations[0],
   1421                 availableStreamConfigurations.size());
   1422     }
   1423 
   1424     const std::vector<int64_t> availableMinFrameDurationsBasic = {
   1425         HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
   1426         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, Sensor::kFrameDurationRange[0],
   1427         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, Sensor::kFrameDurationRange[0],
   1428         HAL_PIXEL_FORMAT_BLOB, 320, 240, Sensor::kFrameDurationRange[0],
   1429         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, Sensor::kFrameDurationRange[0],
   1430         HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, Sensor::kFrameDurationRange[0],
   1431         HAL_PIXEL_FORMAT_BLOB, 176, 144, Sensor::kFrameDurationRange[0],
   1432         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, Sensor::kFrameDurationRange[0],
   1433         HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, Sensor::kFrameDurationRange[0],
   1434         HAL_PIXEL_FORMAT_BLOB, 1280, 720, Sensor::kFrameDurationRange[0],
   1435     };
   1436 
   1437     // Always need to include 640x480 in basic formats
   1438     const std::vector<int64_t> availableMinFrameDurationsBasic640 = {
   1439         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, Sensor::kFrameDurationRange[0],
   1440         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, Sensor::kFrameDurationRange[0],
   1441         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
   1442     };
   1443 
   1444     const std::vector<int64_t> availableMinFrameDurationsRaw = {
   1445         HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0],
   1446     };
   1447 
   1448     const std::vector<int64_t> availableMinFrameDurationsBurst = {
   1449         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, Sensor::kFrameDurationRange[0],
   1450         HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, Sensor::kFrameDurationRange[0],
   1451         HAL_PIXEL_FORMAT_RGBA_8888, width, height, Sensor::kFrameDurationRange[0],
   1452     };
   1453 
   1454     std::vector<int64_t> availableMinFrameDurations;
   1455 
   1456     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1457         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1458                 availableMinFrameDurationsBasic.begin(),
   1459                 availableMinFrameDurationsBasic.end());
   1460         if (width > 640) {
   1461             availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1462                     availableMinFrameDurationsBasic640.begin(),
   1463                     availableMinFrameDurationsBasic640.end());
   1464         }
   1465     }
   1466     if (hasCapability(RAW)) {
   1467         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1468                 availableMinFrameDurationsRaw.begin(),
   1469                 availableMinFrameDurationsRaw.end());
   1470     }
   1471     if (hasCapability(BURST_CAPTURE)) {
   1472         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1473                 availableMinFrameDurationsBurst.begin(),
   1474                 availableMinFrameDurationsBurst.end());
   1475     }
   1476 
   1477     if (availableMinFrameDurations.size() > 0) {
   1478         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   1479                 &availableMinFrameDurations[0],
   1480                 availableMinFrameDurations.size());
   1481     }
   1482 
   1483     const std::vector<int64_t> availableStallDurationsBasic = {
   1484         HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
   1485         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, 0,
   1486         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, 0,
   1487         HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, 0,
   1488         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 176, 144, 0,
   1489         HAL_PIXEL_FORMAT_YCbCr_420_888, 176, 144, 0,
   1490         HAL_PIXEL_FORMAT_RGBA_8888, 176, 144, 0,
   1491         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 1280, 720, 0,
   1492         HAL_PIXEL_FORMAT_YCbCr_420_888, 1280, 720, 0,
   1493         HAL_PIXEL_FORMAT_RGBA_8888, 1280, 720, 0,
   1494     };
   1495 
   1496     // Always need to include 640x480 in basic formats
   1497     const std::vector<int64_t> availableStallDurationsBasic640 = {
   1498         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
   1499         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
   1500         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
   1501     };
   1502 
   1503     const std::vector<int64_t> availableStallDurationsRaw = {
   1504         HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0]
   1505     };
   1506     const std::vector<int64_t> availableStallDurationsBurst = {
   1507         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
   1508         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
   1509         HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, 0
   1510     };
   1511 
   1512     std::vector<int64_t> availableStallDurations;
   1513 
   1514     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1515         availableStallDurations.insert(availableStallDurations.end(),
   1516                 availableStallDurationsBasic.begin(),
   1517                 availableStallDurationsBasic.end());
   1518         if (width > 640) {
   1519             availableStallDurations.insert(availableStallDurations.end(),
   1520                     availableStallDurationsBasic640.begin(),
   1521                     availableStallDurationsBasic640.end());
   1522         }
   1523     }
   1524     if (hasCapability(RAW)) {
   1525         availableStallDurations.insert(availableStallDurations.end(),
   1526                 availableStallDurationsRaw.begin(),
   1527                 availableStallDurationsRaw.end());
   1528     }
   1529     if (hasCapability(BURST_CAPTURE)) {
   1530         availableStallDurations.insert(availableStallDurations.end(),
   1531                 availableStallDurationsBurst.begin(),
   1532                 availableStallDurationsBurst.end());
   1533     }
   1534 
   1535     if (availableStallDurations.size() > 0) {
   1536         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   1537                 &availableStallDurations[0],
   1538                 availableStallDurations.size());
   1539     }
   1540 
   1541     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1542         static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
   1543         ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
   1544                 &croppingType, 1);
   1545 
   1546         static const float maxZoom = 10;
   1547         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   1548                 &maxZoom, 1);
   1549     }
   1550 
   1551     // android.jpeg
   1552 
   1553     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1554         static const int32_t jpegThumbnailSizes[] = {
   1555             0, 0,
   1556             160, 120,
   1557             320, 180,
   1558             320, 240
   1559         };
   1560         ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   1561                 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
   1562 
   1563         static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
   1564         ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
   1565     }
   1566 
   1567     // android.stats
   1568 
   1569     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1570         static const uint8_t availableFaceDetectModes[] = {
   1571             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   1572             ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
   1573             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
   1574         };
   1575         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   1576                 availableFaceDetectModes,
   1577                 sizeof(availableFaceDetectModes));
   1578 
   1579         static const int32_t maxFaceCount = 8;
   1580         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   1581                 &maxFaceCount, 1);
   1582 
   1583 
   1584         static const uint8_t availableShadingMapModes[] = {
   1585             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
   1586         };
   1587         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   1588                 availableShadingMapModes, sizeof(availableShadingMapModes));
   1589     }
   1590 
   1591     // android.sync
   1592 
   1593     static const int32_t maxLatency =
   1594             hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
   1595     ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
   1596 
   1597     // android.control
   1598 
   1599     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1600         static const uint8_t availableControlModes[] = {
   1601             ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO, ANDROID_CONTROL_MODE_USE_SCENE_MODE
   1602         };
   1603         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
   1604                 availableControlModes, sizeof(availableControlModes));
   1605     } else {
   1606         static const uint8_t availableControlModes[] = {
   1607             ANDROID_CONTROL_MODE_AUTO
   1608         };
   1609         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
   1610                 availableControlModes, sizeof(availableControlModes));
   1611     }
   1612 
   1613     static const uint8_t availableSceneModes[] = {
   1614         hasCapability(BACKWARD_COMPATIBLE) ?
   1615             ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
   1616             ANDROID_CONTROL_SCENE_MODE_DISABLED
   1617     };
   1618     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   1619             availableSceneModes, sizeof(availableSceneModes));
   1620 
   1621     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1622         static const uint8_t availableEffects[] = {
   1623             ANDROID_CONTROL_EFFECT_MODE_OFF
   1624         };
   1625         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   1626                 availableEffects, sizeof(availableEffects));
   1627     }
   1628 
   1629     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1630         static const int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
   1631         ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
   1632                 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
   1633 
   1634         static const uint8_t availableAeModes[] = {
   1635             ANDROID_CONTROL_AE_MODE_OFF,
   1636             ANDROID_CONTROL_AE_MODE_ON
   1637         };
   1638         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   1639                 availableAeModes, sizeof(availableAeModes));
   1640 
   1641         static const camera_metadata_rational exposureCompensationStep = {
   1642             0, 3
   1643         };
   1644         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   1645                 &exposureCompensationStep, 1);
   1646 
   1647         int32_t exposureCompensationRange[] = {0, 0};
   1648         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   1649                 exposureCompensationRange,
   1650                 sizeof(exposureCompensationRange)/sizeof(int32_t));
   1651     }
   1652 
   1653     static const int32_t availableTargetFpsRanges[] = {
   1654         15, 30, 30, 30
   1655     };
   1656     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   1657             availableTargetFpsRanges,
   1658             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
   1659 
   1660     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1661         static const uint8_t availableAntibandingModes[] = {
   1662             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
   1663             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
   1664         };
   1665         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   1666                 availableAntibandingModes, sizeof(availableAntibandingModes));
   1667     }
   1668 
   1669     static const uint8_t aeLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
   1670             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   1671 
   1672     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   1673             &aeLockAvailable, 1);
   1674 
   1675     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1676         static const uint8_t availableAwbModes[] = {
   1677             ANDROID_CONTROL_AWB_MODE_OFF,
   1678             ANDROID_CONTROL_AWB_MODE_AUTO,
   1679             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
   1680             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
   1681             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
   1682             ANDROID_CONTROL_AWB_MODE_SHADE
   1683         };
   1684         ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   1685                 availableAwbModes, sizeof(availableAwbModes));
   1686     }
   1687 
   1688     static const uint8_t awbLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
   1689             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   1690 
   1691     ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   1692             &awbLockAvailable, 1);
   1693 
   1694     static const uint8_t availableAfModesBack[] = {
   1695             ANDROID_CONTROL_AF_MODE_OFF,
   1696             ANDROID_CONTROL_AF_MODE_AUTO,
   1697             ANDROID_CONTROL_AF_MODE_MACRO,
   1698             ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
   1699             ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
   1700     };
   1701 
   1702     static const uint8_t availableAfModesFront[] = {
   1703             ANDROID_CONTROL_AF_MODE_OFF
   1704     };
   1705 
   1706     if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
   1707         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   1708                 availableAfModesBack, sizeof(availableAfModesBack));
   1709     } else {
   1710         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   1711                 availableAfModesFront, sizeof(availableAfModesFront));
   1712     }
   1713 
   1714     static const uint8_t availableVstabModes[] = {
   1715         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
   1716     };
   1717     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   1718             availableVstabModes, sizeof(availableVstabModes));
   1719 
   1720     // android.colorCorrection
   1721 
   1722     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1723         static const uint8_t availableAberrationModes[] = {
   1724             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   1725             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
   1726             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
   1727         };
   1728         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   1729                 availableAberrationModes, sizeof(availableAberrationModes));
   1730     } else {
   1731         static const uint8_t availableAberrationModes[] = {
   1732             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   1733         };
   1734         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   1735                 availableAberrationModes, sizeof(availableAberrationModes));
   1736     }
   1737     // android.edge
   1738 
   1739     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1740         static const uint8_t availableEdgeModes[] = {
   1741             ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST, ANDROID_EDGE_MODE_HIGH_QUALITY
   1742         };
   1743         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   1744                 availableEdgeModes, sizeof(availableEdgeModes));
   1745     } else {
   1746         static const uint8_t availableEdgeModes[] = {
   1747             ANDROID_EDGE_MODE_OFF
   1748         };
   1749         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   1750                 availableEdgeModes, sizeof(availableEdgeModes));
   1751     }
   1752 
   1753     // android.info
   1754 
   1755     static const uint8_t supportedHardwareLevel =
   1756             hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
   1757                     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
   1758     ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   1759                 &supportedHardwareLevel,
   1760                 /*count*/1);
   1761 
   1762     // android.noiseReduction
   1763 
   1764     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1765         static const uint8_t availableNoiseReductionModes[] = {
   1766             ANDROID_NOISE_REDUCTION_MODE_OFF,
   1767             ANDROID_NOISE_REDUCTION_MODE_FAST,
   1768             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
   1769         };
   1770         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   1771                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
   1772     } else {
   1773         static const uint8_t availableNoiseReductionModes[] = {
   1774             ANDROID_NOISE_REDUCTION_MODE_OFF,
   1775         };
   1776         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   1777                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
   1778     }
   1779 
   1780     // android.depth
   1781 
   1782     if (hasCapability(DEPTH_OUTPUT)) {
   1783 
   1784         static const int32_t maxDepthSamples = 100;
   1785         ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
   1786                 &maxDepthSamples, 1);
   1787 
   1788         static const int32_t availableDepthStreamConfigurations[] = {
   1789             HAL_PIXEL_FORMAT_Y16, 160, 120, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
   1790             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT
   1791         };
   1792         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
   1793                 availableDepthStreamConfigurations,
   1794                 sizeof(availableDepthStreamConfigurations)/sizeof(int32_t));
   1795 
   1796         static const int64_t availableDepthMinFrameDurations[] = {
   1797             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
   1798             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
   1799         };
   1800         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
   1801                 availableDepthMinFrameDurations,
   1802                 sizeof(availableDepthMinFrameDurations)/sizeof(int64_t));
   1803 
   1804         static const int64_t availableDepthStallDurations[] = {
   1805             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
   1806             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
   1807         };
   1808         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
   1809                 availableDepthStallDurations,
   1810                 sizeof(availableDepthStallDurations)/sizeof(int64_t));
   1811 
   1812         uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
   1813         ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
   1814                 &depthIsExclusive, 1);
   1815     }
   1816 
   1817     // android.shading
   1818 
   1819     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1820         static const uint8_t availableShadingModes[] = {
   1821             ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST, ANDROID_SHADING_MODE_HIGH_QUALITY
   1822         };
   1823         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
   1824                 sizeof(availableShadingModes));
   1825     } else {
   1826         static const uint8_t availableShadingModes[] = {
   1827             ANDROID_SHADING_MODE_OFF
   1828         };
   1829         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
   1830                 sizeof(availableShadingModes));
   1831     }
   1832 
   1833     // android.request
   1834 
   1835     static const int32_t maxNumOutputStreams[] = {
   1836             kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
   1837     };
   1838     ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams, 3);
   1839 
   1840     static const uint8_t maxPipelineDepth = kMaxBufferCount;
   1841     ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
   1842 
   1843     static const int32_t partialResultCount = 1;
   1844     ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   1845             &partialResultCount, /*count*/1);
   1846 
   1847     SortedVector<uint8_t> caps;
   1848     for (size_t i = 0; i < mCapabilities.size(); i++) {
   1849         switch(mCapabilities[i]) {
   1850             case BACKWARD_COMPATIBLE:
   1851                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   1852                 break;
   1853             case MANUAL_SENSOR:
   1854                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
   1855                 break;
   1856             case MANUAL_POST_PROCESSING:
   1857                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
   1858                 break;
   1859             case RAW:
   1860                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
   1861                 break;
   1862             case PRIVATE_REPROCESSING:
   1863                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   1864                 break;
   1865             case READ_SENSOR_SETTINGS:
   1866                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
   1867                 break;
   1868             case BURST_CAPTURE:
   1869                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
   1870                 break;
   1871             case YUV_REPROCESSING:
   1872                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
   1873                 break;
   1874             case DEPTH_OUTPUT:
   1875                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
   1876                 break;
   1877             case CONSTRAINED_HIGH_SPEED_VIDEO:
   1878                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
   1879                 break;
   1880             default:
   1881                 // Ignore LEVELs
   1882                 break;
   1883         }
   1884     }
   1885     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
   1886 
   1887     // Scan a default request template for included request keys
   1888     Vector<int32_t> availableRequestKeys;
   1889     const camera_metadata_t *previewRequest =
   1890         constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
   1891     for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
   1892         camera_metadata_ro_entry_t entry;
   1893         get_camera_metadata_ro_entry(previewRequest, i, &entry);
   1894         availableRequestKeys.add(entry.tag);
   1895     }
   1896     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
   1897             availableRequestKeys.size());
   1898 
   1899     // Add a few more result keys. Must be kept up to date with the various places that add these
   1900 
   1901     Vector<int32_t> availableResultKeys(availableRequestKeys);
   1902     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1903         availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
   1904         availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
   1905         availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
   1906         availableResultKeys.add(ANDROID_FLASH_STATE);
   1907         availableResultKeys.add(ANDROID_LENS_STATE);
   1908         availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
   1909         availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
   1910         availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
   1911     }
   1912 
   1913     if (hasCapability(DEPTH_OUTPUT)) {
   1914         availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
   1915         availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
   1916         availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
   1917         availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
   1918     }
   1919 
   1920     availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
   1921     availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
   1922 
   1923     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
   1924             availableResultKeys.size());
   1925 
   1926     // Needs to be last, to collect all the keys set
   1927 
   1928     availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
   1929     info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   1930             availableCharacteristicsKeys);
   1931 
   1932     mCameraInfo = info.release();
   1933 
   1934 #undef ADD_STATIC_ENTRY
   1935     return OK;
   1936 }
   1937 
   1938 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
   1939     /**
   1940      * Extract top-level 3A controls
   1941      */
   1942     status_t res;
   1943 
   1944     bool facePriority = false;
   1945 
   1946     camera_metadata_entry e;
   1947 
   1948     e = settings.find(ANDROID_CONTROL_MODE);
   1949     if (e.count == 0) {
   1950         ALOGE("%s: No control mode entry!", __FUNCTION__);
   1951         return BAD_VALUE;
   1952     }
   1953     uint8_t controlMode = e.data.u8[0];
   1954 
   1955     if (controlMode == ANDROID_CONTROL_MODE_OFF) {
   1956         mAeMode   = ANDROID_CONTROL_AE_MODE_OFF;
   1957         mAfMode   = ANDROID_CONTROL_AF_MODE_OFF;
   1958         mAwbMode  = ANDROID_CONTROL_AWB_MODE_OFF;
   1959         mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1960         mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1961         mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   1962         update3A(settings);
   1963         return OK;
   1964     } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   1965         if (!hasCapability(BACKWARD_COMPATIBLE)) {
   1966             ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
   1967                   __FUNCTION__);
   1968             return BAD_VALUE;
   1969         }
   1970 
   1971         e = settings.find(ANDROID_CONTROL_SCENE_MODE);
   1972         if (e.count == 0) {
   1973             ALOGE("%s: No scene mode entry!", __FUNCTION__);
   1974             return BAD_VALUE;
   1975         }
   1976         uint8_t sceneMode = e.data.u8[0];
   1977 
   1978         switch(sceneMode) {
   1979             case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
   1980                 mFacePriority = true;
   1981                 break;
   1982             default:
   1983                 ALOGE("%s: Emulator doesn't support scene mode %d",
   1984                         __FUNCTION__, sceneMode);
   1985                 return BAD_VALUE;
   1986         }
   1987     } else {
   1988         mFacePriority = false;
   1989     }
   1990 
   1991     // controlMode == AUTO or sceneMode = FACE_PRIORITY
   1992     // Process individual 3A controls
   1993 
   1994     res = doFakeAE(settings);
   1995     if (res != OK) return res;
   1996 
   1997     res = doFakeAF(settings);
   1998     if (res != OK) return res;
   1999 
   2000     res = doFakeAWB(settings);
   2001     if (res != OK) return res;
   2002 
   2003     update3A(settings);
   2004     return OK;
   2005 }
   2006 
   2007 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
   2008     camera_metadata_entry e;
   2009 
   2010     e = settings.find(ANDROID_CONTROL_AE_MODE);
   2011     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   2012         ALOGE("%s: No AE mode entry!", __FUNCTION__);
   2013         return BAD_VALUE;
   2014     }
   2015     uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
   2016     mAeMode = aeMode;
   2017 
   2018     switch (aeMode) {
   2019         case ANDROID_CONTROL_AE_MODE_OFF:
   2020             // AE is OFF
   2021             mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   2022             return OK;
   2023         case ANDROID_CONTROL_AE_MODE_ON:
   2024             // OK for AUTO modes
   2025             break;
   2026         default:
   2027             // Mostly silently ignore unsupported modes
   2028             ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
   2029                     __FUNCTION__, aeMode);
   2030             break;
   2031     }
   2032 
   2033     e = settings.find(ANDROID_CONTROL_AE_LOCK);
   2034     bool aeLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
   2035 
   2036     e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
   2037     bool precaptureTrigger = false;
   2038     if (e.count != 0) {
   2039         precaptureTrigger =
   2040                 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
   2041     }
   2042 
   2043     if (precaptureTrigger) {
   2044         ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
   2045     } else if (e.count > 0) {
   2046         ALOGV("%s: Pre capture trigger was present? %zu",
   2047               __FUNCTION__,
   2048               e.count);
   2049     }
   2050 
   2051     if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   2052         // Run precapture sequence
   2053         if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   2054             mAeCounter = 0;
   2055         }
   2056 
   2057         if (mFacePriority) {
   2058             mAeTargetExposureTime = kFacePriorityExposureTime;
   2059         } else {
   2060             mAeTargetExposureTime = kNormalExposureTime;
   2061         }
   2062 
   2063         if (mAeCounter > kPrecaptureMinFrames &&
   2064                 (mAeTargetExposureTime - mAeCurrentExposureTime) <
   2065                 mAeTargetExposureTime / 10) {
   2066             // Done with precapture
   2067             mAeCounter = 0;
   2068             mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
   2069                     ANDROID_CONTROL_AE_STATE_CONVERGED;
   2070         } else {
   2071             // Converge some more
   2072             mAeCurrentExposureTime +=
   2073                     (mAeTargetExposureTime - mAeCurrentExposureTime) *
   2074                     kExposureTrackRate;
   2075             mAeCounter++;
   2076             mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
   2077         }
   2078 
   2079     } else if (!aeLocked) {
   2080         // Run standard occasional AE scan
   2081         switch (mAeState) {
   2082             case ANDROID_CONTROL_AE_STATE_INACTIVE:
   2083                 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
   2084                 break;
   2085             case ANDROID_CONTROL_AE_STATE_CONVERGED:
   2086                 mAeCounter++;
   2087                 if (mAeCounter > kStableAeMaxFrames) {
   2088                     mAeTargetExposureTime =
   2089                             mFacePriority ? kFacePriorityExposureTime :
   2090                             kNormalExposureTime;
   2091                     float exposureStep = ((double)rand() / RAND_MAX) *
   2092                             (kExposureWanderMax - kExposureWanderMin) +
   2093                             kExposureWanderMin;
   2094                     mAeTargetExposureTime *= std::pow(2, exposureStep);
   2095                     mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
   2096                 }
   2097                 break;
   2098             case ANDROID_CONTROL_AE_STATE_SEARCHING:
   2099                 mAeCurrentExposureTime +=
   2100                         (mAeTargetExposureTime - mAeCurrentExposureTime) *
   2101                         kExposureTrackRate;
   2102                 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
   2103                         mAeTargetExposureTime / 10) {
   2104                     // Close enough
   2105                     mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
   2106                     mAeCounter = 0;
   2107                 }
   2108                 break;
   2109             case ANDROID_CONTROL_AE_STATE_LOCKED:
   2110                 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
   2111                 mAeCounter = 0;
   2112                 break;
   2113             default:
   2114                 ALOGE("%s: Emulator in unexpected AE state %d",
   2115                         __FUNCTION__, mAeState);
   2116                 return INVALID_OPERATION;
   2117         }
   2118     } else {
   2119         // AE is locked
   2120         mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
   2121     }
   2122 
   2123     return OK;
   2124 }
   2125 
   2126 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
   2127     camera_metadata_entry e;
   2128 
   2129     e = settings.find(ANDROID_CONTROL_AF_MODE);
   2130     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   2131         ALOGE("%s: No AF mode entry!", __FUNCTION__);
   2132         return BAD_VALUE;
   2133     }
   2134     uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
   2135 
   2136     e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
   2137     typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
   2138     af_trigger_t afTrigger;
   2139     if (e.count != 0) {
   2140         afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
   2141 
   2142         ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
   2143         ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
   2144     } else {
   2145         afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   2146     }
   2147 
   2148     switch (afMode) {
   2149         case ANDROID_CONTROL_AF_MODE_OFF:
   2150             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   2151             return OK;
   2152         case ANDROID_CONTROL_AF_MODE_AUTO:
   2153         case ANDROID_CONTROL_AF_MODE_MACRO:
   2154         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2155         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2156             if (!mFacingBack) {
   2157                 ALOGE("%s: Front camera doesn't support AF mode %d",
   2158                         __FUNCTION__, afMode);
   2159                 return BAD_VALUE;
   2160             }
   2161             // OK, handle transitions lower on
   2162             break;
   2163         default:
   2164             ALOGE("%s: Emulator doesn't support AF mode %d",
   2165                     __FUNCTION__, afMode);
   2166             return BAD_VALUE;
   2167     }
   2168 
   2169     bool afModeChanged = mAfMode != afMode;
   2170     mAfMode = afMode;
   2171 
   2172     /**
   2173      * Simulate AF triggers. Transition at most 1 state per frame.
   2174      * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
   2175      */
   2176 
   2177     bool afTriggerStart = false;
   2178     bool afTriggerCancel = false;
   2179     switch (afTrigger) {
   2180         case ANDROID_CONTROL_AF_TRIGGER_IDLE:
   2181             break;
   2182         case ANDROID_CONTROL_AF_TRIGGER_START:
   2183             afTriggerStart = true;
   2184             break;
   2185         case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
   2186             afTriggerCancel = true;
   2187             // Cancel trigger always transitions into INACTIVE
   2188             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   2189 
   2190             ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
   2191 
   2192             // Stay in 'inactive' until at least next frame
   2193             return OK;
   2194         default:
   2195             ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
   2196             return BAD_VALUE;
   2197     }
   2198 
   2199     // If we get down here, we're either in an autofocus mode
   2200     //  or in a continuous focus mode (and no other modes)
   2201 
   2202     int oldAfState = mAfState;
   2203     switch (mAfState) {
   2204         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   2205             if (afTriggerStart) {
   2206                 switch (afMode) {
   2207                     case ANDROID_CONTROL_AF_MODE_AUTO:
   2208                         // fall-through
   2209                     case ANDROID_CONTROL_AF_MODE_MACRO:
   2210                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   2211                         break;
   2212                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2213                         // fall-through
   2214                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2215                         mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2216                         break;
   2217                 }
   2218             } else {
   2219                 // At least one frame stays in INACTIVE
   2220                 if (!afModeChanged) {
   2221                     switch (afMode) {
   2222                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2223                             // fall-through
   2224                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2225                             mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
   2226                             break;
   2227                     }
   2228                 }
   2229             }
   2230             break;
   2231         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   2232             /**
   2233              * When the AF trigger is activated, the algorithm should finish
   2234              * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
   2235              * or AF_NOT_FOCUSED as appropriate
   2236              */
   2237             if (afTriggerStart) {
   2238                 // Randomly transition to focused or not focused
   2239                 if (rand() % 3) {
   2240                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2241                 } else {
   2242                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2243                 }
   2244             }
   2245             /**
   2246              * When the AF trigger is not involved, the AF algorithm should
   2247              * start in INACTIVE state, and then transition into PASSIVE_SCAN
   2248              * and PASSIVE_FOCUSED states
   2249              */
   2250             else if (!afTriggerCancel) {
   2251                // Randomly transition to passive focus
   2252                 if (rand() % 3 == 0) {
   2253                     mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   2254                 }
   2255             }
   2256 
   2257             break;
   2258         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   2259             if (afTriggerStart) {
   2260                 // Randomly transition to focused or not focused
   2261                 if (rand() % 3) {
   2262                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2263                 } else {
   2264                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2265                 }
   2266             }
   2267             // TODO: initiate passive scan (PASSIVE_SCAN)
   2268             break;
   2269         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   2270             // Simulate AF sweep completing instantaneously
   2271 
   2272             // Randomly transition to focused or not focused
   2273             if (rand() % 3) {
   2274                 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2275             } else {
   2276                 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2277             }
   2278             break;
   2279         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   2280             if (afTriggerStart) {
   2281                 switch (afMode) {
   2282                     case ANDROID_CONTROL_AF_MODE_AUTO:
   2283                         // fall-through
   2284                     case ANDROID_CONTROL_AF_MODE_MACRO:
   2285                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   2286                         break;
   2287                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2288                         // fall-through
   2289                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2290                         // continuous autofocus => trigger start has no effect
   2291                         break;
   2292                 }
   2293             }
   2294             break;
   2295         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   2296             if (afTriggerStart) {
   2297                 switch (afMode) {
   2298                     case ANDROID_CONTROL_AF_MODE_AUTO:
   2299                         // fall-through
   2300                     case ANDROID_CONTROL_AF_MODE_MACRO:
   2301                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   2302                         break;
   2303                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2304                         // fall-through
   2305                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2306                         // continuous autofocus => trigger start has no effect
   2307                         break;
   2308                 }
   2309             }
   2310             break;
   2311         default:
   2312             ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
   2313     }
   2314 
   2315     {
   2316         char afStateString[100] = {0,};
   2317         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
   2318                 oldAfState,
   2319                 afStateString,
   2320                 sizeof(afStateString));
   2321 
   2322         char afNewStateString[100] = {0,};
   2323         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
   2324                 mAfState,
   2325                 afNewStateString,
   2326                 sizeof(afNewStateString));
   2327         ALOGVV("%s: AF state transitioned from %s to %s",
   2328               __FUNCTION__, afStateString, afNewStateString);
   2329     }
   2330 
   2331 
   2332     return OK;
   2333 }
   2334 
   2335 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
   2336     camera_metadata_entry e;
   2337 
   2338     e = settings.find(ANDROID_CONTROL_AWB_MODE);
   2339     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   2340         ALOGE("%s: No AWB mode entry!", __FUNCTION__);
   2341         return BAD_VALUE;
   2342     }
   2343     uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
   2344 
   2345     // TODO: Add white balance simulation
   2346 
   2347     e = settings.find(ANDROID_CONTROL_AWB_LOCK);
   2348     bool awbLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AWB_LOCK_ON) : false;
   2349 
   2350     switch (awbMode) {
   2351         case ANDROID_CONTROL_AWB_MODE_OFF:
   2352             mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   2353             break;
   2354         case ANDROID_CONTROL_AWB_MODE_AUTO:
   2355         case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
   2356         case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
   2357         case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
   2358         case ANDROID_CONTROL_AWB_MODE_SHADE:
   2359             // Always magically right, or locked
   2360             mAwbState = awbLocked ? ANDROID_CONTROL_AWB_STATE_LOCKED :
   2361                     ANDROID_CONTROL_AWB_STATE_CONVERGED;
   2362             break;
   2363         default:
   2364             ALOGE("%s: Emulator doesn't support AWB mode %d",
   2365                     __FUNCTION__, awbMode);
   2366             return BAD_VALUE;
   2367     }
   2368 
   2369     return OK;
   2370 }
   2371 
   2372 // Update the 3A Region by calculating the intersection of AE/AF/AWB and CROP
   2373 // regions
   2374 static void update3ARegion(uint32_t tag, CameraMetadata &settings) {
   2375     if (tag != ANDROID_CONTROL_AE_REGIONS &&
   2376         tag != ANDROID_CONTROL_AF_REGIONS &&
   2377         tag != ANDROID_CONTROL_AWB_REGIONS) {
   2378         return;
   2379     }
   2380     camera_metadata_entry_t entry;
   2381     entry = settings.find(ANDROID_SCALER_CROP_REGION);
   2382     if (entry.count > 0) {
   2383         int32_t cropRegion[4];
   2384         cropRegion[0] =  entry.data.i32[0];
   2385         cropRegion[1] =  entry.data.i32[1];
   2386         cropRegion[2] =  entry.data.i32[2] + cropRegion[0];
   2387         cropRegion[3] =  entry.data.i32[3] + cropRegion[1];
   2388         entry = settings.find(tag);
   2389         if (entry.count > 0) {
   2390             int32_t* ARegion = entry.data.i32;
   2391             // calculate the intersection of AE/AF/AWB and CROP regions
   2392             if (ARegion[0] < cropRegion[2] && cropRegion[0] < ARegion[2] &&
   2393                 ARegion[1] < cropRegion[3] && cropRegion[1] < ARegion[3]) {
   2394                 int32_t interSect[5];
   2395                 interSect[0] = std::max(ARegion[0], cropRegion[0]);
   2396                 interSect[1] = std::max(ARegion[1], cropRegion[1]);
   2397                 interSect[2] = std::min(ARegion[2], cropRegion[2]);
   2398                 interSect[3] = std::min(ARegion[3], cropRegion[3]);
   2399                 interSect[4] = ARegion[4];
   2400                 settings.update(tag, &interSect[0], 5);
   2401             }
   2402         }
   2403     }
   2404 }
   2405 
   2406 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
   2407     if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
   2408         settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
   2409                 &mAeCurrentExposureTime, 1);
   2410         settings.update(ANDROID_SENSOR_SENSITIVITY,
   2411                 &mAeCurrentSensitivity, 1);
   2412     }
   2413 
   2414     settings.update(ANDROID_CONTROL_AE_STATE,
   2415             &mAeState, 1);
   2416     settings.update(ANDROID_CONTROL_AF_STATE,
   2417             &mAfState, 1);
   2418     settings.update(ANDROID_CONTROL_AWB_STATE,
   2419             &mAwbState, 1);
   2420 
   2421     uint8_t lensState;
   2422     switch (mAfState) {
   2423         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   2424         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   2425             lensState = ANDROID_LENS_STATE_MOVING;
   2426             break;
   2427         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   2428         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   2429         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   2430         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   2431         case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
   2432         default:
   2433             lensState = ANDROID_LENS_STATE_STATIONARY;
   2434             break;
   2435     }
   2436     settings.update(ANDROID_LENS_STATE, &lensState, 1);
   2437     update3ARegion(ANDROID_CONTROL_AE_REGIONS, settings);
   2438     update3ARegion(ANDROID_CONTROL_AF_REGIONS, settings);
   2439     update3ARegion(ANDROID_CONTROL_AWB_REGIONS, settings);
   2440 }
   2441 
   2442 void EmulatedFakeCamera3::signalReadoutIdle() {
   2443     Mutex::Autolock l(mLock);
   2444     // Need to chek isIdle again because waiting on mLock may have allowed
   2445     // something to be placed in the in-flight queue.
   2446     if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
   2447         ALOGV("Now idle");
   2448         mStatus = STATUS_READY;
   2449     }
   2450 }
   2451 
   2452 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
   2453         nsecs_t timestamp) {
   2454     switch(e) {
   2455         case Sensor::SensorListener::EXPOSURE_START: {
   2456             ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
   2457                     __FUNCTION__, frameNumber, timestamp);
   2458             // Trigger shutter notify to framework
   2459             camera3_notify_msg_t msg;
   2460             msg.type = CAMERA3_MSG_SHUTTER;
   2461             msg.message.shutter.frame_number = frameNumber;
   2462             msg.message.shutter.timestamp = timestamp;
   2463             sendNotify(&msg);
   2464             break;
   2465         }
   2466         default:
   2467             ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
   2468                     e, timestamp);
   2469             break;
   2470     }
   2471 }
   2472 
   2473 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
   2474         mParent(parent), mJpegWaiting(false) {
   2475 }
   2476 
   2477 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
   2478     for (List<Request>::iterator i = mInFlightQueue.begin();
   2479          i != mInFlightQueue.end(); i++) {
   2480         delete i->buffers;
   2481         delete i->sensorBuffers;
   2482     }
   2483 }
   2484 
   2485 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
   2486     Mutex::Autolock l(mLock);
   2487 
   2488     mInFlightQueue.push_back(r);
   2489     mInFlightSignal.signal();
   2490 }
   2491 
   2492 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
   2493     Mutex::Autolock l(mLock);
   2494     return mInFlightQueue.empty() && !mThreadActive;
   2495 }
   2496 
   2497 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
   2498     status_t res;
   2499     Mutex::Autolock l(mLock);
   2500     int loopCount = 0;
   2501     while (mInFlightQueue.size() >= kMaxQueueSize) {
   2502         res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
   2503         if (res != OK && res != TIMED_OUT) {
   2504             ALOGE("%s: Error waiting for in-flight queue to shrink",
   2505                     __FUNCTION__);
   2506             return INVALID_OPERATION;
   2507         }
   2508         if (loopCount == kMaxWaitLoops) {
   2509             ALOGE("%s: Timed out waiting for in-flight queue to shrink",
   2510                     __FUNCTION__);
   2511             return TIMED_OUT;
   2512         }
   2513         loopCount++;
   2514     }
   2515     return OK;
   2516 }
   2517 
   2518 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
   2519     status_t res;
   2520 
   2521     ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
   2522 
   2523     // First wait for a request from the in-flight queue
   2524 
   2525     if (mCurrentRequest.settings.isEmpty()) {
   2526         Mutex::Autolock l(mLock);
   2527         if (mInFlightQueue.empty()) {
   2528             res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
   2529             if (res == TIMED_OUT) {
   2530                 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
   2531                         __FUNCTION__);
   2532                 return true;
   2533             } else if (res != NO_ERROR) {
   2534                 ALOGE("%s: Error waiting for capture requests: %d",
   2535                         __FUNCTION__, res);
   2536                 return false;
   2537             }
   2538         }
   2539         mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
   2540         mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
   2541         mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
   2542         mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
   2543         mInFlightQueue.erase(mInFlightQueue.begin());
   2544         mInFlightSignal.signal();
   2545         mThreadActive = true;
   2546         ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
   2547                 mCurrentRequest.frameNumber);
   2548     }
   2549 
   2550     // Then wait for it to be delivered from the sensor
   2551     ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
   2552             __FUNCTION__);
   2553 
   2554     nsecs_t captureTime;
   2555     bool gotFrame =
   2556             mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
   2557     if (!gotFrame) {
   2558         ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
   2559                 __FUNCTION__);
   2560         return true;
   2561     }
   2562 
   2563     ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
   2564             mCurrentRequest.frameNumber, captureTime);
   2565 
   2566     // Check if we need to JPEG encode a buffer, and send it for async
   2567     // compression if so. Otherwise prepare the buffer for return.
   2568     bool needJpeg = false;
   2569     HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
   2570     while(buf != mCurrentRequest.buffers->end()) {
   2571         bool goodBuffer = true;
   2572         if ( buf->stream->format ==
   2573                 HAL_PIXEL_FORMAT_BLOB && buf->stream->data_space != HAL_DATASPACE_DEPTH) {
   2574             Mutex::Autolock jl(mJpegLock);
   2575             if (mJpegWaiting) {
   2576                 // This shouldn't happen, because processCaptureRequest should
   2577                 // be stalling until JPEG compressor is free.
   2578                 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
   2579                 goodBuffer = false;
   2580             }
   2581             if (goodBuffer) {
   2582                 // Compressor takes ownership of sensorBuffers here
   2583                 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
   2584                         this, &(mCurrentRequest.settings));
   2585                 goodBuffer = (res == OK);
   2586             }
   2587             if (goodBuffer) {
   2588                 needJpeg = true;
   2589 
   2590                 mJpegHalBuffer = *buf;
   2591                 mJpegFrameNumber = mCurrentRequest.frameNumber;
   2592                 mJpegWaiting = true;
   2593 
   2594                 mCurrentRequest.sensorBuffers = NULL;
   2595                 buf = mCurrentRequest.buffers->erase(buf);
   2596 
   2597                 continue;
   2598             }
   2599             ALOGE("%s: Error compressing output buffer: %s (%d)",
   2600                         __FUNCTION__, strerror(-res), res);
   2601             // fallthrough for cleanup
   2602         }
   2603         GrallocModule::getInstance().unlock(*(buf->buffer));
   2604 
   2605         buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
   2606                 CAMERA3_BUFFER_STATUS_ERROR;
   2607         buf->acquire_fence = -1;
   2608         buf->release_fence = -1;
   2609 
   2610         ++buf;
   2611     } // end while
   2612 
   2613     // Construct result for all completed buffers and results
   2614 
   2615     camera3_capture_result result;
   2616 
   2617     if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
   2618         static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
   2619         mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
   2620                 &sceneFlicker, 1);
   2621 
   2622         static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   2623         mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
   2624                 &flashState, 1);
   2625 
   2626         nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
   2627         mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   2628                 &rollingShutterSkew, 1);
   2629 
   2630         float focusRange[] = { 1.0f/5.0f, 0 }; // 5 m to infinity in focus
   2631         mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE,
   2632                 focusRange, sizeof(focusRange)/sizeof(float));
   2633     }
   2634 
   2635     if (mParent->hasCapability(DEPTH_OUTPUT)) {
   2636         camera_metadata_entry_t entry;
   2637 
   2638         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_TRANSLATION, &entry);
   2639         mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION,
   2640                 entry.data.f, entry.count);
   2641 
   2642         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION, &entry);
   2643         mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION,
   2644                 entry.data.f, entry.count);
   2645 
   2646         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
   2647         mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
   2648                 entry.data.f, entry.count);
   2649 
   2650         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_RADIAL_DISTORTION, &entry);
   2651         mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
   2652                 entry.data.f, entry.count);
   2653     }
   2654 
   2655     mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
   2656             &captureTime, 1);
   2657 
   2658 
   2659     // JPEGs take a stage longer
   2660     const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
   2661     mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
   2662             &pipelineDepth, 1);
   2663 
   2664     result.frame_number = mCurrentRequest.frameNumber;
   2665     result.result = mCurrentRequest.settings.getAndLock();
   2666     result.num_output_buffers = mCurrentRequest.buffers->size();
   2667     result.output_buffers = mCurrentRequest.buffers->array();
   2668     result.input_buffer = nullptr;
   2669     result.partial_result = 1;
   2670 
   2671     // Go idle if queue is empty, before sending result
   2672     bool signalIdle = false;
   2673     {
   2674         Mutex::Autolock l(mLock);
   2675         if (mInFlightQueue.empty()) {
   2676             mThreadActive = false;
   2677             signalIdle = true;
   2678         }
   2679     }
   2680     if (signalIdle) mParent->signalReadoutIdle();
   2681 
   2682     // Send it off to the framework
   2683     ALOGVV("%s: ReadoutThread: Send result to framework",
   2684             __FUNCTION__);
   2685     mParent->sendCaptureResult(&result);
   2686 
   2687     // Clean up
   2688     mCurrentRequest.settings.unlock(result.result);
   2689 
   2690     delete mCurrentRequest.buffers;
   2691     mCurrentRequest.buffers = NULL;
   2692     if (!needJpeg) {
   2693         delete mCurrentRequest.sensorBuffers;
   2694         mCurrentRequest.sensorBuffers = NULL;
   2695     }
   2696     mCurrentRequest.settings.clear();
   2697 
   2698     return true;
   2699 }
   2700 
   2701 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
   2702         const StreamBuffer &jpegBuffer, bool success) {
   2703     Mutex::Autolock jl(mJpegLock);
   2704 
   2705     GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
   2706 
   2707     mJpegHalBuffer.status = success ?
   2708             CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
   2709     mJpegHalBuffer.acquire_fence = -1;
   2710     mJpegHalBuffer.release_fence = -1;
   2711     mJpegWaiting = false;
   2712 
   2713     camera3_capture_result result;
   2714 
   2715     result.frame_number = mJpegFrameNumber;
   2716     result.result = NULL;
   2717     result.num_output_buffers = 1;
   2718     result.output_buffers = &mJpegHalBuffer;
   2719     result.input_buffer = nullptr;
   2720     result.partial_result = 0;
   2721 
   2722     if (!success) {
   2723         ALOGE("%s: Compression failure, returning error state buffer to"
   2724                 " framework", __FUNCTION__);
   2725     } else {
   2726         ALOGV("%s: Compression complete, returning buffer to framework",
   2727                 __FUNCTION__);
   2728     }
   2729 
   2730     mParent->sendCaptureResult(&result);
   2731 }
   2732 
   2733 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
   2734         const StreamBuffer &inputBuffer) {
   2735     // Should never get here, since the input buffer has to be returned
   2736     // by end of processCaptureRequest
   2737     ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
   2738 }
   2739 
   2740 
   2741 }; // namespace android
   2742