Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
     19  * functionality of an advanced fake camera.
     20  */
     21 
     22 #include <inttypes.h>
     23 
     24 //#define LOG_NDEBUG 0
     25 //#define LOG_NNDEBUG 0
     26 #define LOG_TAG "EmulatedCamera_FakeCamera3"
     27 #include <cutils/properties.h>
     28 #include <utils/Log.h>
     29 
     30 #include "EmulatedFakeCamera3.h"
     31 #include "EmulatedCameraFactory.h"
     32 #include <ui/Fence.h>
     33 #include "GrallocModule.h"
     34 
     35 #include "fake-pipeline2/Sensor.h"
     36 #include "fake-pipeline2/JpegCompressor.h"
     37 #include <cmath>
     38 
     39 #include <vector>
     40 
     41 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
     42 #define ALOGVV ALOGV
     43 #else
     44 #define ALOGVV(...) ((void)0)
     45 #endif
     46 
     47 namespace android {
     48 
     49 /**
     50  * Constants for camera capabilities
     51  */
     52 
     53 const int64_t USEC = 1000LL;
     54 const int64_t MSEC = USEC * 1000LL;
     55 const int64_t SEC = MSEC * 1000LL;
     56 
     57 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
     58         HAL_PIXEL_FORMAT_RAW16,
     59         HAL_PIXEL_FORMAT_BLOB,
     60         HAL_PIXEL_FORMAT_RGBA_8888,
     61         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
     62         // These are handled by YCbCr_420_888
     63         //        HAL_PIXEL_FORMAT_YV12,
     64         //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
     65         HAL_PIXEL_FORMAT_YCbCr_420_888,
     66         HAL_PIXEL_FORMAT_Y16
     67 };
     68 
     69 const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
     70     640, 480
     71     //    mSensorWidth, mSensorHeight
     72 };
     73 
     74 
     75 /**
     76  * 3A constants
     77  */
     78 
     79 // Default exposure and gain targets for different scenarios
     80 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime       = 10 * MSEC;
     81 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
     82 const int     EmulatedFakeCamera3::kNormalSensitivity        = 100;
     83 const int     EmulatedFakeCamera3::kFacePrioritySensitivity  = 400;
     84 //CTS requires 8 frames timeout in waitForAeStable
     85 const float   EmulatedFakeCamera3::kExposureTrackRate        = 0.2;
     86 const int     EmulatedFakeCamera3::kPrecaptureMinFrames      = 10;
     87 const int     EmulatedFakeCamera3::kStableAeMaxFrames        = 100;
     88 const float   EmulatedFakeCamera3::kExposureWanderMin        = -2;
     89 const float   EmulatedFakeCamera3::kExposureWanderMax        = 1;
     90 
     91 /**
     92  * Camera device lifecycle methods
     93  */
     94 
     95 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
     96         struct hw_module_t* module) :
     97         EmulatedCamera3(cameraId, module),
     98         mFacingBack(facingBack) {
     99     ALOGI("Constructing emulated fake camera 3: ID %d, facing %s",
    100             mCameraID, facingBack ? "back" : "front");
    101 
    102     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
    103         mDefaultTemplates[i] = NULL;
    104     }
    105 }
    106 
    107 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
    108     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
    109         if (mDefaultTemplates[i] != NULL) {
    110             free_camera_metadata(mDefaultTemplates[i]);
    111         }
    112     }
    113 }
    114 
    115 status_t EmulatedFakeCamera3::Initialize() {
    116     ALOGV("%s: E", __FUNCTION__);
    117     status_t res;
    118 
    119     if (mStatus != STATUS_ERROR) {
    120         ALOGE("%s: Already initialized!", __FUNCTION__);
    121         return INVALID_OPERATION;
    122     }
    123 
    124     res = getCameraCapabilities();
    125     if (res != OK) {
    126         ALOGE("%s: Unable to get camera capabilities: %s (%d)",
    127                 __FUNCTION__, strerror(-res), res);
    128         return res;
    129     }
    130 
    131     res = constructStaticInfo();
    132     if (res != OK) {
    133         ALOGE("%s: Unable to allocate static info: %s (%d)",
    134                 __FUNCTION__, strerror(-res), res);
    135         return res;
    136     }
    137 
    138     return EmulatedCamera3::Initialize();
    139 }
    140 
    141 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
    142     ALOGV("%s: E", __FUNCTION__);
    143     Mutex::Autolock l(mLock);
    144     status_t res;
    145 
    146     if (mStatus != STATUS_CLOSED) {
    147         ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
    148         return INVALID_OPERATION;
    149     }
    150 
    151     mSensor = new Sensor(mSensorWidth, mSensorHeight);
    152     mSensor->setSensorListener(this);
    153 
    154     res = mSensor->startUp();
    155     if (res != NO_ERROR) return res;
    156 
    157     mReadoutThread = new ReadoutThread(this);
    158     mJpegCompressor = new JpegCompressor();
    159 
    160     res = mReadoutThread->run("EmuCam3::readoutThread");
    161     if (res != NO_ERROR) return res;
    162 
    163     // Initialize fake 3A
    164 
    165     mControlMode  = ANDROID_CONTROL_MODE_AUTO;
    166     mFacePriority = false;
    167     mAeMode       = ANDROID_CONTROL_AE_MODE_ON;
    168     mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
    169     mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
    170     mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
    171     mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
    172     mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
    173     mAeCounter    = 0;
    174     mAeTargetExposureTime = kNormalExposureTime;
    175     mAeCurrentExposureTime = kNormalExposureTime;
    176     mAeCurrentSensitivity  = kNormalSensitivity;
    177 
    178     return EmulatedCamera3::connectCamera(device);
    179 }
    180 
    181 status_t EmulatedFakeCamera3::closeCamera() {
    182     ALOGV("%s: E", __FUNCTION__);
    183     status_t res;
    184     {
    185         Mutex::Autolock l(mLock);
    186         if (mStatus == STATUS_CLOSED) return OK;
    187 
    188         res = mSensor->shutDown();
    189         if (res != NO_ERROR) {
    190             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
    191             return res;
    192         }
    193         mSensor.clear();
    194 
    195         mReadoutThread->requestExit();
    196     }
    197 
    198     mReadoutThread->join();
    199 
    200     {
    201         Mutex::Autolock l(mLock);
    202         // Clear out private stream information
    203         for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
    204             PrivateStreamInfo *privStream =
    205                     static_cast<PrivateStreamInfo*>((*s)->priv);
    206             delete privStream;
    207             (*s)->priv = NULL;
    208         }
    209         mStreams.clear();
    210         mReadoutThread.clear();
    211     }
    212 
    213     return EmulatedCamera3::closeCamera();
    214 }
    215 
    216 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
    217     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
    218     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
    219     return EmulatedCamera3::getCameraInfo(info);
    220 }
    221 
    222 /**
    223  * Camera3 interface methods
    224  */
    225 
    226 status_t EmulatedFakeCamera3::configureStreams(
    227         camera3_stream_configuration *streamList) {
    228     Mutex::Autolock l(mLock);
    229     ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
    230 
    231     if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
    232         ALOGE("%s: Cannot configure streams in state %d",
    233                 __FUNCTION__, mStatus);
    234         return NO_INIT;
    235     }
    236 
    237     /**
    238      * Sanity-check input list.
    239      */
    240     if (streamList == NULL) {
    241         ALOGE("%s: NULL stream configuration", __FUNCTION__);
    242         return BAD_VALUE;
    243     }
    244 
    245     if (streamList->streams == NULL) {
    246         ALOGE("%s: NULL stream list", __FUNCTION__);
    247         return BAD_VALUE;
    248     }
    249 
    250     if (streamList->num_streams < 1) {
    251         ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
    252                 streamList->num_streams);
    253         return BAD_VALUE;
    254     }
    255 
    256     camera3_stream_t *inputStream = NULL;
    257     for (size_t i = 0; i < streamList->num_streams; i++) {
    258         camera3_stream_t *newStream = streamList->streams[i];
    259 
    260         if (newStream == NULL) {
    261             ALOGE("%s: Stream index %zu was NULL",
    262                   __FUNCTION__, i);
    263             return BAD_VALUE;
    264         }
    265 
    266         ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
    267                 __FUNCTION__, newStream, i, newStream->stream_type,
    268                 newStream->usage,
    269                 newStream->format);
    270 
    271         if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
    272             newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    273             if (inputStream != NULL) {
    274 
    275                 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
    276                 return BAD_VALUE;
    277             }
    278             inputStream = newStream;
    279         }
    280 
    281         bool validFormat = false;
    282         for (size_t f = 0;
    283              f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
    284              f++) {
    285             if (newStream->format == kAvailableFormats[f]) {
    286                 validFormat = true;
    287                 break;
    288             }
    289         }
    290         if (!validFormat) {
    291             ALOGE("%s: Unsupported stream format 0x%x requested",
    292                     __FUNCTION__, newStream->format);
    293             return BAD_VALUE;
    294         }
    295     }
    296     mInputStream = inputStream;
    297 
    298     /**
    299      * Initially mark all existing streams as not alive
    300      */
    301     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
    302         PrivateStreamInfo *privStream =
    303                 static_cast<PrivateStreamInfo*>((*s)->priv);
    304         privStream->alive = false;
    305     }
    306 
    307     /**
    308      * Find new streams and mark still-alive ones
    309      */
    310     for (size_t i = 0; i < streamList->num_streams; i++) {
    311         camera3_stream_t *newStream = streamList->streams[i];
    312         if (newStream->priv == NULL) {
    313             // New stream, construct info
    314             PrivateStreamInfo *privStream = new PrivateStreamInfo();
    315             privStream->alive = true;
    316 
    317             newStream->max_buffers = kMaxBufferCount;
    318             newStream->priv = privStream;
    319             mStreams.push_back(newStream);
    320         } else {
    321             // Existing stream, mark as still alive.
    322             PrivateStreamInfo *privStream =
    323                     static_cast<PrivateStreamInfo*>(newStream->priv);
    324             privStream->alive = true;
    325         }
    326         // Always update usage and max buffers
    327         newStream->max_buffers = kMaxBufferCount;
    328         switch (newStream->stream_type) {
    329             case CAMERA3_STREAM_OUTPUT:
    330                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    331                 break;
    332             case CAMERA3_STREAM_INPUT:
    333                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
    334                 break;
    335             case CAMERA3_STREAM_BIDIRECTIONAL:
    336                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
    337                         GRALLOC_USAGE_HW_CAMERA_WRITE;
    338                 break;
    339         }
    340     }
    341 
    342     /**
    343      * Reap the dead streams
    344      */
    345     for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
    346         PrivateStreamInfo *privStream =
    347                 static_cast<PrivateStreamInfo*>((*s)->priv);
    348         if (!privStream->alive) {
    349             (*s)->priv = NULL;
    350             delete privStream;
    351             s = mStreams.erase(s);
    352         } else {
    353             ++s;
    354         }
    355     }
    356 
    357     /**
    358      * Can't reuse settings across configure call
    359      */
    360     mPrevSettings.clear();
    361 
    362     return OK;
    363 }
    364 
    365 status_t EmulatedFakeCamera3::registerStreamBuffers(
    366         const camera3_stream_buffer_set *bufferSet) {
    367     ALOGV("%s: E", __FUNCTION__);
    368     Mutex::Autolock l(mLock);
    369 
    370     // Should not be called in HAL versions >= 3.2
    371 
    372     ALOGE("%s: Should not be invoked on new HALs!",
    373             __FUNCTION__);
    374     return NO_INIT;
    375 }
    376 
    377 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
    378         int type) {
    379     ALOGV("%s: E", __FUNCTION__);
    380     Mutex::Autolock l(mLock);
    381 
    382     if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
    383         ALOGE("%s: Unknown request settings template: %d",
    384                 __FUNCTION__, type);
    385         return NULL;
    386     }
    387 
    388     if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
    389         ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
    390                 __FUNCTION__, type);
    391         return NULL;
    392     }
    393 
    394     /**
    395      * Cache is not just an optimization - pointer returned has to live at
    396      * least as long as the camera device instance does.
    397      */
    398     if (mDefaultTemplates[type] != NULL) {
    399         return mDefaultTemplates[type];
    400     }
    401 
    402     CameraMetadata settings;
    403 
    404     /** android.request */
    405 
    406     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
    407     settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
    408 
    409     static const int32_t id = 0;
    410     settings.update(ANDROID_REQUEST_ID, &id, 1);
    411 
    412     static const int32_t frameCount = 0;
    413     settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
    414 
    415     /** android.lens */
    416 
    417     static const float focalLength = 5.0f;
    418     settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
    419 
    420     if (hasCapability(BACKWARD_COMPATIBLE)) {
    421         static const float focusDistance = 0;
    422         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
    423 
    424         static const float aperture = 2.8f;
    425         settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
    426 
    427         static const float filterDensity = 0;
    428         settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
    429 
    430         static const uint8_t opticalStabilizationMode =
    431                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
    432         settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
    433                 &opticalStabilizationMode, 1);
    434 
    435         // FOCUS_RANGE set only in frame
    436     }
    437 
    438     /** android.sensor */
    439 
    440     if (hasCapability(MANUAL_SENSOR)) {
    441         static const int64_t exposureTime = 10 * MSEC;
    442         settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
    443 
    444         static const int64_t frameDuration = 33333333L; // 1/30 s
    445         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
    446 
    447         static const int32_t sensitivity = 100;
    448         settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
    449     }
    450 
    451     // TIMESTAMP set only in frame
    452 
    453     /** android.flash */
    454 
    455     if (hasCapability(BACKWARD_COMPATIBLE)) {
    456         static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
    457         settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
    458 
    459         static const uint8_t flashPower = 10;
    460         settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
    461 
    462         static const int64_t firingTime = 0;
    463         settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
    464     }
    465 
    466     /** Processing block modes */
    467     if (hasCapability(MANUAL_POST_PROCESSING)) {
    468         uint8_t hotPixelMode = 0;
    469         uint8_t demosaicMode = 0;
    470         uint8_t noiseMode = 0;
    471         uint8_t shadingMode = 0;
    472         uint8_t colorMode = 0;
    473         uint8_t tonemapMode = 0;
    474         uint8_t edgeMode = 0;
    475         switch (type) {
    476             case CAMERA3_TEMPLATE_STILL_CAPTURE:
    477                 // fall-through
    478             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    479                 // fall-through
    480             case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    481                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
    482                 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
    483                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
    484                 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
    485                 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
    486                 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
    487                 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
    488                 break;
    489             case CAMERA3_TEMPLATE_PREVIEW:
    490                 // fall-through
    491             case CAMERA3_TEMPLATE_VIDEO_RECORD:
    492                 // fall-through
    493             default:
    494                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
    495                 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
    496                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
    497                 shadingMode = ANDROID_SHADING_MODE_FAST;
    498                 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
    499                 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
    500                 edgeMode = ANDROID_EDGE_MODE_FAST;
    501                 break;
    502         }
    503         settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
    504         settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
    505         settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
    506         settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
    507         settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
    508         settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
    509         settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
    510     }
    511 
    512     /** android.colorCorrection */
    513 
    514     if (hasCapability(MANUAL_POST_PROCESSING)) {
    515         static const camera_metadata_rational colorTransform[9] = {
    516             {1,1}, {0,1}, {0,1},
    517             {0,1}, {1,1}, {0,1},
    518             {0,1}, {0,1}, {1,1}
    519         };
    520         settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
    521 
    522         static const float colorGains[4] = {
    523             1.0f, 1.0f, 1.0f, 1.0f
    524         };
    525         settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
    526     }
    527 
    528     /** android.tonemap */
    529 
    530     if (hasCapability(MANUAL_POST_PROCESSING)) {
    531         static const float tonemapCurve[4] = {
    532             0.f, 0.f,
    533             1.f, 1.f
    534         };
    535         settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
    536         settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
    537         settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
    538     }
    539 
    540     /** android.scaler */
    541     if (hasCapability(BACKWARD_COMPATIBLE)) {
    542         static const int32_t cropRegion[4] = {
    543             0, 0, mSensorWidth, mSensorHeight
    544         };
    545         settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
    546     }
    547 
    548     /** android.jpeg */
    549     if (hasCapability(BACKWARD_COMPATIBLE)) {
    550         static const uint8_t jpegQuality = 80;
    551         settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
    552 
    553         static const int32_t thumbnailSize[2] = {
    554             640, 480
    555         };
    556         settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
    557 
    558         static const uint8_t thumbnailQuality = 80;
    559         settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
    560 
    561         static const double gpsCoordinates[2] = {
    562             0, 0
    563         };
    564         settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
    565 
    566         static const uint8_t gpsProcessingMethod[32] = "None";
    567         settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
    568 
    569         static const int64_t gpsTimestamp = 0;
    570         settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
    571 
    572         static const int32_t jpegOrientation = 0;
    573         settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
    574     }
    575 
    576     /** android.stats */
    577 
    578     if (hasCapability(BACKWARD_COMPATIBLE)) {
    579         static const uint8_t faceDetectMode =
    580                 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
    581         settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
    582 
    583         static const uint8_t hotPixelMapMode =
    584                 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
    585         settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
    586     }
    587 
    588     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
    589     // sharpnessMap only in frames
    590 
    591     /** android.control */
    592 
    593     uint8_t controlIntent = 0;
    594     switch (type) {
    595       case CAMERA3_TEMPLATE_PREVIEW:
    596         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
    597         break;
    598       case CAMERA3_TEMPLATE_STILL_CAPTURE:
    599         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
    600         break;
    601       case CAMERA3_TEMPLATE_VIDEO_RECORD:
    602         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
    603         break;
    604       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    605         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
    606         break;
    607       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    608         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
    609         break;
    610       case CAMERA3_TEMPLATE_MANUAL:
    611         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
    612         break;
    613       default:
    614         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
    615         break;
    616     }
    617     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
    618 
    619     const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    620             ANDROID_CONTROL_MODE_OFF :
    621             ANDROID_CONTROL_MODE_AUTO;
    622     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
    623 
    624     int32_t aeTargetFpsRange[2] = {
    625         5, 30
    626     };
    627     if (type == CAMERA3_TEMPLATE_VIDEO_RECORD || type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
    628         aeTargetFpsRange[0] = 30;
    629     }
    630     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
    631 
    632     if (hasCapability(BACKWARD_COMPATIBLE)) {
    633 
    634         static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
    635         settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
    636 
    637         static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
    638         settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
    639 
    640         const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    641                 ANDROID_CONTROL_AE_MODE_OFF :
    642                 ANDROID_CONTROL_AE_MODE_ON;
    643         settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
    644 
    645         static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
    646         settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
    647 
    648         static const int32_t controlRegions[5] = {
    649             0, 0, 0, 0, 0
    650         };
    651         settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
    652 
    653         static const int32_t aeExpCompensation = 0;
    654         settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
    655 
    656 
    657         static const uint8_t aeAntibandingMode =
    658                 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
    659         settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
    660 
    661         static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
    662         settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
    663 
    664         const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    665                 ANDROID_CONTROL_AWB_MODE_OFF :
    666                 ANDROID_CONTROL_AWB_MODE_AUTO;
    667         settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
    668 
    669         static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
    670         settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
    671 
    672         uint8_t afMode = 0;
    673 
    674         if (mFacingBack) {
    675             switch (type) {
    676                 case CAMERA3_TEMPLATE_PREVIEW:
    677                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    678                     break;
    679                 case CAMERA3_TEMPLATE_STILL_CAPTURE:
    680                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    681                     break;
    682                 case CAMERA3_TEMPLATE_VIDEO_RECORD:
    683                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
    684                     break;
    685                 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    686                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
    687                     break;
    688                 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    689                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    690                     break;
    691                 case CAMERA3_TEMPLATE_MANUAL:
    692                     afMode = ANDROID_CONTROL_AF_MODE_OFF;
    693                     break;
    694                 default:
    695                     afMode = ANDROID_CONTROL_AF_MODE_AUTO;
    696                     break;
    697             }
    698         } else {
    699             afMode = ANDROID_CONTROL_AF_MODE_OFF;
    700         }
    701         settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
    702 
    703         settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
    704 
    705         static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
    706         settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
    707 
    708         static const uint8_t vstabMode =
    709                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
    710         settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
    711 
    712         static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
    713         settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
    714 
    715         static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
    716         settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
    717 
    718         static const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
    719         settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
    720 
    721         static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
    722         settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
    723     }
    724 
    725     mDefaultTemplates[type] = settings.release();
    726 
    727     return mDefaultTemplates[type];
    728 }
    729 
    730 status_t EmulatedFakeCamera3::processCaptureRequest(
    731         camera3_capture_request *request) {
    732 
    733     Mutex::Autolock l(mLock);
    734     status_t res;
    735 
    736     /** Validation */
    737 
    738     if (mStatus < STATUS_READY) {
    739         ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
    740                 mStatus);
    741         return INVALID_OPERATION;
    742     }
    743 
    744     if (request == NULL) {
    745         ALOGE("%s: NULL request!", __FUNCTION__);
    746         return BAD_VALUE;
    747     }
    748 
    749     uint32_t frameNumber = request->frame_number;
    750 
    751     if (request->settings == NULL && mPrevSettings.isEmpty()) {
    752         ALOGE("%s: Request %d: NULL settings for first request after"
    753                 "configureStreams()", __FUNCTION__, frameNumber);
    754         return BAD_VALUE;
    755     }
    756 
    757     if (request->input_buffer != NULL &&
    758             request->input_buffer->stream != mInputStream) {
    759         ALOGE("%s: Request %d: Input buffer not from input stream!",
    760                 __FUNCTION__, frameNumber);
    761         ALOGV("%s: Bad stream %p, expected: %p",
    762               __FUNCTION__, request->input_buffer->stream,
    763               mInputStream);
    764         ALOGV("%s: Bad stream type %d, expected stream type %d",
    765               __FUNCTION__, request->input_buffer->stream->stream_type,
    766               mInputStream ? mInputStream->stream_type : -1);
    767 
    768         return BAD_VALUE;
    769     }
    770 
    771     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
    772         ALOGE("%s: Request %d: No output buffers provided!",
    773                 __FUNCTION__, frameNumber);
    774         return BAD_VALUE;
    775     }
    776 
    777     // Validate all buffers, starting with input buffer if it's given
    778 
    779     ssize_t idx;
    780     const camera3_stream_buffer_t *b;
    781     if (request->input_buffer != NULL) {
    782         idx = -1;
    783         b = request->input_buffer;
    784     } else {
    785         idx = 0;
    786         b = request->output_buffers;
    787     }
    788     do {
    789         PrivateStreamInfo *priv =
    790                 static_cast<PrivateStreamInfo*>(b->stream->priv);
    791         if (priv == NULL) {
    792             ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
    793                     __FUNCTION__, frameNumber, idx);
    794             return BAD_VALUE;
    795         }
    796         if (!priv->alive) {
    797             ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
    798                     __FUNCTION__, frameNumber, idx);
    799             return BAD_VALUE;
    800         }
    801         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    802             ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
    803                     __FUNCTION__, frameNumber, idx);
    804             return BAD_VALUE;
    805         }
    806         if (b->release_fence != -1) {
    807             ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
    808                     __FUNCTION__, frameNumber, idx);
    809             return BAD_VALUE;
    810         }
    811         if (b->buffer == NULL) {
    812             ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
    813                     __FUNCTION__, frameNumber, idx);
    814             return BAD_VALUE;
    815         }
    816         idx++;
    817         b = &(request->output_buffers[idx]);
    818     } while (idx < (ssize_t)request->num_output_buffers);
    819 
    820     // TODO: Validate settings parameters
    821 
    822     /**
    823      * Start processing this request
    824      */
    825 
    826     mStatus = STATUS_ACTIVE;
    827 
    828     CameraMetadata settings;
    829 
    830     if (request->settings == NULL) {
    831         settings.acquire(mPrevSettings);
    832     } else {
    833         settings = request->settings;
    834     }
    835 
    836     res = process3A(settings);
    837     if (res != OK) {
    838         return res;
    839     }
    840 
    841     // TODO: Handle reprocessing
    842 
    843     /**
    844      * Get ready for sensor config
    845      */
    846 
    847     nsecs_t  exposureTime;
    848     nsecs_t  frameDuration;
    849     uint32_t sensitivity;
    850     bool     needJpeg = false;
    851     camera_metadata_entry_t entry;
    852 
    853     entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
    854     exposureTime = (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
    855     entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
    856     frameDuration = (entry.count > 0)? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
    857     entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
    858     sensitivity = (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
    859 
    860     if (exposureTime > frameDuration) {
    861         frameDuration = exposureTime + Sensor::kMinVerticalBlank;
    862         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
    863     }
    864 
    865     Buffers *sensorBuffers = new Buffers();
    866     HalBufferVector *buffers = new HalBufferVector();
    867 
    868     sensorBuffers->setCapacity(request->num_output_buffers);
    869     buffers->setCapacity(request->num_output_buffers);
    870 
    871     // Process all the buffers we got for output, constructing internal buffer
    872     // structures for them, and lock them for writing.
    873     for (size_t i = 0; i < request->num_output_buffers; i++) {
    874         const camera3_stream_buffer &srcBuf = request->output_buffers[i];
    875         StreamBuffer destBuf;
    876         destBuf.streamId = kGenericStreamId;
    877         destBuf.width    = srcBuf.stream->width;
    878         destBuf.height   = srcBuf.stream->height;
    879         // For goldfish, IMPLEMENTATION_DEFINED is always RGBx_8888
    880         destBuf.format = (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) ?
    881                 HAL_PIXEL_FORMAT_RGBA_8888 :
    882                 srcBuf.stream->format;
    883         destBuf.stride   = srcBuf.stream->width;
    884         destBuf.dataSpace = srcBuf.stream->data_space;
    885         destBuf.buffer   = srcBuf.buffer;
    886 
    887         if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
    888             needJpeg = true;
    889         }
    890 
    891         // Wait on fence
    892         sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
    893         res = bufferAcquireFence->wait(kFenceTimeoutMs);
    894         if (res == TIMED_OUT) {
    895             ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
    896                     __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
    897         }
    898         if (res == OK) {
    899             // Lock buffer for writing
    900             if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
    901                 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
    902                     android_ycbcr ycbcr = android_ycbcr();
    903                     res = GrallocModule::getInstance().lock_ycbcr(
    904                         *(destBuf.buffer),
    905                         GRALLOC_USAGE_HW_CAMERA_WRITE,
    906                         0, 0, destBuf.width, destBuf.height,
    907                         &ycbcr);
    908                     // This is only valid because we know that emulator's
    909                     // YCbCr_420_888 is really contiguous NV21 under the hood
    910                     destBuf.img = static_cast<uint8_t*>(ycbcr.y);
    911                 } else {
    912                     ALOGE("Unexpected private format for flexible YUV: 0x%x",
    913                             destBuf.format);
    914                     res = INVALID_OPERATION;
    915                 }
    916             } else {
    917                 res = GrallocModule::getInstance().lock(
    918                     *(destBuf.buffer),
    919                     GRALLOC_USAGE_HW_CAMERA_WRITE,
    920                     0, 0, destBuf.width, destBuf.height,
    921                     (void**)&(destBuf.img));
    922 
    923             }
    924             if (res != OK) {
    925                 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
    926                         __FUNCTION__, frameNumber, i);
    927             }
    928         }
    929 
    930         if (res != OK) {
    931             // Either waiting or locking failed. Unlock locked buffers and bail
    932             // out.
    933             for (size_t j = 0; j < i; j++) {
    934                 GrallocModule::getInstance().unlock(
    935                         *(request->output_buffers[i].buffer));
    936             }
    937             delete sensorBuffers;
    938             delete buffers;
    939             return NO_INIT;
    940         }
    941 
    942         sensorBuffers->push_back(destBuf);
    943         buffers->push_back(srcBuf);
    944     }
    945 
    946     /**
    947      * Wait for JPEG compressor to not be busy, if needed
    948      */
    949     if (needJpeg) {
    950         bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
    951         if (!ready) {
    952             ALOGE("%s: Timeout waiting for JPEG compression to complete!",
    953                     __FUNCTION__);
    954             return NO_INIT;
    955         }
    956         res = mJpegCompressor->reserve();
    957         if (res != OK) {
    958             ALOGE("%s: Error managing JPEG compressor resources, can't reserve it!", __FUNCTION__);
    959             return NO_INIT;
    960         }
    961     }
    962 
    963     /**
    964      * Wait until the in-flight queue has room
    965      */
    966     res = mReadoutThread->waitForReadout();
    967     if (res != OK) {
    968         ALOGE("%s: Timeout waiting for previous requests to complete!",
    969                 __FUNCTION__);
    970         return NO_INIT;
    971     }
    972 
    973     /**
    974      * Wait until sensor's ready. This waits for lengthy amounts of time with
    975      * mLock held, but the interface spec is that no other calls may by done to
    976      * the HAL by the framework while process_capture_request is happening.
    977      */
    978     int syncTimeoutCount = 0;
    979     while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
    980         if (mStatus == STATUS_ERROR) {
    981             return NO_INIT;
    982         }
    983         if (syncTimeoutCount == kMaxSyncTimeoutCount) {
    984             ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
    985                     __FUNCTION__, frameNumber,
    986                     kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
    987             return NO_INIT;
    988         }
    989         syncTimeoutCount++;
    990     }
    991 
    992     /**
    993      * Configure sensor and queue up the request to the readout thread
    994      */
    995     mSensor->setExposureTime(exposureTime);
    996     mSensor->setFrameDuration(frameDuration);
    997     mSensor->setSensitivity(sensitivity);
    998     mSensor->setDestinationBuffers(sensorBuffers);
    999     mSensor->setFrameNumber(request->frame_number);
   1000 
   1001     ReadoutThread::Request r;
   1002     r.frameNumber = request->frame_number;
   1003     r.settings = settings;
   1004     r.sensorBuffers = sensorBuffers;
   1005     r.buffers = buffers;
   1006 
   1007     mReadoutThread->queueCaptureRequest(r);
   1008     ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
   1009 
   1010     // Cache the settings for next time
   1011     mPrevSettings.acquire(settings);
   1012 
   1013     return OK;
   1014 }
   1015 
   1016 status_t EmulatedFakeCamera3::flush() {
   1017     ALOGW("%s: Not implemented; ignored", __FUNCTION__);
   1018     return OK;
   1019 }
   1020 
   1021 /** Debug methods */
   1022 
   1023 void EmulatedFakeCamera3::dump(int fd) {
   1024 
   1025 }
   1026 
   1027 /**
   1028  * Private methods
   1029  */
   1030 
   1031 status_t EmulatedFakeCamera3::getCameraCapabilities() {
   1032 
   1033     const char *key = mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
   1034 
   1035     /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
   1036      * property doesn't exist, it is assumed to list FULL. */
   1037     char prop[PROPERTY_VALUE_MAX];
   1038     if (property_get(key, prop, NULL) > 0) {
   1039         char *saveptr = nullptr;
   1040         char *cap = strtok_r(prop, " ,", &saveptr);
   1041         while (cap != NULL) {
   1042             for (int i = 0; i < NUM_CAPABILITIES; i++) {
   1043                 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
   1044                     mCapabilities.add(static_cast<AvailableCapabilities>(i));
   1045                     break;
   1046                 }
   1047             }
   1048             cap = strtok_r(NULL, " ,", &saveptr);
   1049         }
   1050         if (mCapabilities.size() == 0) {
   1051             ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
   1052         }
   1053     }
   1054     // Default to FULL_LEVEL plus RAW if nothing is defined
   1055     if (mCapabilities.size() == 0) {
   1056         mCapabilities.add(FULL_LEVEL);
   1057         mCapabilities.add(RAW);
   1058     }
   1059 
   1060     // Add level-based caps
   1061     if (hasCapability(FULL_LEVEL)) {
   1062         mCapabilities.add(BURST_CAPTURE);
   1063         mCapabilities.add(READ_SENSOR_SETTINGS);
   1064         mCapabilities.add(MANUAL_SENSOR);
   1065         mCapabilities.add(MANUAL_POST_PROCESSING);
   1066     };
   1067 
   1068     // Backwards-compatible is required for most other caps
   1069     // Not required for DEPTH_OUTPUT, though.
   1070     if (hasCapability(BURST_CAPTURE) ||
   1071             hasCapability(READ_SENSOR_SETTINGS) ||
   1072             hasCapability(RAW) ||
   1073             hasCapability(MANUAL_SENSOR) ||
   1074             hasCapability(MANUAL_POST_PROCESSING) ||
   1075             hasCapability(PRIVATE_REPROCESSING) ||
   1076             hasCapability(YUV_REPROCESSING) ||
   1077             hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
   1078         mCapabilities.add(BACKWARD_COMPATIBLE);
   1079     }
   1080 
   1081     ALOGI("Camera %d capabilities:", mCameraID);
   1082     for (size_t i = 0; i < mCapabilities.size(); i++) {
   1083         ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
   1084     }
   1085 
   1086     return OK;
   1087 }
   1088 
   1089 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
   1090     ssize_t idx = mCapabilities.indexOf(cap);
   1091     return idx >= 0;
   1092 }
   1093 
   1094 status_t EmulatedFakeCamera3::constructStaticInfo() {
   1095 
   1096     CameraMetadata info;
   1097     Vector<int32_t> availableCharacteristicsKeys;
   1098     status_t res;
   1099 
   1100     // Find max width/height
   1101     int32_t width = 0, height = 0;
   1102     size_t rawSizeCount = sizeof(kAvailableRawSizes)/sizeof(kAvailableRawSizes[0]);
   1103     for (size_t index = 0; index + 1 < rawSizeCount; index += 2) {
   1104         if (width <= kAvailableRawSizes[index] &&
   1105             height <= kAvailableRawSizes[index+1]) {
   1106             width = kAvailableRawSizes[index];
   1107             height = kAvailableRawSizes[index+1];
   1108         }
   1109     }
   1110 
   1111     if (width < 640 || height < 480) {
   1112         width = 640;
   1113         height = 480;
   1114     }
   1115     mSensorWidth = width;
   1116     mSensorHeight = height;
   1117 
   1118 #define ADD_STATIC_ENTRY(name, varptr, count) \
   1119         availableCharacteristicsKeys.add(name);   \
   1120         res = info.update(name, varptr, count); \
   1121         if (res != OK) return res
   1122 
   1123     // android.sensor
   1124 
   1125     if (hasCapability(MANUAL_SENSOR)) {
   1126 
   1127         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   1128                 Sensor::kExposureTimeRange, 2);
   1129 
   1130         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   1131                 &Sensor::kFrameDurationRange[1], 1);
   1132 
   1133         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   1134                 Sensor::kSensitivityRange,
   1135                 sizeof(Sensor::kSensitivityRange)
   1136                 /sizeof(int32_t));
   1137 
   1138         ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   1139                 &Sensor::kSensitivityRange[1], 1);
   1140     }
   1141 
   1142     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
   1143     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   1144             sensorPhysicalSize, 2);
   1145 
   1146     const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
   1147     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   1148             pixelArray, 2);
   1149     const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
   1150     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   1151             activeArray, 4);
   1152 
   1153     static const int32_t orientation = 90; // Aligned with 'long edge'
   1154     ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
   1155 
   1156     static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
   1157     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
   1158 
   1159     if (hasCapability(RAW)) {
   1160         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   1161                 &Sensor::kColorFilterArrangement, 1);
   1162 
   1163         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   1164                 (int32_t*)&Sensor::kMaxRawValue, 1);
   1165 
   1166         static const int32_t blackLevelPattern[4] = {
   1167             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
   1168             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
   1169         };
   1170         ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   1171                 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
   1172     }
   1173 
   1174     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1175         static const int32_t availableTestPatternModes[] = {
   1176             ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
   1177         };
   1178         ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   1179                 availableTestPatternModes, sizeof(availableTestPatternModes)/sizeof(int32_t));
   1180     }
   1181 
   1182     // android.lens
   1183 
   1184     static const float focalLength = 3.30f; // mm
   1185     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   1186             &focalLength, 1);
   1187 
   1188     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1189         // 5 cm min focus distance for back camera, infinity (fixed focus) for front
   1190         const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
   1191         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   1192                 &minFocusDistance, 1);
   1193 
   1194         // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
   1195         const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
   1196         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   1197                 &minFocusDistance, 1);
   1198 
   1199         static const float aperture = 2.8f;
   1200         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   1201                 &aperture, 1);
   1202         static const float filterDensity = 0;
   1203         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   1204                 &filterDensity, 1);
   1205         static const uint8_t availableOpticalStabilization =
   1206                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   1207         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   1208                 &availableOpticalStabilization, 1);
   1209 
   1210         static const int32_t lensShadingMapSize[] = {1, 1};
   1211         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
   1212                 sizeof(lensShadingMapSize)/sizeof(int32_t));
   1213 
   1214         static const uint8_t lensFocusCalibration =
   1215                 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
   1216         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &lensFocusCalibration, 1);
   1217     }
   1218 
   1219     if (hasCapability(DEPTH_OUTPUT)) {
   1220         // These could be included for non-DEPTH capability as well, but making this variable for
   1221         // testing coverage
   1222 
   1223         // 90 degree rotation to align with long edge of a phone device that's by default portrait
   1224         static const float qO[] = { 0.707107f, 0.f, 0.f, 0.707107f};
   1225 
   1226         // Either a 180-degree rotation for back-facing, or no rotation for front-facing
   1227         const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0, (mFacingBack ? 0.f : 1.f)};
   1228 
   1229         // Quarternion product, orientation change then facing
   1230         const float lensPoseRotation[] = {qO[0]*qF[0] - qO[1]*qF[1] - qO[2]*qF[2] - qO[3]*qF[3],
   1231                                           qO[0]*qF[1] + qO[1]*qF[0] + qO[2]*qF[3] - qO[3]*qF[2],
   1232                                           qO[0]*qF[2] + qO[2]*qF[0] + qO[1]*qF[3] - qO[3]*qF[1],
   1233                                           qO[0]*qF[3] + qO[3]*qF[0] + qO[1]*qF[2] - qO[2]*qF[1]};
   1234 
   1235         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
   1236                 sizeof(lensPoseRotation)/sizeof(float));
   1237 
   1238         // Only one camera facing each way, so 0 translation needed to the center of the 'main'
   1239         // camera
   1240         static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
   1241 
   1242         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
   1243                 sizeof(lensPoseTranslation)/sizeof(float));
   1244 
   1245         // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and active array size
   1246         float f_x = focalLength * mSensorWidth / sensorPhysicalSize[0];
   1247         float f_y = focalLength * mSensorHeight / sensorPhysicalSize[1];
   1248         float c_x = mSensorWidth / 2.f;
   1249         float c_y = mSensorHeight / 2.f;
   1250         float s = 0.f;
   1251         const float lensIntrinsics[] = { f_x, f_y, c_x, c_y, s };
   1252 
   1253         ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
   1254                 sizeof(lensIntrinsics)/sizeof(float));
   1255 
   1256         // No radial or tangential distortion
   1257 
   1258         float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
   1259 
   1260         ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
   1261                 sizeof(lensRadialDistortion)/sizeof(float));
   1262 
   1263     }
   1264 
   1265 
   1266     static const uint8_t lensFacing = mFacingBack ?
   1267             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   1268     ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
   1269 
   1270     // android.flash
   1271 
   1272     static const uint8_t flashAvailable = 0;
   1273     ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
   1274 
   1275     // android.tonemap
   1276 
   1277     if (hasCapability(MANUAL_POST_PROCESSING)) {
   1278         static const int32_t tonemapCurvePoints = 128;
   1279         ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
   1280 
   1281         static const uint8_t availableToneMapModes[] = {
   1282             ANDROID_TONEMAP_MODE_CONTRAST_CURVE,  ANDROID_TONEMAP_MODE_FAST,
   1283             ANDROID_TONEMAP_MODE_HIGH_QUALITY
   1284         };
   1285         ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availableToneMapModes,
   1286                 sizeof(availableToneMapModes));
   1287     }
   1288 
   1289     // android.scaler
   1290 
   1291     const std::vector<int32_t> availableStreamConfigurationsBasic = {
   1292         HAL_PIXEL_FORMAT_BLOB, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1293         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1294         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1295         HAL_PIXEL_FORMAT_BLOB, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1296     };
   1297 
   1298     // Always need to include 640x480 in basic formats
   1299     const std::vector<int32_t> availableStreamConfigurationsBasic640 = {
   1300         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1301         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1302         HAL_PIXEL_FORMAT_BLOB, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
   1303     };
   1304 
   1305     const std::vector<int32_t> availableStreamConfigurationsRaw = {
   1306         HAL_PIXEL_FORMAT_RAW16, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1307     };
   1308 
   1309     const std::vector<int32_t> availableStreamConfigurationsBurst = {
   1310         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1311         HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1312         HAL_PIXEL_FORMAT_RGBA_8888, width, height, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1313     };
   1314 
   1315     std::vector<int32_t> availableStreamConfigurations;
   1316 
   1317     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1318         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1319                 availableStreamConfigurationsBasic.begin(),
   1320                 availableStreamConfigurationsBasic.end());
   1321         if (width > 640) {
   1322             availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1323                     availableStreamConfigurationsBasic640.begin(),
   1324                     availableStreamConfigurationsBasic640.end());
   1325         }
   1326     }
   1327     if (hasCapability(RAW)) {
   1328         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1329                 availableStreamConfigurationsRaw.begin(),
   1330                 availableStreamConfigurationsRaw.end());
   1331     }
   1332     if (hasCapability(BURST_CAPTURE)) {
   1333         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
   1334                 availableStreamConfigurationsBurst.begin(),
   1335                 availableStreamConfigurationsBurst.end());
   1336     }
   1337 
   1338     if (availableStreamConfigurations.size() > 0) {
   1339         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   1340                 &availableStreamConfigurations[0],
   1341                 availableStreamConfigurations.size());
   1342     }
   1343 
   1344     const std::vector<int64_t> availableMinFrameDurationsBasic = {
   1345         HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
   1346         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, Sensor::kFrameDurationRange[0],
   1347         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, Sensor::kFrameDurationRange[0],
   1348         HAL_PIXEL_FORMAT_BLOB, 320, 240, Sensor::kFrameDurationRange[0],
   1349     };
   1350 
   1351     // Always need to include 640x480 in basic formats
   1352     const std::vector<int64_t> availableMinFrameDurationsBasic640 = {
   1353         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, Sensor::kFrameDurationRange[0],
   1354         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, Sensor::kFrameDurationRange[0],
   1355         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
   1356     };
   1357 
   1358     const std::vector<int64_t> availableMinFrameDurationsRaw = {
   1359         HAL_PIXEL_FORMAT_RAW16, width, height, Sensor::kFrameDurationRange[0],
   1360     };
   1361 
   1362     const std::vector<int64_t> availableMinFrameDurationsBurst = {
   1363         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, Sensor::kFrameDurationRange[0],
   1364         HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, Sensor::kFrameDurationRange[0],
   1365         HAL_PIXEL_FORMAT_RGBA_8888, width, height, Sensor::kFrameDurationRange[0],
   1366     };
   1367 
   1368     std::vector<int64_t> availableMinFrameDurations;
   1369 
   1370     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1371         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1372                 availableMinFrameDurationsBasic.begin(),
   1373                 availableMinFrameDurationsBasic.end());
   1374         if (width > 640) {
   1375             availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1376                     availableMinFrameDurationsBasic640.begin(),
   1377                     availableMinFrameDurationsBasic640.end());
   1378         }
   1379     }
   1380     if (hasCapability(RAW)) {
   1381         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1382                 availableMinFrameDurationsRaw.begin(),
   1383                 availableMinFrameDurationsRaw.end());
   1384     }
   1385     if (hasCapability(BURST_CAPTURE)) {
   1386         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
   1387                 availableMinFrameDurationsBurst.begin(),
   1388                 availableMinFrameDurationsBurst.end());
   1389     }
   1390 
   1391     if (availableMinFrameDurations.size() > 0) {
   1392         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   1393                 &availableMinFrameDurations[0],
   1394                 availableMinFrameDurations.size());
   1395     }
   1396 
   1397     const std::vector<int64_t> availableStallDurationsBasic = {
   1398         HAL_PIXEL_FORMAT_BLOB, width, height, Sensor::kFrameDurationRange[0],
   1399         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, 0,
   1400         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, 0,
   1401         HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, 0,
   1402     };
   1403 
   1404     // Always need to include 640x480 in basic formats
   1405     const std::vector<int64_t> availableStallDurationsBasic640 = {
   1406         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
   1407         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
   1408         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
   1409     };
   1410 
   1411     const std::vector<int64_t> availableStallDurationsRaw = {
   1412         HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0]
   1413     };
   1414     const std::vector<int64_t> availableStallDurationsBurst = {
   1415         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
   1416         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
   1417         HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, 0
   1418     };
   1419 
   1420     std::vector<int64_t> availableStallDurations;
   1421 
   1422     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1423         availableStallDurations.insert(availableStallDurations.end(),
   1424                 availableStallDurationsBasic.begin(),
   1425                 availableStallDurationsBasic.end());
   1426         if (width > 640) {
   1427             availableStallDurations.insert(availableStallDurations.end(),
   1428                     availableStallDurationsBasic640.begin(),
   1429                     availableStallDurationsBasic640.end());
   1430         }
   1431     }
   1432     if (hasCapability(RAW)) {
   1433         availableStallDurations.insert(availableStallDurations.end(),
   1434                 availableStallDurationsRaw.begin(),
   1435                 availableStallDurationsRaw.end());
   1436     }
   1437     if (hasCapability(BURST_CAPTURE)) {
   1438         availableStallDurations.insert(availableStallDurations.end(),
   1439                 availableStallDurationsBurst.begin(),
   1440                 availableStallDurationsBurst.end());
   1441     }
   1442 
   1443     if (availableStallDurations.size() > 0) {
   1444         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   1445                 &availableStallDurations[0],
   1446                 availableStallDurations.size());
   1447     }
   1448 
   1449     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1450         static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
   1451         ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
   1452                 &croppingType, 1);
   1453 
   1454         static const float maxZoom = 10;
   1455         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   1456                 &maxZoom, 1);
   1457     }
   1458 
   1459     // android.jpeg
   1460 
   1461     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1462         static const int32_t jpegThumbnailSizes[] = {
   1463             0, 0,
   1464             160, 120,
   1465             320, 240
   1466         };
   1467         ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   1468                 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
   1469 
   1470         static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
   1471         ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
   1472     }
   1473 
   1474     // android.stats
   1475 
   1476     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1477         static const uint8_t availableFaceDetectModes[] = {
   1478             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   1479             ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
   1480             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
   1481         };
   1482         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   1483                 availableFaceDetectModes,
   1484                 sizeof(availableFaceDetectModes));
   1485 
   1486         static const int32_t maxFaceCount = 8;
   1487         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   1488                 &maxFaceCount, 1);
   1489 
   1490 
   1491         static const uint8_t availableShadingMapModes[] = {
   1492             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
   1493         };
   1494         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   1495                 availableShadingMapModes, sizeof(availableShadingMapModes));
   1496     }
   1497 
   1498     // android.sync
   1499 
   1500     static const int32_t maxLatency =
   1501             hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
   1502     ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
   1503 
   1504     // android.control
   1505 
   1506     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1507         static const uint8_t availableControlModes[] = {
   1508             ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO, ANDROID_CONTROL_MODE_USE_SCENE_MODE
   1509         };
   1510         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
   1511                 availableControlModes, sizeof(availableControlModes));
   1512     } else {
   1513         static const uint8_t availableControlModes[] = {
   1514             ANDROID_CONTROL_MODE_AUTO
   1515         };
   1516         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
   1517                 availableControlModes, sizeof(availableControlModes));
   1518     }
   1519 
   1520     static const uint8_t availableSceneModes[] = {
   1521         hasCapability(BACKWARD_COMPATIBLE) ?
   1522             ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
   1523             ANDROID_CONTROL_SCENE_MODE_DISABLED
   1524     };
   1525     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   1526             availableSceneModes, sizeof(availableSceneModes));
   1527 
   1528     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1529         static const uint8_t availableEffects[] = {
   1530             ANDROID_CONTROL_EFFECT_MODE_OFF
   1531         };
   1532         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   1533                 availableEffects, sizeof(availableEffects));
   1534     }
   1535 
   1536     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1537         static const int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
   1538         ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
   1539                 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
   1540 
   1541         static const uint8_t availableAeModes[] = {
   1542             ANDROID_CONTROL_AE_MODE_OFF,
   1543             ANDROID_CONTROL_AE_MODE_ON
   1544         };
   1545         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   1546                 availableAeModes, sizeof(availableAeModes));
   1547 
   1548         static const camera_metadata_rational exposureCompensationStep = {
   1549             1, 3
   1550         };
   1551         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   1552                 &exposureCompensationStep, 1);
   1553 
   1554         int32_t exposureCompensationRange[] = {-9, 9};
   1555         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   1556                 exposureCompensationRange,
   1557                 sizeof(exposureCompensationRange)/sizeof(int32_t));
   1558     }
   1559 
   1560     static const int32_t availableTargetFpsRanges[] = {
   1561             5, 30, 15, 30, 15, 15, 30, 30
   1562     };
   1563     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   1564             availableTargetFpsRanges,
   1565             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
   1566 
   1567     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1568         static const uint8_t availableAntibandingModes[] = {
   1569             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
   1570             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
   1571         };
   1572         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   1573                 availableAntibandingModes, sizeof(availableAntibandingModes));
   1574     }
   1575 
   1576     static const uint8_t aeLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
   1577             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   1578 
   1579     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   1580             &aeLockAvailable, 1);
   1581 
   1582     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1583         static const uint8_t availableAwbModes[] = {
   1584             ANDROID_CONTROL_AWB_MODE_OFF,
   1585             ANDROID_CONTROL_AWB_MODE_AUTO,
   1586             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
   1587             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
   1588             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
   1589             ANDROID_CONTROL_AWB_MODE_SHADE
   1590         };
   1591         ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   1592                 availableAwbModes, sizeof(availableAwbModes));
   1593     }
   1594 
   1595     static const uint8_t awbLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
   1596             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   1597 
   1598     ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   1599             &awbLockAvailable, 1);
   1600 
   1601     static const uint8_t availableAfModesBack[] = {
   1602             ANDROID_CONTROL_AF_MODE_OFF,
   1603             ANDROID_CONTROL_AF_MODE_AUTO,
   1604             ANDROID_CONTROL_AF_MODE_MACRO,
   1605             ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
   1606             ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
   1607     };
   1608 
   1609     static const uint8_t availableAfModesFront[] = {
   1610             ANDROID_CONTROL_AF_MODE_OFF
   1611     };
   1612 
   1613     if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
   1614         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   1615                 availableAfModesBack, sizeof(availableAfModesBack));
   1616     } else {
   1617         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   1618                 availableAfModesFront, sizeof(availableAfModesFront));
   1619     }
   1620 
   1621     static const uint8_t availableVstabModes[] = {
   1622         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
   1623     };
   1624     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   1625             availableVstabModes, sizeof(availableVstabModes));
   1626 
   1627     // android.colorCorrection
   1628 
   1629     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1630         static const uint8_t availableAberrationModes[] = {
   1631             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   1632             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
   1633             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
   1634         };
   1635         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   1636                 availableAberrationModes, sizeof(availableAberrationModes));
   1637     } else {
   1638         static const uint8_t availableAberrationModes[] = {
   1639             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   1640         };
   1641         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   1642                 availableAberrationModes, sizeof(availableAberrationModes));
   1643     }
   1644     // android.edge
   1645 
   1646     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1647         static const uint8_t availableEdgeModes[] = {
   1648             ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST, ANDROID_EDGE_MODE_HIGH_QUALITY
   1649         };
   1650         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   1651                 availableEdgeModes, sizeof(availableEdgeModes));
   1652     } else {
   1653         static const uint8_t availableEdgeModes[] = {
   1654             ANDROID_EDGE_MODE_OFF
   1655         };
   1656         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   1657                 availableEdgeModes, sizeof(availableEdgeModes));
   1658     }
   1659 
   1660     // android.info
   1661 
   1662     static const uint8_t supportedHardwareLevel =
   1663             hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
   1664                     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
   1665     ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   1666                 &supportedHardwareLevel,
   1667                 /*count*/1);
   1668 
   1669     // android.noiseReduction
   1670 
   1671     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1672         static const uint8_t availableNoiseReductionModes[] = {
   1673             ANDROID_NOISE_REDUCTION_MODE_OFF,
   1674             ANDROID_NOISE_REDUCTION_MODE_FAST,
   1675             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
   1676         };
   1677         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   1678                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
   1679     } else {
   1680         static const uint8_t availableNoiseReductionModes[] = {
   1681             ANDROID_NOISE_REDUCTION_MODE_OFF,
   1682         };
   1683         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   1684                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
   1685     }
   1686 
   1687     // android.depth
   1688 
   1689     if (hasCapability(DEPTH_OUTPUT)) {
   1690 
   1691         static const int32_t maxDepthSamples = 100;
   1692         ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
   1693                 &maxDepthSamples, 1);
   1694 
   1695         static const int32_t availableDepthStreamConfigurations[] = {
   1696             HAL_PIXEL_FORMAT_Y16, 160, 120, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
   1697             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT
   1698         };
   1699         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
   1700                 availableDepthStreamConfigurations,
   1701                 sizeof(availableDepthStreamConfigurations)/sizeof(int32_t));
   1702 
   1703         static const int64_t availableDepthMinFrameDurations[] = {
   1704             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
   1705             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
   1706         };
   1707         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
   1708                 availableDepthMinFrameDurations,
   1709                 sizeof(availableDepthMinFrameDurations)/sizeof(int64_t));
   1710 
   1711         static const int64_t availableDepthStallDurations[] = {
   1712             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
   1713             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
   1714         };
   1715         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
   1716                 availableDepthStallDurations,
   1717                 sizeof(availableDepthStallDurations)/sizeof(int64_t));
   1718 
   1719         uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
   1720         ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
   1721                 &depthIsExclusive, 1);
   1722     }
   1723 
   1724     // android.shading
   1725 
   1726     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1727         static const uint8_t availableShadingModes[] = {
   1728             ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST, ANDROID_SHADING_MODE_HIGH_QUALITY
   1729         };
   1730         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
   1731                 sizeof(availableShadingModes));
   1732     } else {
   1733         static const uint8_t availableShadingModes[] = {
   1734             ANDROID_SHADING_MODE_OFF
   1735         };
   1736         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
   1737                 sizeof(availableShadingModes));
   1738     }
   1739 
   1740     // android.request
   1741 
   1742     static const int32_t maxNumOutputStreams[] = {
   1743             kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
   1744     };
   1745     ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams, 3);
   1746 
   1747     static const uint8_t maxPipelineDepth = kMaxBufferCount;
   1748     ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
   1749 
   1750     static const int32_t partialResultCount = 1;
   1751     ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   1752             &partialResultCount, /*count*/1);
   1753 
   1754     SortedVector<uint8_t> caps;
   1755     for (size_t i = 0; i < mCapabilities.size(); i++) {
   1756         switch(mCapabilities[i]) {
   1757             case BACKWARD_COMPATIBLE:
   1758                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   1759                 break;
   1760             case MANUAL_SENSOR:
   1761                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
   1762                 break;
   1763             case MANUAL_POST_PROCESSING:
   1764                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
   1765                 break;
   1766             case RAW:
   1767                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
   1768                 break;
   1769             case PRIVATE_REPROCESSING:
   1770                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   1771                 break;
   1772             case READ_SENSOR_SETTINGS:
   1773                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
   1774                 break;
   1775             case BURST_CAPTURE:
   1776                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
   1777                 break;
   1778             case YUV_REPROCESSING:
   1779                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
   1780                 break;
   1781             case DEPTH_OUTPUT:
   1782                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
   1783                 break;
   1784             case CONSTRAINED_HIGH_SPEED_VIDEO:
   1785                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
   1786                 break;
   1787             default:
   1788                 // Ignore LEVELs
   1789                 break;
   1790         }
   1791     }
   1792     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
   1793 
   1794     // Scan a default request template for included request keys
   1795     Vector<int32_t> availableRequestKeys;
   1796     const camera_metadata_t *previewRequest =
   1797         constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
   1798     for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
   1799         camera_metadata_ro_entry_t entry;
   1800         get_camera_metadata_ro_entry(previewRequest, i, &entry);
   1801         availableRequestKeys.add(entry.tag);
   1802     }
   1803     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
   1804             availableRequestKeys.size());
   1805 
   1806     // Add a few more result keys. Must be kept up to date with the various places that add these
   1807 
   1808     Vector<int32_t> availableResultKeys(availableRequestKeys);
   1809     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1810         availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
   1811         availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
   1812         availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
   1813         availableResultKeys.add(ANDROID_FLASH_STATE);
   1814         availableResultKeys.add(ANDROID_LENS_STATE);
   1815         availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
   1816         availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
   1817         availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
   1818     }
   1819 
   1820     if (hasCapability(DEPTH_OUTPUT)) {
   1821         availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
   1822         availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
   1823         availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
   1824         availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
   1825     }
   1826 
   1827     availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
   1828     availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
   1829 
   1830     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
   1831             availableResultKeys.size());
   1832 
   1833     // Needs to be last, to collect all the keys set
   1834 
   1835     availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
   1836     info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   1837             availableCharacteristicsKeys);
   1838 
   1839     mCameraInfo = info.release();
   1840 
   1841 #undef ADD_STATIC_ENTRY
   1842     return OK;
   1843 }
   1844 
   1845 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
   1846     /**
   1847      * Extract top-level 3A controls
   1848      */
   1849     status_t res;
   1850 
   1851     bool facePriority = false;
   1852 
   1853     camera_metadata_entry e;
   1854 
   1855     e = settings.find(ANDROID_CONTROL_MODE);
   1856     if (e.count == 0) {
   1857         ALOGE("%s: No control mode entry!", __FUNCTION__);
   1858         return BAD_VALUE;
   1859     }
   1860     uint8_t controlMode = e.data.u8[0];
   1861 
   1862     if (controlMode == ANDROID_CONTROL_MODE_OFF) {
   1863         mAeMode   = ANDROID_CONTROL_AE_MODE_OFF;
   1864         mAfMode   = ANDROID_CONTROL_AF_MODE_OFF;
   1865         mAwbMode  = ANDROID_CONTROL_AWB_MODE_OFF;
   1866         mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1867         mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1868         mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   1869         update3A(settings);
   1870         return OK;
   1871     } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   1872         if (!hasCapability(BACKWARD_COMPATIBLE)) {
   1873             ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
   1874                   __FUNCTION__);
   1875             return BAD_VALUE;
   1876         }
   1877 
   1878         e = settings.find(ANDROID_CONTROL_SCENE_MODE);
   1879         if (e.count == 0) {
   1880             ALOGE("%s: No scene mode entry!", __FUNCTION__);
   1881             return BAD_VALUE;
   1882         }
   1883         uint8_t sceneMode = e.data.u8[0];
   1884 
   1885         switch(sceneMode) {
   1886             case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
   1887                 mFacePriority = true;
   1888                 break;
   1889             default:
   1890                 ALOGE("%s: Emulator doesn't support scene mode %d",
   1891                         __FUNCTION__, sceneMode);
   1892                 return BAD_VALUE;
   1893         }
   1894     } else {
   1895         mFacePriority = false;
   1896     }
   1897 
   1898     // controlMode == AUTO or sceneMode = FACE_PRIORITY
   1899     // Process individual 3A controls
   1900 
   1901     res = doFakeAE(settings);
   1902     if (res != OK) return res;
   1903 
   1904     res = doFakeAF(settings);
   1905     if (res != OK) return res;
   1906 
   1907     res = doFakeAWB(settings);
   1908     if (res != OK) return res;
   1909 
   1910     update3A(settings);
   1911     return OK;
   1912 }
   1913 
   1914 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
   1915     camera_metadata_entry e;
   1916 
   1917     e = settings.find(ANDROID_CONTROL_AE_MODE);
   1918     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   1919         ALOGE("%s: No AE mode entry!", __FUNCTION__);
   1920         return BAD_VALUE;
   1921     }
   1922     uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
   1923     mAeMode = aeMode;
   1924 
   1925     switch (aeMode) {
   1926         case ANDROID_CONTROL_AE_MODE_OFF:
   1927             // AE is OFF
   1928             mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1929             return OK;
   1930         case ANDROID_CONTROL_AE_MODE_ON:
   1931             // OK for AUTO modes
   1932             break;
   1933         default:
   1934             // Mostly silently ignore unsupported modes
   1935             ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
   1936                     __FUNCTION__, aeMode);
   1937             break;
   1938     }
   1939 
   1940     e = settings.find(ANDROID_CONTROL_AE_LOCK);
   1941     bool aeLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
   1942 
   1943     e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
   1944     bool precaptureTrigger = false;
   1945     if (e.count != 0) {
   1946         precaptureTrigger =
   1947                 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
   1948     }
   1949 
   1950     if (precaptureTrigger) {
   1951         ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
   1952     } else if (e.count > 0) {
   1953         ALOGV("%s: Pre capture trigger was present? %zu",
   1954               __FUNCTION__,
   1955               e.count);
   1956     }
   1957 
   1958     if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   1959         // Run precapture sequence
   1960         if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   1961             mAeCounter = 0;
   1962         }
   1963 
   1964         if (mFacePriority) {
   1965             mAeTargetExposureTime = kFacePriorityExposureTime;
   1966         } else {
   1967             mAeTargetExposureTime = kNormalExposureTime;
   1968         }
   1969 
   1970         if (mAeCounter > kPrecaptureMinFrames &&
   1971                 (mAeTargetExposureTime - mAeCurrentExposureTime) <
   1972                 mAeTargetExposureTime / 10) {
   1973             // Done with precapture
   1974             mAeCounter = 0;
   1975             mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
   1976                     ANDROID_CONTROL_AE_STATE_CONVERGED;
   1977         } else {
   1978             // Converge some more
   1979             mAeCurrentExposureTime +=
   1980                     (mAeTargetExposureTime - mAeCurrentExposureTime) *
   1981                     kExposureTrackRate;
   1982             mAeCounter++;
   1983             mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
   1984         }
   1985 
   1986     } else if (!aeLocked) {
   1987         // Run standard occasional AE scan
   1988         switch (mAeState) {
   1989             case ANDROID_CONTROL_AE_STATE_INACTIVE:
   1990                 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
   1991                 break;
   1992             case ANDROID_CONTROL_AE_STATE_CONVERGED:
   1993                 mAeCounter++;
   1994                 if (mAeCounter > kStableAeMaxFrames) {
   1995                     mAeTargetExposureTime =
   1996                             mFacePriority ? kFacePriorityExposureTime :
   1997                             kNormalExposureTime;
   1998                     float exposureStep = ((double)rand() / RAND_MAX) *
   1999                             (kExposureWanderMax - kExposureWanderMin) +
   2000                             kExposureWanderMin;
   2001                     mAeTargetExposureTime *= std::pow(2, exposureStep);
   2002                     mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
   2003                 }
   2004                 break;
   2005             case ANDROID_CONTROL_AE_STATE_SEARCHING:
   2006                 mAeCurrentExposureTime +=
   2007                         (mAeTargetExposureTime - mAeCurrentExposureTime) *
   2008                         kExposureTrackRate;
   2009                 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
   2010                         mAeTargetExposureTime / 10) {
   2011                     // Close enough
   2012                     mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
   2013                     mAeCounter = 0;
   2014                 }
   2015                 break;
   2016             case ANDROID_CONTROL_AE_STATE_LOCKED:
   2017                 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
   2018                 mAeCounter = 0;
   2019                 break;
   2020             default:
   2021                 ALOGE("%s: Emulator in unexpected AE state %d",
   2022                         __FUNCTION__, mAeState);
   2023                 return INVALID_OPERATION;
   2024         }
   2025     } else {
   2026         // AE is locked
   2027         mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
   2028     }
   2029 
   2030     return OK;
   2031 }
   2032 
   2033 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
   2034     camera_metadata_entry e;
   2035 
   2036     e = settings.find(ANDROID_CONTROL_AF_MODE);
   2037     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   2038         ALOGE("%s: No AF mode entry!", __FUNCTION__);
   2039         return BAD_VALUE;
   2040     }
   2041     uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
   2042 
   2043     e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
   2044     typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
   2045     af_trigger_t afTrigger;
   2046     if (e.count != 0) {
   2047         afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
   2048 
   2049         ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
   2050         ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
   2051     } else {
   2052         afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   2053     }
   2054 
   2055     switch (afMode) {
   2056         case ANDROID_CONTROL_AF_MODE_OFF:
   2057             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   2058             return OK;
   2059         case ANDROID_CONTROL_AF_MODE_AUTO:
   2060         case ANDROID_CONTROL_AF_MODE_MACRO:
   2061         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2062         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2063             if (!mFacingBack) {
   2064                 ALOGE("%s: Front camera doesn't support AF mode %d",
   2065                         __FUNCTION__, afMode);
   2066                 return BAD_VALUE;
   2067             }
   2068             // OK, handle transitions lower on
   2069             break;
   2070         default:
   2071             ALOGE("%s: Emulator doesn't support AF mode %d",
   2072                     __FUNCTION__, afMode);
   2073             return BAD_VALUE;
   2074     }
   2075 
   2076     bool afModeChanged = mAfMode != afMode;
   2077     mAfMode = afMode;
   2078 
   2079     /**
   2080      * Simulate AF triggers. Transition at most 1 state per frame.
   2081      * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
   2082      */
   2083 
   2084     bool afTriggerStart = false;
   2085     bool afTriggerCancel = false;
   2086     switch (afTrigger) {
   2087         case ANDROID_CONTROL_AF_TRIGGER_IDLE:
   2088             break;
   2089         case ANDROID_CONTROL_AF_TRIGGER_START:
   2090             afTriggerStart = true;
   2091             break;
   2092         case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
   2093             afTriggerCancel = true;
   2094             // Cancel trigger always transitions into INACTIVE
   2095             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   2096 
   2097             ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
   2098 
   2099             // Stay in 'inactive' until at least next frame
   2100             return OK;
   2101         default:
   2102             ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
   2103             return BAD_VALUE;
   2104     }
   2105 
   2106     // If we get down here, we're either in an autofocus mode
   2107     //  or in a continuous focus mode (and no other modes)
   2108 
   2109     int oldAfState = mAfState;
   2110     switch (mAfState) {
   2111         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   2112             if (afTriggerStart) {
   2113                 switch (afMode) {
   2114                     case ANDROID_CONTROL_AF_MODE_AUTO:
   2115                         // fall-through
   2116                     case ANDROID_CONTROL_AF_MODE_MACRO:
   2117                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   2118                         break;
   2119                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2120                         // fall-through
   2121                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2122                         mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2123                         break;
   2124                 }
   2125             } else {
   2126                 // At least one frame stays in INACTIVE
   2127                 if (!afModeChanged) {
   2128                     switch (afMode) {
   2129                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2130                             // fall-through
   2131                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2132                             mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
   2133                             break;
   2134                     }
   2135                 }
   2136             }
   2137             break;
   2138         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   2139             /**
   2140              * When the AF trigger is activated, the algorithm should finish
   2141              * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
   2142              * or AF_NOT_FOCUSED as appropriate
   2143              */
   2144             if (afTriggerStart) {
   2145                 // Randomly transition to focused or not focused
   2146                 if (rand() % 3) {
   2147                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2148                 } else {
   2149                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2150                 }
   2151             }
   2152             /**
   2153              * When the AF trigger is not involved, the AF algorithm should
   2154              * start in INACTIVE state, and then transition into PASSIVE_SCAN
   2155              * and PASSIVE_FOCUSED states
   2156              */
   2157             else if (!afTriggerCancel) {
   2158                // Randomly transition to passive focus
   2159                 if (rand() % 3 == 0) {
   2160                     mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   2161                 }
   2162             }
   2163 
   2164             break;
   2165         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   2166             if (afTriggerStart) {
   2167                 // Randomly transition to focused or not focused
   2168                 if (rand() % 3) {
   2169                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2170                 } else {
   2171                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2172                 }
   2173             }
   2174             // TODO: initiate passive scan (PASSIVE_SCAN)
   2175             break;
   2176         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   2177             // Simulate AF sweep completing instantaneously
   2178 
   2179             // Randomly transition to focused or not focused
   2180             if (rand() % 3) {
   2181                 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2182             } else {
   2183                 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2184             }
   2185             break;
   2186         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   2187             if (afTriggerStart) {
   2188                 switch (afMode) {
   2189                     case ANDROID_CONTROL_AF_MODE_AUTO:
   2190                         // fall-through
   2191                     case ANDROID_CONTROL_AF_MODE_MACRO:
   2192                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   2193                         break;
   2194                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2195                         // fall-through
   2196                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2197                         // continuous autofocus => trigger start has no effect
   2198                         break;
   2199                 }
   2200             }
   2201             break;
   2202         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   2203             if (afTriggerStart) {
   2204                 switch (afMode) {
   2205                     case ANDROID_CONTROL_AF_MODE_AUTO:
   2206                         // fall-through
   2207                     case ANDROID_CONTROL_AF_MODE_MACRO:
   2208                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   2209                         break;
   2210                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2211                         // fall-through
   2212                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2213                         // continuous autofocus => trigger start has no effect
   2214                         break;
   2215                 }
   2216             }
   2217             break;
   2218         default:
   2219             ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
   2220     }
   2221 
   2222     {
   2223         char afStateString[100] = {0,};
   2224         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
   2225                 oldAfState,
   2226                 afStateString,
   2227                 sizeof(afStateString));
   2228 
   2229         char afNewStateString[100] = {0,};
   2230         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
   2231                 mAfState,
   2232                 afNewStateString,
   2233                 sizeof(afNewStateString));
   2234         ALOGVV("%s: AF state transitioned from %s to %s",
   2235               __FUNCTION__, afStateString, afNewStateString);
   2236     }
   2237 
   2238 
   2239     return OK;
   2240 }
   2241 
   2242 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
   2243     camera_metadata_entry e;
   2244 
   2245     e = settings.find(ANDROID_CONTROL_AWB_MODE);
   2246     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   2247         ALOGE("%s: No AWB mode entry!", __FUNCTION__);
   2248         return BAD_VALUE;
   2249     }
   2250     uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
   2251 
   2252     // TODO: Add white balance simulation
   2253 
   2254     e = settings.find(ANDROID_CONTROL_AWB_LOCK);
   2255     bool awbLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AWB_LOCK_ON) : false;
   2256 
   2257     switch (awbMode) {
   2258         case ANDROID_CONTROL_AWB_MODE_OFF:
   2259             mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   2260             break;
   2261         case ANDROID_CONTROL_AWB_MODE_AUTO:
   2262         case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
   2263         case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
   2264         case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
   2265         case ANDROID_CONTROL_AWB_MODE_SHADE:
   2266             // Always magically right, or locked
   2267             mAwbState = awbLocked ? ANDROID_CONTROL_AWB_STATE_LOCKED :
   2268                     ANDROID_CONTROL_AWB_STATE_CONVERGED;
   2269             break;
   2270         default:
   2271             ALOGE("%s: Emulator doesn't support AWB mode %d",
   2272                     __FUNCTION__, awbMode);
   2273             return BAD_VALUE;
   2274     }
   2275 
   2276     return OK;
   2277 }
   2278 
   2279 
   2280 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
   2281     if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
   2282         settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
   2283                 &mAeCurrentExposureTime, 1);
   2284         settings.update(ANDROID_SENSOR_SENSITIVITY,
   2285                 &mAeCurrentSensitivity, 1);
   2286     }
   2287 
   2288     settings.update(ANDROID_CONTROL_AE_STATE,
   2289             &mAeState, 1);
   2290     settings.update(ANDROID_CONTROL_AF_STATE,
   2291             &mAfState, 1);
   2292     settings.update(ANDROID_CONTROL_AWB_STATE,
   2293             &mAwbState, 1);
   2294 
   2295     uint8_t lensState;
   2296     switch (mAfState) {
   2297         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   2298         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   2299             lensState = ANDROID_LENS_STATE_MOVING;
   2300             break;
   2301         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   2302         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   2303         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   2304         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   2305         case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
   2306         default:
   2307             lensState = ANDROID_LENS_STATE_STATIONARY;
   2308             break;
   2309     }
   2310     settings.update(ANDROID_LENS_STATE, &lensState, 1);
   2311 
   2312 }
   2313 
   2314 void EmulatedFakeCamera3::signalReadoutIdle() {
   2315     Mutex::Autolock l(mLock);
   2316     // Need to chek isIdle again because waiting on mLock may have allowed
   2317     // something to be placed in the in-flight queue.
   2318     if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
   2319         ALOGV("Now idle");
   2320         mStatus = STATUS_READY;
   2321     }
   2322 }
   2323 
   2324 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
   2325         nsecs_t timestamp) {
   2326     switch(e) {
   2327         case Sensor::SensorListener::EXPOSURE_START: {
   2328             ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
   2329                     __FUNCTION__, frameNumber, timestamp);
   2330             // Trigger shutter notify to framework
   2331             camera3_notify_msg_t msg;
   2332             msg.type = CAMERA3_MSG_SHUTTER;
   2333             msg.message.shutter.frame_number = frameNumber;
   2334             msg.message.shutter.timestamp = timestamp;
   2335             sendNotify(&msg);
   2336             break;
   2337         }
   2338         default:
   2339             ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
   2340                     e, timestamp);
   2341             break;
   2342     }
   2343 }
   2344 
   2345 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
   2346         mParent(parent), mJpegWaiting(false) {
   2347 }
   2348 
   2349 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
   2350     for (List<Request>::iterator i = mInFlightQueue.begin();
   2351          i != mInFlightQueue.end(); i++) {
   2352         delete i->buffers;
   2353         delete i->sensorBuffers;
   2354     }
   2355 }
   2356 
   2357 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
   2358     Mutex::Autolock l(mLock);
   2359 
   2360     mInFlightQueue.push_back(r);
   2361     mInFlightSignal.signal();
   2362 }
   2363 
   2364 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
   2365     Mutex::Autolock l(mLock);
   2366     return mInFlightQueue.empty() && !mThreadActive;
   2367 }
   2368 
   2369 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
   2370     status_t res;
   2371     Mutex::Autolock l(mLock);
   2372     int loopCount = 0;
   2373     while (mInFlightQueue.size() >= kMaxQueueSize) {
   2374         res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
   2375         if (res != OK && res != TIMED_OUT) {
   2376             ALOGE("%s: Error waiting for in-flight queue to shrink",
   2377                     __FUNCTION__);
   2378             return INVALID_OPERATION;
   2379         }
   2380         if (loopCount == kMaxWaitLoops) {
   2381             ALOGE("%s: Timed out waiting for in-flight queue to shrink",
   2382                     __FUNCTION__);
   2383             return TIMED_OUT;
   2384         }
   2385         loopCount++;
   2386     }
   2387     return OK;
   2388 }
   2389 
   2390 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
   2391     status_t res;
   2392 
   2393     ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
   2394 
   2395     // First wait for a request from the in-flight queue
   2396 
   2397     if (mCurrentRequest.settings.isEmpty()) {
   2398         Mutex::Autolock l(mLock);
   2399         if (mInFlightQueue.empty()) {
   2400             res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
   2401             if (res == TIMED_OUT) {
   2402                 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
   2403                         __FUNCTION__);
   2404                 return true;
   2405             } else if (res != NO_ERROR) {
   2406                 ALOGE("%s: Error waiting for capture requests: %d",
   2407                         __FUNCTION__, res);
   2408                 return false;
   2409             }
   2410         }
   2411         mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
   2412         mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
   2413         mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
   2414         mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
   2415         mInFlightQueue.erase(mInFlightQueue.begin());
   2416         mInFlightSignal.signal();
   2417         mThreadActive = true;
   2418         ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
   2419                 mCurrentRequest.frameNumber);
   2420     }
   2421 
   2422     // Then wait for it to be delivered from the sensor
   2423     ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
   2424             __FUNCTION__);
   2425 
   2426     nsecs_t captureTime;
   2427     bool gotFrame =
   2428             mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
   2429     if (!gotFrame) {
   2430         ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
   2431                 __FUNCTION__);
   2432         return true;
   2433     }
   2434 
   2435     ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
   2436             mCurrentRequest.frameNumber, captureTime);
   2437 
   2438     // Check if we need to JPEG encode a buffer, and send it for async
   2439     // compression if so. Otherwise prepare the buffer for return.
   2440     bool needJpeg = false;
   2441     HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
   2442     while(buf != mCurrentRequest.buffers->end()) {
   2443         bool goodBuffer = true;
   2444         if ( buf->stream->format ==
   2445                 HAL_PIXEL_FORMAT_BLOB && buf->stream->data_space != HAL_DATASPACE_DEPTH) {
   2446             Mutex::Autolock jl(mJpegLock);
   2447             if (mJpegWaiting) {
   2448                 // This shouldn't happen, because processCaptureRequest should
   2449                 // be stalling until JPEG compressor is free.
   2450                 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
   2451                 goodBuffer = false;
   2452             }
   2453             if (goodBuffer) {
   2454                 // Compressor takes ownership of sensorBuffers here
   2455                 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
   2456                         this);
   2457                 goodBuffer = (res == OK);
   2458             }
   2459             if (goodBuffer) {
   2460                 needJpeg = true;
   2461 
   2462                 mJpegHalBuffer = *buf;
   2463                 mJpegFrameNumber = mCurrentRequest.frameNumber;
   2464                 mJpegWaiting = true;
   2465 
   2466                 mCurrentRequest.sensorBuffers = NULL;
   2467                 buf = mCurrentRequest.buffers->erase(buf);
   2468 
   2469                 continue;
   2470             }
   2471             ALOGE("%s: Error compressing output buffer: %s (%d)",
   2472                         __FUNCTION__, strerror(-res), res);
   2473             // fallthrough for cleanup
   2474         }
   2475         GrallocModule::getInstance().unlock(*(buf->buffer));
   2476 
   2477         buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
   2478                 CAMERA3_BUFFER_STATUS_ERROR;
   2479         buf->acquire_fence = -1;
   2480         buf->release_fence = -1;
   2481 
   2482         ++buf;
   2483     } // end while
   2484 
   2485     // Construct result for all completed buffers and results
   2486 
   2487     camera3_capture_result result;
   2488 
   2489     if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
   2490         static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
   2491         mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
   2492                 &sceneFlicker, 1);
   2493 
   2494         static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   2495         mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
   2496                 &flashState, 1);
   2497 
   2498         nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
   2499         mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   2500                 &rollingShutterSkew, 1);
   2501 
   2502         float focusRange[] = { 1.0f/5.0f, 0 }; // 5 m to infinity in focus
   2503         mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE,
   2504                 focusRange, sizeof(focusRange)/sizeof(float));
   2505     }
   2506 
   2507     if (mParent->hasCapability(DEPTH_OUTPUT)) {
   2508         camera_metadata_entry_t entry;
   2509 
   2510         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_TRANSLATION, &entry);
   2511         mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION,
   2512                 entry.data.f, entry.count);
   2513 
   2514         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION, &entry);
   2515         mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION,
   2516                 entry.data.f, entry.count);
   2517 
   2518         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
   2519         mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
   2520                 entry.data.f, entry.count);
   2521 
   2522         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_RADIAL_DISTORTION, &entry);
   2523         mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
   2524                 entry.data.f, entry.count);
   2525     }
   2526 
   2527     mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
   2528             &captureTime, 1);
   2529 
   2530 
   2531     // JPEGs take a stage longer
   2532     const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
   2533     mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
   2534             &pipelineDepth, 1);
   2535 
   2536     result.frame_number = mCurrentRequest.frameNumber;
   2537     result.result = mCurrentRequest.settings.getAndLock();
   2538     result.num_output_buffers = mCurrentRequest.buffers->size();
   2539     result.output_buffers = mCurrentRequest.buffers->array();
   2540     result.input_buffer = nullptr;
   2541     result.partial_result = 1;
   2542 
   2543     // Go idle if queue is empty, before sending result
   2544     bool signalIdle = false;
   2545     {
   2546         Mutex::Autolock l(mLock);
   2547         if (mInFlightQueue.empty()) {
   2548             mThreadActive = false;
   2549             signalIdle = true;
   2550         }
   2551     }
   2552     if (signalIdle) mParent->signalReadoutIdle();
   2553 
   2554     // Send it off to the framework
   2555     ALOGVV("%s: ReadoutThread: Send result to framework",
   2556             __FUNCTION__);
   2557     mParent->sendCaptureResult(&result);
   2558 
   2559     // Clean up
   2560     mCurrentRequest.settings.unlock(result.result);
   2561 
   2562     delete mCurrentRequest.buffers;
   2563     mCurrentRequest.buffers = NULL;
   2564     if (!needJpeg) {
   2565         delete mCurrentRequest.sensorBuffers;
   2566         mCurrentRequest.sensorBuffers = NULL;
   2567     }
   2568     mCurrentRequest.settings.clear();
   2569 
   2570     return true;
   2571 }
   2572 
   2573 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
   2574         const StreamBuffer &jpegBuffer, bool success) {
   2575     Mutex::Autolock jl(mJpegLock);
   2576 
   2577     GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
   2578 
   2579     mJpegHalBuffer.status = success ?
   2580             CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
   2581     mJpegHalBuffer.acquire_fence = -1;
   2582     mJpegHalBuffer.release_fence = -1;
   2583     mJpegWaiting = false;
   2584 
   2585     camera3_capture_result result;
   2586 
   2587     result.frame_number = mJpegFrameNumber;
   2588     result.result = NULL;
   2589     result.num_output_buffers = 1;
   2590     result.output_buffers = &mJpegHalBuffer;
   2591     result.input_buffer = nullptr;
   2592     result.partial_result = 0;
   2593 
   2594     if (!success) {
   2595         ALOGE("%s: Compression failure, returning error state buffer to"
   2596                 " framework", __FUNCTION__);
   2597     } else {
   2598         ALOGV("%s: Compression complete, returning buffer to framework",
   2599                 __FUNCTION__);
   2600     }
   2601 
   2602     mParent->sendCaptureResult(&result);
   2603 }
   2604 
   2605 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
   2606         const StreamBuffer &inputBuffer) {
   2607     // Should never get here, since the input buffer has to be returned
   2608     // by end of processCaptureRequest
   2609     ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
   2610 }
   2611 
   2612 
   2613 }; // namespace android
   2614