Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2017 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of a class EmulatedQemuCamera3 that encapsulates
     19  * functionality of an advanced fake camera.
     20  */
     21 
     22 // Uncomment LOG_NDEBUG to enable verbose logging, and uncomment both LOG_NDEBUG
     23 // *and* LOG_NNDEBUG to enable very verbose logging.
     24 
     25 //#define LOG_NDEBUG 0
     26 //#define LOG_NNDEBUG 0
     27 
     28 #define LOG_TAG "EmulatedCamera_QemuCamera3"
     29 
     30 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
     31 #define ALOGVV ALOGV
     32 #else
     33 #define ALOGVV(...) ((void)0)
     34 #endif
     35 
     36 #include "EmulatedCameraFactory.h"
     37 #include "GrallocModule.h"
     38 #include "EmulatedQemuCamera3.h"
     39 
     40 #include <cmath>
     41 #include <cutils/properties.h>
     42 #include <inttypes.h>
     43 #include <sstream>
     44 #include <ui/Fence.h>
     45 #include <log/log.h>
     46 #include <vector>
     47 
     48 namespace android {
     49 
     50 /*
     51  * Constants for Camera Capabilities
     52  */
     53 
     54 const int64_t USEC = 1000LL;
     55 const int64_t MSEC = USEC * 1000LL;
     56 
     57 const int32_t EmulatedQemuCamera3::kAvailableFormats[] = {
     58     HAL_PIXEL_FORMAT_BLOB,
     59     HAL_PIXEL_FORMAT_RGBA_8888,
     60     HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
     61     // These are handled by YCbCr_420_888
     62     //        HAL_PIXEL_FORMAT_YV12,
     63     //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
     64     HAL_PIXEL_FORMAT_YCbCr_420_888
     65 };
     66 
     67 /**
     68  * 3A constants
     69  */
     70 
     71 // Default exposure and gain targets for different scenarios
     72 const nsecs_t EmulatedQemuCamera3::kNormalExposureTime       = 10 * MSEC;
     73 const nsecs_t EmulatedQemuCamera3::kFacePriorityExposureTime = 30 * MSEC;
     74 const int     EmulatedQemuCamera3::kNormalSensitivity        = 100;
     75 const int     EmulatedQemuCamera3::kFacePrioritySensitivity  = 400;
     76 //CTS requires 8 frames timeout in waitForAeStable
     77 const float   EmulatedQemuCamera3::kExposureTrackRate        = 0.2;
     78 const int     EmulatedQemuCamera3::kPrecaptureMinFrames      = 10;
     79 const int     EmulatedQemuCamera3::kStableAeMaxFrames        = 100;
     80 const float   EmulatedQemuCamera3::kExposureWanderMin        = -2;
     81 const float   EmulatedQemuCamera3::kExposureWanderMax        = 1;
     82 
     83 /*****************************************************************************
     84  * Constructor/Destructor
     85  ****************************************************************************/
     86 
     87 EmulatedQemuCamera3::EmulatedQemuCamera3(int cameraId, struct hw_module_t* module) :
     88         EmulatedCamera3(cameraId, module) {
     89     ALOGI("Constructing emulated qemu camera 3: ID %d", mCameraID);
     90 
     91     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
     92         mDefaultTemplates[i] = nullptr;
     93     }
     94 }
     95 
     96 EmulatedQemuCamera3::~EmulatedQemuCamera3() {
     97     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; ++i) {
     98         if (mDefaultTemplates[i] != nullptr) {
     99             free_camera_metadata(mDefaultTemplates[i]);
    100         }
    101     }
    102     delete[] mDeviceName;
    103 }
    104 
    105 /*****************************************************************************
    106  * Public Methods
    107  ****************************************************************************/
    108 
    109 /*
    110  * Camera Device Lifecycle Methods
    111  */
    112 
    113 void EmulatedQemuCamera3::parseResolutions(const char *frameDims) {
    114     const size_t kMaxFrameDimsLength = 512;
    115     size_t frameDimsLength = strnlen(frameDims, kMaxFrameDimsLength);
    116     if (frameDimsLength == kMaxFrameDimsLength) {
    117         ALOGE("%s: Frame dimensions string was too long (>= %d)",
    118                 __FUNCTION__, frameDimsLength);
    119         return;
    120     } else if (frameDimsLength == 0) {
    121         ALOGE("%s: Frame dimensions string was NULL or zero-length",
    122                 __FUNCTION__);
    123         return;
    124     }
    125     std::stringstream ss(frameDims);
    126     std::string input;
    127     while (std::getline(ss, input, ',')) {
    128         int width = 0;
    129         int height = 0;
    130         char none = 0;
    131         /*
    132          * Expect only two results because that means there was nothing after
    133          * the height, we don't want any trailing characters. Otherwise, we just
    134          * ignore this entry.
    135          */
    136         if (sscanf(input.c_str(), "%dx%d%c", &width, &height, &none) == 2) {
    137             mResolutions.push_back(std::pair<int32_t,int32_t>(width, height));
    138             ALOGI("%s: %dx%d", __FUNCTION__, width, height);
    139         }
    140         else {
    141             ALOGE("wrong resolution input %s", input.c_str());
    142         }
    143     }
    144 
    145     /*
    146      * We assume the sensor size of the webcam is the resolution with the
    147      * largest area. Any resolution with a dimension that exceeds the sensor
    148      * size will be rejected, so Camera API calls will start failing. To work
    149      * around this, we remove any resolutions with at least one dimension
    150      * exceeding that of the max area resolution.
    151      */
    152 
    153     // Find the resolution with the maximum area and use that as the sensor
    154     // size.
    155     int maxArea = 0;
    156     for (const auto &res : mResolutions) {
    157         int area = res.first * res.second;
    158         if (area > maxArea) {
    159             maxArea = area;
    160             mSensorWidth = res.first;
    161             mSensorHeight = res.second;
    162         }
    163     }
    164 
    165     // Remove any resolution with a dimension exceeding the sensor size.
    166     for (auto res = mResolutions.begin(); res != mResolutions.end(); ) {
    167         if (res->first > (int32_t)mSensorWidth ||
    168             res->second > (int32_t)mSensorHeight) {
    169             // Width and/or height larger than sensor. Remove it.
    170             res = mResolutions.erase(res);
    171         } else {
    172             ++res;
    173         }
    174     }
    175 
    176     if (mResolutions.empty()) {
    177         ALOGE("%s: Qemu camera has no valid resolutions", __FUNCTION__);
    178     }
    179 }
    180 
    181 status_t EmulatedQemuCamera3::Initialize(const char *deviceName,
    182                                          const char *frameDims,
    183                                          const char *facingDir) {
    184     if (mStatus != STATUS_ERROR) {
    185         ALOGE("%s: Already initialized!", __FUNCTION__);
    186         return INVALID_OPERATION;
    187     }
    188 
    189     /*
    190      * Save parameters for later.
    191      */
    192     mDeviceName = deviceName;
    193     parseResolutions(frameDims);
    194     if (strcmp("back", facingDir) == 0) {
    195         mFacingBack = true;
    196     } else {
    197         mFacingBack = false;
    198     }
    199     // We no longer need these two strings.
    200     delete[] frameDims;
    201     delete[] facingDir;
    202 
    203     status_t res = getCameraCapabilities();
    204     if (res != OK) {
    205         ALOGE("%s: Unable to get camera capabilities: %s (%d)",
    206                 __FUNCTION__, strerror(-res), res);
    207         return res;
    208     }
    209 
    210     res = constructStaticInfo();
    211     if (res != OK) {
    212         ALOGE("%s: Unable to allocate static info: %s (%d)",
    213                 __FUNCTION__, strerror(-res), res);
    214         return res;
    215     }
    216 
    217     return EmulatedCamera3::Initialize();
    218 }
    219 
    220 status_t EmulatedQemuCamera3::connectCamera(hw_device_t** device) {
    221     Mutex::Autolock l(mLock);
    222     status_t res;
    223 
    224     if (mStatus != STATUS_CLOSED) {
    225         ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
    226         return INVALID_OPERATION;
    227     }
    228 
    229     /*
    230      * Initialize sensor.
    231      */
    232     mSensor = new QemuSensor(mDeviceName, mSensorWidth, mSensorHeight);
    233     mSensor->setQemuSensorListener(this);
    234     res = mSensor->startUp();
    235     if (res != NO_ERROR) {
    236         return res;
    237     }
    238 
    239     mReadoutThread = new ReadoutThread(this);
    240     mJpegCompressor = new JpegCompressor();
    241 
    242     res = mReadoutThread->run("EmuCam3::readoutThread");
    243     if (res != NO_ERROR) return res;
    244 
    245     // Initialize fake 3A
    246 
    247     mFacePriority = false;
    248     mAeMode       = ANDROID_CONTROL_AE_MODE_ON;
    249     mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
    250     mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
    251     mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
    252     mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
    253     mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
    254     mAeCounter    = 0;
    255     mAeTargetExposureTime = kNormalExposureTime;
    256     mAeCurrentExposureTime = kNormalExposureTime;
    257     mAeCurrentSensitivity  = kNormalSensitivity;
    258 
    259     return EmulatedCamera3::connectCamera(device);
    260 }
    261 
    262 status_t EmulatedQemuCamera3::closeCamera() {
    263     status_t res;
    264     {
    265         Mutex::Autolock l(mLock);
    266         if (mStatus == STATUS_CLOSED) return OK;
    267 
    268         res = mSensor->shutDown();
    269         if (res != NO_ERROR) {
    270             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
    271             return res;
    272         }
    273         mSensor.clear();
    274 
    275         mReadoutThread->requestExit();
    276     }
    277 
    278     mReadoutThread->join();
    279 
    280     {
    281         Mutex::Autolock l(mLock);
    282         // Clear out private stream information.
    283         for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
    284             PrivateStreamInfo *privStream =
    285                     static_cast<PrivateStreamInfo*>((*s)->priv);
    286             delete privStream;
    287             (*s)->priv = nullptr;
    288         }
    289         mStreams.clear();
    290         mReadoutThread.clear();
    291     }
    292 
    293     return EmulatedCamera3::closeCamera();
    294 }
    295 
    296 status_t EmulatedQemuCamera3::getCameraInfo(struct camera_info *info) {
    297     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
    298     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
    299     return EmulatedCamera3::getCameraInfo(info);
    300 }
    301 
    302 /*
    303  * Camera3 Interface Methods
    304  */
    305 
    306 status_t EmulatedQemuCamera3::configureStreams(
    307         camera3_stream_configuration *streamList) {
    308     Mutex::Autolock l(mLock);
    309     ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
    310 
    311     if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
    312         ALOGE("%s: Cannot configure streams in state %d",
    313                 __FUNCTION__, mStatus);
    314         return NO_INIT;
    315     }
    316 
    317     /*
    318      * Sanity-check input list.
    319      */
    320     if (streamList == nullptr) {
    321         ALOGE("%s: NULL stream configuration", __FUNCTION__);
    322         return BAD_VALUE;
    323     }
    324     if (streamList->streams == nullptr) {
    325         ALOGE("%s: NULL stream list", __FUNCTION__);
    326         return BAD_VALUE;
    327     }
    328     if (streamList->num_streams < 1) {
    329         ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
    330                 streamList->num_streams);
    331         return BAD_VALUE;
    332     }
    333 
    334     camera3_stream_t *inputStream = nullptr;
    335     for (size_t i = 0; i < streamList->num_streams; ++i) {
    336         camera3_stream_t *newStream = streamList->streams[i];
    337 
    338         if (newStream == nullptr) {
    339             ALOGE("%s: Stream index %zu was NULL", __FUNCTION__, i);
    340             return BAD_VALUE;
    341         }
    342 
    343         ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
    344                 __FUNCTION__, newStream, i, newStream->stream_type,
    345                 newStream->usage, newStream->format);
    346 
    347         if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
    348             newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    349             if (inputStream != nullptr) {
    350                 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
    351                 return BAD_VALUE;
    352             }
    353             inputStream = newStream;
    354         }
    355 
    356         bool validFormat = false;
    357         size_t numFormats = sizeof(kAvailableFormats) /
    358                 sizeof(kAvailableFormats[0]);
    359         for (size_t f = 0; f < numFormats; ++f) {
    360             if (newStream->format == kAvailableFormats[f]) {
    361                 validFormat = true;
    362                 break;
    363             }
    364         }
    365         if (!validFormat) {
    366             ALOGE("%s: Unsupported stream format 0x%x requested",
    367                     __FUNCTION__, newStream->format);
    368             return BAD_VALUE;
    369         }
    370     }
    371     mInputStream = inputStream;
    372 
    373     /*
    374      * Initially mark all existing streams as not alive.
    375      */
    376     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
    377         PrivateStreamInfo *privStream =
    378                 static_cast<PrivateStreamInfo*>((*s)->priv);
    379         privStream->alive = false;
    380     }
    381 
    382     /*
    383      * Find new streams and mark still-alive ones.
    384      */
    385     for (size_t i = 0; i < streamList->num_streams; ++i) {
    386         camera3_stream_t *newStream = streamList->streams[i];
    387         if (newStream->priv == nullptr) {
    388             // New stream. Construct info.
    389             PrivateStreamInfo *privStream = new PrivateStreamInfo();
    390             privStream->alive = true;
    391 
    392             newStream->max_buffers = kMaxBufferCount;
    393             newStream->priv = privStream;
    394             mStreams.push_back(newStream);
    395         } else {
    396             // Existing stream, mark as still alive.
    397             PrivateStreamInfo *privStream =
    398                     static_cast<PrivateStreamInfo*>(newStream->priv);
    399             privStream->alive = true;
    400         }
    401         // Always update usage and max buffers.
    402         newStream->max_buffers = kMaxBufferCount;
    403         switch (newStream->stream_type) {
    404             case CAMERA3_STREAM_OUTPUT:
    405                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
    406                 break;
    407             case CAMERA3_STREAM_INPUT:
    408                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
    409                 break;
    410             case CAMERA3_STREAM_BIDIRECTIONAL:
    411                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
    412                         GRALLOC_USAGE_HW_CAMERA_WRITE;
    413                 break;
    414         }
    415         // Set the buffer format, inline with gralloc implementation
    416         if (newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    417             if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
    418                 if (newStream->usage & GRALLOC_USAGE_HW_TEXTURE) {
    419                     newStream->format = HAL_PIXEL_FORMAT_RGBA_8888;
    420                 }
    421                 else if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
    422                     newStream->format = HAL_PIXEL_FORMAT_YCbCr_420_888;
    423                 }
    424                 else {
    425                     newStream->format = HAL_PIXEL_FORMAT_RGB_888;
    426                 }
    427             }
    428         }
    429     }
    430 
    431     /*
    432      * Reap the dead streams.
    433      */
    434     for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
    435         PrivateStreamInfo *privStream =
    436                 static_cast<PrivateStreamInfo*>((*s)->priv);
    437         if (!privStream->alive) {
    438             (*s)->priv = nullptr;
    439             delete privStream;
    440             s = mStreams.erase(s);
    441         } else {
    442             ++s;
    443         }
    444     }
    445 
    446     /*
    447      * Can't reuse settings across configure call.
    448      */
    449     mPrevSettings.clear();
    450 
    451     return OK;
    452 }
    453 
    454 status_t EmulatedQemuCamera3::registerStreamBuffers(
    455         const camera3_stream_buffer_set *bufferSet) {
    456     Mutex::Autolock l(mLock);
    457     ALOGE("%s: Should not be invoked on HAL versions >= 3.2!", __FUNCTION__);
    458     return NO_INIT;
    459 }
    460 
    461 const camera_metadata_t* EmulatedQemuCamera3::constructDefaultRequestSettings(
    462         int type) {
    463     Mutex::Autolock l(mLock);
    464 
    465     if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
    466         ALOGE("%s: Unknown request settings template: %d",
    467                 __FUNCTION__, type);
    468         return nullptr;
    469     }
    470 
    471     if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
    472         ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
    473                 __FUNCTION__, type);
    474         return nullptr;
    475     }
    476 
    477     /*
    478      * Cache is not just an optimization - pointer returned has to live at least
    479      * as long as the camera device instance does.
    480      */
    481     if (mDefaultTemplates[type] != nullptr) {
    482         return mDefaultTemplates[type];
    483     }
    484 
    485     CameraMetadata settings;
    486 
    487     /* android.request */
    488 
    489     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
    490     settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
    491 
    492     static const int32_t id = 0;
    493     settings.update(ANDROID_REQUEST_ID, &id, 1);
    494 
    495     static const int32_t frameCount = 0;
    496     settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
    497 
    498     /* android.lens */
    499 
    500     static const float focalLength = 5.0f;
    501     settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
    502 
    503     if (hasCapability(BACKWARD_COMPATIBLE)) {
    504         static const float focusDistance = 0;
    505         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
    506 
    507         static const float aperture = 2.8f;
    508         settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
    509 
    510         static const float filterDensity = 0;
    511         settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
    512 
    513         static const uint8_t opticalStabilizationMode =
    514                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
    515         settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
    516                 &opticalStabilizationMode, 1);
    517 
    518         // FOCUS_RANGE set only in frame
    519     }
    520 
    521     /* android.flash */
    522 
    523     if (hasCapability(BACKWARD_COMPATIBLE)) {
    524         static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
    525         settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
    526 
    527         static const uint8_t flashPower = 10;
    528         settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
    529 
    530         static const int64_t firingTime = 0;
    531         settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
    532     }
    533 
    534     /* android.scaler */
    535     if (hasCapability(BACKWARD_COMPATIBLE)) {
    536         const int32_t cropRegion[4] = {
    537             0, 0, mSensorWidth, mSensorHeight
    538         };
    539         settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
    540     }
    541 
    542     /* android.jpeg */
    543     if (hasCapability(BACKWARD_COMPATIBLE)) {
    544         static const uint8_t jpegQuality = 80;
    545         settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
    546 
    547         static const int32_t thumbnailSize[2] = {
    548             320, 240
    549         };
    550         settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
    551 
    552         static const uint8_t thumbnailQuality = 80;
    553         settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
    554 
    555         static const double gpsCoordinates[3] = {
    556             0, 0, 0
    557         };
    558         settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
    559 
    560         static const uint8_t gpsProcessingMethod[32] = "None";
    561         settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
    562 
    563         static const int64_t gpsTimestamp = 0;
    564         settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
    565 
    566         static const int32_t jpegOrientation = 0;
    567         settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
    568     }
    569 
    570     /* android.stats */
    571     if (hasCapability(BACKWARD_COMPATIBLE)) {
    572         static const uint8_t faceDetectMode =
    573                 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
    574         settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
    575 
    576         static const uint8_t hotPixelMapMode =
    577                 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
    578         settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
    579     }
    580 
    581     /* android.control */
    582 
    583     uint8_t controlIntent = 0;
    584     switch (type) {
    585       case CAMERA3_TEMPLATE_PREVIEW:
    586         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
    587         break;
    588       case CAMERA3_TEMPLATE_STILL_CAPTURE:
    589         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
    590         break;
    591       case CAMERA3_TEMPLATE_VIDEO_RECORD:
    592         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
    593         break;
    594       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    595         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
    596         break;
    597       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    598         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
    599         break;
    600       case CAMERA3_TEMPLATE_MANUAL:
    601         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
    602         break;
    603       default:
    604         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
    605         break;
    606     }
    607     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
    608 
    609     const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    610             ANDROID_CONTROL_MODE_OFF :
    611             ANDROID_CONTROL_MODE_AUTO;
    612     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
    613 
    614     int32_t aeTargetFpsRange[2] = {
    615         5, 30
    616     };
    617     if (type == CAMERA3_TEMPLATE_VIDEO_RECORD ||
    618             type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
    619         aeTargetFpsRange[0] = 30;
    620     }
    621     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
    622 
    623     if (hasCapability(BACKWARD_COMPATIBLE)) {
    624         static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
    625         settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
    626 
    627         const uint8_t sceneMode =
    628                 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
    629         settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
    630 
    631         const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    632                 ANDROID_CONTROL_AE_MODE_OFF : ANDROID_CONTROL_AE_MODE_ON;
    633         settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
    634 
    635         static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
    636         settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
    637 
    638         static const int32_t controlRegions[5] = {
    639             0, 0, 0, 0, 0
    640         };
    641         settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
    642 
    643         static const int32_t aeExpCompensation = 0;
    644         settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
    645 
    646 
    647         static const uint8_t aeAntibandingMode =
    648                 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
    649         settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
    650 
    651         static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
    652         settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
    653 
    654         const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
    655                 ANDROID_CONTROL_AWB_MODE_OFF :
    656                 ANDROID_CONTROL_AWB_MODE_AUTO;
    657         settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
    658 
    659         static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
    660         settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
    661 
    662         uint8_t afMode = 0;
    663 
    664         if (mFacingBack) {
    665             switch (type) {
    666                 case CAMERA3_TEMPLATE_PREVIEW:
    667                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    668                     break;
    669                 case CAMERA3_TEMPLATE_STILL_CAPTURE:
    670                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    671                     break;
    672                 case CAMERA3_TEMPLATE_VIDEO_RECORD:
    673                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
    674                     break;
    675                 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
    676                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
    677                     break;
    678                 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
    679                     afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
    680                     break;
    681                 case CAMERA3_TEMPLATE_MANUAL:
    682                     afMode = ANDROID_CONTROL_AF_MODE_OFF;
    683                     break;
    684                 default:
    685                     afMode = ANDROID_CONTROL_AF_MODE_AUTO;
    686                     break;
    687             }
    688         } else {
    689             afMode = ANDROID_CONTROL_AF_MODE_OFF;
    690         }
    691         settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
    692         settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
    693 
    694         static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
    695         settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
    696 
    697         static const uint8_t vstabMode =
    698                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
    699         settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
    700                         &vstabMode, 1);
    701 
    702         static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
    703         settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
    704 
    705         static const uint8_t lensShadingMapMode =
    706                 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
    707         settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
    708                         &lensShadingMapMode, 1);
    709 
    710         static const uint8_t aberrationMode =
    711                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
    712         settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
    713                         &aberrationMode, 1);
    714 
    715         static const int32_t testPatternMode =
    716                 ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
    717         settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
    718     }
    719 
    720     mDefaultTemplates[type] = settings.release();
    721 
    722     return mDefaultTemplates[type];
    723 }
    724 
    725 status_t EmulatedQemuCamera3::processCaptureRequest(
    726         camera3_capture_request *request) {
    727     Mutex::Autolock l(mLock);
    728     status_t res;
    729 
    730     /* Validation */
    731 
    732     if (mStatus < STATUS_READY) {
    733         ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
    734                 mStatus);
    735         return INVALID_OPERATION;
    736     }
    737 
    738     if (request == nullptr) {
    739         ALOGE("%s: NULL request!", __FUNCTION__);
    740         return BAD_VALUE;
    741     }
    742 
    743     uint32_t frameNumber = request->frame_number;
    744 
    745     if (request->settings == nullptr && mPrevSettings.isEmpty()) {
    746         ALOGE("%s: Request %d: NULL settings for first request after"
    747                 "configureStreams()", __FUNCTION__, frameNumber);
    748         return BAD_VALUE;
    749     }
    750 
    751     if (request->input_buffer != nullptr &&
    752             request->input_buffer->stream != mInputStream) {
    753         ALOGE("%s: Request %d: Input buffer not from input stream!",
    754                 __FUNCTION__, frameNumber);
    755         ALOGV("%s: Bad stream %p, expected: %p", __FUNCTION__,
    756                 request->input_buffer->stream, mInputStream);
    757         ALOGV("%s: Bad stream type %d, expected stream type %d", __FUNCTION__,
    758                 request->input_buffer->stream->stream_type,
    759                 mInputStream ? mInputStream->stream_type : -1);
    760 
    761         return BAD_VALUE;
    762     }
    763 
    764     if (request->num_output_buffers < 1 || request->output_buffers == nullptr) {
    765         ALOGE("%s: Request %d: No output buffers provided!",
    766                 __FUNCTION__, frameNumber);
    767         return BAD_VALUE;
    768     }
    769 
    770     /*
    771      * Validate all buffers, starting with input buffer if it's given.
    772      */
    773 
    774     ssize_t idx;
    775     const camera3_stream_buffer_t *b;
    776     if (request->input_buffer != nullptr) {
    777         idx = -1;
    778         b = request->input_buffer;
    779     } else {
    780         idx = 0;
    781         b = request->output_buffers;
    782     }
    783     do {
    784         PrivateStreamInfo *priv =
    785                 static_cast<PrivateStreamInfo*>(b->stream->priv);
    786         if (priv == nullptr) {
    787             ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
    788                     __FUNCTION__, frameNumber, idx);
    789             return BAD_VALUE;
    790         }
    791         if (!priv->alive) {
    792             ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
    793                     __FUNCTION__, frameNumber, idx);
    794             return BAD_VALUE;
    795         }
    796         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    797             ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
    798                     __FUNCTION__, frameNumber, idx);
    799             return BAD_VALUE;
    800         }
    801         if (b->release_fence != -1) {
    802             ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
    803                     __FUNCTION__, frameNumber, idx);
    804             return BAD_VALUE;
    805         }
    806         if (b->buffer == nullptr) {
    807             ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
    808                     __FUNCTION__, frameNumber, idx);
    809             return BAD_VALUE;
    810         }
    811         idx++;
    812         b = &(request->output_buffers[idx]);
    813     } while (idx < (ssize_t)request->num_output_buffers);
    814 
    815     // TODO: Validate settings parameters.
    816 
    817     /*
    818      * Start processing this request.
    819      */
    820 
    821     mStatus = STATUS_ACTIVE;
    822 
    823     CameraMetadata settings;
    824 
    825     if (request->settings == nullptr) {
    826         settings.acquire(mPrevSettings);
    827     } else {
    828         settings = request->settings;
    829     }
    830 
    831     res = process3A(settings);
    832     if (res != OK) {
    833         return res;
    834     }
    835 
    836     /*
    837      * Get ready for sensor config.
    838      */
    839     // TODO: We shouldn't need exposureTime or frameDuration for webcams.
    840     nsecs_t exposureTime;
    841     nsecs_t frameDuration;
    842     bool needJpeg = false;
    843     camera_metadata_entry_t entry;
    844 
    845     entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
    846     exposureTime = (entry.count > 0) ?
    847             entry.data.i64[0] :
    848             QemuSensor::kExposureTimeRange[0];
    849 
    850     // Note: Camera consumers may rely on there being an exposure
    851     //       time set in the camera metadata.
    852     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
    853 
    854     entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
    855     frameDuration = (entry.count > 0) ?
    856             entry.data.i64[0] :
    857             QemuSensor::kFrameDurationRange[0];
    858 
    859     if (exposureTime > frameDuration) {
    860         frameDuration = exposureTime + QemuSensor::kMinVerticalBlank;
    861         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
    862     }
    863 
    864     static const int32_t sensitivity = QemuSensor::kSensitivityRange[0];
    865     settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
    866 
    867     static const uint8_t colorMode  = ANDROID_COLOR_CORRECTION_MODE_FAST;
    868     settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
    869 
    870     static const float colorGains[4] = {
    871         1.0f, 1.0f, 1.0f, 1.0f
    872     };
    873     settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
    874 
    875     static const camera_metadata_rational colorTransform[9] = {
    876         {1,1}, {0,1}, {0,1},
    877         {0,1}, {1,1}, {0,1},
    878         {0,1}, {0,1}, {1,1}
    879     };
    880     settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
    881 
    882     static const camera_metadata_rational neutralColorPoint[3] = {
    883         {1,1}, {1,1}, {1,1},
    884     };
    885     settings.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, neutralColorPoint, 3);
    886 
    887     Buffers *sensorBuffers = new Buffers();
    888     HalBufferVector *buffers = new HalBufferVector();
    889 
    890     sensorBuffers->setCapacity(request->num_output_buffers);
    891     buffers->setCapacity(request->num_output_buffers);
    892 
    893     /*
    894      * Process all the buffers we got for output, constructing internal buffer
    895      * structures for them, and lock them for writing.
    896      */
    897     for (size_t i = 0; i < request->num_output_buffers; ++i) {
    898         const camera3_stream_buffer &srcBuf = request->output_buffers[i];
    899         StreamBuffer destBuf;
    900         destBuf.streamId = kGenericStreamId;
    901         destBuf.width = srcBuf.stream->width;
    902         destBuf.height = srcBuf.stream->height;
    903         // inline with goldfish gralloc
    904         if (srcBuf.stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    905             if (srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_WRITE) {
    906                 if (srcBuf.stream->usage & GRALLOC_USAGE_HW_TEXTURE) {
    907                     destBuf.format = HAL_PIXEL_FORMAT_RGBA_8888;
    908                 }
    909                 else if (srcBuf.stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
    910                     destBuf.format = HAL_PIXEL_FORMAT_YCbCr_420_888;
    911                 }
    912                 else if ((srcBuf.stream->usage & GRALLOC_USAGE_HW_CAMERA_MASK)
    913                          == GRALLOC_USAGE_HW_CAMERA_ZSL) {
    914                     destBuf.format = HAL_PIXEL_FORMAT_RGB_888;
    915                 }
    916             }
    917         }
    918         else {
    919             destBuf.format = srcBuf.stream->format;
    920         }
    921 
    922         destBuf.stride = srcBuf.stream->width;
    923         destBuf.dataSpace = srcBuf.stream->data_space;
    924         destBuf.buffer = srcBuf.buffer;
    925 
    926         if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
    927             needJpeg = true;
    928         }
    929 
    930         // Wait on fence.
    931         sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
    932         res = bufferAcquireFence->wait(kFenceTimeoutMs);
    933         if (res == TIMED_OUT) {
    934             ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
    935                     __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
    936         }
    937         if (res == OK) {
    938             // Lock buffer for writing.
    939             if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
    940                 if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
    941                     android_ycbcr ycbcr = android_ycbcr();
    942                     res = GrallocModule::getInstance().lock_ycbcr(
    943                             *(destBuf.buffer),
    944                             GRALLOC_USAGE_HW_CAMERA_WRITE,
    945                             0, 0, destBuf.width, destBuf.height,
    946                             &ycbcr);
    947                     /*
    948                      * This is only valid because we know that emulator's
    949                      * YCbCr_420_888 is really contiguous NV21 under the hood.
    950                      */
    951                     destBuf.img = static_cast<uint8_t*>(ycbcr.y);
    952                 } else {
    953                     ALOGE("Unexpected private format for flexible YUV: 0x%x",
    954                             destBuf.format);
    955                     res = INVALID_OPERATION;
    956                 }
    957             } else {
    958                 res = GrallocModule::getInstance().lock(
    959                     *(destBuf.buffer),
    960                     GRALLOC_USAGE_HW_CAMERA_WRITE,
    961                     0, 0, destBuf.width, destBuf.height,
    962                     (void**)&(destBuf.img));
    963 
    964             }
    965             if (res != OK) {
    966                 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
    967                         __FUNCTION__, frameNumber, i);
    968             }
    969         }
    970 
    971         if (res != OK) {
    972             /*
    973              * Either waiting or locking failed. Unlock locked buffers and bail
    974              * out.
    975              */
    976             for (size_t j = 0; j < i; j++) {
    977                 GrallocModule::getInstance().unlock(
    978                         *(request->output_buffers[i].buffer));
    979             }
    980             delete sensorBuffers;
    981             delete buffers;
    982             return NO_INIT;
    983         }
    984 
    985         sensorBuffers->push_back(destBuf);
    986         buffers->push_back(srcBuf);
    987     }
    988 
    989     /*
    990      * Wait for JPEG compressor to not be busy, if needed.
    991      */
    992     if (needJpeg) {
    993         bool ready = mJpegCompressor->waitForDone(kJpegTimeoutNs);
    994         if (!ready) {
    995             ALOGE("%s: Timeout waiting for JPEG compression to complete!",
    996                     __FUNCTION__);
    997             return NO_INIT;
    998         }
    999         res = mJpegCompressor->reserve();
   1000         if (res != OK) {
   1001             ALOGE("%s: Error managing JPEG compressor resources, can't "
   1002                     "reserve it!", __FUNCTION__);
   1003             return NO_INIT;
   1004         }
   1005     }
   1006 
   1007     /*
   1008      * TODO: We shouldn't need to wait for sensor readout with a webcam, because
   1009      * we might be wasting time.
   1010      */
   1011 
   1012     /*
   1013      * Wait until the in-flight queue has room.
   1014      */
   1015     res = mReadoutThread->waitForReadout();
   1016     if (res != OK) {
   1017         ALOGE("%s: Timeout waiting for previous requests to complete!",
   1018                 __FUNCTION__);
   1019         return NO_INIT;
   1020     }
   1021 
   1022     /*
   1023      * Wait until sensor's ready. This waits for lengthy amounts of time with
   1024      * mLock held, but the interface spec is that no other calls may by done to
   1025      * the HAL by the framework while process_capture_request is happening.
   1026      */
   1027     int syncTimeoutCount = 0;
   1028     while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
   1029         if (mStatus == STATUS_ERROR) {
   1030             return NO_INIT;
   1031         }
   1032         if (syncTimeoutCount == kMaxSyncTimeoutCount) {
   1033             ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
   1034                     __FUNCTION__, frameNumber,
   1035                     kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
   1036             return NO_INIT;
   1037         }
   1038         syncTimeoutCount++;
   1039     }
   1040 
   1041     /*
   1042      * Configure sensor and queue up the request to the readout thread.
   1043      */
   1044     mSensor->setFrameDuration(frameDuration);
   1045     mSensor->setDestinationBuffers(sensorBuffers);
   1046     mSensor->setFrameNumber(request->frame_number);
   1047 
   1048     ReadoutThread::Request r;
   1049     r.frameNumber = request->frame_number;
   1050     r.settings = settings;
   1051     r.sensorBuffers = sensorBuffers;
   1052     r.buffers = buffers;
   1053 
   1054     mReadoutThread->queueCaptureRequest(r);
   1055     ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
   1056 
   1057     // Cache the settings for next time.
   1058     mPrevSettings.acquire(settings);
   1059 
   1060     return OK;
   1061 }
   1062 
   1063 status_t EmulatedQemuCamera3::flush() {
   1064     ALOGW("%s: Not implemented; ignored", __FUNCTION__);
   1065     return OK;
   1066 }
   1067 
   1068 /*****************************************************************************
   1069  * Private Methods
   1070  ****************************************************************************/
   1071 
   1072 status_t EmulatedQemuCamera3::getCameraCapabilities() {
   1073     const char *key = mFacingBack ? "qemu.sf.back_camera_caps" :
   1074             "qemu.sf.front_camera_caps";
   1075 
   1076     /*
   1077      * Defined by 'qemu.sf.*_camera_caps' boot property: if the property doesn't
   1078      * exist, it is assumed to list FULL.
   1079      */
   1080     char prop[PROPERTY_VALUE_MAX];
   1081     if (property_get(key, prop, nullptr) > 0) {
   1082         char *saveptr = nullptr;
   1083         char *cap = strtok_r(prop, " ,", &saveptr);
   1084         while (cap != nullptr) {
   1085             for (int i = 0; i < NUM_CAPABILITIES; ++i) {
   1086                 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
   1087                     mCapabilities.add(static_cast<AvailableCapabilities>(i));
   1088                     break;
   1089                 }
   1090             }
   1091             cap = strtok_r(nullptr, " ,", &saveptr);
   1092         }
   1093         if (mCapabilities.size() == 0) {
   1094             ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
   1095         }
   1096     }
   1097 
   1098     mCapabilities.add(BACKWARD_COMPATIBLE);
   1099 
   1100     ALOGI("Camera %d capabilities:", mCameraID);
   1101     for (size_t i = 0; i < mCapabilities.size(); ++i) {
   1102         ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
   1103     }
   1104 
   1105     return OK;
   1106 }
   1107 
   1108 bool EmulatedQemuCamera3::hasCapability(AvailableCapabilities cap) {
   1109     ssize_t idx = mCapabilities.indexOf(cap);
   1110     return idx >= 0;
   1111 }
   1112 
   1113 status_t EmulatedQemuCamera3::constructStaticInfo() {
   1114     CameraMetadata info;
   1115     Vector<int32_t> availableCharacteristicsKeys;
   1116     status_t res;
   1117 
   1118 #define ADD_STATIC_ENTRY(name, varptr, count) \
   1119         availableCharacteristicsKeys.add(name);   \
   1120         res = info.update(name, varptr, count); \
   1121         if (res != OK) return res
   1122 
   1123     static const float sensorPhysicalSize[2] = {3.20f, 2.40f};  // mm
   1124     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   1125             sensorPhysicalSize, 2);
   1126 
   1127     const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
   1128     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   1129             pixelArray, 2);
   1130     const int32_t activeArray[] = {0, 0, mSensorWidth, mSensorHeight};
   1131     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   1132             activeArray, 4);
   1133 
   1134     static const int32_t orientation = 90;  // Aligned with 'long edge'.
   1135     ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
   1136 
   1137     static const uint8_t timestampSource =
   1138             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
   1139     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
   1140 
   1141     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1142         static const int32_t availableTestPatternModes[] = {
   1143             ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
   1144         };
   1145         ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   1146                 availableTestPatternModes,
   1147                 sizeof(availableTestPatternModes) / sizeof(int32_t));
   1148     }
   1149 
   1150     /* android.lens */
   1151 
   1152     static const float focalLengths = 5.0f; // mm
   1153     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   1154             &focalLengths, 1);
   1155 
   1156     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1157         // infinity (fixed focus)
   1158         static const float minFocusDistance = 0.0;
   1159         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   1160                 &minFocusDistance, 1);
   1161 
   1162         // (fixed focus)
   1163         static const float hyperFocalDistance = 0.0;
   1164         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   1165                 &hyperFocalDistance, 1);
   1166 
   1167         static const float apertures = 2.8f;
   1168         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   1169                 &apertures, 1);
   1170         static const float filterDensities = 0;
   1171         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   1172                 &filterDensities, 1);
   1173         static const uint8_t availableOpticalStabilization =
   1174                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   1175         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   1176                 &availableOpticalStabilization, 1);
   1177 
   1178         static const int32_t lensShadingMapSize[] = {1, 1};
   1179         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
   1180                 sizeof(lensShadingMapSize) / sizeof(int32_t));
   1181 
   1182         static const uint8_t lensFocusCalibration =
   1183                 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
   1184         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   1185                 &lensFocusCalibration, 1);
   1186     }
   1187 
   1188     const uint8_t lensFacing = mFacingBack ?
   1189             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   1190     ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
   1191 
   1192     /* android.flash */
   1193 
   1194     static const uint8_t flashAvailable = 0;
   1195     ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
   1196 
   1197     /* android.scaler */
   1198 
   1199     std::vector<int32_t> availableStreamConfigurations;
   1200     std::vector<int64_t> availableMinFrameDurations;
   1201     std::vector<int64_t> availableStallDurations;
   1202 
   1203     /*
   1204      * Build stream configurations, min frame durations, and stall durations for
   1205      * all resolutions reported by camera device.
   1206      */
   1207     for (const auto &res : mResolutions) {
   1208         int32_t width = res.first, height = res.second;
   1209         std::vector<int32_t> currentResStreamConfigurations = {
   1210             HAL_PIXEL_FORMAT_BLOB, width, height,
   1211             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1212 
   1213             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
   1214             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1215 
   1216             HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
   1217             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
   1218 
   1219             HAL_PIXEL_FORMAT_RGBA_8888, width, height,
   1220             ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
   1221         };
   1222         std::vector<int32_t> currentResMinFrameDurations = {
   1223             HAL_PIXEL_FORMAT_BLOB, width, height,
   1224             QemuSensor::kFrameDurationRange[0],
   1225 
   1226             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height,
   1227             QemuSensor::kFrameDurationRange[0],
   1228 
   1229             HAL_PIXEL_FORMAT_YCbCr_420_888, width, height,
   1230             QemuSensor::kFrameDurationRange[0],
   1231 
   1232             HAL_PIXEL_FORMAT_RGBA_8888, width, height,
   1233             QemuSensor::kFrameDurationRange[0]
   1234         };
   1235         std::vector<int32_t> currentResStallDurations = {
   1236             // We should only introduce stall times with JPEG-compressed frames.
   1237             HAL_PIXEL_FORMAT_BLOB, width, height,
   1238             QemuSensor::kFrameDurationRange[0],
   1239 
   1240             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, width, height, 0,
   1241 
   1242             HAL_PIXEL_FORMAT_YCbCr_420_888, width, height, 0,
   1243 
   1244             HAL_PIXEL_FORMAT_RGBA_8888, width, height, 0
   1245         };
   1246         availableStreamConfigurations.insert(
   1247                 availableStreamConfigurations.end(),
   1248                 currentResStreamConfigurations.begin(),
   1249                 currentResStreamConfigurations.end());
   1250         availableMinFrameDurations.insert(
   1251                 availableMinFrameDurations.end(),
   1252                 currentResMinFrameDurations.begin(),
   1253                 currentResMinFrameDurations.end());
   1254         availableStallDurations.insert(
   1255                 availableStallDurations.end(),
   1256                 currentResStallDurations.begin(),
   1257                 currentResStallDurations.end());
   1258     }
   1259 
   1260     /*
   1261      * Now, if nonempty, add them to the camera's available characteristics.
   1262      */
   1263     if (availableStreamConfigurations.size() > 0) {
   1264         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   1265                 availableStreamConfigurations.data(),
   1266                 availableStreamConfigurations.size());
   1267     }
   1268     if (availableMinFrameDurations.size() > 0) {
   1269         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   1270                 &availableMinFrameDurations[0],
   1271                 availableMinFrameDurations.size());
   1272     }
   1273     if (availableStallDurations.size() > 0) {
   1274         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   1275                 &availableStallDurations[0],
   1276                 availableStallDurations.size());
   1277     }
   1278 
   1279     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1280         static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
   1281         ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
   1282                 &croppingType, 1);
   1283 
   1284         static const float maxZoom = 10;
   1285         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   1286                 &maxZoom, 1);
   1287     }
   1288 
   1289     /* android.jpeg */
   1290 
   1291     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1292         static const int32_t jpegThumbnailSizes[] = {
   1293             0, 0,
   1294             160, 120,
   1295             320, 240
   1296         };
   1297         ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   1298                 jpegThumbnailSizes,
   1299                 sizeof(jpegThumbnailSizes) / sizeof(int32_t));
   1300 
   1301         static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
   1302         ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
   1303     }
   1304 
   1305     /* android.stats */
   1306 
   1307     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1308         static const uint8_t availableFaceDetectModes[] = {
   1309             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF
   1310         };
   1311         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   1312                 availableFaceDetectModes,
   1313                 sizeof(availableFaceDetectModes));
   1314 
   1315         static const int32_t maxFaceCount = 0;
   1316         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   1317                 &maxFaceCount, 1);
   1318 
   1319         static const uint8_t availableShadingMapModes[] = {
   1320             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
   1321         };
   1322         ADD_STATIC_ENTRY(
   1323                 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   1324                 availableShadingMapModes, sizeof(availableShadingMapModes));
   1325     }
   1326 
   1327     /* android.sync */
   1328 
   1329     const int32_t maxLatency =
   1330             hasCapability(FULL_LEVEL) ?
   1331             ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
   1332     ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
   1333 
   1334     /* android.control */
   1335 
   1336     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1337         const uint8_t availableControlModes[] = {
   1338             ANDROID_CONTROL_MODE_OFF,
   1339             ANDROID_CONTROL_MODE_AUTO,
   1340             ANDROID_CONTROL_MODE_USE_SCENE_MODE
   1341         };
   1342         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
   1343                 availableControlModes, sizeof(availableControlModes));
   1344     } else {
   1345         const uint8_t availableControlModes[] = {
   1346             ANDROID_CONTROL_MODE_AUTO
   1347         };
   1348         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
   1349                 availableControlModes, sizeof(availableControlModes));
   1350     }
   1351 
   1352     const uint8_t availableSceneModes[] = {
   1353         hasCapability(BACKWARD_COMPATIBLE) ?
   1354             ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
   1355             ANDROID_CONTROL_SCENE_MODE_DISABLED
   1356     };
   1357     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   1358             availableSceneModes, sizeof(availableSceneModes));
   1359 
   1360     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1361         static const uint8_t availableEffects[] = {
   1362             ANDROID_CONTROL_EFFECT_MODE_OFF
   1363         };
   1364         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   1365                 availableEffects, sizeof(availableEffects));
   1366     }
   1367 
   1368     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1369         static const int32_t max3aRegions[] = {
   1370             /* AE */ 1,
   1371             /* AWB */ 0,
   1372             /* AF */ 1
   1373         };
   1374         ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
   1375                 max3aRegions,
   1376                 sizeof(max3aRegions) / sizeof(max3aRegions[0]));
   1377 
   1378         static const uint8_t availableAeModes[] = {
   1379             ANDROID_CONTROL_AE_MODE_OFF,
   1380             ANDROID_CONTROL_AE_MODE_ON
   1381         };
   1382         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   1383                 availableAeModes, sizeof(availableAeModes));
   1384 
   1385         static const camera_metadata_rational exposureCompensationStep = {1, 3};
   1386         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   1387                 &exposureCompensationStep, 1);
   1388 
   1389         static int32_t exposureCompensationRange[] = {-9, 9};
   1390         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   1391                 exposureCompensationRange,
   1392                 sizeof(exposureCompensationRange) / sizeof(int32_t));
   1393     }
   1394 
   1395     static const int32_t availableTargetFpsRanges[] = {
   1396         5, 30, 15, 30, 15, 15, 30, 30
   1397     };
   1398     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   1399             availableTargetFpsRanges,
   1400             sizeof(availableTargetFpsRanges) / sizeof(int32_t));
   1401 
   1402     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1403         static const uint8_t availableAntibandingModes[] = {
   1404             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
   1405             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
   1406         };
   1407         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   1408                 availableAntibandingModes, sizeof(availableAntibandingModes));
   1409     }
   1410 
   1411     static const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   1412 
   1413     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   1414             &aeLockAvailable, 1);
   1415 
   1416     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1417         static const uint8_t availableAwbModes[] = {
   1418             ANDROID_CONTROL_AWB_MODE_OFF,
   1419             ANDROID_CONTROL_AWB_MODE_AUTO,
   1420             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
   1421             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
   1422             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
   1423             ANDROID_CONTROL_AWB_MODE_SHADE,
   1424         };
   1425         ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   1426                 availableAwbModes, sizeof(availableAwbModes));
   1427     }
   1428 
   1429     static const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   1430 
   1431     ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   1432             &awbLockAvailable, 1);
   1433 
   1434     static const uint8_t availableAfModesBack[] = {
   1435         ANDROID_CONTROL_AF_MODE_OFF
   1436     };
   1437 
   1438     static const uint8_t availableAfModesFront[] = {
   1439         ANDROID_CONTROL_AF_MODE_OFF
   1440     };
   1441 
   1442     if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
   1443         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   1444                 availableAfModesBack, sizeof(availableAfModesBack));
   1445     } else {
   1446         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   1447                 availableAfModesFront, sizeof(availableAfModesFront));
   1448     }
   1449 
   1450     static const uint8_t availableVstabModes[] = {
   1451         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
   1452     };
   1453     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   1454             availableVstabModes, sizeof(availableVstabModes));
   1455 
   1456     /* android.colorCorrection */
   1457 
   1458     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1459         const uint8_t availableAberrationModes[] = {
   1460             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   1461             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
   1462             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
   1463         };
   1464         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   1465                 availableAberrationModes, sizeof(availableAberrationModes));
   1466     } else {
   1467         const uint8_t availableAberrationModes[] = {
   1468             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   1469         };
   1470         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   1471                 availableAberrationModes, sizeof(availableAberrationModes));
   1472     }
   1473 
   1474     /* android.edge */
   1475 
   1476     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1477         const uint8_t availableEdgeModes[] = {
   1478             ANDROID_EDGE_MODE_OFF,
   1479             ANDROID_EDGE_MODE_FAST,
   1480             ANDROID_EDGE_MODE_HIGH_QUALITY,
   1481         };
   1482         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   1483                 availableEdgeModes, sizeof(availableEdgeModes));
   1484     } else {
   1485         const uint8_t availableEdgeModes[] = {
   1486             ANDROID_EDGE_MODE_OFF
   1487         };
   1488         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   1489                 availableEdgeModes, sizeof(availableEdgeModes));
   1490     }
   1491 
   1492     /* android.info */
   1493 
   1494     static const uint8_t supportedHardwareLevel =
   1495             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
   1496     ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   1497             &supportedHardwareLevel, /* count */ 1);
   1498 
   1499     /* android.noiseReduction */
   1500 
   1501     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1502         const uint8_t availableNoiseReductionModes[] = {
   1503             ANDROID_NOISE_REDUCTION_MODE_OFF,
   1504             ANDROID_NOISE_REDUCTION_MODE_FAST,
   1505             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
   1506         };
   1507         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   1508                 availableNoiseReductionModes,
   1509                 sizeof(availableNoiseReductionModes));
   1510     } else {
   1511         const uint8_t availableNoiseReductionModes[] = {
   1512             ANDROID_NOISE_REDUCTION_MODE_OFF
   1513         };
   1514         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   1515                 availableNoiseReductionModes,
   1516                 sizeof(availableNoiseReductionModes));
   1517     }
   1518 
   1519     /* android.shading */
   1520 
   1521     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1522         const uint8_t availableShadingModes[] = {
   1523             ANDROID_SHADING_MODE_OFF,
   1524             ANDROID_SHADING_MODE_FAST,
   1525             ANDROID_SHADING_MODE_HIGH_QUALITY
   1526         };
   1527         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
   1528                 sizeof(availableShadingModes));
   1529     } else {
   1530         const uint8_t availableShadingModes[] = {
   1531             ANDROID_SHADING_MODE_OFF
   1532         };
   1533         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
   1534                 sizeof(availableShadingModes));
   1535     }
   1536 
   1537     /* android.request */
   1538 
   1539     static const int32_t maxNumOutputStreams[] = {
   1540         kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
   1541     };
   1542     ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   1543             maxNumOutputStreams, 3);
   1544 
   1545     static const uint8_t maxPipelineDepth = kMaxBufferCount;
   1546     ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
   1547 
   1548     static const int32_t partialResultCount = 1;
   1549     ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   1550             &partialResultCount, /* count */ 1);
   1551 
   1552     SortedVector<uint8_t> caps;
   1553     for (size_t i = 0; i < mCapabilities.size(); ++i) {
   1554         switch (mCapabilities[i]) {
   1555             case BACKWARD_COMPATIBLE:
   1556                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   1557                 break;
   1558             case PRIVATE_REPROCESSING:
   1559                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   1560                 break;
   1561             case READ_SENSOR_SETTINGS:
   1562                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
   1563                 break;
   1564             case BURST_CAPTURE:
   1565                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
   1566                 break;
   1567             case YUV_REPROCESSING:
   1568                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
   1569                 break;
   1570             case CONSTRAINED_HIGH_SPEED_VIDEO:
   1571                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
   1572                 break;
   1573             default:
   1574                 // Ignore LEVELs.
   1575                 break;
   1576         }
   1577     }
   1578     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
   1579 
   1580     // Scan a default request template for included request keys.
   1581     Vector<int32_t> availableRequestKeys;
   1582     const camera_metadata_t *previewRequest =
   1583         constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
   1584     for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); ++i) {
   1585         camera_metadata_ro_entry_t entry;
   1586         get_camera_metadata_ro_entry(previewRequest, i, &entry);
   1587         availableRequestKeys.add(entry.tag);
   1588     }
   1589     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
   1590             availableRequestKeys.size());
   1591 
   1592     /*
   1593      * Add a few more result keys. Must be kept up to date with the various
   1594      * places that add these.
   1595      */
   1596 
   1597     Vector<int32_t> availableResultKeys(availableRequestKeys);
   1598     if (hasCapability(BACKWARD_COMPATIBLE)) {
   1599         availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
   1600         availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
   1601         availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
   1602         availableResultKeys.add(ANDROID_FLASH_STATE);
   1603         availableResultKeys.add(ANDROID_LENS_STATE);
   1604         availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
   1605         availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
   1606         availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
   1607     }
   1608 
   1609     availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
   1610     availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
   1611 
   1612     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
   1613             availableResultKeys.size());
   1614 
   1615     // Needs to be last, to collect all the keys set.
   1616 
   1617     availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
   1618     info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   1619             availableCharacteristicsKeys);
   1620 
   1621     mCameraInfo = info.release();
   1622 
   1623 #undef ADD_STATIC_ENTRY
   1624     return OK;
   1625 }
   1626 
   1627 status_t EmulatedQemuCamera3::process3A(CameraMetadata &settings) {
   1628     /**
   1629      * Extract top-level 3A controls
   1630      */
   1631     status_t res;
   1632 
   1633     camera_metadata_entry e;
   1634 
   1635     e = settings.find(ANDROID_CONTROL_MODE);
   1636     if (e.count == 0) {
   1637         ALOGE("%s: No control mode entry!", __FUNCTION__);
   1638         return BAD_VALUE;
   1639     }
   1640     uint8_t controlMode = e.data.u8[0];
   1641 
   1642     if (controlMode == ANDROID_CONTROL_MODE_OFF) {
   1643         mAeMode   = ANDROID_CONTROL_AE_MODE_OFF;
   1644         mAfMode   = ANDROID_CONTROL_AF_MODE_OFF;
   1645         mAwbMode  = ANDROID_CONTROL_AWB_MODE_OFF;
   1646         mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1647         mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1648         mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   1649         update3A(settings);
   1650         return OK;
   1651     } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   1652         if (!hasCapability(BACKWARD_COMPATIBLE)) {
   1653             ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
   1654                   __FUNCTION__);
   1655             return BAD_VALUE;
   1656         }
   1657 
   1658         e = settings.find(ANDROID_CONTROL_SCENE_MODE);
   1659         if (e.count == 0) {
   1660             ALOGE("%s: No scene mode entry!", __FUNCTION__);
   1661             return BAD_VALUE;
   1662         }
   1663         uint8_t sceneMode = e.data.u8[0];
   1664 
   1665         switch(sceneMode) {
   1666             case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
   1667                 mFacePriority = true;
   1668                 break;
   1669             default:
   1670                 ALOGE("%s: Emulator doesn't support scene mode %d",
   1671                         __FUNCTION__, sceneMode);
   1672                 return BAD_VALUE;
   1673         }
   1674     } else {
   1675         mFacePriority = false;
   1676     }
   1677 
   1678     // controlMode == AUTO or sceneMode = FACE_PRIORITY
   1679     // Process individual 3A controls
   1680 
   1681     res = doFakeAE(settings);
   1682     if (res != OK) return res;
   1683 
   1684     res = doFakeAF(settings);
   1685     if (res != OK) return res;
   1686 
   1687     res = doFakeAWB(settings);
   1688     if (res != OK) return res;
   1689 
   1690     update3A(settings);
   1691     return OK;
   1692 }
   1693 
   1694 status_t EmulatedQemuCamera3::doFakeAE(CameraMetadata &settings) {
   1695     camera_metadata_entry e;
   1696 
   1697     e = settings.find(ANDROID_CONTROL_AE_MODE);
   1698     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   1699         ALOGE("%s: No AE mode entry!", __FUNCTION__);
   1700         return BAD_VALUE;
   1701     }
   1702     uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
   1703     mAeMode = aeMode;
   1704 
   1705     switch (aeMode) {
   1706         case ANDROID_CONTROL_AE_MODE_OFF:
   1707             // AE is OFF
   1708             mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1709             return OK;
   1710         case ANDROID_CONTROL_AE_MODE_ON:
   1711             // OK for AUTO modes
   1712             break;
   1713         default:
   1714             // Mostly silently ignore unsupported modes
   1715             ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
   1716                     __FUNCTION__, aeMode);
   1717             break;
   1718     }
   1719 
   1720     e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
   1721     bool precaptureTrigger = false;
   1722     if (e.count != 0) {
   1723         precaptureTrigger =
   1724                 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
   1725     }
   1726 
   1727     if (precaptureTrigger) {
   1728         ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
   1729     } else if (e.count > 0) {
   1730         ALOGV("%s: Pre capture trigger was present? %zu",
   1731               __FUNCTION__, e.count);
   1732     }
   1733 
   1734     if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   1735         // Run precapture sequence
   1736         if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   1737             mAeCounter = 0;
   1738         }
   1739 
   1740         if (mFacePriority) {
   1741             mAeTargetExposureTime = kFacePriorityExposureTime;
   1742         } else {
   1743             mAeTargetExposureTime = kNormalExposureTime;
   1744         }
   1745 
   1746         if (mAeCounter > kPrecaptureMinFrames &&
   1747                 (mAeTargetExposureTime - mAeCurrentExposureTime) <
   1748                 mAeTargetExposureTime / 10) {
   1749             // Done with precapture
   1750             mAeCounter = 0;
   1751             mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
   1752         } else {
   1753             // Converge some more
   1754             mAeCurrentExposureTime +=
   1755                     (mAeTargetExposureTime - mAeCurrentExposureTime) *
   1756                     kExposureTrackRate;
   1757             mAeCounter++;
   1758             mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
   1759         }
   1760     }
   1761     else {
   1762         mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
   1763     }
   1764 
   1765     return OK;
   1766 }
   1767 
   1768 status_t EmulatedQemuCamera3::doFakeAF(CameraMetadata &settings) {
   1769     camera_metadata_entry e;
   1770 
   1771     e = settings.find(ANDROID_CONTROL_AF_MODE);
   1772     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   1773         ALOGE("%s: No AF mode entry!", __FUNCTION__);
   1774         return BAD_VALUE;
   1775     }
   1776     uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
   1777 
   1778     switch (afMode) {
   1779         case ANDROID_CONTROL_AF_MODE_OFF:
   1780         case ANDROID_CONTROL_AF_MODE_AUTO:
   1781         case ANDROID_CONTROL_AF_MODE_MACRO:
   1782         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   1783         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   1784             // Always report INACTIVE for Qemu Camera
   1785             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1786             break;
   1787          default:
   1788             ALOGE("%s: Emulator doesn't support AF mode %d",
   1789                     __FUNCTION__, afMode);
   1790             return BAD_VALUE;
   1791     }
   1792 
   1793     return OK;
   1794 }
   1795 
   1796 status_t EmulatedQemuCamera3::doFakeAWB(CameraMetadata &settings) {
   1797     camera_metadata_entry e;
   1798 
   1799     e = settings.find(ANDROID_CONTROL_AWB_MODE);
   1800     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
   1801         ALOGE("%s: No AWB mode entry!", __FUNCTION__);
   1802         return BAD_VALUE;
   1803     }
   1804     uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
   1805 
   1806     // TODO: Add white balance simulation
   1807 
   1808     switch (awbMode) {
   1809         case ANDROID_CONTROL_AWB_MODE_OFF:
   1810         case ANDROID_CONTROL_AWB_MODE_AUTO:
   1811         case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
   1812         case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
   1813         case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
   1814         case ANDROID_CONTROL_AWB_MODE_SHADE:
   1815             // Always magically right for Qemu Camera
   1816             mAwbState =  ANDROID_CONTROL_AWB_STATE_CONVERGED;
   1817             break;
   1818         default:
   1819             ALOGE("%s: Emulator doesn't support AWB mode %d",
   1820                     __FUNCTION__, awbMode);
   1821             return BAD_VALUE;
   1822     }
   1823 
   1824     return OK;
   1825 }
   1826 
   1827 void EmulatedQemuCamera3::update3A(CameraMetadata &settings) {
   1828     if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
   1829         settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
   1830                 &mAeCurrentExposureTime, 1);
   1831         settings.update(ANDROID_SENSOR_SENSITIVITY,
   1832                 &mAeCurrentSensitivity, 1);
   1833     }
   1834 
   1835     settings.update(ANDROID_CONTROL_AE_STATE,
   1836             &mAeState, 1);
   1837     settings.update(ANDROID_CONTROL_AF_STATE,
   1838             &mAfState, 1);
   1839     settings.update(ANDROID_CONTROL_AWB_STATE,
   1840             &mAwbState, 1);
   1841 
   1842     uint8_t lensState;
   1843     switch (mAfState) {
   1844         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   1845         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   1846             lensState = ANDROID_LENS_STATE_MOVING;
   1847             break;
   1848         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1849         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   1850         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1851         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1852         case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
   1853         default:
   1854             lensState = ANDROID_LENS_STATE_STATIONARY;
   1855             break;
   1856     }
   1857     settings.update(ANDROID_LENS_STATE, &lensState, 1);
   1858 }
   1859 
   1860 void EmulatedQemuCamera3::signalReadoutIdle() {
   1861     Mutex::Autolock l(mLock);
   1862     /*
   1863      * Need to check isIdle again because waiting on mLock may have allowed
   1864      * something to be placed in the in-flight queue.
   1865      */
   1866     if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
   1867         ALOGV("Now idle");
   1868         mStatus = STATUS_READY;
   1869     }
   1870 }
   1871 
   1872 void EmulatedQemuCamera3::onQemuSensorEvent(uint32_t frameNumber, Event e,
   1873                                             nsecs_t timestamp) {
   1874     switch (e) {
   1875         case QemuSensor::QemuSensorListener::EXPOSURE_START:
   1876             ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
   1877                     __FUNCTION__, frameNumber, timestamp);
   1878             // Trigger shutter notify to framework.
   1879             camera3_notify_msg_t msg;
   1880             msg.type = CAMERA3_MSG_SHUTTER;
   1881             msg.message.shutter.frame_number = frameNumber;
   1882             msg.message.shutter.timestamp = timestamp;
   1883             sendNotify(&msg);
   1884             break;
   1885         default:
   1886             ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
   1887                     e, timestamp);
   1888             break;
   1889     }
   1890 }
   1891 
   1892 EmulatedQemuCamera3::ReadoutThread::ReadoutThread(EmulatedQemuCamera3 *parent) :
   1893         mParent(parent), mJpegWaiting(false) {
   1894     ALOGV("%s: Creating readout thread", __FUNCTION__);
   1895 }
   1896 
   1897 EmulatedQemuCamera3::ReadoutThread::~ReadoutThread() {
   1898     for (List<Request>::iterator i = mInFlightQueue.begin();
   1899          i != mInFlightQueue.end(); ++i) {
   1900         delete i->buffers;
   1901         delete i->sensorBuffers;
   1902     }
   1903 }
   1904 
   1905 void EmulatedQemuCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
   1906     Mutex::Autolock l(mLock);
   1907 
   1908     mInFlightQueue.push_back(r);
   1909     mInFlightSignal.signal();
   1910 }
   1911 
   1912 bool EmulatedQemuCamera3::ReadoutThread::isIdle() {
   1913     Mutex::Autolock l(mLock);
   1914     return mInFlightQueue.empty() && !mThreadActive;
   1915 }
   1916 
   1917 status_t EmulatedQemuCamera3::ReadoutThread::waitForReadout() {
   1918     status_t res;
   1919     Mutex::Autolock l(mLock);
   1920     int loopCount = 0;
   1921     while (mInFlightQueue.size() >= kMaxQueueSize) {
   1922         res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
   1923         if (res != OK && res != TIMED_OUT) {
   1924             ALOGE("%s: Error waiting for in-flight queue to shrink",
   1925                     __FUNCTION__);
   1926             return INVALID_OPERATION;
   1927         }
   1928         if (loopCount == kMaxWaitLoops) {
   1929             ALOGE("%s: Timed out waiting for in-flight queue to shrink",
   1930                     __FUNCTION__);
   1931             return TIMED_OUT;
   1932         }
   1933         loopCount++;
   1934     }
   1935     return OK;
   1936 }
   1937 
   1938 bool EmulatedQemuCamera3::ReadoutThread::threadLoop() {
   1939     status_t res;
   1940 
   1941     ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
   1942 
   1943     // First wait for a request from the in-flight queue.
   1944 
   1945     if (mCurrentRequest.settings.isEmpty()) {
   1946         Mutex::Autolock l(mLock);
   1947         if (mInFlightQueue.empty()) {
   1948             res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
   1949             if (res == TIMED_OUT) {
   1950                 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
   1951                         __FUNCTION__);
   1952                 return true;
   1953             } else if (res != NO_ERROR) {
   1954                 ALOGE("%s: Error waiting for capture requests: %d",
   1955                         __FUNCTION__, res);
   1956                 return false;
   1957             }
   1958         }
   1959         mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
   1960         mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
   1961         mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
   1962         mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
   1963         mInFlightQueue.erase(mInFlightQueue.begin());
   1964         mInFlightSignal.signal();
   1965         mThreadActive = true;
   1966         ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
   1967                 mCurrentRequest.frameNumber);
   1968     }
   1969 
   1970     // Then wait for it to be delivered from the sensor.
   1971     ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
   1972             __FUNCTION__);
   1973 
   1974     nsecs_t captureTime;
   1975     bool gotFrame =
   1976             mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
   1977     if (!gotFrame) {
   1978         ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
   1979                 __FUNCTION__);
   1980         return true;
   1981     }
   1982 
   1983     ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
   1984             mCurrentRequest.frameNumber, captureTime);
   1985 
   1986     /*
   1987      * Check if we need to JPEG encode a buffer, and send it for async
   1988      * compression if so. Otherwise prepare the buffer for return.
   1989      */
   1990     bool needJpeg = false;
   1991     HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
   1992     while (buf != mCurrentRequest.buffers->end()) {
   1993         bool goodBuffer = true;
   1994         if (buf->stream->format == HAL_PIXEL_FORMAT_BLOB &&
   1995                 buf->stream->data_space != HAL_DATASPACE_DEPTH) {
   1996             Mutex::Autolock jl(mJpegLock);
   1997             if (mJpegWaiting) {
   1998                 /*
   1999                  * This shouldn't happen, because processCaptureRequest should
   2000                  * be stalling until JPEG compressor is free.
   2001                  */
   2002                 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
   2003                 goodBuffer = false;
   2004             }
   2005             if (goodBuffer) {
   2006                 // Compressor takes ownership of sensorBuffers here.
   2007                 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
   2008                         this, &(mCurrentRequest.settings));
   2009                 goodBuffer = (res == OK);
   2010             }
   2011             if (goodBuffer) {
   2012                 needJpeg = true;
   2013 
   2014                 mJpegHalBuffer = *buf;
   2015                 mJpegFrameNumber = mCurrentRequest.frameNumber;
   2016                 mJpegWaiting = true;
   2017 
   2018                 mCurrentRequest.sensorBuffers = nullptr;
   2019                 buf = mCurrentRequest.buffers->erase(buf);
   2020 
   2021                 continue;
   2022             }
   2023             ALOGE("%s: Error compressing output buffer: %s (%d)",
   2024                     __FUNCTION__, strerror(-res), res);
   2025             // Fallthrough for cleanup.
   2026         }
   2027         GrallocModule::getInstance().unlock(*(buf->buffer));
   2028 
   2029         buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
   2030                 CAMERA3_BUFFER_STATUS_ERROR;
   2031         buf->acquire_fence = -1;
   2032         buf->release_fence = -1;
   2033 
   2034         ++buf;
   2035     }
   2036 
   2037     // Construct result for all completed buffers and results.
   2038 
   2039     camera3_capture_result result;
   2040 
   2041     if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
   2042         static const uint8_t sceneFlicker =
   2043                 ANDROID_STATISTICS_SCENE_FLICKER_NONE;
   2044         mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
   2045                 &sceneFlicker, 1);
   2046 
   2047         static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   2048         mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
   2049                 &flashState, 1);
   2050 
   2051         nsecs_t rollingShutterSkew = 0;
   2052         mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   2053                 &rollingShutterSkew, 1);
   2054 
   2055         float focusRange[] = { 1.0f / 5.0f, 0 };  // 5 m to infinity in focus
   2056         mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange,
   2057                 sizeof(focusRange) / sizeof(float));
   2058     }
   2059 
   2060     mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
   2061             &captureTime, 1);
   2062 
   2063 
   2064     // JPEGs take a stage longer.
   2065     const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
   2066     mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
   2067             &pipelineDepth, 1);
   2068 
   2069     result.frame_number = mCurrentRequest.frameNumber;
   2070     result.result = mCurrentRequest.settings.getAndLock();
   2071     result.num_output_buffers = mCurrentRequest.buffers->size();
   2072     result.output_buffers = mCurrentRequest.buffers->array();
   2073     result.input_buffer = nullptr;
   2074     result.partial_result = 1;
   2075 
   2076     // Go idle if queue is empty, before sending result.
   2077     bool signalIdle = false;
   2078     {
   2079         Mutex::Autolock l(mLock);
   2080         if (mInFlightQueue.empty()) {
   2081             mThreadActive = false;
   2082             signalIdle = true;
   2083         }
   2084     }
   2085     if (signalIdle) mParent->signalReadoutIdle();
   2086 
   2087     // Send it off to the framework.
   2088     ALOGVV("%s: ReadoutThread: Send result to framework",
   2089             __FUNCTION__);
   2090     mParent->sendCaptureResult(&result);
   2091 
   2092     // Clean up.
   2093     mCurrentRequest.settings.unlock(result.result);
   2094 
   2095     delete mCurrentRequest.buffers;
   2096     mCurrentRequest.buffers = nullptr;
   2097     if (!needJpeg) {
   2098         delete mCurrentRequest.sensorBuffers;
   2099         mCurrentRequest.sensorBuffers = nullptr;
   2100     }
   2101     mCurrentRequest.settings.clear();
   2102 
   2103     return true;
   2104 }
   2105 
   2106 void EmulatedQemuCamera3::ReadoutThread::onJpegDone(
   2107         const StreamBuffer &jpegBuffer, bool success) {
   2108     Mutex::Autolock jl(mJpegLock);
   2109 
   2110     GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
   2111 
   2112     mJpegHalBuffer.status = success ?
   2113             CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
   2114     mJpegHalBuffer.acquire_fence = -1;
   2115     mJpegHalBuffer.release_fence = -1;
   2116     mJpegWaiting = false;
   2117 
   2118     camera3_capture_result result;
   2119 
   2120     result.frame_number = mJpegFrameNumber;
   2121     result.result = nullptr;
   2122     result.num_output_buffers = 1;
   2123     result.output_buffers = &mJpegHalBuffer;
   2124     result.input_buffer = nullptr;
   2125     result.partial_result = 0;
   2126 
   2127     if (!success) {
   2128         ALOGE("%s: Compression failure, returning error state buffer to"
   2129                 " framework", __FUNCTION__);
   2130     } else {
   2131         ALOGV("%s: Compression complete, returning buffer to framework",
   2132                 __FUNCTION__);
   2133     }
   2134 
   2135     mParent->sendCaptureResult(&result);
   2136 }
   2137 
   2138 void EmulatedQemuCamera3::ReadoutThread::onJpegInputDone(
   2139         const StreamBuffer &inputBuffer) {
   2140     /*
   2141      * Should never get here, since the input buffer has to be returned by end
   2142      * of processCaptureRequest.
   2143      */
   2144     ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
   2145 }
   2146 
   2147 }; // end of namespace android
   2148