Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of a class EmulatedFakeCamera2 that encapsulates
     19  * functionality of an advanced fake camera.
     20  */
     21 
     22 #include <algorithm>
     23 #include <cstdint>
     24 #include <iterator>
     25 
     26 #define LOG_NDEBUG 0
     27 #define LOG_TAG "EmulatedCamera_FakeCamera2"
     28 #include <utils/Log.h>
     29 
     30 #include "EmulatedCameraFactory.h"
     31 #include "EmulatedFakeCamera2.h"
     32 #include "GrallocModule.h"
     33 #include "common/libs/auto_resources/auto_resources.h"
     34 #include "guest/libs/platform_support/api_level_fixes.h"
     35 
     36 #define ERROR_CAMERA_NOT_PRESENT -EPIPE
     37 
     38 #define CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT 0xFFFFFFFF
     39 
     40 namespace android {
     41 
     42 const int64_t USEC = 1000LL;
     43 const int64_t MSEC = USEC * 1000LL;
     44 const int64_t SEC = MSEC * 1000LL;
     45 
     46 const uint32_t EmulatedFakeCamera2::kAvailableFormats[] = {
     47 #if VSOC_PLATFORM_SDK_AFTER(K)
     48     HAL_PIXEL_FORMAT_RAW16,
     49 #endif
     50     HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_RGBA_8888,
     51     //        HAL_PIXEL_FORMAT_YV12,
     52     HAL_PIXEL_FORMAT_YCrCb_420_SP};
     53 
     54 const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
     55     640, 480
     56     //    mSensorWidth, mSensorHeight
     57 };
     58 
     59 const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
     60     static_cast<uint64_t>(Sensor::kFrameDurationRange[0])};
     61 
     62 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
     63     640, 480, 320, 240
     64     //    mSensorWidth, mSensorHeight
     65 };
     66 
     67 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesFront[4] = {
     68     320, 240, 160, 120
     69     //    mSensorWidth, mSensorHeight
     70 };
     71 
     72 const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
     73     static_cast<uint64_t>(Sensor::kFrameDurationRange[0])};
     74 
     75 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
     76     640, 480
     77     //    mSensorWidth, mSensorHeight
     78 };
     79 
     80 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesFront[2] = {
     81     320, 240
     82     //    mSensorWidth, mSensorHeight
     83 };
     84 
     85 const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
     86     static_cast<uint64_t>(Sensor::kFrameDurationRange[0])};
     87 
     88 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId, bool facingBack,
     89                                          struct hw_module_t *module)
     90     : EmulatedCamera2(cameraId, module),
     91       mFacingBack(facingBack),
     92       mIsConnected(false) {
     93   ALOGD("Constructing emulated fake camera 2 facing %s",
     94         facingBack ? "back" : "front");
     95 }
     96 
     97 EmulatedFakeCamera2::~EmulatedFakeCamera2() {
     98   if (mCameraInfo != NULL) {
     99     free_camera_metadata(mCameraInfo);
    100   }
    101 }
    102 
    103 /****************************************************************************
    104  * Public API overrides
    105  ***************************************************************************/
    106 
    107 status_t EmulatedFakeCamera2::Initialize(const cvd::CameraDefinition &params) {
    108   status_t res;
    109 
    110   for (size_t index = 0; index < params.resolutions.size(); ++index) {
    111     mAvailableRawSizes.push_back(params.resolutions[index].width);
    112     mAvailableRawSizes.push_back(params.resolutions[index].height);
    113     mAvailableProcessedSizes.push_back(params.resolutions[index].width);
    114     mAvailableProcessedSizes.push_back(params.resolutions[index].height);
    115     mAvailableJpegSizes.push_back(params.resolutions[index].width);
    116     mAvailableJpegSizes.push_back(params.resolutions[index].height);
    117   }
    118 
    119   // Find max width/height
    120   int32_t width = 0, height = 0;
    121   for (size_t index = 0; index < params.resolutions.size(); ++index) {
    122     if (width <= params.resolutions[index].width &&
    123         height <= params.resolutions[index].height) {
    124       width = params.resolutions[index].width;
    125       height = params.resolutions[index].height;
    126     }
    127   }
    128   if (width < 640 || height < 480) {
    129     width = 640;
    130     height = 480;
    131   }
    132   mSensorWidth = width;
    133   mSensorHeight = height;
    134 
    135   /* TODO(ender): probably should drop this. */
    136   std::copy(kAvailableRawSizes,
    137             kAvailableRawSizes + arraysize(kAvailableRawSizes),
    138             std::back_inserter(mAvailableRawSizes));
    139 
    140   if (params.orientation == cvd::CameraDefinition::kFront) {
    141     std::copy(kAvailableProcessedSizesFront,
    142               kAvailableProcessedSizesFront +
    143                   arraysize(kAvailableProcessedSizesFront),
    144               std::back_inserter(mAvailableProcessedSizes));
    145     std::copy(kAvailableJpegSizesFront,
    146               kAvailableJpegSizesFront + arraysize(kAvailableJpegSizesFront),
    147               std::back_inserter(mAvailableJpegSizes));
    148   } else {
    149     std::copy(
    150         kAvailableProcessedSizesBack,
    151         kAvailableProcessedSizesBack + arraysize(kAvailableProcessedSizesBack),
    152         mAvailableProcessedSizes.begin());
    153     std::copy(kAvailableJpegSizesBack,
    154               kAvailableJpegSizesBack + arraysize(kAvailableJpegSizesBack),
    155               mAvailableJpegSizes.begin());
    156   }
    157 
    158   res = constructStaticInfo(&mCameraInfo, true);
    159   if (res != OK) {
    160     ALOGE("%s: Unable to allocate static info: %s (%d)", __FUNCTION__,
    161           strerror(-res), res);
    162     return res;
    163   }
    164   res = constructStaticInfo(&mCameraInfo, false);
    165   if (res != OK) {
    166     ALOGE("%s: Unable to fill in static info: %s (%d)", __FUNCTION__,
    167           strerror(-res), res);
    168     return res;
    169   }
    170   if (res != OK) return res;
    171 
    172   mNextStreamId = 1;
    173   mNextReprocessStreamId = 1;
    174   mRawStreamCount = 0;
    175   mProcessedStreamCount = 0;
    176   mJpegStreamCount = 0;
    177   mReprocessStreamCount = 0;
    178 
    179   return NO_ERROR;
    180 }
    181 
    182 /****************************************************************************
    183  * Camera module API overrides
    184  ***************************************************************************/
    185 
    186 status_t EmulatedFakeCamera2::connectCamera(hw_device_t **device) {
    187   status_t res;
    188   ALOGV("%s", __FUNCTION__);
    189 
    190   {
    191     Mutex::Autolock l(mMutex);
    192     if (!mStatusPresent) {
    193       ALOGE("%s: Camera ID %d is unplugged", __FUNCTION__, mCameraID);
    194       return -ENODEV;
    195     }
    196   }
    197 
    198   mConfigureThread = new ConfigureThread(this);
    199   mReadoutThread = new ReadoutThread(this);
    200   mControlThread = new ControlThread(this);
    201   mSensor = new Sensor(mSensorWidth, mSensorHeight);
    202   mJpegCompressor = new JpegCompressor();
    203 
    204   mNextStreamId = 1;
    205   mNextReprocessStreamId = 1;
    206 
    207   res = mSensor->startUp();
    208   if (res != NO_ERROR) return res;
    209 
    210   res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
    211   if (res != NO_ERROR) return res;
    212 
    213   res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
    214   if (res != NO_ERROR) return res;
    215 
    216   res = mControlThread->run("EmulatedFakeCamera2::controlThread");
    217   if (res != NO_ERROR) return res;
    218 
    219   status_t ret = EmulatedCamera2::connectCamera(device);
    220 
    221   if (ret >= 0) {
    222     mIsConnected = true;
    223   }
    224 
    225   return ret;
    226 }
    227 
    228 status_t EmulatedFakeCamera2::plugCamera() {
    229   {
    230     Mutex::Autolock l(mMutex);
    231 
    232     if (!mStatusPresent) {
    233       ALOGI("%s: Plugged back in", __FUNCTION__);
    234       mStatusPresent = true;
    235     }
    236   }
    237 
    238   return NO_ERROR;
    239 }
    240 
    241 status_t EmulatedFakeCamera2::unplugCamera() {
    242   {
    243     Mutex::Autolock l(mMutex);
    244 
    245     if (mStatusPresent) {
    246       ALOGI("%s: Unplugged camera", __FUNCTION__);
    247       mStatusPresent = false;
    248     }
    249   }
    250 
    251   return closeCamera();
    252 }
    253 
    254 camera_device_status_t EmulatedFakeCamera2::getHotplugStatus() {
    255   Mutex::Autolock l(mMutex);
    256   return mStatusPresent ? CAMERA_DEVICE_STATUS_PRESENT
    257                         : CAMERA_DEVICE_STATUS_NOT_PRESENT;
    258 }
    259 
    260 status_t EmulatedFakeCamera2::closeCamera() {
    261   {
    262     Mutex::Autolock l(mMutex);
    263 
    264     status_t res;
    265     ALOGV("%s", __FUNCTION__);
    266 
    267     if (!mIsConnected) {
    268       return NO_ERROR;
    269     }
    270 
    271     res = mSensor->shutDown();
    272     if (res != NO_ERROR) {
    273       ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
    274       return res;
    275     }
    276 
    277     mConfigureThread->requestExit();
    278     mReadoutThread->requestExit();
    279     mControlThread->requestExit();
    280     mJpegCompressor->cancel();
    281   }
    282 
    283   // give up the lock since we will now block and the threads
    284   // can call back into this object
    285   mConfigureThread->join();
    286   mReadoutThread->join();
    287   mControlThread->join();
    288 
    289   ALOGV("%s exit", __FUNCTION__);
    290 
    291   {
    292     Mutex::Autolock l(mMutex);
    293     mIsConnected = false;
    294   }
    295 
    296   return NO_ERROR;
    297 }
    298 
    299 status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
    300   info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
    301   info->orientation =
    302       EmulatedCameraFactory::Instance().getFakeCameraOrientation();
    303   return EmulatedCamera2::getCameraInfo(info);
    304 }
    305 
    306 /****************************************************************************
    307  * Camera device API overrides
    308  ***************************************************************************/
    309 
    310 /** Request input queue */
    311 
    312 int EmulatedFakeCamera2::requestQueueNotify() {
    313   ALOGV("Request queue notification received");
    314 
    315   ALOG_ASSERT(mRequestQueueSrc != NULL,
    316               "%s: Request queue src not set, but received queue notification!",
    317               __FUNCTION__);
    318   ALOG_ASSERT(mFrameQueueDst != NULL,
    319               "%s: Request queue src not set, but received queue notification!",
    320               __FUNCTION__);
    321   ALOG_ASSERT(mStreams.size() != 0,
    322               "%s: No streams allocated, but received queue notification!",
    323               __FUNCTION__);
    324   return mConfigureThread->newRequestAvailable();
    325 }
    326 
    327 int EmulatedFakeCamera2::getInProgressCount() {
    328   Mutex::Autolock l(mMutex);
    329 
    330   if (!mStatusPresent) {
    331     ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    332     return ERROR_CAMERA_NOT_PRESENT;
    333   }
    334 
    335   int requestCount = 0;
    336   requestCount += mConfigureThread->getInProgressCount();
    337   requestCount += mReadoutThread->getInProgressCount();
    338   requestCount += mJpegCompressor->isBusy() ? 1 : 0;
    339 
    340   return requestCount;
    341 }
    342 
    343 int EmulatedFakeCamera2::constructDefaultRequest(int request_template,
    344                                                  camera_metadata_t **request) {
    345   if (request == NULL) return BAD_VALUE;
    346   if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
    347     return BAD_VALUE;
    348   }
    349 
    350   {
    351     Mutex::Autolock l(mMutex);
    352     if (!mStatusPresent) {
    353       ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    354       return ERROR_CAMERA_NOT_PRESENT;
    355     }
    356   }
    357 
    358   status_t res;
    359   // Pass 1, calculate size and allocate
    360   res = constructDefaultRequest(request_template, request, true);
    361   if (res != OK) {
    362     return res;
    363   }
    364   // Pass 2, build request
    365   res = constructDefaultRequest(request_template, request, false);
    366   if (res != OK) {
    367     ALOGE("Unable to populate new request for template %d", request_template);
    368   }
    369 
    370   return res;
    371 }
    372 
    373 int EmulatedFakeCamera2::allocateStream(
    374     uint32_t width, uint32_t height, int format,
    375     const camera2_stream_ops_t *stream_ops, uint32_t *stream_id,
    376     uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers) {
    377   Mutex::Autolock l(mMutex);
    378 
    379   if (!mStatusPresent) {
    380     ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    381     return ERROR_CAMERA_NOT_PRESENT;
    382   }
    383 
    384   // Temporary shim until FORMAT_ZSL is removed
    385   if (format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) {
    386     format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
    387   }
    388 
    389   if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    390     unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
    391     unsigned int formatIdx = 0;
    392     for (; formatIdx < numFormats; formatIdx++) {
    393       if (format == (int)kAvailableFormats[formatIdx]) break;
    394     }
    395     if (formatIdx == numFormats) {
    396       ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
    397       return BAD_VALUE;
    398     }
    399   }
    400 
    401   const uint32_t *availableSizes;
    402   size_t availableSizeCount;
    403   switch (format) {
    404 #if VSOC_PLATFORM_SDK_AFTER(K)
    405     case HAL_PIXEL_FORMAT_RAW16:
    406       availableSizes = &mAvailableRawSizes.front();
    407       availableSizeCount = mAvailableRawSizes.size();
    408       break;
    409 #endif
    410     case HAL_PIXEL_FORMAT_BLOB:
    411       availableSizes = &mAvailableJpegSizes.front();
    412       availableSizeCount = mAvailableJpegSizes.size();
    413       break;
    414     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    415     case HAL_PIXEL_FORMAT_RGBA_8888:
    416     case HAL_PIXEL_FORMAT_YV12:
    417     case HAL_PIXEL_FORMAT_YCrCb_420_SP:
    418       availableSizes = &mAvailableProcessedSizes.front();
    419       availableSizeCount = mAvailableProcessedSizes.size();
    420       break;
    421     default:
    422       ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
    423       return BAD_VALUE;
    424   }
    425 
    426   unsigned int resIdx = 0;
    427   for (; resIdx < availableSizeCount; resIdx++) {
    428     if (availableSizes[resIdx * 2] == width &&
    429         availableSizes[resIdx * 2 + 1] == height)
    430       break;
    431   }
    432   if (resIdx == availableSizeCount) {
    433     ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
    434           format, width, height);
    435     return BAD_VALUE;
    436   }
    437 
    438   switch (format) {
    439 #if VSOC_PLATFORM_SDK_AFTER(K)
    440     case HAL_PIXEL_FORMAT_RAW16:
    441       if (mRawStreamCount >= kMaxRawStreamCount) {
    442         ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
    443               __FUNCTION__, mRawStreamCount);
    444         return INVALID_OPERATION;
    445       }
    446       mRawStreamCount++;
    447       break;
    448 #endif
    449     case HAL_PIXEL_FORMAT_BLOB:
    450       if (mJpegStreamCount >= kMaxJpegStreamCount) {
    451         ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
    452               __FUNCTION__, mJpegStreamCount);
    453         return INVALID_OPERATION;
    454       }
    455       mJpegStreamCount++;
    456       break;
    457     default:
    458       if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
    459         ALOGE(
    460             "%s: Cannot allocate another processed stream (%d already "
    461             "allocated)",
    462             __FUNCTION__, mProcessedStreamCount);
    463         return INVALID_OPERATION;
    464       }
    465       mProcessedStreamCount++;
    466   }
    467 
    468   Stream newStream;
    469   newStream.ops = stream_ops;
    470   newStream.width = width;
    471   newStream.height = height;
    472   newStream.format = format;
    473   // TODO: Query stride from gralloc
    474   newStream.stride = width;
    475 
    476   mStreams.add(mNextStreamId, newStream);
    477 
    478   *stream_id = mNextStreamId;
    479   if (format_actual) *format_actual = format;
    480   *usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    481   *max_buffers = kMaxBufferCount;
    482 
    483   ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d", *stream_id, width,
    484         height, format, *usage, *max_buffers);
    485 
    486   mNextStreamId++;
    487   return NO_ERROR;
    488 }
    489 
    490 int EmulatedFakeCamera2::registerStreamBuffers(uint32_t stream_id,
    491                                                int num_buffers,
    492                                                buffer_handle_t * /*buffers*/) {
    493   Mutex::Autolock l(mMutex);
    494 
    495   if (!mStatusPresent) {
    496     ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    497     return ERROR_CAMERA_NOT_PRESENT;
    498   }
    499 
    500   ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__, stream_id,
    501         num_buffers);
    502   // Need to find out what the final concrete pixel format for our stream is
    503   // Assumes that all buffers have the same format.
    504   if (num_buffers < 1) {
    505     ALOGE("%s: Stream %d only has %d buffers!", __FUNCTION__, stream_id,
    506           num_buffers);
    507     return BAD_VALUE;
    508   }
    509 
    510   ssize_t streamIndex = mStreams.indexOfKey(stream_id);
    511   if (streamIndex < 0) {
    512     ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
    513     return BAD_VALUE;
    514   }
    515 
    516   Stream &stream = mStreams.editValueAt(streamIndex);
    517 
    518   int finalFormat = stream.format;
    519 
    520   if (finalFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    521     finalFormat = HAL_PIXEL_FORMAT_RGBA_8888;
    522   }
    523 
    524   ALOGV("%s: Stream %d format set to %x, previously %x", __FUNCTION__,
    525         stream_id, finalFormat, stream.format);
    526 
    527   stream.format = finalFormat;
    528 
    529   return NO_ERROR;
    530 }
    531 
    532 int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
    533   Mutex::Autolock l(mMutex);
    534 
    535   ssize_t streamIndex = mStreams.indexOfKey(stream_id);
    536   if (streamIndex < 0) {
    537     ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
    538     return BAD_VALUE;
    539   }
    540 
    541   if (isStreamInUse(stream_id)) {
    542     ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__, stream_id);
    543     return BAD_VALUE;
    544   }
    545 
    546   switch (mStreams.valueAt(streamIndex).format) {
    547 #if VSOC_PLATFORM_SDK_AFTER(K)
    548     case HAL_PIXEL_FORMAT_RAW16:
    549       mRawStreamCount--;
    550       break;
    551 #endif
    552     case HAL_PIXEL_FORMAT_BLOB:
    553       mJpegStreamCount--;
    554       break;
    555     default:
    556       mProcessedStreamCount--;
    557       break;
    558   }
    559 
    560   mStreams.removeItemsAt(streamIndex);
    561 
    562   return NO_ERROR;
    563 }
    564 
    565 int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
    566     uint32_t output_stream_id, const camera2_stream_in_ops_t *stream_ops,
    567     uint32_t *stream_id) {
    568   Mutex::Autolock l(mMutex);
    569 
    570   if (!mStatusPresent) {
    571     ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    572     return ERROR_CAMERA_NOT_PRESENT;
    573   }
    574 
    575   ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
    576   if (baseStreamIndex < 0) {
    577     ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
    578     return BAD_VALUE;
    579   }
    580 
    581   const Stream &baseStream = mStreams[baseStreamIndex];
    582 
    583   // We'll reprocess anything we produced
    584 
    585   if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
    586     ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
    587           __FUNCTION__, mReprocessStreamCount);
    588     return INVALID_OPERATION;
    589   }
    590   mReprocessStreamCount++;
    591 
    592   ReprocessStream newStream;
    593   newStream.ops = stream_ops;
    594   newStream.width = baseStream.width;
    595   newStream.height = baseStream.height;
    596   newStream.format = baseStream.format;
    597   newStream.stride = baseStream.stride;
    598   newStream.sourceStreamId = output_stream_id;
    599 
    600   *stream_id = mNextReprocessStreamId;
    601   mReprocessStreams.add(mNextReprocessStreamId, newStream);
    602 
    603   ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
    604         *stream_id, newStream.width, newStream.height, newStream.format,
    605         output_stream_id);
    606 
    607   mNextReprocessStreamId++;
    608   return NO_ERROR;
    609 }
    610 
    611 int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
    612   Mutex::Autolock l(mMutex);
    613 
    614   ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
    615   if (streamIndex < 0) {
    616     ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
    617     return BAD_VALUE;
    618   }
    619 
    620   if (isReprocessStreamInUse(stream_id)) {
    621     ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
    622           stream_id);
    623     return BAD_VALUE;
    624   }
    625 
    626   mReprocessStreamCount--;
    627   mReprocessStreams.removeItemsAt(streamIndex);
    628 
    629   return NO_ERROR;
    630 }
    631 
    632 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id, int32_t ext1,
    633                                        int32_t ext2) {
    634   Mutex::Autolock l(mMutex);
    635 
    636   if (trigger_id == CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT) {
    637     ALOGI("%s: Disconnect trigger - camera must be closed", __FUNCTION__);
    638     mStatusPresent = false;
    639 
    640     EmulatedCameraFactory::Instance().onStatusChanged(
    641         mCameraID, CAMERA_DEVICE_STATUS_NOT_PRESENT);
    642   }
    643 
    644   if (!mStatusPresent) {
    645     ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    646     return ERROR_CAMERA_NOT_PRESENT;
    647   }
    648 
    649   return mControlThread->triggerAction(trigger_id, ext1, ext2);
    650 }
    651 
    652 /** Shutdown and debug methods */
    653 
    654 int EmulatedFakeCamera2::dump(int fd) {
    655   String8 result;
    656 
    657   result.appendFormat("    Camera HAL device: EmulatedFakeCamera2\n");
    658   result.appendFormat("      Streams:\n");
    659   for (size_t i = 0; i < mStreams.size(); i++) {
    660     int id = mStreams.keyAt(i);
    661     const Stream &s = mStreams.valueAt(i);
    662     result.appendFormat("         Stream %d: %d x %d, format 0x%x, stride %d\n",
    663                         id, s.width, s.height, s.format, s.stride);
    664   }
    665 
    666   write(fd, result.string(), result.size());
    667 
    668   return NO_ERROR;
    669 }
    670 
    671 void EmulatedFakeCamera2::signalError() {
    672   // TODO: Let parent know so we can shut down cleanly
    673   ALOGE("Worker thread is signaling a serious error");
    674 }
    675 
    676 /** Pipeline control worker thread methods */
    677 
    678 EmulatedFakeCamera2::ConfigureThread::ConfigureThread(
    679     EmulatedFakeCamera2 *parent)
    680     : Thread(false), mParent(parent), mRequestCount(0), mNextBuffers(NULL) {
    681   mRunning = false;
    682 }
    683 
    684 EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {}
    685 
    686 status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
    687   Mutex::Autolock lock(mInputMutex);
    688 
    689   ALOGV("Starting up ConfigureThread");
    690   mRequest = NULL;
    691   mActive = false;
    692   mRunning = true;
    693 
    694   mInputSignal.signal();
    695   return NO_ERROR;
    696 }
    697 
    698 status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
    699   Mutex::Autolock lock(mInputMutex);
    700   if (!mRunning) {
    701     ALOGV("Waiting for configure thread to start");
    702     mInputSignal.wait(mInputMutex);
    703   }
    704   return OK;
    705 }
    706 
    707 status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
    708   waitUntilRunning();
    709 
    710   Mutex::Autolock lock(mInputMutex);
    711 
    712   mActive = true;
    713   mInputSignal.signal();
    714 
    715   return OK;
    716 }
    717 
    718 bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
    719   Mutex::Autolock lock(mInternalsMutex);
    720 
    721   if (mNextBuffers == NULL) return false;
    722   for (size_t i = 0; i < mNextBuffers->size(); i++) {
    723     if ((*mNextBuffers)[i].streamId == (int)id) return true;
    724   }
    725   return false;
    726 }
    727 
    728 int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
    729   Mutex::Autolock lock(mInputMutex);
    730   return mRequestCount;
    731 }
    732 
    733 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
    734   status_t res;
    735 
    736   // Check if we're currently processing or just waiting
    737   {
    738     Mutex::Autolock lock(mInputMutex);
    739     if (!mActive) {
    740       // Inactive, keep waiting until we've been signaled
    741       status_t res;
    742       res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
    743       if (res != NO_ERROR && res != TIMED_OUT) {
    744         ALOGE("%s: Error waiting for input requests: %d", __FUNCTION__, res);
    745         return false;
    746       }
    747       if (!mActive) return true;
    748       ALOGV("New request available");
    749     }
    750     // Active
    751   }
    752 
    753   if (mRequest == NULL) {
    754     Mutex::Autolock il(mInternalsMutex);
    755 
    756     ALOGV("Configure: Getting next request");
    757     res = mParent->mRequestQueueSrc->dequeue_request(mParent->mRequestQueueSrc,
    758                                                      &mRequest);
    759     if (res != NO_ERROR) {
    760       ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
    761       mParent->signalError();
    762       return false;
    763     }
    764     if (mRequest == NULL) {
    765       ALOGV("Configure: Request queue empty, going inactive");
    766       // No requests available, go into inactive mode
    767       Mutex::Autolock lock(mInputMutex);
    768       mActive = false;
    769       return true;
    770     } else {
    771       Mutex::Autolock lock(mInputMutex);
    772       mRequestCount++;
    773     }
    774 
    775     camera_metadata_entry_t type;
    776     res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_TYPE, &type);
    777     if (res != NO_ERROR) {
    778       ALOGE("%s: error reading request type", __FUNCTION__);
    779       mParent->signalError();
    780       return false;
    781     }
    782     bool success = false;
    783     ;
    784     switch (type.data.u8[0]) {
    785       case ANDROID_REQUEST_TYPE_CAPTURE:
    786         success = setupCapture();
    787         break;
    788       case ANDROID_REQUEST_TYPE_REPROCESS:
    789         success = setupReprocess();
    790         break;
    791       default:
    792         ALOGE("%s: Unexpected request type %d", __FUNCTION__, type.data.u8[0]);
    793         mParent->signalError();
    794         break;
    795     }
    796     if (!success) return false;
    797   }
    798 
    799   if (mWaitingForReadout) {
    800     bool readoutDone;
    801     readoutDone = mParent->mReadoutThread->waitForReady(kWaitPerLoop);
    802     if (!readoutDone) return true;
    803 
    804     if (mNextNeedsJpeg) {
    805       ALOGV("Configure: Waiting for JPEG compressor");
    806     } else {
    807       ALOGV("Configure: Waiting for sensor");
    808     }
    809     mWaitingForReadout = false;
    810   }
    811 
    812   if (mNextNeedsJpeg) {
    813     bool jpegDone;
    814     jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
    815     if (!jpegDone) return true;
    816 
    817     ALOGV("Configure: Waiting for sensor");
    818     mNextNeedsJpeg = false;
    819   }
    820 
    821   if (mNextIsCapture) {
    822     return configureNextCapture();
    823   } else {
    824     return configureNextReprocess();
    825   }
    826 }
    827 
    828 bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
    829   status_t res;
    830 
    831   mNextIsCapture = true;
    832   // Get necessary parameters for sensor config
    833   mParent->mControlThread->processRequest(mRequest);
    834 
    835   camera_metadata_entry_t streams;
    836   res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
    837                                    &streams);
    838   if (res != NO_ERROR) {
    839     ALOGE("%s: error reading output stream tag", __FUNCTION__);
    840     mParent->signalError();
    841     return false;
    842   }
    843 
    844   mNextBuffers = new Buffers;
    845   mNextNeedsJpeg = false;
    846   ALOGV("Configure: Setting up buffers for capture");
    847   for (size_t i = 0; i < streams.count; i++) {
    848     int streamId = streams.data.i32[i];
    849     const Stream &s = mParent->getStreamInfo(streamId);
    850     if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    851       ALOGE(
    852           "%s: Stream %d does not have a concrete pixel format, but "
    853           "is included in a request!",
    854           __FUNCTION__, streamId);
    855       mParent->signalError();
    856       return false;
    857     }
    858     StreamBuffer b;
    859     b.streamId = streamId;  // streams.data.u8[i];
    860     b.width = s.width;
    861     b.height = s.height;
    862     b.format = s.format;
    863     b.stride = s.stride;
    864     mNextBuffers->push_back(b);
    865     ALOGV(
    866         "Configure:    Buffer %zu: Stream %d, %d x %d, format 0x%x, "
    867         "stride %d",
    868         i, b.streamId, b.width, b.height, b.format, b.stride);
    869     if (b.format == HAL_PIXEL_FORMAT_BLOB) {
    870       mNextNeedsJpeg = true;
    871     }
    872   }
    873 
    874   camera_metadata_entry_t e;
    875   res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_FRAME_COUNT, &e);
    876   if (res != NO_ERROR) {
    877     ALOGE("%s: error reading frame count tag: %s (%d)", __FUNCTION__,
    878           strerror(-res), res);
    879     mParent->signalError();
    880     return false;
    881   }
    882   mNextFrameNumber = *e.data.i32;
    883 
    884   res = find_camera_metadata_entry(mRequest, ANDROID_SENSOR_EXPOSURE_TIME, &e);
    885   if (res != NO_ERROR) {
    886     ALOGE("%s: error reading exposure time tag: %s (%d)", __FUNCTION__,
    887           strerror(-res), res);
    888     mParent->signalError();
    889     return false;
    890   }
    891   mNextExposureTime = *e.data.i64;
    892 
    893   res = find_camera_metadata_entry(mRequest, ANDROID_SENSOR_FRAME_DURATION, &e);
    894   if (res != NO_ERROR) {
    895     ALOGE("%s: error reading frame duration tag", __FUNCTION__);
    896     mParent->signalError();
    897     return false;
    898   }
    899   mNextFrameDuration = *e.data.i64;
    900 
    901   if (mNextFrameDuration < mNextExposureTime + Sensor::kMinVerticalBlank) {
    902     mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
    903   }
    904   res = find_camera_metadata_entry(mRequest, ANDROID_SENSOR_SENSITIVITY, &e);
    905   if (res != NO_ERROR) {
    906     ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
    907     mParent->signalError();
    908     return false;
    909   }
    910   mNextSensitivity = *e.data.i32;
    911 
    912   // Start waiting on readout thread
    913   mWaitingForReadout = true;
    914   ALOGV("Configure: Waiting for readout thread");
    915 
    916   return true;
    917 }
    918 
    919 bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
    920   bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
    921   if (!vsync) return true;
    922 
    923   Mutex::Autolock il(mInternalsMutex);
    924   ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
    925   mParent->mSensor->setExposureTime(mNextExposureTime);
    926   mParent->mSensor->setFrameDuration(mNextFrameDuration);
    927   mParent->mSensor->setSensitivity(mNextSensitivity);
    928 
    929   getBuffers();
    930 
    931   ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
    932   mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
    933   mParent->mSensor->setDestinationBuffers(mNextBuffers);
    934 
    935   mRequest = NULL;
    936   mNextBuffers = NULL;
    937 
    938   Mutex::Autolock lock(mInputMutex);
    939   mRequestCount--;
    940 
    941   return true;
    942 }
    943 
    944 bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
    945   status_t res;
    946 
    947   mNextNeedsJpeg = true;
    948   mNextIsCapture = false;
    949 
    950   camera_metadata_entry_t reprocessStreams;
    951   res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_INPUT_STREAMS,
    952                                    &reprocessStreams);
    953   if (res != NO_ERROR) {
    954     ALOGE("%s: error reading output stream tag", __FUNCTION__);
    955     mParent->signalError();
    956     return false;
    957   }
    958 
    959   mNextBuffers = new Buffers;
    960 
    961   ALOGV("Configure: Setting up input buffers for reprocess");
    962   for (size_t i = 0; i < reprocessStreams.count; i++) {
    963     int streamId = reprocessStreams.data.i32[i];
    964     const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
    965     if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
    966       ALOGE("%s: Only ZSL reprocessing supported!", __FUNCTION__);
    967       mParent->signalError();
    968       return false;
    969     }
    970     StreamBuffer b;
    971     b.streamId = -streamId;
    972     b.width = s.width;
    973     b.height = s.height;
    974     b.format = s.format;
    975     b.stride = s.stride;
    976     mNextBuffers->push_back(b);
    977   }
    978 
    979   camera_metadata_entry_t streams;
    980   res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_OUTPUT_STREAMS,
    981                                    &streams);
    982   if (res != NO_ERROR) {
    983     ALOGE("%s: error reading output stream tag", __FUNCTION__);
    984     mParent->signalError();
    985     return false;
    986   }
    987 
    988   ALOGV("Configure: Setting up output buffers for reprocess");
    989   for (size_t i = 0; i < streams.count; i++) {
    990     int streamId = streams.data.i32[i];
    991     const Stream &s = mParent->getStreamInfo(streamId);
    992     if (s.format != HAL_PIXEL_FORMAT_BLOB) {
    993       // TODO: Support reprocess to YUV
    994       ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
    995             __FUNCTION__, streamId);
    996       mParent->signalError();
    997       return false;
    998     }
    999     StreamBuffer b;
   1000     b.streamId = streams.data.u8[i];
   1001     b.width = s.width;
   1002     b.height = s.height;
   1003     b.format = s.format;
   1004     b.stride = s.stride;
   1005     mNextBuffers->push_back(b);
   1006     ALOGV(
   1007         "Configure:    Buffer %zu: Stream %d, %d x %d, format 0x%x, "
   1008         "stride %d",
   1009         i, b.streamId, b.width, b.height, b.format, b.stride);
   1010   }
   1011 
   1012   camera_metadata_entry_t e;
   1013   res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_FRAME_COUNT, &e);
   1014   if (res != NO_ERROR) {
   1015     ALOGE("%s: error reading frame count tag: %s (%d)", __FUNCTION__,
   1016           strerror(-res), res);
   1017     mParent->signalError();
   1018     return false;
   1019   }
   1020   mNextFrameNumber = *e.data.i32;
   1021 
   1022   return true;
   1023 }
   1024 
   1025 bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
   1026   Mutex::Autolock il(mInternalsMutex);
   1027 
   1028   getBuffers();
   1029 
   1030   ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
   1031   mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
   1032 
   1033   mRequest = NULL;
   1034   mNextBuffers = NULL;
   1035 
   1036   Mutex::Autolock lock(mInputMutex);
   1037   mRequestCount--;
   1038 
   1039   return true;
   1040 }
   1041 
   1042 bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
   1043   status_t res;
   1044   /** Get buffers to fill for this frame */
   1045   for (size_t i = 0; i < mNextBuffers->size(); i++) {
   1046     StreamBuffer &b = mNextBuffers->editItemAt(i);
   1047 
   1048     if (b.streamId > 0) {
   1049       ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
   1050       Stream s = mParent->getStreamInfo(b.streamId);
   1051       res = s.ops->dequeue_buffer(s.ops, &(b.buffer));
   1052       if (res != NO_ERROR || b.buffer == NULL) {
   1053         ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
   1054               __FUNCTION__, b.streamId, strerror(-res), res);
   1055         mParent->signalError();
   1056         return false;
   1057       }
   1058 
   1059       /* Lock the buffer from the perspective of the graphics mapper */
   1060       res = GrallocModule::getInstance().lock(
   1061           *(b.buffer), GRALLOC_USAGE_HW_CAMERA_WRITE, 0, 0, s.width, s.height,
   1062           (void **)&(b.img));
   1063 
   1064       if (res != NO_ERROR) {
   1065         ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)", __FUNCTION__,
   1066               strerror(-res), res);
   1067         s.ops->cancel_buffer(s.ops, b.buffer);
   1068         mParent->signalError();
   1069         return false;
   1070       }
   1071     } else {
   1072       ALOGV("Configure: Acquiring buffer from reprocess stream %d",
   1073             -b.streamId);
   1074       ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
   1075       res = s.ops->acquire_buffer(s.ops, &(b.buffer));
   1076       if (res != NO_ERROR || b.buffer == NULL) {
   1077         ALOGE(
   1078             "%s: Unable to acquire buffer from reprocess stream %d: "
   1079             "%s (%d)",
   1080             __FUNCTION__, -b.streamId, strerror(-res), res);
   1081         mParent->signalError();
   1082         return false;
   1083       }
   1084 
   1085       /* Lock the buffer from the perspective of the graphics mapper */
   1086       res = GrallocModule::getInstance().lock(
   1087           *(b.buffer), GRALLOC_USAGE_HW_CAMERA_READ, 0, 0, s.width, s.height,
   1088           (void **)&(b.img));
   1089       if (res != NO_ERROR) {
   1090         ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)", __FUNCTION__,
   1091               strerror(-res), res);
   1092         s.ops->release_buffer(s.ops, b.buffer);
   1093         mParent->signalError();
   1094         return false;
   1095       }
   1096     }
   1097   }
   1098   return true;
   1099 }
   1100 
   1101 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent)
   1102     : Thread(false),
   1103       mParent(parent),
   1104       mRunning(false),
   1105       mActive(false),
   1106       mRequestCount(0),
   1107       mRequest(NULL),
   1108       mBuffers(NULL) {
   1109   mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
   1110   mInFlightHead = 0;
   1111   mInFlightTail = 0;
   1112 }
   1113 
   1114 EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
   1115   delete[] mInFlightQueue;
   1116 }
   1117 
   1118 status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
   1119   Mutex::Autolock lock(mInputMutex);
   1120   ALOGV("Starting up ReadoutThread");
   1121   mRunning = true;
   1122   mInputSignal.signal();
   1123   return NO_ERROR;
   1124 }
   1125 
   1126 status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
   1127   Mutex::Autolock lock(mInputMutex);
   1128   if (!mRunning) {
   1129     ALOGV("Waiting for readout thread to start");
   1130     mInputSignal.wait(mInputMutex);
   1131   }
   1132   return OK;
   1133 }
   1134 
   1135 bool EmulatedFakeCamera2::ReadoutThread::waitForReady(nsecs_t timeout) {
   1136   status_t res;
   1137   Mutex::Autolock lock(mInputMutex);
   1138   while (!readyForNextCapture()) {
   1139     res = mReadySignal.waitRelative(mInputMutex, timeout);
   1140     if (res == TIMED_OUT) return false;
   1141     if (res != OK) {
   1142       ALOGE("%s: Error waiting for ready: %s (%d)", __FUNCTION__,
   1143             strerror(-res), res);
   1144       return false;
   1145     }
   1146   }
   1147   return true;
   1148 }
   1149 
   1150 bool EmulatedFakeCamera2::ReadoutThread::readyForNextCapture() {
   1151   return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
   1152 }
   1153 
   1154 void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
   1155     bool isCapture, camera_metadata_t *request, Buffers *buffers) {
   1156   Mutex::Autolock lock(mInputMutex);
   1157   if (!readyForNextCapture()) {
   1158     ALOGE("In flight queue full, dropping captures");
   1159     mParent->signalError();
   1160     return;
   1161   }
   1162   mInFlightQueue[mInFlightTail].isCapture = isCapture;
   1163   mInFlightQueue[mInFlightTail].request = request;
   1164   mInFlightQueue[mInFlightTail].buffers = buffers;
   1165   mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
   1166   mRequestCount++;
   1167 
   1168   if (!mActive) {
   1169     mActive = true;
   1170     mInputSignal.signal();
   1171   }
   1172 }
   1173 
   1174 bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
   1175   // acquire in same order as threadLoop
   1176   Mutex::Autolock iLock(mInternalsMutex);
   1177   Mutex::Autolock lock(mInputMutex);
   1178 
   1179   size_t i = mInFlightHead;
   1180   while (i != mInFlightTail) {
   1181     for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
   1182       if ((*(mInFlightQueue[i].buffers))[j].streamId == (int)id) return true;
   1183     }
   1184     i = (i + 1) % kInFlightQueueSize;
   1185   }
   1186 
   1187   if (mBuffers != NULL) {
   1188     for (i = 0; i < mBuffers->size(); i++) {
   1189       if ((*mBuffers)[i].streamId == (int)id) return true;
   1190     }
   1191   }
   1192 
   1193   return false;
   1194 }
   1195 
   1196 int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
   1197   Mutex::Autolock lock(mInputMutex);
   1198 
   1199   return mRequestCount;
   1200 }
   1201 
   1202 bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
   1203   static const nsecs_t kWaitPerLoop = 10000000L;  // 10 ms
   1204   status_t res;
   1205   int32_t frameNumber;
   1206 
   1207   // Check if we're currently processing or just waiting
   1208   {
   1209     Mutex::Autolock lock(mInputMutex);
   1210     if (!mActive) {
   1211       // Inactive, keep waiting until we've been signaled
   1212       res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
   1213       if (res != NO_ERROR && res != TIMED_OUT) {
   1214         ALOGE("%s: Error waiting for capture requests: %d", __FUNCTION__, res);
   1215         mParent->signalError();
   1216         return false;
   1217       }
   1218       if (!mActive) return true;
   1219     }
   1220     // Active, see if we need a new request
   1221     if (mRequest == NULL) {
   1222       if (mInFlightHead == mInFlightTail) {
   1223         // Go inactive
   1224         ALOGV("Waiting for sensor data");
   1225         mActive = false;
   1226         return true;
   1227       } else {
   1228         Mutex::Autolock iLock(mInternalsMutex);
   1229         mReadySignal.signal();
   1230         mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
   1231         mRequest = mInFlightQueue[mInFlightHead].request;
   1232         mBuffers = mInFlightQueue[mInFlightHead].buffers;
   1233         mInFlightQueue[mInFlightHead].request = NULL;
   1234         mInFlightQueue[mInFlightHead].buffers = NULL;
   1235         mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
   1236         ALOGV("Ready to read out request %p, %zu buffers", mRequest,
   1237               mBuffers->size());
   1238       }
   1239     }
   1240   }
   1241 
   1242   // Active with request, wait on sensor to complete
   1243 
   1244   nsecs_t captureTime;
   1245 
   1246   if (mIsCapture) {
   1247     bool gotFrame;
   1248     gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
   1249 
   1250     if (!gotFrame) return true;
   1251   }
   1252 
   1253   Mutex::Autolock iLock(mInternalsMutex);
   1254 
   1255   camera_metadata_entry_t entry;
   1256   if (!mIsCapture) {
   1257     res =
   1258         find_camera_metadata_entry(mRequest, ANDROID_SENSOR_TIMESTAMP, &entry);
   1259     if (res != NO_ERROR) {
   1260       ALOGE("%s: error reading reprocessing timestamp: %s (%d)", __FUNCTION__,
   1261             strerror(-res), res);
   1262       mParent->signalError();
   1263       return false;
   1264     }
   1265     captureTime = entry.data.i64[0];
   1266   }
   1267 
   1268   res =
   1269       find_camera_metadata_entry(mRequest, ANDROID_REQUEST_FRAME_COUNT, &entry);
   1270   if (res != NO_ERROR) {
   1271     ALOGE("%s: error reading frame count tag: %s (%d)", __FUNCTION__,
   1272           strerror(-res), res);
   1273     mParent->signalError();
   1274     return false;
   1275   }
   1276   frameNumber = *entry.data.i32;
   1277 
   1278   res = find_camera_metadata_entry(mRequest, ANDROID_REQUEST_METADATA_MODE,
   1279                                    &entry);
   1280   if (res != NO_ERROR) {
   1281     ALOGE("%s: error reading metadata mode tag: %s (%d)", __FUNCTION__,
   1282           strerror(-res), res);
   1283     mParent->signalError();
   1284     return false;
   1285   }
   1286 
   1287   // Got sensor data and request, construct frame and send it out
   1288   ALOGV("Readout: Constructing metadata and frames for request %d",
   1289         frameNumber);
   1290 
   1291   if (*entry.data.u8 == ANDROID_REQUEST_METADATA_MODE_FULL) {
   1292     ALOGV("Readout: Metadata requested, constructing");
   1293 
   1294     camera_metadata_t *frame = NULL;
   1295 
   1296     size_t frame_entries = get_camera_metadata_entry_count(mRequest);
   1297     size_t frame_data = get_camera_metadata_data_count(mRequest);
   1298 
   1299     // TODO: Dynamically calculate based on enabled statistics, etc
   1300     frame_entries += 10;
   1301     frame_data += 100;
   1302 
   1303     res = mParent->mFrameQueueDst->dequeue_frame(
   1304         mParent->mFrameQueueDst, frame_entries, frame_data, &frame);
   1305 
   1306     if (res != NO_ERROR || frame == NULL) {
   1307       ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
   1308       mParent->signalError();
   1309       return false;
   1310     }
   1311 
   1312     res = append_camera_metadata(frame, mRequest);
   1313     if (res != NO_ERROR) {
   1314       ALOGE("Unable to append request metadata");
   1315     }
   1316 
   1317     if (mIsCapture) {
   1318       add_camera_metadata_entry(frame, ANDROID_SENSOR_TIMESTAMP, &captureTime,
   1319                                 1);
   1320 
   1321       collectStatisticsMetadata(frame);
   1322       // TODO: Collect all final values used from sensor in addition to
   1323       // timestamp
   1324     }
   1325 
   1326     ALOGV("Readout: Enqueue frame %d", frameNumber);
   1327     mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst, frame);
   1328   }
   1329   ALOGV("Readout: Free request");
   1330   res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc,
   1331                                                 mRequest);
   1332   if (res != NO_ERROR) {
   1333     ALOGE("%s: Unable to return request buffer to queue: %d", __FUNCTION__,
   1334           res);
   1335     mParent->signalError();
   1336     return false;
   1337   }
   1338   mRequest = NULL;
   1339 
   1340   int compressedBufferIndex = -1;
   1341   ALOGV("Readout: Processing %zu buffers", mBuffers->size());
   1342   for (size_t i = 0; i < mBuffers->size(); i++) {
   1343     const StreamBuffer &b = (*mBuffers)[i];
   1344     ALOGV("Readout:    Buffer %zu: Stream %d, %d x %d, format 0x%x, stride %d",
   1345           i, b.streamId, b.width, b.height, b.format, b.stride);
   1346     if (b.streamId > 0) {
   1347       if (b.format == HAL_PIXEL_FORMAT_BLOB) {
   1348         // Assumes only one BLOB buffer type per capture
   1349         compressedBufferIndex = i;
   1350       } else {
   1351         ALOGV("Readout:    Sending image buffer %zu (%p) to output stream %d",
   1352               i, (void *)*(b.buffer), b.streamId);
   1353         GrallocModule::getInstance().unlock(*(b.buffer));
   1354         const Stream &s = mParent->getStreamInfo(b.streamId);
   1355         res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
   1356         if (res != OK) {
   1357           ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
   1358                 strerror(-res), res);
   1359           mParent->signalError();
   1360         }
   1361       }
   1362     }
   1363   }
   1364 
   1365   if (compressedBufferIndex == -1) {
   1366     delete mBuffers;
   1367   } else {
   1368     ALOGV("Readout:  Starting JPEG compression for buffer %d, stream %d",
   1369           compressedBufferIndex, (*mBuffers)[compressedBufferIndex].streamId);
   1370     mJpegTimestamp = captureTime;
   1371     // Takes ownership of mBuffers
   1372     mParent->mJpegCompressor->start(mBuffers, this);
   1373   }
   1374   mBuffers = NULL;
   1375 
   1376   Mutex::Autolock l(mInputMutex);
   1377   mRequestCount--;
   1378   ALOGV("Readout: Done with request %d", frameNumber);
   1379   return true;
   1380 }
   1381 
   1382 void EmulatedFakeCamera2::ReadoutThread::onJpegDone(
   1383     const StreamBuffer &jpegBuffer, bool success) {
   1384   if (!success) {
   1385     ALOGE("%s: Error queueing compressed image buffer %p", __FUNCTION__,
   1386           jpegBuffer.buffer);
   1387     mParent->signalError();
   1388     return;
   1389   }
   1390 
   1391   // Write to JPEG output stream
   1392   ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
   1393         jpegBuffer.streamId);
   1394 
   1395   GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
   1396   const Stream &s = mParent->getStreamInfo(jpegBuffer.streamId);
   1397   s.ops->enqueue_buffer(s.ops, mJpegTimestamp, jpegBuffer.buffer);
   1398 }
   1399 
   1400 void EmulatedFakeCamera2::ReadoutThread::onJpegInputDone(
   1401     const StreamBuffer &inputBuffer) {
   1402   status_t res;
   1403   GrallocModule::getInstance().unlock(*(inputBuffer.buffer));
   1404   const ReprocessStream &s =
   1405       mParent->getReprocessStreamInfo(-inputBuffer.streamId);
   1406   res = s.ops->release_buffer(s.ops, inputBuffer.buffer);
   1407   if (res != OK) {
   1408     ALOGE("Error releasing reprocess buffer %p: %s (%d)", inputBuffer.buffer,
   1409           strerror(-res), res);
   1410     mParent->signalError();
   1411   }
   1412 }
   1413 
   1414 status_t EmulatedFakeCamera2::ReadoutThread::collectStatisticsMetadata(
   1415     camera_metadata_t *frame) {
   1416   // Completely fake face rectangles, don't correspond to real faces in scene
   1417   ALOGV("Readout:    Collecting statistics metadata");
   1418 
   1419   status_t res;
   1420   camera_metadata_entry_t entry;
   1421   res = find_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_DETECT_MODE,
   1422                                    &entry);
   1423   if (res != OK) {
   1424     ALOGE("%s: Unable to find face detect mode!", __FUNCTION__);
   1425     return BAD_VALUE;
   1426   }
   1427 
   1428   if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) return OK;
   1429 
   1430   // The coordinate system for the face regions is the raw sensor pixel
   1431   // coordinates. Here, we map from the scene coordinates (0-19 in both axis)
   1432   // to raw pixels, for the scene defined in fake-pipeline2/Scene.cpp. We
   1433   // approximately place two faces on top of the windows of the house. No
   1434   // actual faces exist there, but might one day. Note that this doesn't
   1435   // account for the offsets used to account for aspect ratio differences, so
   1436   // the rectangles don't line up quite right.
   1437   const size_t numFaces = 2;
   1438   int32_t rects[numFaces * 4] = {
   1439       static_cast<int32_t>(mParent->mSensorWidth * 10 / 20),
   1440       static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
   1441       static_cast<int32_t>(mParent->mSensorWidth * 12 / 20),
   1442       static_cast<int32_t>(mParent->mSensorHeight * 17 / 20),
   1443 
   1444       static_cast<int32_t>(mParent->mSensorWidth * 16 / 20),
   1445       static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
   1446       static_cast<int32_t>(mParent->mSensorWidth * 18 / 20),
   1447       static_cast<int32_t>(mParent->mSensorHeight * 17 / 20)};
   1448   // To simulate some kind of real detection going on, we jitter the rectangles
   1449   // on each frame by a few pixels in each dimension.
   1450   for (size_t i = 0; i < numFaces * 4; i++) {
   1451     rects[i] += (int32_t)(((float)rand() / RAND_MAX) * 6 - 3);
   1452   }
   1453   // The confidence scores (0-100) are similarly jittered.
   1454   uint8_t scores[numFaces] = {85, 95};
   1455   for (size_t i = 0; i < numFaces; i++) {
   1456     scores[i] += (int32_t)(((float)rand() / RAND_MAX) * 10 - 5);
   1457   }
   1458 
   1459   res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_RECTANGLES,
   1460                                   rects, numFaces * 4);
   1461   if (res != OK) {
   1462     ALOGE("%s: Unable to add face rectangles!", __FUNCTION__);
   1463     return BAD_VALUE;
   1464   }
   1465 
   1466   res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_SCORES, scores,
   1467                                   numFaces);
   1468   if (res != OK) {
   1469     ALOGE("%s: Unable to add face scores!", __FUNCTION__);
   1470     return BAD_VALUE;
   1471   }
   1472 
   1473   if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE) return OK;
   1474 
   1475   // Advanced face detection options - add eye/mouth coordinates.  The
   1476   // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
   1477   // mouthX, mouthY). The mapping is the same as the face rectangles.
   1478   int32_t features[numFaces * 6] = {
   1479       static_cast<int32_t>(mParent->mSensorWidth * 10.5 / 20),
   1480       static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1481       static_cast<int32_t>(mParent->mSensorWidth * 11.5 / 20),
   1482       static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1483       static_cast<int32_t>(mParent->mSensorWidth * 11 / 20),
   1484       static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
   1485 
   1486       static_cast<int32_t>(mParent->mSensorWidth * 16.5 / 20),
   1487       static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1488       static_cast<int32_t>(mParent->mSensorWidth * 17.5 / 20),
   1489       static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1490       static_cast<int32_t>(mParent->mSensorWidth * 17 / 20),
   1491       static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
   1492   };
   1493   // Jitter these a bit less than the rects
   1494   for (size_t i = 0; i < numFaces * 6; i++) {
   1495     features[i] += (int32_t)(((float)rand() / RAND_MAX) * 4 - 2);
   1496   }
   1497   // These are unique IDs that are used to identify each face while it's
   1498   // visible to the detector (if a face went away and came back, it'd get a
   1499   // new ID).
   1500   int32_t ids[numFaces] = {100, 200};
   1501 
   1502   res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_LANDMARKS,
   1503                                   features, numFaces * 6);
   1504   if (res != OK) {
   1505     ALOGE("%s: Unable to add face landmarks!", __FUNCTION__);
   1506     return BAD_VALUE;
   1507   }
   1508 
   1509   res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_IDS, ids,
   1510                                   numFaces);
   1511   if (res != OK) {
   1512     ALOGE("%s: Unable to add face scores!", __FUNCTION__);
   1513     return BAD_VALUE;
   1514   }
   1515 
   1516   return OK;
   1517 }
   1518 
   1519 EmulatedFakeCamera2::ControlThread::ControlThread(EmulatedFakeCamera2 *parent)
   1520     : Thread(false), mParent(parent) {
   1521   mRunning = false;
   1522 }
   1523 
   1524 EmulatedFakeCamera2::ControlThread::~ControlThread() {}
   1525 
   1526 status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
   1527   Mutex::Autolock lock(mInputMutex);
   1528 
   1529   ALOGV("Starting up ControlThread");
   1530   mRunning = true;
   1531   mStartAf = false;
   1532   mCancelAf = false;
   1533   mStartPrecapture = false;
   1534 
   1535   mControlMode = ANDROID_CONTROL_MODE_AUTO;
   1536 
   1537   mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   1538   mSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   1539 
   1540   mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
   1541   mAfModeChange = false;
   1542 
   1543   mAeMode = ANDROID_CONTROL_AE_MODE_ON;
   1544   mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   1545 
   1546   mAfTriggerId = 0;
   1547   mPrecaptureTriggerId = 0;
   1548 
   1549   mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1550   mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1551   mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   1552 
   1553   mExposureTime = kNormalExposureTime;
   1554 
   1555   mInputSignal.signal();
   1556   return NO_ERROR;
   1557 }
   1558 
   1559 status_t EmulatedFakeCamera2::ControlThread::waitUntilRunning() {
   1560   Mutex::Autolock lock(mInputMutex);
   1561   if (!mRunning) {
   1562     ALOGV("Waiting for control thread to start");
   1563     mInputSignal.wait(mInputMutex);
   1564   }
   1565   return OK;
   1566 }
   1567 
   1568 // Override android.control.* fields with 3A values before sending request to
   1569 // sensor
   1570 status_t EmulatedFakeCamera2::ControlThread::processRequest(
   1571     camera_metadata_t *request) {
   1572   Mutex::Autolock lock(mInputMutex);
   1573   // TODO: Add handling for all android.control.* fields here
   1574   camera_metadata_entry_t mode;
   1575   status_t res;
   1576 
   1577 #define READ_IF_OK(res, what, def) (((res) == OK) ? (what) : (uint8_t)(def))
   1578 
   1579   res = find_camera_metadata_entry(request, ANDROID_CONTROL_MODE, &mode);
   1580   mControlMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_MODE_OFF);
   1581 
   1582   // disable all 3A
   1583   if (mControlMode == ANDROID_CONTROL_MODE_OFF) {
   1584     mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   1585 #if VSOC_PLATFORM_SDK_AFTER(K)
   1586     mSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED;
   1587 #else
   1588     mSceneMode = ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
   1589 #endif
   1590     mAfMode = ANDROID_CONTROL_AF_MODE_OFF;
   1591     mAeLock = ANDROID_CONTROL_AE_LOCK_ON;
   1592     mAeMode = ANDROID_CONTROL_AE_MODE_OFF;
   1593     mAfModeChange = true;
   1594     mStartAf = false;
   1595     mCancelAf = true;
   1596     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1597     mAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
   1598     return res;
   1599   }
   1600 
   1601   res = find_camera_metadata_entry(request, ANDROID_CONTROL_EFFECT_MODE, &mode);
   1602   mEffectMode =
   1603       READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_EFFECT_MODE_OFF);
   1604 
   1605   res = find_camera_metadata_entry(request, ANDROID_CONTROL_SCENE_MODE, &mode);
   1606 #if VSOC_PLATFORM_SDK_AFTER(K)
   1607   mSceneMode =
   1608       READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_SCENE_MODE_DISABLED);
   1609 #else
   1610   mSceneMode =
   1611       READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED);
   1612 #endif
   1613 
   1614   res = find_camera_metadata_entry(request, ANDROID_CONTROL_AF_MODE, &mode);
   1615   if (mAfMode != mode.data.u8[0]) {
   1616     ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
   1617     mAfMode = mode.data.u8[0];
   1618     mAfModeChange = true;
   1619     mStartAf = false;
   1620     mCancelAf = false;
   1621   }
   1622 
   1623   res = find_camera_metadata_entry(request, ANDROID_CONTROL_AE_MODE, &mode);
   1624   mAeMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_AE_MODE_OFF);
   1625 
   1626   res = find_camera_metadata_entry(request, ANDROID_CONTROL_AE_LOCK, &mode);
   1627   uint8_t aeLockVal =
   1628       READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_AE_LOCK_ON);
   1629   bool aeLock = (aeLockVal == ANDROID_CONTROL_AE_LOCK_ON);
   1630   if (mAeLock && !aeLock) {
   1631     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1632   }
   1633   mAeLock = aeLock;
   1634 
   1635   res = find_camera_metadata_entry(request, ANDROID_CONTROL_AWB_MODE, &mode);
   1636   mAwbMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_AWB_MODE_OFF);
   1637 
   1638   // TODO: Override more control fields
   1639 
   1640   if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
   1641     camera_metadata_entry_t exposureTime;
   1642     res = find_camera_metadata_entry(request, ANDROID_SENSOR_EXPOSURE_TIME,
   1643                                      &exposureTime);
   1644     if (res == OK) {
   1645       exposureTime.data.i64[0] = mExposureTime;
   1646     }
   1647   }
   1648 
   1649 #undef READ_IF_OK
   1650 
   1651   return OK;
   1652 }
   1653 
   1654 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
   1655                                                            int32_t ext1,
   1656                                                            int32_t ext2) {
   1657   ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
   1658   Mutex::Autolock lock(mInputMutex);
   1659   switch (msgType) {
   1660     case CAMERA2_TRIGGER_AUTOFOCUS:
   1661       mAfTriggerId = ext1;
   1662       mStartAf = true;
   1663       mCancelAf = false;
   1664       break;
   1665     case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
   1666       mAfTriggerId = ext1;
   1667       mStartAf = false;
   1668       mCancelAf = true;
   1669       break;
   1670     case CAMERA2_TRIGGER_PRECAPTURE_METERING:
   1671       mPrecaptureTriggerId = ext1;
   1672       mStartPrecapture = true;
   1673       break;
   1674     default:
   1675       ALOGE("%s: Unknown action triggered: %d (arguments %d %d)", __FUNCTION__,
   1676             msgType, ext1, ext2);
   1677       return BAD_VALUE;
   1678   }
   1679   return OK;
   1680 }
   1681 
   1682 const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay =
   1683     100 * MSEC;
   1684 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
   1685 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
   1686 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
   1687 // Once every 5 seconds
   1688 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
   1689     kControlCycleDelay / 5.0 * SEC;
   1690 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
   1691 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
   1692 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration =
   1693     100 * MSEC;
   1694 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration =
   1695     400 * MSEC;
   1696 // Once every 3 seconds
   1697 const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
   1698     kControlCycleDelay / 3000000000.0;
   1699 
   1700 const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime =
   1701     10 * MSEC;
   1702 const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 2 * MSEC;
   1703 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
   1704 
   1705 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
   1706   bool afModeChange = false;
   1707   bool afTriggered = false;
   1708   bool afCancelled = false;
   1709   uint8_t afState;
   1710   uint8_t afMode;
   1711   int32_t afTriggerId;
   1712   bool precaptureTriggered = false;
   1713   uint8_t aeState;
   1714   uint8_t aeMode;
   1715   bool aeLock;
   1716   int32_t precaptureTriggerId;
   1717   nsecs_t nextSleep = kControlCycleDelay;
   1718 
   1719   {
   1720     Mutex::Autolock lock(mInputMutex);
   1721     if (mStartAf) {
   1722       ALOGD("Starting AF trigger processing");
   1723       afTriggered = true;
   1724       mStartAf = false;
   1725     } else if (mCancelAf) {
   1726       ALOGD("Starting cancel AF trigger processing");
   1727       afCancelled = true;
   1728       mCancelAf = false;
   1729     }
   1730     afState = mAfState;
   1731     afMode = mAfMode;
   1732     afModeChange = mAfModeChange;
   1733     mAfModeChange = false;
   1734 
   1735     afTriggerId = mAfTriggerId;
   1736 
   1737     if (mStartPrecapture) {
   1738       ALOGD("Starting precapture trigger processing");
   1739       precaptureTriggered = true;
   1740       mStartPrecapture = false;
   1741     }
   1742     aeState = mAeState;
   1743     aeMode = mAeMode;
   1744     aeLock = mAeLock;
   1745     precaptureTriggerId = mPrecaptureTriggerId;
   1746   }
   1747 
   1748   if (afCancelled || afModeChange) {
   1749     ALOGV("Resetting AF state due to cancel/mode change");
   1750     afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1751     updateAfState(afState, afTriggerId);
   1752     mAfScanDuration = 0;
   1753     mLockAfterPassiveScan = false;
   1754   }
   1755 
   1756   if (afTriggered) {
   1757     afState = processAfTrigger(afMode, afState);
   1758   }
   1759 
   1760   afState = maybeStartAfScan(afMode, afState);
   1761   afState = updateAfScan(afMode, afState, &nextSleep);
   1762   updateAfState(afState, afTriggerId);
   1763 
   1764   if (precaptureTriggered) {
   1765     aeState = processPrecaptureTrigger(aeMode, aeState);
   1766   }
   1767 
   1768   aeState = maybeStartAeScan(aeMode, aeLock, aeState);
   1769   aeState = updateAeScan(aeMode, aeLock, aeState, &nextSleep);
   1770   updateAeState(aeState, precaptureTriggerId);
   1771 
   1772   int ret;
   1773   timespec t;
   1774   t.tv_sec = 0;
   1775   t.tv_nsec = nextSleep;
   1776   do {
   1777     ret = nanosleep(&t, &t);
   1778   } while (ret != 0);
   1779 
   1780   if (mAfScanDuration > 0) {
   1781     mAfScanDuration -= nextSleep;
   1782   }
   1783   if (mAeScanDuration > 0) {
   1784     mAeScanDuration -= nextSleep;
   1785   }
   1786 
   1787   return true;
   1788 }
   1789 
   1790 int EmulatedFakeCamera2::ControlThread::processAfTrigger(uint8_t afMode,
   1791                                                          uint8_t afState) {
   1792   switch (afMode) {
   1793     case ANDROID_CONTROL_AF_MODE_OFF:
   1794     case ANDROID_CONTROL_AF_MODE_EDOF:
   1795       // Do nothing
   1796       break;
   1797     case ANDROID_CONTROL_AF_MODE_MACRO:
   1798     case ANDROID_CONTROL_AF_MODE_AUTO:
   1799       switch (afState) {
   1800         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1801         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1802         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1803           // Start new focusing cycle
   1804           mAfScanDuration =
   1805               ((double)rand() / RAND_MAX) * (kMaxAfDuration - kMinAfDuration) +
   1806               kMinAfDuration;
   1807           afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   1808           ALOGV("%s: AF scan start, duration %" PRId64 " ms", __FUNCTION__,
   1809                 mAfScanDuration / 1000000);
   1810           break;
   1811         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   1812           // Ignore new request, already scanning
   1813           break;
   1814         default:
   1815           ALOGE("Unexpected AF state in AUTO/MACRO AF mode: %d", afState);
   1816       }
   1817       break;
   1818     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   1819       switch (afState) {
   1820         // Picture mode waits for passive scan to complete
   1821         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   1822           mLockAfterPassiveScan = true;
   1823           break;
   1824         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1825           afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1826           break;
   1827         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   1828           afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1829           break;
   1830         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1831         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1832           // Must cancel to get out of these states
   1833           break;
   1834         default:
   1835           ALOGE("Unexpected AF state in CONTINUOUS_PICTURE AF mode: %d",
   1836                 afState);
   1837       }
   1838       break;
   1839     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   1840       switch (afState) {
   1841         // Video mode does not wait for passive scan to complete
   1842         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   1843         case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1844           afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1845           break;
   1846         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   1847           afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1848           break;
   1849         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1850         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1851           // Must cancel to get out of these states
   1852           break;
   1853         default:
   1854           ALOGE("Unexpected AF state in CONTINUOUS_VIDEO AF mode: %d", afState);
   1855       }
   1856       break;
   1857     default:
   1858       break;
   1859   }
   1860   return afState;
   1861 }
   1862 
   1863 int EmulatedFakeCamera2::ControlThread::maybeStartAfScan(uint8_t afMode,
   1864                                                          uint8_t afState) {
   1865   if ((afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO ||
   1866        afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE) &&
   1867       (afState == ANDROID_CONTROL_AF_STATE_INACTIVE ||
   1868        afState == ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)) {
   1869     bool startScan = ((double)rand() / RAND_MAX) < kContinuousAfStartRate;
   1870     if (startScan) {
   1871       // Start new passive focusing cycle
   1872       mAfScanDuration =
   1873           ((double)rand() / RAND_MAX) * (kMaxAfDuration - kMinAfDuration) +
   1874           kMinAfDuration;
   1875       afState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
   1876       ALOGV("%s: AF passive scan start, duration %" PRId64 " ms", __FUNCTION__,
   1877             mAfScanDuration / 1000000);
   1878     }
   1879   }
   1880   return afState;
   1881 }
   1882 
   1883 int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
   1884                                                      uint8_t afState,
   1885                                                      nsecs_t *maxSleep) {
   1886   if (!(afState == ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN ||
   1887         afState == ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN)) {
   1888     return afState;
   1889   }
   1890 
   1891   if (mAfScanDuration <= 0) {
   1892     ALOGV("%s: AF scan done", __FUNCTION__);
   1893     switch (afMode) {
   1894       case ANDROID_CONTROL_AF_MODE_MACRO:
   1895       case ANDROID_CONTROL_AF_MODE_AUTO: {
   1896         bool success = ((double)rand() / RAND_MAX) < kAfSuccessRate;
   1897         if (success) {
   1898           afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1899         } else {
   1900           afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1901         }
   1902         break;
   1903       }
   1904       case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   1905         if (mLockAfterPassiveScan) {
   1906           afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1907           mLockAfterPassiveScan = false;
   1908         } else {
   1909           afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   1910         }
   1911         break;
   1912       case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   1913         afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   1914         break;
   1915       default:
   1916         ALOGE("Unexpected AF mode in scan state");
   1917     }
   1918   } else {
   1919     if (mAfScanDuration <= *maxSleep) {
   1920       *maxSleep = mAfScanDuration;
   1921     }
   1922   }
   1923   return afState;
   1924 }
   1925 
   1926 void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
   1927                                                        int32_t triggerId) {
   1928   Mutex::Autolock lock(mInputMutex);
   1929   if (mAfState != newState) {
   1930     ALOGV("%s: Autofocus state now %d, id %d", __FUNCTION__, newState,
   1931           triggerId);
   1932     mAfState = newState;
   1933     mParent->sendNotification(CAMERA2_MSG_AUTOFOCUS, newState, triggerId, 0);
   1934   }
   1935 }
   1936 
   1937 int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(
   1938     uint8_t aeMode, uint8_t aeState) {
   1939   switch (aeMode) {
   1940     case ANDROID_CONTROL_AE_MODE_OFF:
   1941       // Don't do anything for these
   1942       return aeState;
   1943     case ANDROID_CONTROL_AE_MODE_ON:
   1944     case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
   1945     case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
   1946     case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
   1947       // Trigger a precapture cycle
   1948       aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
   1949       mAeScanDuration =
   1950           ((double)rand() / RAND_MAX) *
   1951               (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
   1952           kMinPrecaptureAeDuration;
   1953       ALOGD("%s: AE precapture scan start, duration %" PRId64 " ms",
   1954             __FUNCTION__, mAeScanDuration / 1000000);
   1955   }
   1956   return aeState;
   1957 }
   1958 
   1959 int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
   1960                                                          bool aeLocked,
   1961                                                          uint8_t aeState) {
   1962   if (aeLocked) return aeState;
   1963   switch (aeMode) {
   1964     case ANDROID_CONTROL_AE_MODE_OFF:
   1965       break;
   1966     case ANDROID_CONTROL_AE_MODE_ON:
   1967     case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
   1968     case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
   1969     case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: {
   1970       if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
   1971           aeState != ANDROID_CONTROL_AE_STATE_CONVERGED)
   1972         break;
   1973 
   1974       bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
   1975       if (startScan) {
   1976         mAeScanDuration =
   1977             ((double)rand() / RAND_MAX) * (kMaxAeDuration - kMinAeDuration) +
   1978             kMinAeDuration;
   1979         aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
   1980         ALOGV("%s: AE scan start, duration %" PRId64 " ms", __FUNCTION__,
   1981               mAeScanDuration / 1000000);
   1982       }
   1983     }
   1984   }
   1985 
   1986   return aeState;
   1987 }
   1988 
   1989 int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t /*aeMode*/,
   1990                                                      bool aeLock,
   1991                                                      uint8_t aeState,
   1992                                                      nsecs_t *maxSleep) {
   1993   if (aeLock && aeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   1994     mAeScanDuration = 0;
   1995     aeState = ANDROID_CONTROL_AE_STATE_LOCKED;
   1996   } else if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
   1997              (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE)) {
   1998     if (mAeScanDuration <= 0) {
   1999       ALOGV("%s: AE scan done", __FUNCTION__);
   2000       aeState = aeLock ? ANDROID_CONTROL_AE_STATE_LOCKED
   2001                        : ANDROID_CONTROL_AE_STATE_CONVERGED;
   2002 
   2003       Mutex::Autolock lock(mInputMutex);
   2004       mExposureTime = kNormalExposureTime;
   2005     } else {
   2006       if (mAeScanDuration <= *maxSleep) {
   2007         *maxSleep = mAeScanDuration;
   2008       }
   2009 
   2010       int64_t exposureDelta =
   2011           ((double)rand() / RAND_MAX) * 2 * kExposureJump - kExposureJump;
   2012       Mutex::Autolock lock(mInputMutex);
   2013       mExposureTime = mExposureTime + exposureDelta;
   2014       if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
   2015     }
   2016   }
   2017 
   2018   return aeState;
   2019 }
   2020 
   2021 void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
   2022                                                        int32_t triggerId) {
   2023   Mutex::Autolock lock(mInputMutex);
   2024   if (mAeState != newState) {
   2025     ALOGV("%s: Autoexposure state now %d, id %d", __FUNCTION__, newState,
   2026           triggerId);
   2027     mAeState = newState;
   2028     mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE, newState, triggerId, 0);
   2029   }
   2030 }
   2031 
   2032 /** Private methods */
   2033 
   2034 status_t EmulatedFakeCamera2::constructStaticInfo(camera_metadata_t **info,
   2035                                                   bool sizeRequest) const {
   2036   size_t entryCount = 0;
   2037   size_t dataCount = 0;
   2038   status_t ret;
   2039 
   2040 #define ADD_OR_SIZE(tag, data, count)                                          \
   2041   if ((ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, tag, data, \
   2042                        count)) != OK)                                          \
   2043   return ret
   2044 
   2045   // android.lens
   2046 
   2047   // 5 cm min focus distance for back camera, infinity (fixed focus) for front
   2048   const float minFocusDistance = mFacingBack ? 1.0 / 0.05 : 0.0;
   2049   ADD_OR_SIZE(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &minFocusDistance, 1);
   2050   // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
   2051   // const float hyperFocalDistance = mFacingBack ? 1.0 / 5.0 : 0.0;
   2052   ADD_OR_SIZE(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, &minFocusDistance, 1);
   2053 
   2054   static const float focalLength = 3.30f;  // mm
   2055   ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, &focalLength, 1);
   2056   static const float aperture = 2.8f;
   2057   ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_APERTURES, &aperture, 1);
   2058   static const float filterDensity = 0;
   2059   ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, &filterDensity, 1);
   2060   static const uint8_t availableOpticalStabilization =
   2061       ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   2062   ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   2063               &availableOpticalStabilization, 1);
   2064 
   2065   static const int32_t lensShadingMapSize[] = {1, 1};
   2066   ADD_OR_SIZE(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
   2067               sizeof(lensShadingMapSize) / sizeof(int32_t));
   2068 
   2069   int32_t lensFacing =
   2070       mFacingBack ? ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   2071   ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
   2072 
   2073   // android.sensor
   2074 
   2075   ADD_OR_SIZE(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   2076               Sensor::kExposureTimeRange, 2);
   2077 
   2078   ADD_OR_SIZE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   2079               &Sensor::kFrameDurationRange[1], 1);
   2080 
   2081   ADD_OR_SIZE(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, Sensor::kSensitivityRange,
   2082               sizeof(Sensor::kSensitivityRange) / sizeof(int32_t));
   2083 
   2084   ADD_OR_SIZE(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   2085               &Sensor::kColorFilterArrangement, 1);
   2086 
   2087   static const float sensorPhysicalSize[2] = {3.20f, 2.40f};  // mm
   2088   ADD_OR_SIZE(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, sensorPhysicalSize, 2);
   2089 
   2090   const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
   2091   ADD_OR_SIZE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArray, 2);
   2092 
   2093   ADD_OR_SIZE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, pixelArray, 2);
   2094 
   2095   ADD_OR_SIZE(ANDROID_SENSOR_INFO_WHITE_LEVEL, &Sensor::kMaxRawValue, 1);
   2096 
   2097   static const int32_t blackLevelPattern[4] = {
   2098       static_cast<int32_t>(Sensor::kBlackLevel),
   2099       static_cast<int32_t>(Sensor::kBlackLevel),
   2100       static_cast<int32_t>(Sensor::kBlackLevel),
   2101       static_cast<int32_t>(Sensor::kBlackLevel)};
   2102   ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, blackLevelPattern,
   2103               sizeof(blackLevelPattern) / sizeof(int32_t));
   2104 
   2105   // TODO: sensor color calibration fields
   2106 
   2107   // android.flash
   2108   static const uint8_t flashAvailable = 0;
   2109   ADD_OR_SIZE(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
   2110 
   2111   static const int64_t flashChargeDuration = 0;
   2112   ADD_OR_SIZE(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
   2113 
   2114   // android.tonemap
   2115 
   2116   static const int32_t tonemapCurvePoints = 128;
   2117   ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
   2118 
   2119   // android.scaler
   2120 
   2121   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS, kAvailableFormats,
   2122               sizeof(kAvailableFormats) / sizeof(uint32_t));
   2123 
   2124   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES, &mAvailableRawSizes.front(),
   2125               mAvailableRawSizes.size());
   2126 
   2127   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
   2128               kAvailableRawMinDurations,
   2129               sizeof(kAvailableRawMinDurations) / sizeof(uint64_t));
   2130 
   2131   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   2132               &mAvailableProcessedSizes.front(),
   2133               mAvailableProcessedSizes.size());
   2134 
   2135   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
   2136               kAvailableProcessedMinDurations,
   2137               sizeof(kAvailableProcessedMinDurations) / sizeof(uint64_t));
   2138 
   2139   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, &mAvailableJpegSizes.front(),
   2140               mAvailableJpegSizes.size());
   2141 
   2142   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
   2143               kAvailableJpegMinDurations,
   2144               sizeof(kAvailableJpegMinDurations) / sizeof(uint64_t));
   2145 
   2146   static const float maxZoom = 10;
   2147   ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxZoom, 1);
   2148 
   2149   // android.jpeg
   2150 
   2151   static const int32_t jpegThumbnailSizes[] = {0, 0, 160, 120, 320, 240};
   2152   ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegThumbnailSizes,
   2153               sizeof(jpegThumbnailSizes) / sizeof(int32_t));
   2154 
   2155   static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
   2156   ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
   2157 
   2158   // android.stats
   2159 
   2160   static const uint8_t availableFaceDetectModes[] = {
   2161       ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   2162       ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
   2163       ANDROID_STATISTICS_FACE_DETECT_MODE_FULL};
   2164 
   2165   ADD_OR_SIZE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   2166               availableFaceDetectModes, sizeof(availableFaceDetectModes));
   2167 
   2168   static const int32_t maxFaceCount = 8;
   2169   ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1);
   2170 
   2171   static const int32_t histogramSize = 64;
   2172   ADD_OR_SIZE(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, &histogramSize,
   2173               1);
   2174 
   2175   static const int32_t maxHistogramCount = 1000;
   2176   ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, &maxHistogramCount,
   2177               1);
   2178 
   2179   static const int32_t sharpnessMapSize[2] = {64, 64};
   2180   ADD_OR_SIZE(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, sharpnessMapSize,
   2181               sizeof(sharpnessMapSize) / sizeof(int32_t));
   2182 
   2183   static const int32_t maxSharpnessMapValue = 1000;
   2184   ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   2185               &maxSharpnessMapValue, 1);
   2186 
   2187   // android.control
   2188 
   2189   static const uint8_t availableSceneModes[] = {
   2190 #if VSOC_PLATFORM_SDK_AFTER(K)
   2191     ANDROID_CONTROL_SCENE_MODE_DISABLED
   2192 #else
   2193     ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED
   2194 #endif
   2195   };
   2196   ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, availableSceneModes,
   2197               sizeof(availableSceneModes));
   2198 
   2199   static const uint8_t availableEffects[] = {ANDROID_CONTROL_EFFECT_MODE_OFF};
   2200   ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS, availableEffects,
   2201               sizeof(availableEffects));
   2202 
   2203   static const int32_t max3aRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0};
   2204   ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS, max3aRegions,
   2205               sizeof(max3aRegions) / sizeof(max3aRegions[0]));
   2206 
   2207   static const uint8_t availableAeModes[] = {ANDROID_CONTROL_AE_MODE_OFF,
   2208                                              ANDROID_CONTROL_AE_MODE_ON};
   2209   ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES, availableAeModes,
   2210               sizeof(availableAeModes));
   2211 
   2212   static const camera_metadata_rational exposureCompensationStep = {1, 3};
   2213   ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_STEP, &exposureCompensationStep,
   2214               1);
   2215 
   2216   int32_t exposureCompensationRange[] = {-9, 9};
   2217   ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, exposureCompensationRange,
   2218               sizeof(exposureCompensationRange) / sizeof(int32_t));
   2219 
   2220   static const int32_t availableTargetFpsRanges[] = {5, 30, 15, 30};
   2221   ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   2222               availableTargetFpsRanges,
   2223               sizeof(availableTargetFpsRanges) / sizeof(int32_t));
   2224 
   2225   static const uint8_t availableAntibandingModes[] = {
   2226       ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
   2227       ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO};
   2228   ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   2229               availableAntibandingModes, sizeof(availableAntibandingModes));
   2230 
   2231   static const uint8_t availableAwbModes[] = {
   2232       ANDROID_CONTROL_AWB_MODE_OFF,
   2233       ANDROID_CONTROL_AWB_MODE_AUTO,
   2234       ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
   2235       ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
   2236       ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
   2237       ANDROID_CONTROL_AWB_MODE_SHADE};
   2238   ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, availableAwbModes,
   2239               sizeof(availableAwbModes));
   2240 
   2241   static const uint8_t availableAfModesBack[] = {
   2242       ANDROID_CONTROL_AF_MODE_OFF, ANDROID_CONTROL_AF_MODE_AUTO,
   2243       ANDROID_CONTROL_AF_MODE_MACRO, ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
   2244       ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE};
   2245 
   2246   static const uint8_t availableAfModesFront[] = {ANDROID_CONTROL_AF_MODE_OFF};
   2247 
   2248   if (mFacingBack) {
   2249     ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesBack,
   2250                 sizeof(availableAfModesBack));
   2251   } else {
   2252     ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES, availableAfModesFront,
   2253                 sizeof(availableAfModesFront));
   2254   }
   2255 
   2256   static const uint8_t availableVstabModes[] = {
   2257       ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
   2258   ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   2259               availableVstabModes, sizeof(availableVstabModes));
   2260 
   2261 #undef ADD_OR_SIZE
   2262   /** Allocate metadata if sizing */
   2263   if (sizeRequest) {
   2264     ALOGV(
   2265         "Allocating %zu entries, %zu extra bytes for "
   2266         "static camera info",
   2267         entryCount, dataCount);
   2268     *info = allocate_camera_metadata(entryCount, dataCount);
   2269     if (*info == NULL) {
   2270       ALOGE(
   2271           "Unable to allocate camera static info"
   2272           "(%zu entries, %zu bytes extra data)",
   2273           entryCount, dataCount);
   2274       return NO_MEMORY;
   2275     }
   2276   }
   2277   return OK;
   2278 }
   2279 
   2280 status_t EmulatedFakeCamera2::constructDefaultRequest(
   2281     int request_template, camera_metadata_t **request, bool sizeRequest) const {
   2282   size_t entryCount = 0;
   2283   size_t dataCount = 0;
   2284   status_t ret;
   2285 
   2286 #define ADD_OR_SIZE(tag, data, count)                                       \
   2287   if ((ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, tag, \
   2288                        data, count)) != OK)                                 \
   2289   return ret
   2290 
   2291   /** android.request */
   2292 
   2293   static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   2294   ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
   2295 
   2296   static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
   2297   ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
   2298 
   2299   static const int32_t id = 0;
   2300   ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
   2301 
   2302   static const int32_t frameCount = 0;
   2303   ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
   2304 
   2305   // OUTPUT_STREAMS set by user
   2306   entryCount += 1;
   2307   dataCount += 5;  // TODO: Should be maximum stream number
   2308 
   2309   /** android.lens */
   2310 
   2311   static const float focusDistance = 0;
   2312   ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
   2313 
   2314   static const float aperture = 2.8f;
   2315   ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
   2316 
   2317   static const float focalLength = 5.0f;
   2318   ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
   2319 
   2320   static const float filterDensity = 0;
   2321   ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
   2322 
   2323   static const uint8_t opticalStabilizationMode =
   2324       ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   2325   ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   2326               &opticalStabilizationMode, 1);
   2327 
   2328   // FOCUS_RANGE set only in frame
   2329 
   2330   /** android.sensor */
   2331 
   2332   static const int64_t exposureTime = 10 * MSEC;
   2333   ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
   2334 
   2335   static const int64_t frameDuration = 33333333L;  // 1/30 s
   2336   ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
   2337 
   2338   static const int32_t sensitivity = 100;
   2339   ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
   2340 
   2341   // TIMESTAMP set only in frame
   2342 
   2343   /** android.flash */
   2344 
   2345   static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   2346   ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
   2347 
   2348   static const uint8_t flashPower = 10;
   2349   ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
   2350 
   2351   static const int64_t firingTime = 0;
   2352   ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
   2353 
   2354   /** Processing block modes */
   2355   uint8_t hotPixelMode = 0;
   2356   uint8_t demosaicMode = 0;
   2357   uint8_t noiseMode = 0;
   2358   uint8_t shadingMode = 0;
   2359   uint8_t colorMode = 0;
   2360   uint8_t tonemapMode = 0;
   2361   uint8_t edgeMode = 0;
   2362   switch (request_template) {
   2363     case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2364       // fall-through
   2365     case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2366       // fall-through
   2367     case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2368       hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
   2369       demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
   2370       noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   2371       shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
   2372       colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
   2373       tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   2374       edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   2375       break;
   2376     case CAMERA2_TEMPLATE_PREVIEW:
   2377       // fall-through
   2378     case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2379       // fall-through
   2380     default:
   2381       hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   2382       demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   2383       noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   2384       shadingMode = ANDROID_SHADING_MODE_FAST;
   2385       colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   2386       tonemapMode = ANDROID_TONEMAP_MODE_FAST;
   2387       edgeMode = ANDROID_EDGE_MODE_FAST;
   2388       break;
   2389   }
   2390   ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
   2391   ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   2392   ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
   2393   ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
   2394   ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
   2395   ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
   2396   ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
   2397 
   2398   /** android.noise */
   2399   static const uint8_t noiseStrength = 5;
   2400   ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
   2401 
   2402   /** android.color */
   2403   static const float colorTransform[9] = {1.0f, 0.f, 0.f, 0.f, 1.f,
   2404                                           0.f,  0.f, 0.f, 1.f};
   2405   ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
   2406 
   2407   /** android.tonemap */
   2408   static const float tonemapCurve[4] = {0.f, 0.f, 1.f, 1.f};
   2409   ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
   2410   ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
   2411   ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
   2412 
   2413   /** android.edge */
   2414   static const uint8_t edgeStrength = 5;
   2415   ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
   2416 
   2417   /** android.scaler */
   2418   static const int32_t cropRegion[3] = {0, 0,
   2419                                         static_cast<int32_t>(mSensorWidth)};
   2420   ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
   2421 
   2422   /** android.jpeg */
   2423   static const int32_t jpegQuality = 80;
   2424   ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
   2425 
   2426   static const int32_t thumbnailSize[2] = {640, 480};
   2427   ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
   2428 
   2429   static const int32_t thumbnailQuality = 80;
   2430   ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
   2431 
   2432   static const double gpsCoordinates[2] = {0, 0};
   2433   ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
   2434 
   2435   static const uint8_t gpsProcessingMethod[32] = "None";
   2436   ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
   2437 
   2438   static const int64_t gpsTimestamp = 0;
   2439   ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
   2440 
   2441   static const int32_t jpegOrientation = 0;
   2442   ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
   2443 
   2444   /** android.stats */
   2445 
   2446   static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   2447   ADD_OR_SIZE(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   2448 
   2449   static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   2450   ADD_OR_SIZE(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   2451 
   2452   static const uint8_t sharpnessMapMode =
   2453       ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   2454   ADD_OR_SIZE(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   2455 
   2456   // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
   2457   // sharpnessMap only in frames
   2458 
   2459   /** android.control */
   2460 
   2461   uint8_t controlIntent = 0;
   2462   switch (request_template) {
   2463     case CAMERA2_TEMPLATE_PREVIEW:
   2464       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   2465       break;
   2466     case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2467       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   2468       break;
   2469     case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2470       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   2471       break;
   2472     case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2473       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   2474       break;
   2475     case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2476       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   2477       break;
   2478     default:
   2479       controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   2480       break;
   2481   }
   2482   ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   2483 
   2484   static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   2485   ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
   2486 
   2487   static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   2488   ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   2489 
   2490   static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   2491   ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   2492 
   2493   static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   2494   ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   2495 
   2496   static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   2497   ADD_OR_SIZE(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   2498 
   2499   static const int32_t controlRegions[5] = {
   2500       0, 0, static_cast<int32_t>(mSensorWidth),
   2501       static_cast<int32_t>(mSensorHeight), 1000};
   2502   ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
   2503 
   2504   static const int32_t aeExpCompensation = 0;
   2505   ADD_OR_SIZE(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
   2506 
   2507   static const int32_t aeTargetFpsRange[2] = {10, 30};
   2508   ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
   2509 
   2510   static const uint8_t aeAntibandingMode =
   2511       ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   2512   ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
   2513 
   2514   static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   2515   ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   2516 
   2517   static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   2518   ADD_OR_SIZE(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   2519 
   2520   ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
   2521 
   2522   uint8_t afMode = 0;
   2523   switch (request_template) {
   2524     case CAMERA2_TEMPLATE_PREVIEW:
   2525       afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2526       break;
   2527     case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2528       afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2529       break;
   2530     case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2531       afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   2532       break;
   2533     case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2534       afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   2535       break;
   2536     case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2537       afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   2538       break;
   2539     default:
   2540       afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2541       break;
   2542   }
   2543   ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
   2544 
   2545   ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
   2546 
   2547   static const uint8_t vstabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   2548   ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
   2549 
   2550   // aeState, awbState, afState only in frame
   2551 
   2552   /** Allocate metadata if sizing */
   2553   if (sizeRequest) {
   2554     ALOGV(
   2555         "Allocating %zu entries, %zu extra bytes for "
   2556         "request template type %d",
   2557         entryCount, dataCount, request_template);
   2558     *request = allocate_camera_metadata(entryCount, dataCount);
   2559     if (*request == NULL) {
   2560       ALOGE(
   2561           "Unable to allocate new request template type %d "
   2562           "(%zu entries, %zu bytes extra data)",
   2563           request_template, entryCount, dataCount);
   2564       return NO_MEMORY;
   2565     }
   2566   }
   2567   return OK;
   2568 #undef ADD_OR_SIZE
   2569 }
   2570 
   2571 status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
   2572                                         bool sizeRequest, size_t *entryCount,
   2573                                         size_t *dataCount, uint32_t tag,
   2574                                         const void *entryData,
   2575                                         size_t entryDataCount) {
   2576   if (!sizeRequest) {
   2577     return add_camera_metadata_entry(request, tag, entryData, entryDataCount);
   2578   } else {
   2579     int type = get_camera_metadata_tag_type(tag);
   2580     if (type < 0) return BAD_VALUE;
   2581     (*entryCount)++;
   2582     (*dataCount) +=
   2583         calculate_camera_metadata_entry_data_size(type, entryDataCount);
   2584     return OK;
   2585   }
   2586 }
   2587 
   2588 bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
   2589   // Assumes mMutex is locked; otherwise new requests could enter
   2590   // configureThread while readoutThread is being checked
   2591 
   2592   // Order of isStreamInUse calls matters
   2593   if (mConfigureThread->isStreamInUse(id) ||
   2594       mReadoutThread->isStreamInUse(id) || mJpegCompressor->isStreamInUse(id)) {
   2595     ALOGE("%s: Stream %d is in use in active requests!", __FUNCTION__, id);
   2596     return true;
   2597   }
   2598   return false;
   2599 }
   2600 
   2601 bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t /*id*/) {
   2602   // TODO: implement
   2603   return false;
   2604 }
   2605 
   2606 const Stream &EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
   2607   Mutex::Autolock lock(mMutex);
   2608 
   2609   return mStreams.valueFor(streamId);
   2610 }
   2611 
   2612 const ReprocessStream &EmulatedFakeCamera2::getReprocessStreamInfo(
   2613     uint32_t streamId) {
   2614   Mutex::Autolock lock(mMutex);
   2615 
   2616   return mReprocessStreams.valueFor(streamId);
   2617 }
   2618 
   2619 }; /* namespace android */
   2620