Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of a class EmulatedFakeCamera2 that encapsulates
     19  * functionality of an advanced fake camera.
     20  */
     21 
     22 #include <inttypes.h>
     23 
     24 //#define LOG_NDEBUG 0
     25 #define LOG_TAG "EmulatedCamera_FakeCamera2"
     26 #include <utils/Log.h>
     27 
     28 #include "EmulatedFakeCamera2.h"
     29 #include "EmulatedCameraFactory.h"
     30 #include "GrallocModule.h"
     31 
     32 #define ERROR_CAMERA_NOT_PRESENT (-EPIPE)
     33 
     34 #define CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT 0xFFFFFFFF
     35 
     36 namespace android {
     37 
     38 const int64_t USEC = 1000LL;
     39 const int64_t MSEC = USEC * 1000LL;
     40 const int64_t SEC = MSEC * 1000LL;
     41 
     42 const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
     43         HAL_PIXEL_FORMAT_RAW16,
     44         HAL_PIXEL_FORMAT_BLOB,
     45         HAL_PIXEL_FORMAT_RGBA_8888,
     46         //        HAL_PIXEL_FORMAT_YV12,
     47         HAL_PIXEL_FORMAT_YCrCb_420_SP
     48 };
     49 
     50 const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
     51     640, 480
     52     //    mSensorWidth, mSensorHeight
     53 };
     54 
     55 const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
     56     static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
     57 };
     58 
     59 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
     60     640, 480, 320, 240
     61     //    mSensorWidth, mSensorHeight
     62 };
     63 
     64 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesFront[4] = {
     65     320, 240, 160, 120
     66     //    mSensorWidth, mSensorHeight
     67 };
     68 
     69 const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
     70     static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
     71 };
     72 
     73 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
     74     640, 480
     75     //    mSensorWidth, mSensorHeight
     76 };
     77 
     78 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesFront[2] = {
     79     320, 240
     80     //    mSensorWidth, mSensorHeight
     81 };
     82 
     83 
     84 const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
     85     static_cast<uint64_t>(Sensor::kFrameDurationRange[0])
     86 };
     87 
     88 
     89 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId,
     90         bool facingBack,
     91         struct hw_module_t* module)
     92         : EmulatedCamera2(cameraId,module),
     93           mFacingBack(facingBack),
     94           mIsConnected(false)
     95 {
     96     ALOGD("Constructing emulated fake camera 2 facing %s",
     97             facingBack ? "back" : "front");
     98 }
     99 
    100 EmulatedFakeCamera2::~EmulatedFakeCamera2() {
    101     if (mCameraInfo != NULL) {
    102         free_camera_metadata(mCameraInfo);
    103     }
    104 }
    105 
    106 /****************************************************************************
    107  * Public API overrides
    108  ***************************************************************************/
    109 
    110 status_t EmulatedFakeCamera2::Initialize() {
    111     status_t res;
    112 
    113     // Find max width/height
    114     int32_t width = 0, height = 0;
    115     size_t rawSizeCount = sizeof(kAvailableRawSizes)/sizeof(kAvailableRawSizes[0]);
    116     for (size_t index = 0; index + 1 < rawSizeCount; index += 2) {
    117         if (width <= kAvailableRawSizes[index] &&
    118             height <= kAvailableRawSizes[index+1]) {
    119             width = kAvailableRawSizes[index];
    120             height = kAvailableRawSizes[index+1];
    121         }
    122     }
    123 
    124     if (width < 640 || height < 480) {
    125         width = 640;
    126         height = 480;
    127     }
    128     mSensorWidth = width;
    129     mSensorHeight = height;
    130 
    131     res = constructStaticInfo(&mCameraInfo, true);
    132     if (res != OK) {
    133         ALOGE("%s: Unable to allocate static info: %s (%d)",
    134                 __FUNCTION__, strerror(-res), res);
    135         return res;
    136     }
    137     res = constructStaticInfo(&mCameraInfo, false);
    138     if (res != OK) {
    139         ALOGE("%s: Unable to fill in static info: %s (%d)",
    140                 __FUNCTION__, strerror(-res), res);
    141         return res;
    142     }
    143     if (res != OK) return res;
    144 
    145     mNextStreamId = 1;
    146     mNextReprocessStreamId = 1;
    147     mRawStreamCount = 0;
    148     mProcessedStreamCount = 0;
    149     mJpegStreamCount = 0;
    150     mReprocessStreamCount = 0;
    151 
    152     return NO_ERROR;
    153 }
    154 
    155 /****************************************************************************
    156  * Camera module API overrides
    157  ***************************************************************************/
    158 
    159 status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
    160     status_t res;
    161     ALOGV("%s", __FUNCTION__);
    162 
    163     {
    164         Mutex::Autolock l(mMutex);
    165         if (!mStatusPresent) {
    166             ALOGE("%s: Camera ID %d is unplugged", __FUNCTION__,
    167                   mCameraID);
    168             return -ENODEV;
    169         }
    170     }
    171 
    172     mConfigureThread = new ConfigureThread(this);
    173     mReadoutThread = new ReadoutThread(this);
    174     mControlThread = new ControlThread(this);
    175     mSensor = new Sensor(mSensorWidth, mSensorHeight);
    176     mJpegCompressor = new JpegCompressor();
    177 
    178     mNextStreamId = 1;
    179     mNextReprocessStreamId = 1;
    180 
    181     res = mSensor->startUp();
    182     if (res != NO_ERROR) return res;
    183 
    184     res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
    185     if (res != NO_ERROR) return res;
    186 
    187     res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
    188     if (res != NO_ERROR) return res;
    189 
    190     res = mControlThread->run("EmulatedFakeCamera2::controlThread");
    191     if (res != NO_ERROR) return res;
    192 
    193     status_t ret = EmulatedCamera2::connectCamera(device);
    194 
    195     if (ret >= 0) {
    196         mIsConnected = true;
    197     }
    198 
    199     return ret;
    200 }
    201 
    202 status_t EmulatedFakeCamera2::plugCamera() {
    203     {
    204         Mutex::Autolock l(mMutex);
    205 
    206         if (!mStatusPresent) {
    207             ALOGI("%s: Plugged back in", __FUNCTION__);
    208             mStatusPresent = true;
    209         }
    210     }
    211 
    212     return NO_ERROR;
    213 }
    214 
    215 status_t EmulatedFakeCamera2::unplugCamera() {
    216     {
    217         Mutex::Autolock l(mMutex);
    218 
    219         if (mStatusPresent) {
    220             ALOGI("%s: Unplugged camera", __FUNCTION__);
    221             mStatusPresent = false;
    222         }
    223     }
    224 
    225     return closeCamera();
    226 }
    227 
    228 camera_device_status_t EmulatedFakeCamera2::getHotplugStatus() {
    229     Mutex::Autolock l(mMutex);
    230     return mStatusPresent ?
    231         CAMERA_DEVICE_STATUS_PRESENT :
    232         CAMERA_DEVICE_STATUS_NOT_PRESENT;
    233 }
    234 
    235 
    236 
    237 status_t EmulatedFakeCamera2::closeCamera() {
    238     {
    239         Mutex::Autolock l(mMutex);
    240 
    241         status_t res;
    242         ALOGV("%s", __FUNCTION__);
    243 
    244         if (!mIsConnected) {
    245             return NO_ERROR;
    246         }
    247 
    248         res = mSensor->shutDown();
    249         if (res != NO_ERROR) {
    250             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
    251             return res;
    252         }
    253 
    254         mConfigureThread->requestExit();
    255         mReadoutThread->requestExit();
    256         mControlThread->requestExit();
    257         mJpegCompressor->cancel();
    258     }
    259 
    260     // give up the lock since we will now block and the threads
    261     // can call back into this object
    262     mConfigureThread->join();
    263     mReadoutThread->join();
    264     mControlThread->join();
    265 
    266     ALOGV("%s exit", __FUNCTION__);
    267 
    268     {
    269         Mutex::Autolock l(mMutex);
    270         mIsConnected = false;
    271     }
    272 
    273     return NO_ERROR;
    274 }
    275 
    276 status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
    277     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
    278     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
    279     return EmulatedCamera2::getCameraInfo(info);
    280 }
    281 
    282 /****************************************************************************
    283  * Camera device API overrides
    284  ***************************************************************************/
    285 
    286 /** Request input queue */
    287 
    288 int EmulatedFakeCamera2::requestQueueNotify() {
    289     ALOGV("Request queue notification received");
    290 
    291     ALOG_ASSERT(mRequestQueueSrc != NULL,
    292             "%s: Request queue src not set, but received queue notification!",
    293             __FUNCTION__);
    294     ALOG_ASSERT(mFrameQueueDst != NULL,
    295             "%s: Request queue src not set, but received queue notification!",
    296             __FUNCTION__);
    297     ALOG_ASSERT(mStreams.size() != 0,
    298             "%s: No streams allocated, but received queue notification!",
    299             __FUNCTION__);
    300     return mConfigureThread->newRequestAvailable();
    301 }
    302 
    303 int EmulatedFakeCamera2::getInProgressCount() {
    304     Mutex::Autolock l(mMutex);
    305 
    306     if (!mStatusPresent) {
    307         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    308         return ERROR_CAMERA_NOT_PRESENT;
    309     }
    310 
    311     int requestCount = 0;
    312     requestCount += mConfigureThread->getInProgressCount();
    313     requestCount += mReadoutThread->getInProgressCount();
    314     requestCount += mJpegCompressor->isBusy() ? 1 : 0;
    315 
    316     return requestCount;
    317 }
    318 
    319 int EmulatedFakeCamera2::constructDefaultRequest(
    320         int request_template,
    321         camera_metadata_t **request) {
    322 
    323     if (request == NULL) return BAD_VALUE;
    324     if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
    325         return BAD_VALUE;
    326     }
    327 
    328     {
    329         Mutex::Autolock l(mMutex);
    330         if (!mStatusPresent) {
    331             ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    332             return ERROR_CAMERA_NOT_PRESENT;
    333         }
    334     }
    335 
    336     status_t res;
    337     // Pass 1, calculate size and allocate
    338     res = constructDefaultRequest(request_template,
    339             request,
    340             true);
    341     if (res != OK) {
    342         return res;
    343     }
    344     // Pass 2, build request
    345     res = constructDefaultRequest(request_template,
    346             request,
    347             false);
    348     if (res != OK) {
    349         ALOGE("Unable to populate new request for template %d",
    350                 request_template);
    351     }
    352 
    353     return res;
    354 }
    355 
    356 int EmulatedFakeCamera2::allocateStream(
    357         uint32_t width,
    358         uint32_t height,
    359         int format,
    360         const camera2_stream_ops_t *stream_ops,
    361         uint32_t *stream_id,
    362         uint32_t *format_actual,
    363         uint32_t *usage,
    364         uint32_t *max_buffers) {
    365     Mutex::Autolock l(mMutex);
    366 
    367     if (!mStatusPresent) {
    368         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    369         return ERROR_CAMERA_NOT_PRESENT;
    370     }
    371 
    372     // Temporary shim until FORMAT_ZSL is removed
    373     if (format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) {
    374         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
    375     }
    376 
    377     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    378         unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
    379         unsigned int formatIdx = 0;
    380         unsigned int sizeOffsetIdx = 0;
    381         for (; formatIdx < numFormats; formatIdx++) {
    382             if (format == (int)kAvailableFormats[formatIdx]) break;
    383         }
    384         if (formatIdx == numFormats) {
    385             ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
    386             return BAD_VALUE;
    387         }
    388     }
    389 
    390     const uint32_t *availableSizes;
    391     size_t availableSizeCount;
    392     switch (format) {
    393         case HAL_PIXEL_FORMAT_RAW16:
    394             availableSizes = kAvailableRawSizes;
    395             availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
    396             break;
    397         case HAL_PIXEL_FORMAT_BLOB:
    398             availableSizes = mFacingBack ?
    399                     kAvailableJpegSizesBack : kAvailableJpegSizesFront;
    400             availableSizeCount = mFacingBack ?
    401                     sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t) :
    402                     sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t);
    403             break;
    404         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    405         case HAL_PIXEL_FORMAT_RGBA_8888:
    406         case HAL_PIXEL_FORMAT_YV12:
    407         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
    408             availableSizes = mFacingBack ?
    409                     kAvailableProcessedSizesBack : kAvailableProcessedSizesFront;
    410             availableSizeCount = mFacingBack ?
    411                     sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t) :
    412                     sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t);
    413             break;
    414         default:
    415             ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
    416             return BAD_VALUE;
    417     }
    418 
    419     unsigned int resIdx = 0;
    420     for (; resIdx < availableSizeCount; resIdx++) {
    421         if (availableSizes[resIdx * 2] == width &&
    422                 availableSizes[resIdx * 2 + 1] == height) break;
    423     }
    424     if (resIdx == availableSizeCount) {
    425         ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
    426                 format, width, height);
    427         return BAD_VALUE;
    428     }
    429 
    430     switch (format) {
    431         case HAL_PIXEL_FORMAT_RAW16:
    432             if (mRawStreamCount >= kMaxRawStreamCount) {
    433                 ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
    434                         __FUNCTION__, mRawStreamCount);
    435                 return INVALID_OPERATION;
    436             }
    437             mRawStreamCount++;
    438             break;
    439         case HAL_PIXEL_FORMAT_BLOB:
    440             if (mJpegStreamCount >= kMaxJpegStreamCount) {
    441                 ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
    442                         __FUNCTION__, mJpegStreamCount);
    443                 return INVALID_OPERATION;
    444             }
    445             mJpegStreamCount++;
    446             break;
    447         default:
    448             if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
    449                 ALOGE("%s: Cannot allocate another processed stream (%d already allocated)",
    450                         __FUNCTION__, mProcessedStreamCount);
    451                 return INVALID_OPERATION;
    452             }
    453             mProcessedStreamCount++;
    454     }
    455 
    456     Stream newStream;
    457     newStream.ops = stream_ops;
    458     newStream.width = width;
    459     newStream.height = height;
    460     newStream.format = format;
    461     // TODO: Query stride from gralloc
    462     newStream.stride = width;
    463 
    464     mStreams.add(mNextStreamId, newStream);
    465 
    466     *stream_id = mNextStreamId;
    467     if (format_actual) *format_actual = format;
    468     *usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    469     *max_buffers = kMaxBufferCount;
    470 
    471     ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d",
    472             *stream_id, width, height, format, *usage, *max_buffers);
    473 
    474     mNextStreamId++;
    475     return NO_ERROR;
    476 }
    477 
    478 int EmulatedFakeCamera2::registerStreamBuffers(
    479             uint32_t stream_id,
    480             int num_buffers,
    481             buffer_handle_t *buffers) {
    482     Mutex::Autolock l(mMutex);
    483 
    484     if (!mStatusPresent) {
    485         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    486         return ERROR_CAMERA_NOT_PRESENT;
    487     }
    488 
    489     ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__,
    490             stream_id, num_buffers);
    491     // Need to find out what the final concrete pixel format for our stream is
    492     // Assumes that all buffers have the same format.
    493     if (num_buffers < 1) {
    494         ALOGE("%s: Stream %d only has %d buffers!",
    495                 __FUNCTION__, stream_id, num_buffers);
    496         return BAD_VALUE;
    497     }
    498 
    499     ssize_t streamIndex = mStreams.indexOfKey(stream_id);
    500     if (streamIndex < 0) {
    501         ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
    502         return BAD_VALUE;
    503     }
    504 
    505     Stream &stream = mStreams.editValueAt(streamIndex);
    506 
    507     int finalFormat = stream.format;
    508 
    509     if (finalFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    510         finalFormat = HAL_PIXEL_FORMAT_RGBA_8888;
    511     }
    512 
    513     ALOGV("%s: Stream %d format set to %x, previously %x",
    514             __FUNCTION__, stream_id, finalFormat, stream.format);
    515 
    516     stream.format = finalFormat;
    517 
    518     return NO_ERROR;
    519 }
    520 
    521 int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
    522     Mutex::Autolock l(mMutex);
    523 
    524     ssize_t streamIndex = mStreams.indexOfKey(stream_id);
    525     if (streamIndex < 0) {
    526         ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
    527         return BAD_VALUE;
    528     }
    529 
    530     if (isStreamInUse(stream_id)) {
    531         ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__,
    532                 stream_id);
    533         return BAD_VALUE;
    534     }
    535 
    536     switch(mStreams.valueAt(streamIndex).format) {
    537         case HAL_PIXEL_FORMAT_RAW16:
    538             mRawStreamCount--;
    539             break;
    540         case HAL_PIXEL_FORMAT_BLOB:
    541             mJpegStreamCount--;
    542             break;
    543         default:
    544             mProcessedStreamCount--;
    545             break;
    546     }
    547 
    548     mStreams.removeItemsAt(streamIndex);
    549 
    550     return NO_ERROR;
    551 }
    552 
    553 int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
    554         uint32_t output_stream_id,
    555         const camera2_stream_in_ops_t *stream_ops,
    556         uint32_t *stream_id) {
    557     Mutex::Autolock l(mMutex);
    558 
    559     if (!mStatusPresent) {
    560         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    561         return ERROR_CAMERA_NOT_PRESENT;
    562     }
    563 
    564     ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
    565     if (baseStreamIndex < 0) {
    566         ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
    567         return BAD_VALUE;
    568     }
    569 
    570     const Stream &baseStream = mStreams[baseStreamIndex];
    571 
    572     // We'll reprocess anything we produced
    573 
    574     if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
    575         ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
    576                 __FUNCTION__, mReprocessStreamCount);
    577         return INVALID_OPERATION;
    578     }
    579     mReprocessStreamCount++;
    580 
    581     ReprocessStream newStream;
    582     newStream.ops = stream_ops;
    583     newStream.width = baseStream.width;
    584     newStream.height = baseStream.height;
    585     newStream.format = baseStream.format;
    586     newStream.stride = baseStream.stride;
    587     newStream.sourceStreamId = output_stream_id;
    588 
    589     *stream_id = mNextReprocessStreamId;
    590     mReprocessStreams.add(mNextReprocessStreamId, newStream);
    591 
    592     ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
    593             *stream_id, newStream.width, newStream.height, newStream.format,
    594             output_stream_id);
    595 
    596     mNextReprocessStreamId++;
    597     return NO_ERROR;
    598 }
    599 
    600 int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
    601     Mutex::Autolock l(mMutex);
    602 
    603     ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
    604     if (streamIndex < 0) {
    605         ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
    606         return BAD_VALUE;
    607     }
    608 
    609     if (isReprocessStreamInUse(stream_id)) {
    610         ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
    611                 stream_id);
    612         return BAD_VALUE;
    613     }
    614 
    615     mReprocessStreamCount--;
    616     mReprocessStreams.removeItemsAt(streamIndex);
    617 
    618     return NO_ERROR;
    619 }
    620 
    621 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
    622         int32_t ext1,
    623         int32_t ext2) {
    624     Mutex::Autolock l(mMutex);
    625 
    626     if (trigger_id == CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT) {
    627         ALOGI("%s: Disconnect trigger - camera must be closed", __FUNCTION__);
    628         mStatusPresent = false;
    629 
    630         gEmulatedCameraFactory.onStatusChanged(
    631                 mCameraID,
    632                 CAMERA_DEVICE_STATUS_NOT_PRESENT);
    633     }
    634 
    635     if (!mStatusPresent) {
    636         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    637         return ERROR_CAMERA_NOT_PRESENT;
    638     }
    639 
    640     return mControlThread->triggerAction(trigger_id,
    641             ext1, ext2);
    642 }
    643 
    644 /** Shutdown and debug methods */
    645 
    646 int EmulatedFakeCamera2::dump(int fd) {
    647     String8 result;
    648 
    649     result.appendFormat("    Camera HAL device: EmulatedFakeCamera2\n");
    650     result.appendFormat("      Streams:\n");
    651     for (size_t i = 0; i < mStreams.size(); i++) {
    652         int id = mStreams.keyAt(i);
    653         const Stream& s = mStreams.valueAt(i);
    654         result.appendFormat(
    655             "         Stream %d: %d x %d, format 0x%x, stride %d\n",
    656             id, s.width, s.height, s.format, s.stride);
    657     }
    658 
    659     write(fd, result.string(), result.size());
    660 
    661     return NO_ERROR;
    662 }
    663 
    664 void EmulatedFakeCamera2::signalError() {
    665     // TODO: Let parent know so we can shut down cleanly
    666     ALOGE("Worker thread is signaling a serious error");
    667 }
    668 
    669 /** Pipeline control worker thread methods */
    670 
    671 EmulatedFakeCamera2::ConfigureThread::ConfigureThread(EmulatedFakeCamera2 *parent):
    672         Thread(false),
    673         mParent(parent),
    674         mRequestCount(0),
    675         mNextBuffers(NULL) {
    676     mRunning = false;
    677 }
    678 
    679 EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {
    680 }
    681 
    682 status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
    683     Mutex::Autolock lock(mInputMutex);
    684 
    685     ALOGV("Starting up ConfigureThread");
    686     mRequest = NULL;
    687     mActive  = false;
    688     mRunning = true;
    689 
    690     mInputSignal.signal();
    691     return NO_ERROR;
    692 }
    693 
    694 status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
    695     Mutex::Autolock lock(mInputMutex);
    696     if (!mRunning) {
    697         ALOGV("Waiting for configure thread to start");
    698         mInputSignal.wait(mInputMutex);
    699     }
    700     return OK;
    701 }
    702 
    703 status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
    704     waitUntilRunning();
    705 
    706     Mutex::Autolock lock(mInputMutex);
    707 
    708     mActive = true;
    709     mInputSignal.signal();
    710 
    711     return OK;
    712 }
    713 
    714 bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
    715     Mutex::Autolock lock(mInternalsMutex);
    716 
    717     if (mNextBuffers == NULL) return false;
    718     for (size_t i=0; i < mNextBuffers->size(); i++) {
    719         if ((*mNextBuffers)[i].streamId == (int)id) return true;
    720     }
    721     return false;
    722 }
    723 
    724 int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
    725     Mutex::Autolock lock(mInputMutex);
    726     return mRequestCount;
    727 }
    728 
    729 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
    730     status_t res;
    731 
    732     // Check if we're currently processing or just waiting
    733     {
    734         Mutex::Autolock lock(mInputMutex);
    735         if (!mActive) {
    736             // Inactive, keep waiting until we've been signaled
    737             status_t res;
    738             res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
    739             if (res != NO_ERROR && res != TIMED_OUT) {
    740                 ALOGE("%s: Error waiting for input requests: %d",
    741                         __FUNCTION__, res);
    742                 return false;
    743             }
    744             if (!mActive) return true;
    745             ALOGV("New request available");
    746         }
    747         // Active
    748     }
    749 
    750     if (mRequest == NULL) {
    751         Mutex::Autolock il(mInternalsMutex);
    752 
    753         ALOGV("Configure: Getting next request");
    754         res = mParent->mRequestQueueSrc->dequeue_request(
    755             mParent->mRequestQueueSrc,
    756             &mRequest);
    757         if (res != NO_ERROR) {
    758             ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
    759             mParent->signalError();
    760             return false;
    761         }
    762         if (mRequest == NULL) {
    763             ALOGV("Configure: Request queue empty, going inactive");
    764             // No requests available, go into inactive mode
    765             Mutex::Autolock lock(mInputMutex);
    766             mActive = false;
    767             return true;
    768         } else {
    769             Mutex::Autolock lock(mInputMutex);
    770             mRequestCount++;
    771         }
    772 
    773         camera_metadata_entry_t type;
    774         res = find_camera_metadata_entry(mRequest,
    775                 ANDROID_REQUEST_TYPE,
    776                 &type);
    777         if (res != NO_ERROR) {
    778             ALOGE("%s: error reading request type", __FUNCTION__);
    779             mParent->signalError();
    780             return false;
    781         }
    782         bool success = false;;
    783         switch (type.data.u8[0]) {
    784             case ANDROID_REQUEST_TYPE_CAPTURE:
    785                 success = setupCapture();
    786                 break;
    787             case ANDROID_REQUEST_TYPE_REPROCESS:
    788                 success = setupReprocess();
    789                 break;
    790             default:
    791                 ALOGE("%s: Unexpected request type %d",
    792                         __FUNCTION__, type.data.u8[0]);
    793                 mParent->signalError();
    794                 break;
    795         }
    796         if (!success) return false;
    797 
    798     }
    799 
    800     if (mWaitingForReadout) {
    801         bool readoutDone;
    802         readoutDone = mParent->mReadoutThread->waitForReady(kWaitPerLoop);
    803         if (!readoutDone) return true;
    804 
    805         if (mNextNeedsJpeg) {
    806             ALOGV("Configure: Waiting for JPEG compressor");
    807         } else {
    808             ALOGV("Configure: Waiting for sensor");
    809         }
    810         mWaitingForReadout = false;
    811     }
    812 
    813     if (mNextNeedsJpeg) {
    814         bool jpegDone;
    815         jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
    816         if (!jpegDone) return true;
    817 
    818         ALOGV("Configure: Waiting for sensor");
    819         mNextNeedsJpeg = false;
    820     }
    821 
    822     if (mNextIsCapture) {
    823         return configureNextCapture();
    824     } else {
    825         return configureNextReprocess();
    826     }
    827 }
    828 
    829 bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
    830     status_t res;
    831 
    832     mNextIsCapture = true;
    833     // Get necessary parameters for sensor config
    834     mParent->mControlThread->processRequest(mRequest);
    835 
    836     camera_metadata_entry_t streams;
    837     res = find_camera_metadata_entry(mRequest,
    838             ANDROID_REQUEST_OUTPUT_STREAMS,
    839             &streams);
    840     if (res != NO_ERROR) {
    841         ALOGE("%s: error reading output stream tag", __FUNCTION__);
    842         mParent->signalError();
    843         return false;
    844     }
    845 
    846     mNextBuffers = new Buffers;
    847     mNextNeedsJpeg = false;
    848     ALOGV("Configure: Setting up buffers for capture");
    849     for (size_t i = 0; i < streams.count; i++) {
    850         int streamId = streams.data.i32[i];
    851         const Stream &s = mParent->getStreamInfo(streamId);
    852         if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    853             ALOGE("%s: Stream %d does not have a concrete pixel format, but "
    854                     "is included in a request!", __FUNCTION__, streamId);
    855             mParent->signalError();
    856             return false;
    857         }
    858         StreamBuffer b;
    859         b.streamId = streams.data.u8[i];
    860         b.width  = s.width;
    861         b.height = s.height;
    862         b.format = s.format;
    863         b.stride = s.stride;
    864         mNextBuffers->push_back(b);
    865         ALOGV("Configure:    Buffer %zu: Stream %d, %d x %d, format 0x%x, "
    866                 "stride %d",
    867                 i, b.streamId, b.width, b.height, b.format, b.stride);
    868         if (b.format == HAL_PIXEL_FORMAT_BLOB) {
    869             mNextNeedsJpeg = true;
    870         }
    871     }
    872 
    873     camera_metadata_entry_t e;
    874     res = find_camera_metadata_entry(mRequest,
    875             ANDROID_REQUEST_FRAME_COUNT,
    876             &e);
    877     if (res != NO_ERROR) {
    878         ALOGE("%s: error reading frame count tag: %s (%d)",
    879                 __FUNCTION__, strerror(-res), res);
    880         mParent->signalError();
    881         return false;
    882     }
    883     mNextFrameNumber = *e.data.i32;
    884 
    885     res = find_camera_metadata_entry(mRequest,
    886             ANDROID_SENSOR_EXPOSURE_TIME,
    887             &e);
    888     if (res != NO_ERROR) {
    889         ALOGE("%s: error reading exposure time tag: %s (%d)",
    890                 __FUNCTION__, strerror(-res), res);
    891         mParent->signalError();
    892         return false;
    893     }
    894     mNextExposureTime = *e.data.i64;
    895 
    896     res = find_camera_metadata_entry(mRequest,
    897             ANDROID_SENSOR_FRAME_DURATION,
    898             &e);
    899     if (res != NO_ERROR) {
    900         ALOGE("%s: error reading frame duration tag", __FUNCTION__);
    901         mParent->signalError();
    902         return false;
    903     }
    904     mNextFrameDuration = *e.data.i64;
    905 
    906     if (mNextFrameDuration <
    907             mNextExposureTime + Sensor::kMinVerticalBlank) {
    908         mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
    909     }
    910     res = find_camera_metadata_entry(mRequest,
    911             ANDROID_SENSOR_SENSITIVITY,
    912             &e);
    913     if (res != NO_ERROR) {
    914         ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
    915         mParent->signalError();
    916         return false;
    917     }
    918     mNextSensitivity = *e.data.i32;
    919 
    920     // Start waiting on readout thread
    921     mWaitingForReadout = true;
    922     ALOGV("Configure: Waiting for readout thread");
    923 
    924     return true;
    925 }
    926 
    927 bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
    928     bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
    929     if (!vsync) return true;
    930 
    931     Mutex::Autolock il(mInternalsMutex);
    932     ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
    933     mParent->mSensor->setExposureTime(mNextExposureTime);
    934     mParent->mSensor->setFrameDuration(mNextFrameDuration);
    935     mParent->mSensor->setSensitivity(mNextSensitivity);
    936 
    937     getBuffers();
    938 
    939     ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
    940     mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
    941     mParent->mSensor->setDestinationBuffers(mNextBuffers);
    942 
    943     mRequest = NULL;
    944     mNextBuffers = NULL;
    945 
    946     Mutex::Autolock lock(mInputMutex);
    947     mRequestCount--;
    948 
    949     return true;
    950 }
    951 
    952 bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
    953     status_t res;
    954 
    955     mNextNeedsJpeg = true;
    956     mNextIsCapture = false;
    957 
    958     camera_metadata_entry_t reprocessStreams;
    959     res = find_camera_metadata_entry(mRequest,
    960             ANDROID_REQUEST_INPUT_STREAMS,
    961             &reprocessStreams);
    962     if (res != NO_ERROR) {
    963         ALOGE("%s: error reading output stream tag", __FUNCTION__);
    964         mParent->signalError();
    965         return false;
    966     }
    967 
    968     mNextBuffers = new Buffers;
    969 
    970     ALOGV("Configure: Setting up input buffers for reprocess");
    971     for (size_t i = 0; i < reprocessStreams.count; i++) {
    972         int streamId = reprocessStreams.data.i32[i];
    973         const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
    974         if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
    975             ALOGE("%s: Only ZSL reprocessing supported!",
    976                     __FUNCTION__);
    977             mParent->signalError();
    978             return false;
    979         }
    980         StreamBuffer b;
    981         b.streamId = -streamId;
    982         b.width = s.width;
    983         b.height = s.height;
    984         b.format = s.format;
    985         b.stride = s.stride;
    986         mNextBuffers->push_back(b);
    987     }
    988 
    989     camera_metadata_entry_t streams;
    990     res = find_camera_metadata_entry(mRequest,
    991             ANDROID_REQUEST_OUTPUT_STREAMS,
    992             &streams);
    993     if (res != NO_ERROR) {
    994         ALOGE("%s: error reading output stream tag", __FUNCTION__);
    995         mParent->signalError();
    996         return false;
    997     }
    998 
    999     ALOGV("Configure: Setting up output buffers for reprocess");
   1000     for (size_t i = 0; i < streams.count; i++) {
   1001         int streamId = streams.data.i32[i];
   1002         const Stream &s = mParent->getStreamInfo(streamId);
   1003         if (s.format != HAL_PIXEL_FORMAT_BLOB) {
   1004             // TODO: Support reprocess to YUV
   1005             ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
   1006                     __FUNCTION__, streamId);
   1007             mParent->signalError();
   1008             return false;
   1009         }
   1010         StreamBuffer b;
   1011         b.streamId = streams.data.u8[i];
   1012         b.width  = s.width;
   1013         b.height = s.height;
   1014         b.format = s.format;
   1015         b.stride = s.stride;
   1016         mNextBuffers->push_back(b);
   1017         ALOGV("Configure:    Buffer %zu: Stream %d, %d x %d, format 0x%x, "
   1018                 "stride %d",
   1019                 i, b.streamId, b.width, b.height, b.format, b.stride);
   1020     }
   1021 
   1022     camera_metadata_entry_t e;
   1023     res = find_camera_metadata_entry(mRequest,
   1024             ANDROID_REQUEST_FRAME_COUNT,
   1025             &e);
   1026     if (res != NO_ERROR) {
   1027         ALOGE("%s: error reading frame count tag: %s (%d)",
   1028                 __FUNCTION__, strerror(-res), res);
   1029         mParent->signalError();
   1030         return false;
   1031     }
   1032     mNextFrameNumber = *e.data.i32;
   1033 
   1034     return true;
   1035 }
   1036 
   1037 bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
   1038     Mutex::Autolock il(mInternalsMutex);
   1039 
   1040     getBuffers();
   1041 
   1042     ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
   1043     mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
   1044 
   1045     mRequest = NULL;
   1046     mNextBuffers = NULL;
   1047 
   1048     Mutex::Autolock lock(mInputMutex);
   1049     mRequestCount--;
   1050 
   1051     return true;
   1052 }
   1053 
   1054 bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
   1055     status_t res;
   1056     /** Get buffers to fill for this frame */
   1057     for (size_t i = 0; i < mNextBuffers->size(); i++) {
   1058         StreamBuffer &b = mNextBuffers->editItemAt(i);
   1059 
   1060         if (b.streamId > 0) {
   1061             Stream s = mParent->getStreamInfo(b.streamId);
   1062             ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
   1063             res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
   1064             if (res != NO_ERROR || b.buffer == NULL) {
   1065                 ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
   1066                         __FUNCTION__, b.streamId, strerror(-res), res);
   1067                 mParent->signalError();
   1068                 return false;
   1069             }
   1070 
   1071             /* Lock the buffer from the perspective of the graphics mapper */
   1072             res = GrallocModule::getInstance().lock(*(b.buffer),
   1073                     GRALLOC_USAGE_HW_CAMERA_WRITE,
   1074                     0, 0, s.width, s.height,
   1075                     (void**)&(b.img));
   1076 
   1077 
   1078             if (res != NO_ERROR) {
   1079                 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
   1080                         __FUNCTION__, strerror(-res), res);
   1081                 s.ops->cancel_buffer(s.ops,
   1082                         b.buffer);
   1083                 mParent->signalError();
   1084                 return false;
   1085             }
   1086         } else {
   1087             ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
   1088             ALOGV("Configure: Acquiring buffer from reprocess stream %d",
   1089                     -b.streamId);
   1090             res = s.ops->acquire_buffer(s.ops, &(b.buffer) );
   1091             if (res != NO_ERROR || b.buffer == NULL) {
   1092                 ALOGE("%s: Unable to acquire buffer from reprocess stream %d: "
   1093                         "%s (%d)", __FUNCTION__, -b.streamId,
   1094                         strerror(-res), res);
   1095                 mParent->signalError();
   1096                 return false;
   1097             }
   1098 
   1099             /* Lock the buffer from the perspective of the graphics mapper */
   1100             res = GrallocModule::getInstance().lock(*(b.buffer),
   1101                     GRALLOC_USAGE_HW_CAMERA_READ,
   1102                     0, 0, s.width, s.height,
   1103                     (void**)&(b.img) );
   1104             if (res != NO_ERROR) {
   1105                 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
   1106                         __FUNCTION__, strerror(-res), res);
   1107                 s.ops->release_buffer(s.ops,
   1108                         b.buffer);
   1109                 mParent->signalError();
   1110                 return false;
   1111             }
   1112         }
   1113     }
   1114     return true;
   1115 }
   1116 
   1117 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
   1118         Thread(false),
   1119         mParent(parent),
   1120         mRunning(false),
   1121         mActive(false),
   1122         mRequestCount(0),
   1123         mRequest(NULL),
   1124         mBuffers(NULL) {
   1125     mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
   1126     mInFlightHead = 0;
   1127     mInFlightTail = 0;
   1128 }
   1129 
   1130 EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
   1131     delete[] mInFlightQueue;
   1132 }
   1133 
   1134 status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
   1135     Mutex::Autolock lock(mInputMutex);
   1136     ALOGV("Starting up ReadoutThread");
   1137     mRunning = true;
   1138     mInputSignal.signal();
   1139     return NO_ERROR;
   1140 }
   1141 
   1142 status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
   1143     Mutex::Autolock lock(mInputMutex);
   1144     if (!mRunning) {
   1145         ALOGV("Waiting for readout thread to start");
   1146         mInputSignal.wait(mInputMutex);
   1147     }
   1148     return OK;
   1149 }
   1150 
   1151 bool EmulatedFakeCamera2::ReadoutThread::waitForReady(nsecs_t timeout) {
   1152     status_t res;
   1153     Mutex::Autolock lock(mInputMutex);
   1154     while (!readyForNextCapture()) {
   1155         res = mReadySignal.waitRelative(mInputMutex, timeout);
   1156         if (res == TIMED_OUT) return false;
   1157         if (res != OK) {
   1158             ALOGE("%s: Error waiting for ready: %s (%d)", __FUNCTION__,
   1159                     strerror(-res), res);
   1160             return false;
   1161         }
   1162     }
   1163     return true;
   1164 }
   1165 
   1166 bool EmulatedFakeCamera2::ReadoutThread::readyForNextCapture() {
   1167     return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
   1168 }
   1169 
   1170 void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
   1171         bool isCapture,
   1172         camera_metadata_t *request,
   1173         Buffers *buffers) {
   1174     Mutex::Autolock lock(mInputMutex);
   1175     if ( !readyForNextCapture() ) {
   1176         ALOGE("In flight queue full, dropping captures");
   1177         mParent->signalError();
   1178         return;
   1179     }
   1180     mInFlightQueue[mInFlightTail].isCapture = isCapture;
   1181     mInFlightQueue[mInFlightTail].request = request;
   1182     mInFlightQueue[mInFlightTail].buffers = buffers;
   1183     mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
   1184     mRequestCount++;
   1185 
   1186     if (!mActive) {
   1187         mActive = true;
   1188         mInputSignal.signal();
   1189     }
   1190 }
   1191 
   1192 bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
   1193     // acquire in same order as threadLoop
   1194     Mutex::Autolock iLock(mInternalsMutex);
   1195     Mutex::Autolock lock(mInputMutex);
   1196 
   1197     size_t i = mInFlightHead;
   1198     while (i != mInFlightTail) {
   1199         for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
   1200             if ( (*(mInFlightQueue[i].buffers))[j].streamId == (int)id )
   1201                 return true;
   1202         }
   1203         i = (i + 1) % kInFlightQueueSize;
   1204     }
   1205 
   1206 
   1207     if (mBuffers != NULL) {
   1208         for (i = 0; i < mBuffers->size(); i++) {
   1209             if ( (*mBuffers)[i].streamId == (int)id) return true;
   1210         }
   1211     }
   1212 
   1213     return false;
   1214 }
   1215 
   1216 int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
   1217     Mutex::Autolock lock(mInputMutex);
   1218 
   1219     return mRequestCount;
   1220 }
   1221 
   1222 bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
   1223     static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
   1224     status_t res;
   1225     int32_t frameNumber;
   1226 
   1227     // Check if we're currently processing or just waiting
   1228     {
   1229         Mutex::Autolock lock(mInputMutex);
   1230         if (!mActive) {
   1231             // Inactive, keep waiting until we've been signaled
   1232             res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
   1233             if (res != NO_ERROR && res != TIMED_OUT) {
   1234                 ALOGE("%s: Error waiting for capture requests: %d",
   1235                         __FUNCTION__, res);
   1236                 mParent->signalError();
   1237                 return false;
   1238             }
   1239             if (!mActive) return true;
   1240         }
   1241         // Active, see if we need a new request
   1242         if (mRequest == NULL) {
   1243             if (mInFlightHead == mInFlightTail) {
   1244                 // Go inactive
   1245                 ALOGV("Waiting for sensor data");
   1246                 mActive = false;
   1247                 return true;
   1248             } else {
   1249                 Mutex::Autolock iLock(mInternalsMutex);
   1250                 mReadySignal.signal();
   1251                 mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
   1252                 mRequest = mInFlightQueue[mInFlightHead].request;
   1253                 mBuffers  = mInFlightQueue[mInFlightHead].buffers;
   1254                 mInFlightQueue[mInFlightHead].request = NULL;
   1255                 mInFlightQueue[mInFlightHead].buffers = NULL;
   1256                 mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
   1257                 ALOGV("Ready to read out request %p, %zu buffers",
   1258                         mRequest, mBuffers->size());
   1259             }
   1260         }
   1261     }
   1262 
   1263     // Active with request, wait on sensor to complete
   1264 
   1265     nsecs_t captureTime;
   1266 
   1267     if (mIsCapture) {
   1268         bool gotFrame;
   1269         gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
   1270                 &captureTime);
   1271 
   1272         if (!gotFrame) return true;
   1273     }
   1274 
   1275     Mutex::Autolock iLock(mInternalsMutex);
   1276 
   1277     camera_metadata_entry_t entry;
   1278     if (!mIsCapture) {
   1279         res = find_camera_metadata_entry(mRequest,
   1280                 ANDROID_SENSOR_TIMESTAMP,
   1281             &entry);
   1282         if (res != NO_ERROR) {
   1283             ALOGE("%s: error reading reprocessing timestamp: %s (%d)",
   1284                     __FUNCTION__, strerror(-res), res);
   1285             mParent->signalError();
   1286             return false;
   1287         }
   1288         captureTime = entry.data.i64[0];
   1289     }
   1290 
   1291     res = find_camera_metadata_entry(mRequest,
   1292             ANDROID_REQUEST_FRAME_COUNT,
   1293             &entry);
   1294     if (res != NO_ERROR) {
   1295         ALOGE("%s: error reading frame count tag: %s (%d)",
   1296                 __FUNCTION__, strerror(-res), res);
   1297         mParent->signalError();
   1298         return false;
   1299     }
   1300     frameNumber = *entry.data.i32;
   1301 
   1302     res = find_camera_metadata_entry(mRequest,
   1303             ANDROID_REQUEST_METADATA_MODE,
   1304             &entry);
   1305     if (res != NO_ERROR) {
   1306         ALOGE("%s: error reading metadata mode tag: %s (%d)",
   1307                 __FUNCTION__, strerror(-res), res);
   1308         mParent->signalError();
   1309         return false;
   1310     }
   1311 
   1312     // Got sensor data and request, construct frame and send it out
   1313     ALOGV("Readout: Constructing metadata and frames for request %d",
   1314             frameNumber);
   1315 
   1316     if (*entry.data.u8 == ANDROID_REQUEST_METADATA_MODE_FULL) {
   1317         ALOGV("Readout: Metadata requested, constructing");
   1318 
   1319         camera_metadata_t *frame = NULL;
   1320 
   1321         size_t frame_entries = get_camera_metadata_entry_count(mRequest);
   1322         size_t frame_data    = get_camera_metadata_data_count(mRequest);
   1323 
   1324         // TODO: Dynamically calculate based on enabled statistics, etc
   1325         frame_entries += 10;
   1326         frame_data += 100;
   1327 
   1328         res = mParent->mFrameQueueDst->dequeue_frame(mParent->mFrameQueueDst,
   1329                 frame_entries, frame_data, &frame);
   1330 
   1331         if (res != NO_ERROR || frame == NULL) {
   1332             ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
   1333             mParent->signalError();
   1334             return false;
   1335         }
   1336 
   1337         res = append_camera_metadata(frame, mRequest);
   1338         if (res != NO_ERROR) {
   1339             ALOGE("Unable to append request metadata");
   1340         }
   1341 
   1342         if (mIsCapture) {
   1343             add_camera_metadata_entry(frame,
   1344                     ANDROID_SENSOR_TIMESTAMP,
   1345                     &captureTime,
   1346                     1);
   1347 
   1348             collectStatisticsMetadata(frame);
   1349             // TODO: Collect all final values used from sensor in addition to timestamp
   1350         }
   1351 
   1352         ALOGV("Readout: Enqueue frame %d", frameNumber);
   1353         mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
   1354                 frame);
   1355     }
   1356     ALOGV("Readout: Free request");
   1357     res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc, mRequest);
   1358     if (res != NO_ERROR) {
   1359         ALOGE("%s: Unable to return request buffer to queue: %d",
   1360                 __FUNCTION__, res);
   1361         mParent->signalError();
   1362         return false;
   1363     }
   1364     mRequest = NULL;
   1365 
   1366     int compressedBufferIndex = -1;
   1367     ALOGV("Readout: Processing %zu buffers", mBuffers->size());
   1368     for (size_t i = 0; i < mBuffers->size(); i++) {
   1369         const StreamBuffer &b = (*mBuffers)[i];
   1370         ALOGV("Readout:    Buffer %zu: Stream %d, %d x %d, format 0x%x, stride %d",
   1371                 i, b.streamId, b.width, b.height, b.format, b.stride);
   1372         if (b.streamId > 0) {
   1373             if (b.format == HAL_PIXEL_FORMAT_BLOB) {
   1374                 // Assumes only one BLOB buffer type per capture
   1375                 compressedBufferIndex = i;
   1376             } else {
   1377                 ALOGV("Readout:    Sending image buffer %zu (%p) to output stream %d",
   1378                         i, (void*)*(b.buffer), b.streamId);
   1379                 GrallocModule::getInstance().unlock(*(b.buffer));
   1380                 const Stream &s = mParent->getStreamInfo(b.streamId);
   1381                 res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
   1382                 if (res != OK) {
   1383                     ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
   1384                             strerror(-res), res);
   1385                     mParent->signalError();
   1386                 }
   1387             }
   1388         }
   1389     }
   1390 
   1391     if (compressedBufferIndex == -1) {
   1392         delete mBuffers;
   1393     } else {
   1394         ALOGV("Readout:  Starting JPEG compression for buffer %d, stream %d",
   1395                 compressedBufferIndex,
   1396                 (*mBuffers)[compressedBufferIndex].streamId);
   1397         mJpegTimestamp = captureTime;
   1398         // Takes ownership of mBuffers
   1399         mParent->mJpegCompressor->start(mBuffers, this);
   1400     }
   1401     mBuffers = NULL;
   1402 
   1403     Mutex::Autolock l(mInputMutex);
   1404     mRequestCount--;
   1405     ALOGV("Readout: Done with request %d", frameNumber);
   1406     return true;
   1407 }
   1408 
   1409 void EmulatedFakeCamera2::ReadoutThread::onJpegDone(
   1410         const StreamBuffer &jpegBuffer, bool success) {
   1411     status_t res;
   1412     if (!success) {
   1413         ALOGE("%s: Error queueing compressed image buffer %p",
   1414                 __FUNCTION__, jpegBuffer.buffer);
   1415         mParent->signalError();
   1416         return;
   1417     }
   1418 
   1419     // Write to JPEG output stream
   1420     ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
   1421             jpegBuffer.streamId);
   1422 
   1423     GrallocModule::getInstance().unlock(*(jpegBuffer.buffer));
   1424     const Stream &s = mParent->getStreamInfo(jpegBuffer.streamId);
   1425     res = s.ops->enqueue_buffer(s.ops, mJpegTimestamp, jpegBuffer.buffer);
   1426 }
   1427 
   1428 void EmulatedFakeCamera2::ReadoutThread::onJpegInputDone(
   1429         const StreamBuffer &inputBuffer) {
   1430     status_t res;
   1431     GrallocModule::getInstance().unlock(*(inputBuffer.buffer));
   1432     const ReprocessStream &s =
   1433             mParent->getReprocessStreamInfo(-inputBuffer.streamId);
   1434     res = s.ops->release_buffer(s.ops, inputBuffer.buffer);
   1435     if (res != OK) {
   1436         ALOGE("Error releasing reprocess buffer %p: %s (%d)",
   1437                 inputBuffer.buffer, strerror(-res), res);
   1438         mParent->signalError();
   1439     }
   1440 }
   1441 
   1442 status_t EmulatedFakeCamera2::ReadoutThread::collectStatisticsMetadata(
   1443         camera_metadata_t *frame) {
   1444     // Completely fake face rectangles, don't correspond to real faces in scene
   1445     ALOGV("Readout:    Collecting statistics metadata");
   1446 
   1447     status_t res;
   1448     camera_metadata_entry_t entry;
   1449     res = find_camera_metadata_entry(frame,
   1450                 ANDROID_STATISTICS_FACE_DETECT_MODE,
   1451                 &entry);
   1452     if (res != OK) {
   1453         ALOGE("%s: Unable to find face detect mode!", __FUNCTION__);
   1454         return BAD_VALUE;
   1455     }
   1456 
   1457     if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) return OK;
   1458 
   1459     // The coordinate system for the face regions is the raw sensor pixel
   1460     // coordinates. Here, we map from the scene coordinates (0-19 in both axis)
   1461     // to raw pixels, for the scene defined in fake-pipeline2/Scene.cpp. We
   1462     // approximately place two faces on top of the windows of the house. No
   1463     // actual faces exist there, but might one day. Note that this doesn't
   1464     // account for the offsets used to account for aspect ratio differences, so
   1465     // the rectangles don't line up quite right.
   1466     const size_t numFaces = 2;
   1467     int32_t rects[numFaces * 4] = {
   1468         static_cast<int32_t>(mParent->mSensorWidth * 10 / 20),
   1469         static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
   1470         static_cast<int32_t>(mParent->mSensorWidth * 12 / 20),
   1471         static_cast<int32_t>(mParent->mSensorHeight * 17 / 20),
   1472 
   1473         static_cast<int32_t>(mParent->mSensorWidth * 16 / 20),
   1474         static_cast<int32_t>(mParent->mSensorHeight * 15 / 20),
   1475         static_cast<int32_t>(mParent->mSensorWidth * 18 / 20),
   1476         static_cast<int32_t>(mParent->mSensorHeight * 17 / 20)
   1477     };
   1478     // To simulate some kind of real detection going on, we jitter the rectangles on
   1479     // each frame by a few pixels in each dimension.
   1480     for (size_t i = 0; i < numFaces * 4; i++) {
   1481         rects[i] += (int32_t)(((float)rand() / RAND_MAX) * 6 - 3);
   1482     }
   1483     // The confidence scores (0-100) are similarly jittered.
   1484     uint8_t scores[numFaces] = { 85, 95 };
   1485     for (size_t i = 0; i < numFaces; i++) {
   1486         scores[i] += (int32_t)(((float)rand() / RAND_MAX) * 10 - 5);
   1487     }
   1488 
   1489     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_RECTANGLES,
   1490             rects, numFaces * 4);
   1491     if (res != OK) {
   1492         ALOGE("%s: Unable to add face rectangles!", __FUNCTION__);
   1493         return BAD_VALUE;
   1494     }
   1495 
   1496     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_SCORES,
   1497             scores, numFaces);
   1498     if (res != OK) {
   1499         ALOGE("%s: Unable to add face scores!", __FUNCTION__);
   1500         return BAD_VALUE;
   1501     }
   1502 
   1503     if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE) return OK;
   1504 
   1505     // Advanced face detection options - add eye/mouth coordinates.  The
   1506     // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
   1507     // mouthX, mouthY). The mapping is the same as the face rectangles.
   1508     int32_t features[numFaces * 6] = {
   1509         static_cast<int32_t>(mParent->mSensorWidth * 10.5 / 20),
   1510         static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1511         static_cast<int32_t>(mParent->mSensorWidth * 11.5 / 20),
   1512         static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1513         static_cast<int32_t>(mParent->mSensorWidth * 11 / 20),
   1514         static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
   1515 
   1516         static_cast<int32_t>(mParent->mSensorWidth * 16.5 / 20),
   1517         static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1518         static_cast<int32_t>(mParent->mSensorWidth * 17.5 / 20),
   1519         static_cast<int32_t>(mParent->mSensorHeight * 16 / 20),
   1520         static_cast<int32_t>(mParent->mSensorWidth * 17 / 20),
   1521         static_cast<int32_t>(mParent->mSensorHeight * 16.5 / 20),
   1522     };
   1523     // Jitter these a bit less than the rects
   1524     for (size_t i = 0; i < numFaces * 6; i++) {
   1525         features[i] += (int32_t)(((float)rand() / RAND_MAX) * 4 - 2);
   1526     }
   1527     // These are unique IDs that are used to identify each face while it's
   1528     // visible to the detector (if a face went away and came back, it'd get a
   1529     // new ID).
   1530     int32_t ids[numFaces] = {
   1531         100, 200
   1532     };
   1533 
   1534     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_LANDMARKS,
   1535             features, numFaces * 6);
   1536     if (res != OK) {
   1537         ALOGE("%s: Unable to add face landmarks!", __FUNCTION__);
   1538         return BAD_VALUE;
   1539     }
   1540 
   1541     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_IDS,
   1542             ids, numFaces);
   1543     if (res != OK) {
   1544         ALOGE("%s: Unable to add face scores!", __FUNCTION__);
   1545         return BAD_VALUE;
   1546     }
   1547 
   1548     return OK;
   1549 }
   1550 
   1551 EmulatedFakeCamera2::ControlThread::ControlThread(EmulatedFakeCamera2 *parent):
   1552         Thread(false),
   1553         mParent(parent) {
   1554     mRunning = false;
   1555 }
   1556 
   1557 EmulatedFakeCamera2::ControlThread::~ControlThread() {
   1558 }
   1559 
   1560 status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
   1561     Mutex::Autolock lock(mInputMutex);
   1562 
   1563     ALOGV("Starting up ControlThread");
   1564     mRunning = true;
   1565     mStartAf = false;
   1566     mCancelAf = false;
   1567     mStartPrecapture = false;
   1568 
   1569     mControlMode = ANDROID_CONTROL_MODE_AUTO;
   1570 
   1571     mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   1572     mSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   1573 
   1574     mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
   1575     mAfModeChange = false;
   1576 
   1577     mAeMode = ANDROID_CONTROL_AE_MODE_ON;
   1578     mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   1579 
   1580     mAfTriggerId = 0;
   1581     mPrecaptureTriggerId = 0;
   1582 
   1583     mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1584     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1585     mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   1586 
   1587     mExposureTime = kNormalExposureTime;
   1588 
   1589     mInputSignal.signal();
   1590     return NO_ERROR;
   1591 }
   1592 
   1593 status_t EmulatedFakeCamera2::ControlThread::waitUntilRunning() {
   1594     Mutex::Autolock lock(mInputMutex);
   1595     if (!mRunning) {
   1596         ALOGV("Waiting for control thread to start");
   1597         mInputSignal.wait(mInputMutex);
   1598     }
   1599     return OK;
   1600 }
   1601 
   1602 // Override android.control.* fields with 3A values before sending request to sensor
   1603 status_t EmulatedFakeCamera2::ControlThread::processRequest(camera_metadata_t *request) {
   1604     Mutex::Autolock lock(mInputMutex);
   1605     // TODO: Add handling for all android.control.* fields here
   1606     camera_metadata_entry_t mode;
   1607     status_t res;
   1608 
   1609 #define READ_IF_OK(res, what, def)                                             \
   1610     (((res) == OK) ? (what) : (uint8_t)(def))
   1611 
   1612     res = find_camera_metadata_entry(request,
   1613             ANDROID_CONTROL_MODE,
   1614             &mode);
   1615     mControlMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_MODE_OFF);
   1616 
   1617     // disable all 3A
   1618     if (mControlMode == ANDROID_CONTROL_MODE_OFF) {
   1619         mEffectMode =   ANDROID_CONTROL_EFFECT_MODE_OFF;
   1620         mSceneMode =    ANDROID_CONTROL_SCENE_MODE_DISABLED;
   1621         mAfMode =       ANDROID_CONTROL_AF_MODE_OFF;
   1622         mAeLock =       ANDROID_CONTROL_AE_LOCK_ON;
   1623         mAeMode =       ANDROID_CONTROL_AE_MODE_OFF;
   1624         mAfModeChange = true;
   1625         mStartAf =      false;
   1626         mCancelAf =     true;
   1627         mAeState =      ANDROID_CONTROL_AE_STATE_INACTIVE;
   1628         mAwbMode =      ANDROID_CONTROL_AWB_MODE_OFF;
   1629         return res;
   1630     }
   1631 
   1632     res = find_camera_metadata_entry(request,
   1633             ANDROID_CONTROL_EFFECT_MODE,
   1634             &mode);
   1635     mEffectMode = READ_IF_OK(res, mode.data.u8[0],
   1636                              ANDROID_CONTROL_EFFECT_MODE_OFF);
   1637 
   1638     res = find_camera_metadata_entry(request,
   1639             ANDROID_CONTROL_SCENE_MODE,
   1640             &mode);
   1641     mSceneMode = READ_IF_OK(res, mode.data.u8[0],
   1642                              ANDROID_CONTROL_SCENE_MODE_DISABLED);
   1643 
   1644     res = find_camera_metadata_entry(request,
   1645             ANDROID_CONTROL_AF_MODE,
   1646             &mode);
   1647     if (mAfMode != mode.data.u8[0]) {
   1648         ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
   1649         mAfMode = mode.data.u8[0];
   1650         mAfModeChange = true;
   1651         mStartAf = false;
   1652         mCancelAf = false;
   1653     }
   1654 
   1655     res = find_camera_metadata_entry(request,
   1656             ANDROID_CONTROL_AE_MODE,
   1657             &mode);
   1658     mAeMode = READ_IF_OK(res, mode.data.u8[0],
   1659                              ANDROID_CONTROL_AE_MODE_OFF);
   1660 
   1661     res = find_camera_metadata_entry(request,
   1662             ANDROID_CONTROL_AE_LOCK,
   1663             &mode);
   1664     uint8_t aeLockVal = READ_IF_OK(res, mode.data.u8[0],
   1665                                    ANDROID_CONTROL_AE_LOCK_ON);
   1666     bool aeLock = (aeLockVal == ANDROID_CONTROL_AE_LOCK_ON);
   1667     if (mAeLock && !aeLock) {
   1668         mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1669     }
   1670     mAeLock = aeLock;
   1671 
   1672     res = find_camera_metadata_entry(request,
   1673             ANDROID_CONTROL_AWB_MODE,
   1674             &mode);
   1675     mAwbMode = READ_IF_OK(res, mode.data.u8[0],
   1676                           ANDROID_CONTROL_AWB_MODE_OFF);
   1677 
   1678     // TODO: Override more control fields
   1679 
   1680     if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
   1681         camera_metadata_entry_t exposureTime;
   1682         res = find_camera_metadata_entry(request,
   1683                 ANDROID_SENSOR_EXPOSURE_TIME,
   1684                 &exposureTime);
   1685         if (res == OK) {
   1686             exposureTime.data.i64[0] = mExposureTime;
   1687         }
   1688     }
   1689 
   1690 #undef READ_IF_OK
   1691 
   1692     return OK;
   1693 }
   1694 
   1695 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
   1696         int32_t ext1, int32_t ext2) {
   1697     ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
   1698     Mutex::Autolock lock(mInputMutex);
   1699     switch (msgType) {
   1700         case CAMERA2_TRIGGER_AUTOFOCUS:
   1701             mAfTriggerId = ext1;
   1702             mStartAf = true;
   1703             mCancelAf = false;
   1704             break;
   1705         case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
   1706             mAfTriggerId = ext1;
   1707             mStartAf = false;
   1708             mCancelAf = true;
   1709             break;
   1710         case CAMERA2_TRIGGER_PRECAPTURE_METERING:
   1711             mPrecaptureTriggerId = ext1;
   1712             mStartPrecapture = true;
   1713             break;
   1714         default:
   1715             ALOGE("%s: Unknown action triggered: %d (arguments %d %d)",
   1716                     __FUNCTION__, msgType, ext1, ext2);
   1717             return BAD_VALUE;
   1718     }
   1719     return OK;
   1720 }
   1721 
   1722 const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100 * MSEC;
   1723 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
   1724 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
   1725 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
   1726  // Once every 5 seconds
   1727 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
   1728         kControlCycleDelay / 5.0 * SEC;
   1729 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
   1730 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
   1731 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration = 100 * MSEC;
   1732 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration = 400 * MSEC;
   1733  // Once every 3 seconds
   1734 const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
   1735     kControlCycleDelay / 3000000000.0;
   1736 
   1737 const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime = 10 * MSEC;
   1738 const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 2 * MSEC;
   1739 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
   1740 
   1741 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
   1742     bool afModeChange = false;
   1743     bool afTriggered = false;
   1744     bool afCancelled = false;
   1745     uint8_t afState;
   1746     uint8_t afMode;
   1747     int32_t afTriggerId;
   1748     bool precaptureTriggered = false;
   1749     uint8_t aeState;
   1750     uint8_t aeMode;
   1751     bool    aeLock;
   1752     int32_t precaptureTriggerId;
   1753     nsecs_t nextSleep = kControlCycleDelay;
   1754 
   1755     {
   1756         Mutex::Autolock lock(mInputMutex);
   1757         if (mStartAf) {
   1758             ALOGD("Starting AF trigger processing");
   1759             afTriggered = true;
   1760             mStartAf = false;
   1761         } else if (mCancelAf) {
   1762             ALOGD("Starting cancel AF trigger processing");
   1763             afCancelled = true;
   1764             mCancelAf = false;
   1765         }
   1766         afState = mAfState;
   1767         afMode = mAfMode;
   1768         afModeChange = mAfModeChange;
   1769         mAfModeChange = false;
   1770 
   1771         afTriggerId = mAfTriggerId;
   1772 
   1773         if(mStartPrecapture) {
   1774             ALOGD("Starting precapture trigger processing");
   1775             precaptureTriggered = true;
   1776             mStartPrecapture = false;
   1777         }
   1778         aeState = mAeState;
   1779         aeMode = mAeMode;
   1780         aeLock = mAeLock;
   1781         precaptureTriggerId = mPrecaptureTriggerId;
   1782     }
   1783 
   1784     if (afCancelled || afModeChange) {
   1785         ALOGV("Resetting AF state due to cancel/mode change");
   1786         afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1787         updateAfState(afState, afTriggerId);
   1788         mAfScanDuration = 0;
   1789         mLockAfterPassiveScan = false;
   1790     }
   1791 
   1792     uint8_t oldAfState = afState;
   1793 
   1794     if (afTriggered) {
   1795         afState = processAfTrigger(afMode, afState);
   1796     }
   1797 
   1798     afState = maybeStartAfScan(afMode, afState);
   1799     afState = updateAfScan(afMode, afState, &nextSleep);
   1800     updateAfState(afState, afTriggerId);
   1801 
   1802     if (precaptureTriggered) {
   1803         aeState = processPrecaptureTrigger(aeMode, aeState);
   1804     }
   1805 
   1806     aeState = maybeStartAeScan(aeMode, aeLock, aeState);
   1807     aeState = updateAeScan(aeMode, aeLock, aeState, &nextSleep);
   1808     updateAeState(aeState, precaptureTriggerId);
   1809 
   1810     int ret;
   1811     timespec t;
   1812     t.tv_sec = 0;
   1813     t.tv_nsec = nextSleep;
   1814     do {
   1815         ret = nanosleep(&t, &t);
   1816     } while (ret != 0);
   1817 
   1818     if (mAfScanDuration > 0) {
   1819         mAfScanDuration -= nextSleep;
   1820     }
   1821     if (mAeScanDuration > 0) {
   1822         mAeScanDuration -= nextSleep;
   1823     }
   1824 
   1825     return true;
   1826 }
   1827 
   1828 int EmulatedFakeCamera2::ControlThread::processAfTrigger(uint8_t afMode,
   1829         uint8_t afState) {
   1830     switch (afMode) {
   1831         case ANDROID_CONTROL_AF_MODE_OFF:
   1832         case ANDROID_CONTROL_AF_MODE_EDOF:
   1833             // Do nothing
   1834             break;
   1835         case ANDROID_CONTROL_AF_MODE_MACRO:
   1836         case ANDROID_CONTROL_AF_MODE_AUTO:
   1837             switch (afState) {
   1838                 case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1839                 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1840                 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1841                     // Start new focusing cycle
   1842                     mAfScanDuration =  ((double)rand() / RAND_MAX) *
   1843                         (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
   1844                     afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   1845                     ALOGV("%s: AF scan start, duration %" PRId64 " ms",
   1846                           __FUNCTION__, mAfScanDuration / 1000000);
   1847                     break;
   1848                 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   1849                     // Ignore new request, already scanning
   1850                     break;
   1851                 default:
   1852                     ALOGE("Unexpected AF state in AUTO/MACRO AF mode: %d",
   1853                           afState);
   1854             }
   1855             break;
   1856         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   1857             switch (afState) {
   1858                 // Picture mode waits for passive scan to complete
   1859                 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   1860                     mLockAfterPassiveScan = true;
   1861                     break;
   1862                 case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1863                     afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1864                     break;
   1865                 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   1866                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1867                     break;
   1868                 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1869                 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1870                     // Must cancel to get out of these states
   1871                     break;
   1872                 default:
   1873                     ALOGE("Unexpected AF state in CONTINUOUS_PICTURE AF mode: %d",
   1874                           afState);
   1875             }
   1876             break;
   1877         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   1878             switch (afState) {
   1879                 // Video mode does not wait for passive scan to complete
   1880                 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   1881                 case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1882                     afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1883                     break;
   1884                 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   1885                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1886                     break;
   1887                 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1888                 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1889                     // Must cancel to get out of these states
   1890                     break;
   1891                 default:
   1892                     ALOGE("Unexpected AF state in CONTINUOUS_VIDEO AF mode: %d",
   1893                           afState);
   1894             }
   1895             break;
   1896         default:
   1897             break;
   1898     }
   1899     return afState;
   1900 }
   1901 
   1902 int EmulatedFakeCamera2::ControlThread::maybeStartAfScan(uint8_t afMode,
   1903         uint8_t afState) {
   1904     if ((afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO ||
   1905             afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE) &&
   1906         (afState == ANDROID_CONTROL_AF_STATE_INACTIVE ||
   1907             afState == ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)) {
   1908 
   1909         bool startScan = ((double)rand() / RAND_MAX) < kContinuousAfStartRate;
   1910         if (startScan) {
   1911             // Start new passive focusing cycle
   1912             mAfScanDuration =  ((double)rand() / RAND_MAX) *
   1913                 (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
   1914             afState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
   1915             ALOGV("%s: AF passive scan start, duration %" PRId64 " ms",
   1916                 __FUNCTION__, mAfScanDuration / 1000000);
   1917         }
   1918     }
   1919     return afState;
   1920 }
   1921 
   1922 int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
   1923         uint8_t afState, nsecs_t *maxSleep) {
   1924     if (! (afState == ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN ||
   1925             afState == ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN ) ) {
   1926         return afState;
   1927     }
   1928 
   1929     if (mAfScanDuration <= 0) {
   1930         ALOGV("%s: AF scan done", __FUNCTION__);
   1931         switch (afMode) {
   1932             case ANDROID_CONTROL_AF_MODE_MACRO:
   1933             case ANDROID_CONTROL_AF_MODE_AUTO: {
   1934                 bool success = ((double)rand() / RAND_MAX) < kAfSuccessRate;
   1935                 if (success) {
   1936                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1937                 } else {
   1938                     afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1939                 }
   1940                 break;
   1941             }
   1942             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   1943                 if (mLockAfterPassiveScan) {
   1944                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1945                     mLockAfterPassiveScan = false;
   1946                 } else {
   1947                     afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   1948                 }
   1949                 break;
   1950             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   1951                 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   1952                 break;
   1953             default:
   1954                 ALOGE("Unexpected AF mode in scan state");
   1955         }
   1956     } else {
   1957         if (mAfScanDuration <= *maxSleep) {
   1958             *maxSleep = mAfScanDuration;
   1959         }
   1960     }
   1961     return afState;
   1962 }
   1963 
   1964 void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
   1965         int32_t triggerId) {
   1966     Mutex::Autolock lock(mInputMutex);
   1967     if (mAfState != newState) {
   1968         ALOGV("%s: Autofocus state now %d, id %d", __FUNCTION__,
   1969                 newState, triggerId);
   1970         mAfState = newState;
   1971         mParent->sendNotification(CAMERA2_MSG_AUTOFOCUS,
   1972                 newState, triggerId, 0);
   1973     }
   1974 }
   1975 
   1976 int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(uint8_t aeMode,
   1977         uint8_t aeState) {
   1978     switch (aeMode) {
   1979         case ANDROID_CONTROL_AE_MODE_OFF:
   1980             // Don't do anything for these
   1981             return aeState;
   1982         case ANDROID_CONTROL_AE_MODE_ON:
   1983         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
   1984         case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
   1985         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
   1986             // Trigger a precapture cycle
   1987             aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
   1988             mAeScanDuration = ((double)rand() / RAND_MAX) *
   1989                     (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
   1990                     kMinPrecaptureAeDuration;
   1991             ALOGD("%s: AE precapture scan start, duration %" PRId64 " ms",
   1992                     __FUNCTION__, mAeScanDuration / 1000000);
   1993 
   1994     }
   1995     return aeState;
   1996 }
   1997 
   1998 int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
   1999         bool aeLocked,
   2000         uint8_t aeState) {
   2001     if (aeLocked) return aeState;
   2002     switch (aeMode) {
   2003         case ANDROID_CONTROL_AE_MODE_OFF:
   2004             break;
   2005         case ANDROID_CONTROL_AE_MODE_ON:
   2006         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
   2007         case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
   2008         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: {
   2009             if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
   2010                     aeState != ANDROID_CONTROL_AE_STATE_CONVERGED) break;
   2011 
   2012             bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
   2013             if (startScan) {
   2014                 mAeScanDuration = ((double)rand() / RAND_MAX) *
   2015                 (kMaxAeDuration - kMinAeDuration) + kMinAeDuration;
   2016                 aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
   2017                 ALOGV("%s: AE scan start, duration %" PRId64 " ms",
   2018                         __FUNCTION__, mAeScanDuration / 1000000);
   2019             }
   2020         }
   2021     }
   2022 
   2023     return aeState;
   2024 }
   2025 
   2026 int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t aeMode,
   2027         bool aeLock, uint8_t aeState, nsecs_t *maxSleep) {
   2028     if (aeLock && aeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   2029         mAeScanDuration = 0;
   2030         aeState = ANDROID_CONTROL_AE_STATE_LOCKED;
   2031     } else if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
   2032             (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE ) ) {
   2033         if (mAeScanDuration <= 0) {
   2034             ALOGV("%s: AE scan done", __FUNCTION__);
   2035             aeState = aeLock ?
   2036                     ANDROID_CONTROL_AE_STATE_LOCKED :ANDROID_CONTROL_AE_STATE_CONVERGED;
   2037 
   2038             Mutex::Autolock lock(mInputMutex);
   2039             mExposureTime = kNormalExposureTime;
   2040         } else {
   2041             if (mAeScanDuration <= *maxSleep) {
   2042                 *maxSleep = mAeScanDuration;
   2043             }
   2044 
   2045             int64_t exposureDelta =
   2046                     ((double)rand() / RAND_MAX) * 2 * kExposureJump -
   2047                     kExposureJump;
   2048             Mutex::Autolock lock(mInputMutex);
   2049             mExposureTime = mExposureTime + exposureDelta;
   2050             if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
   2051         }
   2052     }
   2053 
   2054     return aeState;
   2055 }
   2056 
   2057 
   2058 void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
   2059         int32_t triggerId) {
   2060     Mutex::Autolock lock(mInputMutex);
   2061     if (mAeState != newState) {
   2062         ALOGV("%s: Autoexposure state now %d, id %d", __FUNCTION__,
   2063                 newState, triggerId);
   2064         mAeState = newState;
   2065         mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE,
   2066                 newState, triggerId, 0);
   2067     }
   2068 }
   2069 
   2070 /** Private methods */
   2071 
   2072 status_t EmulatedFakeCamera2::constructStaticInfo(
   2073         camera_metadata_t **info,
   2074         bool sizeRequest) const {
   2075 
   2076     size_t entryCount = 0;
   2077     size_t dataCount = 0;
   2078     status_t ret;
   2079 
   2080 #define ADD_OR_SIZE( tag, data, count ) \
   2081     if ( ( ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, \
   2082             tag, data, count) ) != OK ) return ret
   2083 
   2084     // android.lens
   2085 
   2086     // 5 cm min focus distance for back camera, infinity (fixed focus) for front
   2087     const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
   2088     ADD_OR_SIZE(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   2089             &minFocusDistance, 1);
   2090     // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
   2091     const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
   2092     ADD_OR_SIZE(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   2093             &minFocusDistance, 1);
   2094 
   2095     static const float focalLength = 3.30f; // mm
   2096     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   2097             &focalLength, 1);
   2098     static const float aperture = 2.8f;
   2099     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   2100             &aperture, 1);
   2101     static const float filterDensity = 0;
   2102     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   2103             &filterDensity, 1);
   2104     static const uint8_t availableOpticalStabilization =
   2105             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   2106     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   2107             &availableOpticalStabilization, 1);
   2108 
   2109     static const int32_t lensShadingMapSize[] = {1, 1};
   2110     ADD_OR_SIZE(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
   2111             sizeof(lensShadingMapSize)/sizeof(int32_t));
   2112 
   2113     int32_t lensFacing = mFacingBack ?
   2114             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   2115     ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
   2116 
   2117     // android.sensor
   2118 
   2119     ADD_OR_SIZE(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   2120             Sensor::kExposureTimeRange, 2);
   2121 
   2122     ADD_OR_SIZE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   2123             &Sensor::kFrameDurationRange[1], 1);
   2124 
   2125     ADD_OR_SIZE(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   2126             Sensor::kSensitivityRange,
   2127             sizeof(Sensor::kSensitivityRange)
   2128             /sizeof(int32_t));
   2129 
   2130     ADD_OR_SIZE(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   2131             &Sensor::kColorFilterArrangement, 1);
   2132 
   2133     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
   2134     ADD_OR_SIZE(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   2135             sensorPhysicalSize, 2);
   2136 
   2137     const int32_t pixelArray[] = {mSensorWidth, mSensorHeight};
   2138     ADD_OR_SIZE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   2139             pixelArray, 2);
   2140 
   2141     ADD_OR_SIZE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   2142             pixelArray, 2);
   2143 
   2144     ADD_OR_SIZE(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   2145             &Sensor::kMaxRawValue, 1);
   2146 
   2147     static const int32_t blackLevelPattern[4] = {
   2148         static_cast<int32_t>(Sensor::kBlackLevel),
   2149         static_cast<int32_t>(Sensor::kBlackLevel),
   2150         static_cast<int32_t>(Sensor::kBlackLevel),
   2151         static_cast<int32_t>(Sensor::kBlackLevel)
   2152     };
   2153     ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   2154             blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
   2155 
   2156     //TODO: sensor color calibration fields
   2157 
   2158     // android.flash
   2159     static const uint8_t flashAvailable = 0;
   2160     ADD_OR_SIZE(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
   2161 
   2162     static const int64_t flashChargeDuration = 0;
   2163     ADD_OR_SIZE(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
   2164 
   2165     // android.tonemap
   2166 
   2167     static const int32_t tonemapCurvePoints = 128;
   2168     ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
   2169 
   2170     // android.scaler
   2171 
   2172     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS,
   2173             kAvailableFormats,
   2174             sizeof(kAvailableFormats)/sizeof(uint32_t));
   2175 
   2176     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   2177             kAvailableRawSizes,
   2178             sizeof(kAvailableRawSizes)/sizeof(uint32_t));
   2179 
   2180     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
   2181             kAvailableRawMinDurations,
   2182             sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
   2183 
   2184     if (mFacingBack) {
   2185         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   2186                 kAvailableProcessedSizesBack,
   2187                 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t));
   2188     } else {
   2189         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   2190                 kAvailableProcessedSizesFront,
   2191                 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t));
   2192     }
   2193 
   2194     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
   2195             kAvailableProcessedMinDurations,
   2196             sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
   2197 
   2198     if (mFacingBack) {
   2199         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   2200                 kAvailableJpegSizesBack,
   2201                 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t));
   2202     } else {
   2203         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   2204                 kAvailableJpegSizesFront,
   2205                 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t));
   2206     }
   2207 
   2208     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
   2209             kAvailableJpegMinDurations,
   2210             sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
   2211 
   2212     static const float maxZoom = 10;
   2213     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   2214             &maxZoom, 1);
   2215 
   2216     // android.jpeg
   2217 
   2218     static const int32_t jpegThumbnailSizes[] = {
   2219             0, 0,
   2220             160, 120,
   2221             320, 240
   2222      };
   2223     ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   2224             jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
   2225 
   2226     static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
   2227     ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
   2228 
   2229     // android.stats
   2230 
   2231     static const uint8_t availableFaceDetectModes[] = {
   2232         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   2233         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
   2234         ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
   2235     };
   2236 
   2237     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   2238             availableFaceDetectModes,
   2239             sizeof(availableFaceDetectModes));
   2240 
   2241     static const int32_t maxFaceCount = 8;
   2242     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   2243             &maxFaceCount, 1);
   2244 
   2245     static const int32_t histogramSize = 64;
   2246     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   2247             &histogramSize, 1);
   2248 
   2249     static const int32_t maxHistogramCount = 1000;
   2250     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   2251             &maxHistogramCount, 1);
   2252 
   2253     static const int32_t sharpnessMapSize[2] = {64, 64};
   2254     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   2255             sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
   2256 
   2257     static const int32_t maxSharpnessMapValue = 1000;
   2258     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   2259             &maxSharpnessMapValue, 1);
   2260 
   2261     // android.control
   2262 
   2263     static const uint8_t availableSceneModes[] = {
   2264             ANDROID_CONTROL_SCENE_MODE_DISABLED
   2265     };
   2266     ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   2267             availableSceneModes, sizeof(availableSceneModes));
   2268 
   2269     static const uint8_t availableEffects[] = {
   2270             ANDROID_CONTROL_EFFECT_MODE_OFF
   2271     };
   2272     ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   2273             availableEffects, sizeof(availableEffects));
   2274 
   2275     static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
   2276     ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS,
   2277             max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
   2278 
   2279     static const uint8_t availableAeModes[] = {
   2280             ANDROID_CONTROL_AE_MODE_OFF,
   2281             ANDROID_CONTROL_AE_MODE_ON
   2282     };
   2283     ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   2284             availableAeModes, sizeof(availableAeModes));
   2285 
   2286     static const camera_metadata_rational exposureCompensationStep = {
   2287             1, 3
   2288     };
   2289     ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   2290             &exposureCompensationStep, 1);
   2291 
   2292     int32_t exposureCompensationRange[] = {-9, 9};
   2293     ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   2294             exposureCompensationRange,
   2295             sizeof(exposureCompensationRange)/sizeof(int32_t));
   2296 
   2297     static const int32_t availableTargetFpsRanges[] = {
   2298             5, 30, 15, 30
   2299     };
   2300     ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   2301             availableTargetFpsRanges,
   2302             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
   2303 
   2304     static const uint8_t availableAntibandingModes[] = {
   2305             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
   2306             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
   2307     };
   2308     ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   2309             availableAntibandingModes, sizeof(availableAntibandingModes));
   2310 
   2311     static const uint8_t availableAwbModes[] = {
   2312             ANDROID_CONTROL_AWB_MODE_OFF,
   2313             ANDROID_CONTROL_AWB_MODE_AUTO,
   2314             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
   2315             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
   2316             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
   2317             ANDROID_CONTROL_AWB_MODE_SHADE
   2318     };
   2319     ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   2320             availableAwbModes, sizeof(availableAwbModes));
   2321 
   2322     static const uint8_t availableAfModesBack[] = {
   2323             ANDROID_CONTROL_AF_MODE_OFF,
   2324             ANDROID_CONTROL_AF_MODE_AUTO,
   2325             ANDROID_CONTROL_AF_MODE_MACRO,
   2326             ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
   2327             ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
   2328     };
   2329 
   2330     static const uint8_t availableAfModesFront[] = {
   2331             ANDROID_CONTROL_AF_MODE_OFF
   2332     };
   2333 
   2334     if (mFacingBack) {
   2335         ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   2336                     availableAfModesBack, sizeof(availableAfModesBack));
   2337     } else {
   2338         ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   2339                     availableAfModesFront, sizeof(availableAfModesFront));
   2340     }
   2341 
   2342     static const uint8_t availableVstabModes[] = {
   2343             ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
   2344     };
   2345     ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   2346             availableVstabModes, sizeof(availableVstabModes));
   2347 
   2348 #undef ADD_OR_SIZE
   2349     /** Allocate metadata if sizing */
   2350     if (sizeRequest) {
   2351         ALOGV("Allocating %zu entries, %zu extra bytes for "
   2352                 "static camera info",
   2353                 entryCount, dataCount);
   2354         *info = allocate_camera_metadata(entryCount, dataCount);
   2355         if (*info == NULL) {
   2356             ALOGE("Unable to allocate camera static info"
   2357                     "(%zu entries, %zu bytes extra data)",
   2358                     entryCount, dataCount);
   2359             return NO_MEMORY;
   2360         }
   2361     }
   2362     return OK;
   2363 }
   2364 
   2365 status_t EmulatedFakeCamera2::constructDefaultRequest(
   2366         int request_template,
   2367         camera_metadata_t **request,
   2368         bool sizeRequest) const {
   2369 
   2370     size_t entryCount = 0;
   2371     size_t dataCount = 0;
   2372     status_t ret;
   2373 
   2374 #define ADD_OR_SIZE( tag, data, count ) \
   2375     if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
   2376             tag, data, count) ) != OK ) return ret
   2377 
   2378     /** android.request */
   2379 
   2380     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   2381     ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
   2382 
   2383     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
   2384     ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
   2385 
   2386     static const int32_t id = 0;
   2387     ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
   2388 
   2389     static const int32_t frameCount = 0;
   2390     ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
   2391 
   2392     // OUTPUT_STREAMS set by user
   2393     entryCount += 1;
   2394     dataCount += 5; // TODO: Should be maximum stream number
   2395 
   2396     /** android.lens */
   2397 
   2398     static const float focusDistance = 0;
   2399     ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
   2400 
   2401     static const float aperture = 2.8f;
   2402     ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
   2403 
   2404     static const float focalLength = 5.0f;
   2405     ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
   2406 
   2407     static const float filterDensity = 0;
   2408     ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
   2409 
   2410     static const uint8_t opticalStabilizationMode =
   2411             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   2412     ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   2413             &opticalStabilizationMode, 1);
   2414 
   2415     // FOCUS_RANGE set only in frame
   2416 
   2417     /** android.sensor */
   2418 
   2419     static const int64_t exposureTime = 10 * MSEC;
   2420     ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
   2421 
   2422     static const int64_t frameDuration = 33333333L; // 1/30 s
   2423     ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
   2424 
   2425     static const int32_t sensitivity = 100;
   2426     ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
   2427 
   2428     // TIMESTAMP set only in frame
   2429 
   2430     /** android.flash */
   2431 
   2432     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   2433     ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
   2434 
   2435     static const uint8_t flashPower = 10;
   2436     ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
   2437 
   2438     static const int64_t firingTime = 0;
   2439     ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
   2440 
   2441     /** Processing block modes */
   2442     uint8_t hotPixelMode = 0;
   2443     uint8_t demosaicMode = 0;
   2444     uint8_t noiseMode = 0;
   2445     uint8_t shadingMode = 0;
   2446     uint8_t colorMode = 0;
   2447     uint8_t tonemapMode = 0;
   2448     uint8_t edgeMode = 0;
   2449     switch (request_template) {
   2450       case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2451         // fall-through
   2452       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2453         // fall-through
   2454       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2455         hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
   2456         demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
   2457         noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   2458         shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
   2459         colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
   2460         tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   2461         edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   2462         break;
   2463       case CAMERA2_TEMPLATE_PREVIEW:
   2464         // fall-through
   2465       case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2466         // fall-through
   2467       default:
   2468         hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   2469         demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   2470         noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   2471         shadingMode = ANDROID_SHADING_MODE_FAST;
   2472         colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   2473         tonemapMode = ANDROID_TONEMAP_MODE_FAST;
   2474         edgeMode = ANDROID_EDGE_MODE_FAST;
   2475         break;
   2476     }
   2477     ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
   2478     ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   2479     ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
   2480     ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
   2481     ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
   2482     ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
   2483     ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
   2484 
   2485     /** android.noise */
   2486     static const uint8_t noiseStrength = 5;
   2487     ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
   2488 
   2489     /** android.color */
   2490     static const float colorTransform[9] = {
   2491         1.0f, 0.f, 0.f,
   2492         0.f, 1.f, 0.f,
   2493         0.f, 0.f, 1.f
   2494     };
   2495     ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
   2496 
   2497     /** android.tonemap */
   2498     static const float tonemapCurve[4] = {
   2499         0.f, 0.f,
   2500         1.f, 1.f
   2501     };
   2502     ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
   2503     ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
   2504     ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
   2505 
   2506     /** android.edge */
   2507     static const uint8_t edgeStrength = 5;
   2508     ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
   2509 
   2510     /** android.scaler */
   2511     static const int32_t cropRegion[3] = {
   2512         0, 0, static_cast<int32_t>(mSensorWidth)
   2513     };
   2514     ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
   2515 
   2516     /** android.jpeg */
   2517     static const int32_t jpegQuality = 80;
   2518     ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
   2519 
   2520     static const int32_t thumbnailSize[2] = {
   2521         640, 480
   2522     };
   2523     ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
   2524 
   2525     static const int32_t thumbnailQuality = 80;
   2526     ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
   2527 
   2528     static const double gpsCoordinates[2] = {
   2529         0, 0
   2530     };
   2531     ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
   2532 
   2533     static const uint8_t gpsProcessingMethod[32] = "None";
   2534     ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
   2535 
   2536     static const int64_t gpsTimestamp = 0;
   2537     ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
   2538 
   2539     static const int32_t jpegOrientation = 0;
   2540     ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
   2541 
   2542     /** android.stats */
   2543 
   2544     static const uint8_t faceDetectMode =
   2545         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   2546     ADD_OR_SIZE(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   2547 
   2548     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   2549     ADD_OR_SIZE(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   2550 
   2551     static const uint8_t sharpnessMapMode =
   2552         ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   2553     ADD_OR_SIZE(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   2554 
   2555     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
   2556     // sharpnessMap only in frames
   2557 
   2558     /** android.control */
   2559 
   2560     uint8_t controlIntent = 0;
   2561     switch (request_template) {
   2562       case CAMERA2_TEMPLATE_PREVIEW:
   2563         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   2564         break;
   2565       case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2566         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   2567         break;
   2568       case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2569         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   2570         break;
   2571       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2572         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   2573         break;
   2574       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2575         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   2576         break;
   2577       default:
   2578         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   2579         break;
   2580     }
   2581     ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   2582 
   2583     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   2584     ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
   2585 
   2586     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   2587     ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   2588 
   2589     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   2590     ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   2591 
   2592     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   2593     ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   2594 
   2595     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   2596     ADD_OR_SIZE(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   2597 
   2598     static const int32_t controlRegions[5] = {
   2599         0, 0,
   2600         static_cast<int32_t>(mSensorWidth),
   2601         static_cast<int32_t>(mSensorHeight),
   2602         1000
   2603     };
   2604     ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
   2605 
   2606     static const int32_t aeExpCompensation = 0;
   2607     ADD_OR_SIZE(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
   2608 
   2609     static const int32_t aeTargetFpsRange[2] = {
   2610         10, 30
   2611     };
   2612     ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
   2613 
   2614     static const uint8_t aeAntibandingMode =
   2615             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   2616     ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
   2617 
   2618     static const uint8_t awbMode =
   2619             ANDROID_CONTROL_AWB_MODE_AUTO;
   2620     ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   2621 
   2622     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   2623     ADD_OR_SIZE(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   2624 
   2625     ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
   2626 
   2627     uint8_t afMode = 0;
   2628     switch (request_template) {
   2629       case CAMERA2_TEMPLATE_PREVIEW:
   2630         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2631         break;
   2632       case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2633         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2634         break;
   2635       case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2636         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   2637         break;
   2638       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2639         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   2640         break;
   2641       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2642         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   2643         break;
   2644       default:
   2645         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2646         break;
   2647     }
   2648     ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
   2649 
   2650     ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
   2651 
   2652     static const uint8_t vstabMode =
   2653         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   2654     ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
   2655 
   2656     // aeState, awbState, afState only in frame
   2657 
   2658     /** Allocate metadata if sizing */
   2659     if (sizeRequest) {
   2660         ALOGV("Allocating %zu entries, %zu extra bytes for "
   2661                 "request template type %d",
   2662                 entryCount, dataCount, request_template);
   2663         *request = allocate_camera_metadata(entryCount, dataCount);
   2664         if (*request == NULL) {
   2665             ALOGE("Unable to allocate new request template type %d "
   2666                     "(%zu entries, %zu bytes extra data)", request_template,
   2667                     entryCount, dataCount);
   2668             return NO_MEMORY;
   2669         }
   2670     }
   2671     return OK;
   2672 #undef ADD_OR_SIZE
   2673 }
   2674 
   2675 status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
   2676         bool sizeRequest,
   2677         size_t *entryCount,
   2678         size_t *dataCount,
   2679         uint32_t tag,
   2680         const void *entryData,
   2681         size_t entryDataCount) {
   2682     status_t res;
   2683     if (!sizeRequest) {
   2684         return add_camera_metadata_entry(request, tag, entryData,
   2685                 entryDataCount);
   2686     } else {
   2687         int type = get_camera_metadata_tag_type(tag);
   2688         if (type < 0 ) return BAD_VALUE;
   2689         (*entryCount)++;
   2690         (*dataCount) += calculate_camera_metadata_entry_data_size(type,
   2691                 entryDataCount);
   2692         return OK;
   2693     }
   2694 }
   2695 
   2696 bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
   2697     // Assumes mMutex is locked; otherwise new requests could enter
   2698     // configureThread while readoutThread is being checked
   2699 
   2700     // Order of isStreamInUse calls matters
   2701     if (mConfigureThread->isStreamInUse(id) ||
   2702             mReadoutThread->isStreamInUse(id) ||
   2703             mJpegCompressor->isStreamInUse(id) ) {
   2704         ALOGE("%s: Stream %d is in use in active requests!",
   2705                 __FUNCTION__, id);
   2706         return true;
   2707     }
   2708     return false;
   2709 }
   2710 
   2711 bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t id) {
   2712     // TODO: implement
   2713     return false;
   2714 }
   2715 
   2716 const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
   2717     Mutex::Autolock lock(mMutex);
   2718 
   2719     return mStreams.valueFor(streamId);
   2720 }
   2721 
   2722 const ReprocessStream& EmulatedFakeCamera2::getReprocessStreamInfo(uint32_t streamId) {
   2723     Mutex::Autolock lock(mMutex);
   2724 
   2725     return mReprocessStreams.valueFor(streamId);
   2726 }
   2727 
   2728 };  /* namespace android */
   2729