Home | History | Annotate | Download | only in camera
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 /*
     18  * Contains implementation of a class EmulatedFakeCamera2 that encapsulates
     19  * functionality of an advanced fake camera.
     20  */
     21 
     22 //#define LOG_NDEBUG 0
     23 #define LOG_TAG "EmulatedCamera_FakeCamera2"
     24 #include <utils/Log.h>
     25 
     26 #include "EmulatedFakeCamera2.h"
     27 #include "EmulatedCameraFactory.h"
     28 #include <ui/Rect.h>
     29 #include <ui/GraphicBufferMapper.h>
     30 #include "gralloc_cb.h"
     31 
     32 #define ERROR_CAMERA_NOT_PRESENT -EPIPE
     33 
     34 #define CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT 0xFFFFFFFF
     35 
     36 namespace android {
     37 
     38 const int64_t USEC = 1000LL;
     39 const int64_t MSEC = USEC * 1000LL;
     40 const int64_t SEC = MSEC * 1000LL;
     41 
     42 const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
     43         HAL_PIXEL_FORMAT_RAW_SENSOR,
     44         HAL_PIXEL_FORMAT_BLOB,
     45         HAL_PIXEL_FORMAT_RGBA_8888,
     46         //        HAL_PIXEL_FORMAT_YV12,
     47         HAL_PIXEL_FORMAT_YCrCb_420_SP
     48 };
     49 
     50 const uint32_t EmulatedFakeCamera2::kAvailableRawSizes[2] = {
     51     640, 480
     52     //    Sensor::kResolution[0], Sensor::kResolution[1]
     53 };
     54 
     55 const uint64_t EmulatedFakeCamera2::kAvailableRawMinDurations[1] = {
     56     Sensor::kFrameDurationRange[0]
     57 };
     58 
     59 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesBack[4] = {
     60     640, 480, 320, 240
     61     //    Sensor::kResolution[0], Sensor::kResolution[1]
     62 };
     63 
     64 const uint32_t EmulatedFakeCamera2::kAvailableProcessedSizesFront[4] = {
     65     320, 240, 160, 120
     66     //    Sensor::kResolution[0], Sensor::kResolution[1]
     67 };
     68 
     69 const uint64_t EmulatedFakeCamera2::kAvailableProcessedMinDurations[1] = {
     70     Sensor::kFrameDurationRange[0]
     71 };
     72 
     73 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesBack[2] = {
     74     640, 480
     75     //    Sensor::kResolution[0], Sensor::kResolution[1]
     76 };
     77 
     78 const uint32_t EmulatedFakeCamera2::kAvailableJpegSizesFront[2] = {
     79     320, 240
     80     //    Sensor::kResolution[0], Sensor::kResolution[1]
     81 };
     82 
     83 
     84 const uint64_t EmulatedFakeCamera2::kAvailableJpegMinDurations[1] = {
     85     Sensor::kFrameDurationRange[0]
     86 };
     87 
     88 
     89 EmulatedFakeCamera2::EmulatedFakeCamera2(int cameraId,
     90         bool facingBack,
     91         struct hw_module_t* module)
     92         : EmulatedCamera2(cameraId,module),
     93           mFacingBack(facingBack),
     94           mIsConnected(false)
     95 {
     96     ALOGD("Constructing emulated fake camera 2 facing %s",
     97             facingBack ? "back" : "front");
     98 }
     99 
    100 EmulatedFakeCamera2::~EmulatedFakeCamera2() {
    101     if (mCameraInfo != NULL) {
    102         free_camera_metadata(mCameraInfo);
    103     }
    104 }
    105 
    106 /****************************************************************************
    107  * Public API overrides
    108  ***************************************************************************/
    109 
    110 status_t EmulatedFakeCamera2::Initialize() {
    111     status_t res;
    112 
    113     set_camera_metadata_vendor_tag_ops(
    114             static_cast<vendor_tag_query_ops_t*>(&mVendorTagOps));
    115 
    116     res = constructStaticInfo(&mCameraInfo, true);
    117     if (res != OK) {
    118         ALOGE("%s: Unable to allocate static info: %s (%d)",
    119                 __FUNCTION__, strerror(-res), res);
    120         return res;
    121     }
    122     res = constructStaticInfo(&mCameraInfo, false);
    123     if (res != OK) {
    124         ALOGE("%s: Unable to fill in static info: %s (%d)",
    125                 __FUNCTION__, strerror(-res), res);
    126         return res;
    127     }
    128     if (res != OK) return res;
    129 
    130     mNextStreamId = 1;
    131     mNextReprocessStreamId = 1;
    132     mRawStreamCount = 0;
    133     mProcessedStreamCount = 0;
    134     mJpegStreamCount = 0;
    135     mReprocessStreamCount = 0;
    136 
    137     return NO_ERROR;
    138 }
    139 
    140 /****************************************************************************
    141  * Camera module API overrides
    142  ***************************************************************************/
    143 
    144 status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
    145     status_t res;
    146     ALOGV("%s", __FUNCTION__);
    147 
    148     {
    149         Mutex::Autolock l(mMutex);
    150         if (!mStatusPresent) {
    151             ALOGE("%s: Camera ID %d is unplugged", __FUNCTION__,
    152                   mCameraID);
    153             return -ENODEV;
    154         }
    155     }
    156 
    157     mConfigureThread = new ConfigureThread(this);
    158     mReadoutThread = new ReadoutThread(this);
    159     mControlThread = new ControlThread(this);
    160     mSensor = new Sensor();
    161     mJpegCompressor = new JpegCompressor();
    162 
    163     mNextStreamId = 1;
    164     mNextReprocessStreamId = 1;
    165 
    166     res = mSensor->startUp();
    167     if (res != NO_ERROR) return res;
    168 
    169     res = mConfigureThread->run("EmulatedFakeCamera2::configureThread");
    170     if (res != NO_ERROR) return res;
    171 
    172     res = mReadoutThread->run("EmulatedFakeCamera2::readoutThread");
    173     if (res != NO_ERROR) return res;
    174 
    175     res = mControlThread->run("EmulatedFakeCamera2::controlThread");
    176     if (res != NO_ERROR) return res;
    177 
    178     status_t ret = EmulatedCamera2::connectCamera(device);
    179 
    180     if (ret >= 0) {
    181         mIsConnected = true;
    182     }
    183 
    184     return ret;
    185 }
    186 
    187 status_t EmulatedFakeCamera2::plugCamera() {
    188     {
    189         Mutex::Autolock l(mMutex);
    190 
    191         if (!mStatusPresent) {
    192             ALOGI("%s: Plugged back in", __FUNCTION__);
    193             mStatusPresent = true;
    194         }
    195     }
    196 
    197     return NO_ERROR;
    198 }
    199 
    200 status_t EmulatedFakeCamera2::unplugCamera() {
    201     {
    202         Mutex::Autolock l(mMutex);
    203 
    204         if (mStatusPresent) {
    205             ALOGI("%s: Unplugged camera", __FUNCTION__);
    206             mStatusPresent = false;
    207         }
    208     }
    209 
    210     return closeCamera();
    211 }
    212 
    213 camera_device_status_t EmulatedFakeCamera2::getHotplugStatus() {
    214     Mutex::Autolock l(mMutex);
    215     return mStatusPresent ?
    216         CAMERA_DEVICE_STATUS_PRESENT :
    217         CAMERA_DEVICE_STATUS_NOT_PRESENT;
    218 }
    219 
    220 
    221 
    222 status_t EmulatedFakeCamera2::closeCamera() {
    223     {
    224         Mutex::Autolock l(mMutex);
    225 
    226         status_t res;
    227         ALOGV("%s", __FUNCTION__);
    228 
    229         if (!mIsConnected) {
    230             return NO_ERROR;
    231         }
    232 
    233         res = mSensor->shutDown();
    234         if (res != NO_ERROR) {
    235             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
    236             return res;
    237         }
    238 
    239         mConfigureThread->requestExit();
    240         mReadoutThread->requestExit();
    241         mControlThread->requestExit();
    242         mJpegCompressor->cancel();
    243     }
    244 
    245     // give up the lock since we will now block and the threads
    246     // can call back into this object
    247     mConfigureThread->join();
    248     mReadoutThread->join();
    249     mControlThread->join();
    250 
    251     ALOGV("%s exit", __FUNCTION__);
    252 
    253     {
    254         Mutex::Autolock l(mMutex);
    255         mIsConnected = false;
    256     }
    257 
    258     return NO_ERROR;
    259 }
    260 
    261 status_t EmulatedFakeCamera2::getCameraInfo(struct camera_info *info) {
    262     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
    263     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
    264     return EmulatedCamera2::getCameraInfo(info);
    265 }
    266 
    267 /****************************************************************************
    268  * Camera device API overrides
    269  ***************************************************************************/
    270 
    271 /** Request input queue */
    272 
    273 int EmulatedFakeCamera2::requestQueueNotify() {
    274     ALOGV("Request queue notification received");
    275 
    276     ALOG_ASSERT(mRequestQueueSrc != NULL,
    277             "%s: Request queue src not set, but received queue notification!",
    278             __FUNCTION__);
    279     ALOG_ASSERT(mFrameQueueDst != NULL,
    280             "%s: Request queue src not set, but received queue notification!",
    281             __FUNCTION__);
    282     ALOG_ASSERT(mStreams.size() != 0,
    283             "%s: No streams allocated, but received queue notification!",
    284             __FUNCTION__);
    285     return mConfigureThread->newRequestAvailable();
    286 }
    287 
    288 int EmulatedFakeCamera2::getInProgressCount() {
    289     Mutex::Autolock l(mMutex);
    290 
    291     if (!mStatusPresent) {
    292         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    293         return ERROR_CAMERA_NOT_PRESENT;
    294     }
    295 
    296     int requestCount = 0;
    297     requestCount += mConfigureThread->getInProgressCount();
    298     requestCount += mReadoutThread->getInProgressCount();
    299     requestCount += mJpegCompressor->isBusy() ? 1 : 0;
    300 
    301     return requestCount;
    302 }
    303 
    304 int EmulatedFakeCamera2::constructDefaultRequest(
    305         int request_template,
    306         camera_metadata_t **request) {
    307 
    308     if (request == NULL) return BAD_VALUE;
    309     if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
    310         return BAD_VALUE;
    311     }
    312 
    313     {
    314         Mutex::Autolock l(mMutex);
    315         if (!mStatusPresent) {
    316             ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    317             return ERROR_CAMERA_NOT_PRESENT;
    318         }
    319     }
    320 
    321     status_t res;
    322     // Pass 1, calculate size and allocate
    323     res = constructDefaultRequest(request_template,
    324             request,
    325             true);
    326     if (res != OK) {
    327         return res;
    328     }
    329     // Pass 2, build request
    330     res = constructDefaultRequest(request_template,
    331             request,
    332             false);
    333     if (res != OK) {
    334         ALOGE("Unable to populate new request for template %d",
    335                 request_template);
    336     }
    337 
    338     return res;
    339 }
    340 
    341 int EmulatedFakeCamera2::allocateStream(
    342         uint32_t width,
    343         uint32_t height,
    344         int format,
    345         const camera2_stream_ops_t *stream_ops,
    346         uint32_t *stream_id,
    347         uint32_t *format_actual,
    348         uint32_t *usage,
    349         uint32_t *max_buffers) {
    350     Mutex::Autolock l(mMutex);
    351 
    352     if (!mStatusPresent) {
    353         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    354         return ERROR_CAMERA_NOT_PRESENT;
    355     }
    356 
    357     // Temporary shim until FORMAT_ZSL is removed
    358     if (format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) {
    359         format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
    360     }
    361 
    362     if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    363         unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
    364         unsigned int formatIdx = 0;
    365         unsigned int sizeOffsetIdx = 0;
    366         for (; formatIdx < numFormats; formatIdx++) {
    367             if (format == (int)kAvailableFormats[formatIdx]) break;
    368         }
    369         if (formatIdx == numFormats) {
    370             ALOGE("%s: Format 0x%x is not supported", __FUNCTION__, format);
    371             return BAD_VALUE;
    372         }
    373     }
    374 
    375     const uint32_t *availableSizes;
    376     size_t availableSizeCount;
    377     switch (format) {
    378         case HAL_PIXEL_FORMAT_RAW_SENSOR:
    379             availableSizes = kAvailableRawSizes;
    380             availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
    381             break;
    382         case HAL_PIXEL_FORMAT_BLOB:
    383             availableSizes = mFacingBack ?
    384                     kAvailableJpegSizesBack : kAvailableJpegSizesFront;
    385             availableSizeCount = mFacingBack ?
    386                     sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t) :
    387                     sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t);
    388             break;
    389         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    390         case HAL_PIXEL_FORMAT_RGBA_8888:
    391         case HAL_PIXEL_FORMAT_YV12:
    392         case HAL_PIXEL_FORMAT_YCrCb_420_SP:
    393             availableSizes = mFacingBack ?
    394                     kAvailableProcessedSizesBack : kAvailableProcessedSizesFront;
    395             availableSizeCount = mFacingBack ?
    396                     sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t) :
    397                     sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t);
    398             break;
    399         default:
    400             ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
    401             return BAD_VALUE;
    402     }
    403 
    404     unsigned int resIdx = 0;
    405     for (; resIdx < availableSizeCount; resIdx++) {
    406         if (availableSizes[resIdx * 2] == width &&
    407                 availableSizes[resIdx * 2 + 1] == height) break;
    408     }
    409     if (resIdx == availableSizeCount) {
    410         ALOGE("%s: Format 0x%x does not support resolution %d, %d", __FUNCTION__,
    411                 format, width, height);
    412         return BAD_VALUE;
    413     }
    414 
    415     switch (format) {
    416         case HAL_PIXEL_FORMAT_RAW_SENSOR:
    417             if (mRawStreamCount >= kMaxRawStreamCount) {
    418                 ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
    419                         __FUNCTION__, mRawStreamCount);
    420                 return INVALID_OPERATION;
    421             }
    422             mRawStreamCount++;
    423             break;
    424         case HAL_PIXEL_FORMAT_BLOB:
    425             if (mJpegStreamCount >= kMaxJpegStreamCount) {
    426                 ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
    427                         __FUNCTION__, mJpegStreamCount);
    428                 return INVALID_OPERATION;
    429             }
    430             mJpegStreamCount++;
    431             break;
    432         default:
    433             if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
    434                 ALOGE("%s: Cannot allocate another processed stream (%d already allocated)",
    435                         __FUNCTION__, mProcessedStreamCount);
    436                 return INVALID_OPERATION;
    437             }
    438             mProcessedStreamCount++;
    439     }
    440 
    441     Stream newStream;
    442     newStream.ops = stream_ops;
    443     newStream.width = width;
    444     newStream.height = height;
    445     newStream.format = format;
    446     // TODO: Query stride from gralloc
    447     newStream.stride = width;
    448 
    449     mStreams.add(mNextStreamId, newStream);
    450 
    451     *stream_id = mNextStreamId;
    452     if (format_actual) *format_actual = format;
    453     *usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    454     *max_buffers = kMaxBufferCount;
    455 
    456     ALOGV("Stream allocated: %d, %d x %d, 0x%x. U: %x, B: %d",
    457             *stream_id, width, height, format, *usage, *max_buffers);
    458 
    459     mNextStreamId++;
    460     return NO_ERROR;
    461 }
    462 
    463 int EmulatedFakeCamera2::registerStreamBuffers(
    464             uint32_t stream_id,
    465             int num_buffers,
    466             buffer_handle_t *buffers) {
    467     Mutex::Autolock l(mMutex);
    468 
    469     if (!mStatusPresent) {
    470         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    471         return ERROR_CAMERA_NOT_PRESENT;
    472     }
    473 
    474     ALOGV("%s: Stream %d registering %d buffers", __FUNCTION__,
    475             stream_id, num_buffers);
    476     // Need to find out what the final concrete pixel format for our stream is
    477     // Assumes that all buffers have the same format.
    478     if (num_buffers < 1) {
    479         ALOGE("%s: Stream %d only has %d buffers!",
    480                 __FUNCTION__, stream_id, num_buffers);
    481         return BAD_VALUE;
    482     }
    483     const cb_handle_t *streamBuffer =
    484             reinterpret_cast<const cb_handle_t*>(buffers[0]);
    485 
    486     int finalFormat = streamBuffer->format;
    487 
    488     if (finalFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    489         ALOGE("%s: Stream %d: Bad final pixel format "
    490                 "HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; "
    491                 "concrete pixel format required!", __FUNCTION__, stream_id);
    492         return BAD_VALUE;
    493     }
    494 
    495     ssize_t streamIndex = mStreams.indexOfKey(stream_id);
    496     if (streamIndex < 0) {
    497         ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
    498         return BAD_VALUE;
    499     }
    500 
    501     Stream &stream = mStreams.editValueAt(streamIndex);
    502 
    503     ALOGV("%s: Stream %d format set to %x, previously %x",
    504             __FUNCTION__, stream_id, finalFormat, stream.format);
    505 
    506     stream.format = finalFormat;
    507 
    508     return NO_ERROR;
    509 }
    510 
    511 int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
    512     Mutex::Autolock l(mMutex);
    513 
    514     ssize_t streamIndex = mStreams.indexOfKey(stream_id);
    515     if (streamIndex < 0) {
    516         ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
    517         return BAD_VALUE;
    518     }
    519 
    520     if (isStreamInUse(stream_id)) {
    521         ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__,
    522                 stream_id);
    523         return BAD_VALUE;
    524     }
    525 
    526     switch(mStreams.valueAt(streamIndex).format) {
    527         case HAL_PIXEL_FORMAT_RAW_SENSOR:
    528             mRawStreamCount--;
    529             break;
    530         case HAL_PIXEL_FORMAT_BLOB:
    531             mJpegStreamCount--;
    532             break;
    533         default:
    534             mProcessedStreamCount--;
    535             break;
    536     }
    537 
    538     mStreams.removeItemsAt(streamIndex);
    539 
    540     return NO_ERROR;
    541 }
    542 
    543 int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
    544         uint32_t output_stream_id,
    545         const camera2_stream_in_ops_t *stream_ops,
    546         uint32_t *stream_id) {
    547     Mutex::Autolock l(mMutex);
    548 
    549     if (!mStatusPresent) {
    550         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    551         return ERROR_CAMERA_NOT_PRESENT;
    552     }
    553 
    554     ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
    555     if (baseStreamIndex < 0) {
    556         ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
    557         return BAD_VALUE;
    558     }
    559 
    560     const Stream &baseStream = mStreams[baseStreamIndex];
    561 
    562     // We'll reprocess anything we produced
    563 
    564     if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
    565         ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
    566                 __FUNCTION__, mReprocessStreamCount);
    567         return INVALID_OPERATION;
    568     }
    569     mReprocessStreamCount++;
    570 
    571     ReprocessStream newStream;
    572     newStream.ops = stream_ops;
    573     newStream.width = baseStream.width;
    574     newStream.height = baseStream.height;
    575     newStream.format = baseStream.format;
    576     newStream.stride = baseStream.stride;
    577     newStream.sourceStreamId = output_stream_id;
    578 
    579     *stream_id = mNextReprocessStreamId;
    580     mReprocessStreams.add(mNextReprocessStreamId, newStream);
    581 
    582     ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
    583             *stream_id, newStream.width, newStream.height, newStream.format,
    584             output_stream_id);
    585 
    586     mNextReprocessStreamId++;
    587     return NO_ERROR;
    588 }
    589 
    590 int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
    591     Mutex::Autolock l(mMutex);
    592 
    593     ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
    594     if (streamIndex < 0) {
    595         ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
    596         return BAD_VALUE;
    597     }
    598 
    599     if (isReprocessStreamInUse(stream_id)) {
    600         ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
    601                 stream_id);
    602         return BAD_VALUE;
    603     }
    604 
    605     mReprocessStreamCount--;
    606     mReprocessStreams.removeItemsAt(streamIndex);
    607 
    608     return NO_ERROR;
    609 }
    610 
    611 int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
    612         int32_t ext1,
    613         int32_t ext2) {
    614     Mutex::Autolock l(mMutex);
    615 
    616     if (trigger_id == CAMERA2_EXT_TRIGGER_TESTING_DISCONNECT) {
    617         ALOGI("%s: Disconnect trigger - camera must be closed", __FUNCTION__);
    618         mStatusPresent = false;
    619 
    620         gEmulatedCameraFactory.onStatusChanged(
    621                 mCameraID,
    622                 CAMERA_DEVICE_STATUS_NOT_PRESENT);
    623     }
    624 
    625     if (!mStatusPresent) {
    626         ALOGW("%s: Camera was physically disconnected", __FUNCTION__);
    627         return ERROR_CAMERA_NOT_PRESENT;
    628     }
    629 
    630     return mControlThread->triggerAction(trigger_id,
    631             ext1, ext2);
    632 }
    633 
    634 /** Custom tag definitions */
    635 
    636 // Emulator camera metadata sections
    637 enum {
    638     EMULATOR_SCENE = VENDOR_SECTION,
    639     END_EMULATOR_SECTIONS
    640 };
    641 
    642 enum {
    643     EMULATOR_SCENE_START = EMULATOR_SCENE << 16,
    644 };
    645 
    646 // Emulator camera metadata tags
    647 enum {
    648     // Hour of day to use for lighting calculations (0-23). Default: 12
    649     EMULATOR_SCENE_HOUROFDAY = EMULATOR_SCENE_START,
    650     EMULATOR_SCENE_END
    651 };
    652 
    653 unsigned int emulator_metadata_section_bounds[END_EMULATOR_SECTIONS -
    654         VENDOR_SECTION][2] = {
    655     { EMULATOR_SCENE_START, EMULATOR_SCENE_END }
    656 };
    657 
    658 const char *emulator_metadata_section_names[END_EMULATOR_SECTIONS -
    659         VENDOR_SECTION] = {
    660     "com.android.emulator.scene"
    661 };
    662 
    663 typedef struct emulator_tag_info {
    664     const char *tag_name;
    665     uint8_t     tag_type;
    666 } emulator_tag_info_t;
    667 
    668 emulator_tag_info_t emulator_scene[EMULATOR_SCENE_END - EMULATOR_SCENE_START] = {
    669     { "hourOfDay", TYPE_INT32 }
    670 };
    671 
    672 emulator_tag_info_t *tag_info[END_EMULATOR_SECTIONS -
    673         VENDOR_SECTION] = {
    674     emulator_scene
    675 };
    676 
    677 const char* EmulatedFakeCamera2::getVendorSectionName(uint32_t tag) {
    678     ALOGV("%s", __FUNCTION__);
    679     uint32_t section = tag >> 16;
    680     if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
    681     return emulator_metadata_section_names[section - VENDOR_SECTION];
    682 }
    683 
    684 const char* EmulatedFakeCamera2::getVendorTagName(uint32_t tag) {
    685     ALOGV("%s", __FUNCTION__);
    686     uint32_t section = tag >> 16;
    687     if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return NULL;
    688     uint32_t section_index = section - VENDOR_SECTION;
    689     if (tag >= emulator_metadata_section_bounds[section_index][1]) {
    690         return NULL;
    691     }
    692     uint32_t tag_index = tag & 0xFFFF;
    693     return tag_info[section_index][tag_index].tag_name;
    694 }
    695 
    696 int EmulatedFakeCamera2::getVendorTagType(uint32_t tag) {
    697     ALOGV("%s", __FUNCTION__);
    698     uint32_t section = tag >> 16;
    699     if (section < VENDOR_SECTION || section > END_EMULATOR_SECTIONS) return -1;
    700     uint32_t section_index = section - VENDOR_SECTION;
    701     if (tag >= emulator_metadata_section_bounds[section_index][1]) {
    702         return -1;
    703     }
    704     uint32_t tag_index = tag & 0xFFFF;
    705     return tag_info[section_index][tag_index].tag_type;
    706 }
    707 
    708 /** Shutdown and debug methods */
    709 
    710 int EmulatedFakeCamera2::dump(int fd) {
    711     String8 result;
    712 
    713     result.appendFormat("    Camera HAL device: EmulatedFakeCamera2\n");
    714     result.appendFormat("      Streams:\n");
    715     for (size_t i = 0; i < mStreams.size(); i++) {
    716         int id = mStreams.keyAt(i);
    717         const Stream& s = mStreams.valueAt(i);
    718         result.appendFormat(
    719             "         Stream %d: %d x %d, format 0x%x, stride %d\n",
    720             id, s.width, s.height, s.format, s.stride);
    721     }
    722 
    723     write(fd, result.string(), result.size());
    724 
    725     return NO_ERROR;
    726 }
    727 
    728 void EmulatedFakeCamera2::signalError() {
    729     // TODO: Let parent know so we can shut down cleanly
    730     ALOGE("Worker thread is signaling a serious error");
    731 }
    732 
    733 /** Pipeline control worker thread methods */
    734 
    735 EmulatedFakeCamera2::ConfigureThread::ConfigureThread(EmulatedFakeCamera2 *parent):
    736         Thread(false),
    737         mParent(parent),
    738         mRequestCount(0),
    739         mNextBuffers(NULL) {
    740     mRunning = false;
    741 }
    742 
    743 EmulatedFakeCamera2::ConfigureThread::~ConfigureThread() {
    744 }
    745 
    746 status_t EmulatedFakeCamera2::ConfigureThread::readyToRun() {
    747     Mutex::Autolock lock(mInputMutex);
    748 
    749     ALOGV("Starting up ConfigureThread");
    750     mRequest = NULL;
    751     mActive  = false;
    752     mRunning = true;
    753 
    754     mInputSignal.signal();
    755     return NO_ERROR;
    756 }
    757 
    758 status_t EmulatedFakeCamera2::ConfigureThread::waitUntilRunning() {
    759     Mutex::Autolock lock(mInputMutex);
    760     if (!mRunning) {
    761         ALOGV("Waiting for configure thread to start");
    762         mInputSignal.wait(mInputMutex);
    763     }
    764     return OK;
    765 }
    766 
    767 status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
    768     waitUntilRunning();
    769 
    770     Mutex::Autolock lock(mInputMutex);
    771 
    772     mActive = true;
    773     mInputSignal.signal();
    774 
    775     return OK;
    776 }
    777 
    778 bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
    779     Mutex::Autolock lock(mInternalsMutex);
    780 
    781     if (mNextBuffers == NULL) return false;
    782     for (size_t i=0; i < mNextBuffers->size(); i++) {
    783         if ((*mNextBuffers)[i].streamId == (int)id) return true;
    784     }
    785     return false;
    786 }
    787 
    788 int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
    789     Mutex::Autolock lock(mInputMutex);
    790     return mRequestCount;
    791 }
    792 
    793 bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
    794     status_t res;
    795 
    796     // Check if we're currently processing or just waiting
    797     {
    798         Mutex::Autolock lock(mInputMutex);
    799         if (!mActive) {
    800             // Inactive, keep waiting until we've been signaled
    801             status_t res;
    802             res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
    803             if (res != NO_ERROR && res != TIMED_OUT) {
    804                 ALOGE("%s: Error waiting for input requests: %d",
    805                         __FUNCTION__, res);
    806                 return false;
    807             }
    808             if (!mActive) return true;
    809             ALOGV("New request available");
    810         }
    811         // Active
    812     }
    813 
    814     if (mRequest == NULL) {
    815         Mutex::Autolock il(mInternalsMutex);
    816 
    817         ALOGV("Configure: Getting next request");
    818         res = mParent->mRequestQueueSrc->dequeue_request(
    819             mParent->mRequestQueueSrc,
    820             &mRequest);
    821         if (res != NO_ERROR) {
    822             ALOGE("%s: Error dequeuing next request: %d", __FUNCTION__, res);
    823             mParent->signalError();
    824             return false;
    825         }
    826         if (mRequest == NULL) {
    827             ALOGV("Configure: Request queue empty, going inactive");
    828             // No requests available, go into inactive mode
    829             Mutex::Autolock lock(mInputMutex);
    830             mActive = false;
    831             return true;
    832         } else {
    833             Mutex::Autolock lock(mInputMutex);
    834             mRequestCount++;
    835         }
    836 
    837         camera_metadata_entry_t type;
    838         res = find_camera_metadata_entry(mRequest,
    839                 ANDROID_REQUEST_TYPE,
    840                 &type);
    841         if (res != NO_ERROR) {
    842             ALOGE("%s: error reading request type", __FUNCTION__);
    843             mParent->signalError();
    844             return false;
    845         }
    846         bool success = false;;
    847         switch (type.data.u8[0]) {
    848             case ANDROID_REQUEST_TYPE_CAPTURE:
    849                 success = setupCapture();
    850                 break;
    851             case ANDROID_REQUEST_TYPE_REPROCESS:
    852                 success = setupReprocess();
    853                 break;
    854             default:
    855                 ALOGE("%s: Unexpected request type %d",
    856                         __FUNCTION__, type.data.u8[0]);
    857                 mParent->signalError();
    858                 break;
    859         }
    860         if (!success) return false;
    861 
    862     }
    863 
    864     if (mWaitingForReadout) {
    865         bool readoutDone;
    866         readoutDone = mParent->mReadoutThread->waitForReady(kWaitPerLoop);
    867         if (!readoutDone) return true;
    868 
    869         if (mNextNeedsJpeg) {
    870             ALOGV("Configure: Waiting for JPEG compressor");
    871         } else {
    872             ALOGV("Configure: Waiting for sensor");
    873         }
    874         mWaitingForReadout = false;
    875     }
    876 
    877     if (mNextNeedsJpeg) {
    878         bool jpegDone;
    879         jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
    880         if (!jpegDone) return true;
    881 
    882         ALOGV("Configure: Waiting for sensor");
    883         mNextNeedsJpeg = false;
    884     }
    885 
    886     if (mNextIsCapture) {
    887         return configureNextCapture();
    888     } else {
    889         return configureNextReprocess();
    890     }
    891 }
    892 
    893 bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
    894     status_t res;
    895 
    896     mNextIsCapture = true;
    897     // Get necessary parameters for sensor config
    898     mParent->mControlThread->processRequest(mRequest);
    899 
    900     camera_metadata_entry_t streams;
    901     res = find_camera_metadata_entry(mRequest,
    902             ANDROID_REQUEST_OUTPUT_STREAMS,
    903             &streams);
    904     if (res != NO_ERROR) {
    905         ALOGE("%s: error reading output stream tag", __FUNCTION__);
    906         mParent->signalError();
    907         return false;
    908     }
    909 
    910     mNextBuffers = new Buffers;
    911     mNextNeedsJpeg = false;
    912     ALOGV("Configure: Setting up buffers for capture");
    913     for (size_t i = 0; i < streams.count; i++) {
    914         int streamId = streams.data.u8[i];
    915         const Stream &s = mParent->getStreamInfo(streamId);
    916         if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    917             ALOGE("%s: Stream %d does not have a concrete pixel format, but "
    918                     "is included in a request!", __FUNCTION__, streamId);
    919             mParent->signalError();
    920             return false;
    921         }
    922         StreamBuffer b;
    923         b.streamId = streams.data.u8[i];
    924         b.width  = s.width;
    925         b.height = s.height;
    926         b.format = s.format;
    927         b.stride = s.stride;
    928         mNextBuffers->push_back(b);
    929         ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
    930                 "stride %d",
    931                 i, b.streamId, b.width, b.height, b.format, b.stride);
    932         if (b.format == HAL_PIXEL_FORMAT_BLOB) {
    933             mNextNeedsJpeg = true;
    934         }
    935     }
    936 
    937     camera_metadata_entry_t e;
    938     res = find_camera_metadata_entry(mRequest,
    939             ANDROID_REQUEST_FRAME_COUNT,
    940             &e);
    941     if (res != NO_ERROR) {
    942         ALOGE("%s: error reading frame count tag: %s (%d)",
    943                 __FUNCTION__, strerror(-res), res);
    944         mParent->signalError();
    945         return false;
    946     }
    947     mNextFrameNumber = *e.data.i32;
    948 
    949     res = find_camera_metadata_entry(mRequest,
    950             ANDROID_SENSOR_EXPOSURE_TIME,
    951             &e);
    952     if (res != NO_ERROR) {
    953         ALOGE("%s: error reading exposure time tag: %s (%d)",
    954                 __FUNCTION__, strerror(-res), res);
    955         mParent->signalError();
    956         return false;
    957     }
    958     mNextExposureTime = *e.data.i64;
    959 
    960     res = find_camera_metadata_entry(mRequest,
    961             ANDROID_SENSOR_FRAME_DURATION,
    962             &e);
    963     if (res != NO_ERROR) {
    964         ALOGE("%s: error reading frame duration tag", __FUNCTION__);
    965         mParent->signalError();
    966         return false;
    967     }
    968     mNextFrameDuration = *e.data.i64;
    969 
    970     if (mNextFrameDuration <
    971             mNextExposureTime + Sensor::kMinVerticalBlank) {
    972         mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
    973     }
    974     res = find_camera_metadata_entry(mRequest,
    975             ANDROID_SENSOR_SENSITIVITY,
    976             &e);
    977     if (res != NO_ERROR) {
    978         ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
    979         mParent->signalError();
    980         return false;
    981     }
    982     mNextSensitivity = *e.data.i32;
    983 
    984     res = find_camera_metadata_entry(mRequest,
    985             EMULATOR_SCENE_HOUROFDAY,
    986             &e);
    987     if (res == NO_ERROR) {
    988         ALOGV("Setting hour: %d", *e.data.i32);
    989         mParent->mSensor->getScene().setHour(*e.data.i32);
    990     }
    991 
    992     // Start waiting on readout thread
    993     mWaitingForReadout = true;
    994     ALOGV("Configure: Waiting for readout thread");
    995 
    996     return true;
    997 }
    998 
    999 bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
   1000     bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
   1001     if (!vsync) return true;
   1002 
   1003     Mutex::Autolock il(mInternalsMutex);
   1004     ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
   1005     mParent->mSensor->setExposureTime(mNextExposureTime);
   1006     mParent->mSensor->setFrameDuration(mNextFrameDuration);
   1007     mParent->mSensor->setSensitivity(mNextSensitivity);
   1008 
   1009     getBuffers();
   1010 
   1011     ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
   1012     mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
   1013     mParent->mSensor->setDestinationBuffers(mNextBuffers);
   1014 
   1015     mRequest = NULL;
   1016     mNextBuffers = NULL;
   1017 
   1018     Mutex::Autolock lock(mInputMutex);
   1019     mRequestCount--;
   1020 
   1021     return true;
   1022 }
   1023 
   1024 bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
   1025     status_t res;
   1026 
   1027     mNextNeedsJpeg = true;
   1028     mNextIsCapture = false;
   1029 
   1030     camera_metadata_entry_t reprocessStreams;
   1031     res = find_camera_metadata_entry(mRequest,
   1032             ANDROID_REQUEST_INPUT_STREAMS,
   1033             &reprocessStreams);
   1034     if (res != NO_ERROR) {
   1035         ALOGE("%s: error reading output stream tag", __FUNCTION__);
   1036         mParent->signalError();
   1037         return false;
   1038     }
   1039 
   1040     mNextBuffers = new Buffers;
   1041 
   1042     ALOGV("Configure: Setting up input buffers for reprocess");
   1043     for (size_t i = 0; i < reprocessStreams.count; i++) {
   1044         int streamId = reprocessStreams.data.u8[i];
   1045         const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
   1046         if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
   1047             ALOGE("%s: Only ZSL reprocessing supported!",
   1048                     __FUNCTION__);
   1049             mParent->signalError();
   1050             return false;
   1051         }
   1052         StreamBuffer b;
   1053         b.streamId = -streamId;
   1054         b.width = s.width;
   1055         b.height = s.height;
   1056         b.format = s.format;
   1057         b.stride = s.stride;
   1058         mNextBuffers->push_back(b);
   1059     }
   1060 
   1061     camera_metadata_entry_t streams;
   1062     res = find_camera_metadata_entry(mRequest,
   1063             ANDROID_REQUEST_OUTPUT_STREAMS,
   1064             &streams);
   1065     if (res != NO_ERROR) {
   1066         ALOGE("%s: error reading output stream tag", __FUNCTION__);
   1067         mParent->signalError();
   1068         return false;
   1069     }
   1070 
   1071     ALOGV("Configure: Setting up output buffers for reprocess");
   1072     for (size_t i = 0; i < streams.count; i++) {
   1073         int streamId = streams.data.u8[i];
   1074         const Stream &s = mParent->getStreamInfo(streamId);
   1075         if (s.format != HAL_PIXEL_FORMAT_BLOB) {
   1076             // TODO: Support reprocess to YUV
   1077             ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
   1078                     __FUNCTION__, streamId);
   1079             mParent->signalError();
   1080             return false;
   1081         }
   1082         StreamBuffer b;
   1083         b.streamId = streams.data.u8[i];
   1084         b.width  = s.width;
   1085         b.height = s.height;
   1086         b.format = s.format;
   1087         b.stride = s.stride;
   1088         mNextBuffers->push_back(b);
   1089         ALOGV("Configure:    Buffer %d: Stream %d, %d x %d, format 0x%x, "
   1090                 "stride %d",
   1091                 i, b.streamId, b.width, b.height, b.format, b.stride);
   1092     }
   1093 
   1094     camera_metadata_entry_t e;
   1095     res = find_camera_metadata_entry(mRequest,
   1096             ANDROID_REQUEST_FRAME_COUNT,
   1097             &e);
   1098     if (res != NO_ERROR) {
   1099         ALOGE("%s: error reading frame count tag: %s (%d)",
   1100                 __FUNCTION__, strerror(-res), res);
   1101         mParent->signalError();
   1102         return false;
   1103     }
   1104     mNextFrameNumber = *e.data.i32;
   1105 
   1106     return true;
   1107 }
   1108 
   1109 bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
   1110     Mutex::Autolock il(mInternalsMutex);
   1111 
   1112     getBuffers();
   1113 
   1114     ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
   1115     mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
   1116 
   1117     mRequest = NULL;
   1118     mNextBuffers = NULL;
   1119 
   1120     Mutex::Autolock lock(mInputMutex);
   1121     mRequestCount--;
   1122 
   1123     return true;
   1124 }
   1125 
   1126 bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
   1127     status_t res;
   1128     /** Get buffers to fill for this frame */
   1129     for (size_t i = 0; i < mNextBuffers->size(); i++) {
   1130         StreamBuffer &b = mNextBuffers->editItemAt(i);
   1131 
   1132         if (b.streamId > 0) {
   1133             Stream s = mParent->getStreamInfo(b.streamId);
   1134             ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
   1135             res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
   1136             if (res != NO_ERROR || b.buffer == NULL) {
   1137                 ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
   1138                         __FUNCTION__, b.streamId, strerror(-res), res);
   1139                 mParent->signalError();
   1140                 return false;
   1141             }
   1142 
   1143             /* Lock the buffer from the perspective of the graphics mapper */
   1144             const Rect rect(s.width, s.height);
   1145 
   1146             res = GraphicBufferMapper::get().lock(*(b.buffer),
   1147                     GRALLOC_USAGE_HW_CAMERA_WRITE,
   1148                     rect, (void**)&(b.img) );
   1149 
   1150             if (res != NO_ERROR) {
   1151                 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
   1152                         __FUNCTION__, strerror(-res), res);
   1153                 s.ops->cancel_buffer(s.ops,
   1154                         b.buffer);
   1155                 mParent->signalError();
   1156                 return false;
   1157             }
   1158         } else {
   1159             ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
   1160             ALOGV("Configure: Acquiring buffer from reprocess stream %d",
   1161                     -b.streamId);
   1162             res = s.ops->acquire_buffer(s.ops, &(b.buffer) );
   1163             if (res != NO_ERROR || b.buffer == NULL) {
   1164                 ALOGE("%s: Unable to acquire buffer from reprocess stream %d: "
   1165                         "%s (%d)", __FUNCTION__, -b.streamId,
   1166                         strerror(-res), res);
   1167                 mParent->signalError();
   1168                 return false;
   1169             }
   1170 
   1171             /* Lock the buffer from the perspective of the graphics mapper */
   1172             const Rect rect(s.width, s.height);
   1173 
   1174             res = GraphicBufferMapper::get().lock(*(b.buffer),
   1175                     GRALLOC_USAGE_HW_CAMERA_READ,
   1176                     rect, (void**)&(b.img) );
   1177             if (res != NO_ERROR) {
   1178                 ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
   1179                         __FUNCTION__, strerror(-res), res);
   1180                 s.ops->release_buffer(s.ops,
   1181                         b.buffer);
   1182                 mParent->signalError();
   1183                 return false;
   1184             }
   1185         }
   1186     }
   1187     return true;
   1188 }
   1189 
   1190 EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
   1191         Thread(false),
   1192         mParent(parent),
   1193         mRunning(false),
   1194         mActive(false),
   1195         mRequestCount(0),
   1196         mRequest(NULL),
   1197         mBuffers(NULL) {
   1198     mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
   1199     mInFlightHead = 0;
   1200     mInFlightTail = 0;
   1201 }
   1202 
   1203 EmulatedFakeCamera2::ReadoutThread::~ReadoutThread() {
   1204     delete mInFlightQueue;
   1205 }
   1206 
   1207 status_t EmulatedFakeCamera2::ReadoutThread::readyToRun() {
   1208     Mutex::Autolock lock(mInputMutex);
   1209     ALOGV("Starting up ReadoutThread");
   1210     mRunning = true;
   1211     mInputSignal.signal();
   1212     return NO_ERROR;
   1213 }
   1214 
   1215 status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
   1216     Mutex::Autolock lock(mInputMutex);
   1217     if (!mRunning) {
   1218         ALOGV("Waiting for readout thread to start");
   1219         mInputSignal.wait(mInputMutex);
   1220     }
   1221     return OK;
   1222 }
   1223 
   1224 bool EmulatedFakeCamera2::ReadoutThread::waitForReady(nsecs_t timeout) {
   1225     status_t res;
   1226     Mutex::Autolock lock(mInputMutex);
   1227     while (!readyForNextCapture()) {
   1228         res = mReadySignal.waitRelative(mInputMutex, timeout);
   1229         if (res == TIMED_OUT) return false;
   1230         if (res != OK) {
   1231             ALOGE("%s: Error waiting for ready: %s (%d)", __FUNCTION__,
   1232                     strerror(-res), res);
   1233             return false;
   1234         }
   1235     }
   1236     return true;
   1237 }
   1238 
   1239 bool EmulatedFakeCamera2::ReadoutThread::readyForNextCapture() {
   1240     return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
   1241 }
   1242 
   1243 void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
   1244         bool isCapture,
   1245         camera_metadata_t *request,
   1246         Buffers *buffers) {
   1247     Mutex::Autolock lock(mInputMutex);
   1248     if ( !readyForNextCapture() ) {
   1249         ALOGE("In flight queue full, dropping captures");
   1250         mParent->signalError();
   1251         return;
   1252     }
   1253     mInFlightQueue[mInFlightTail].isCapture = isCapture;
   1254     mInFlightQueue[mInFlightTail].request = request;
   1255     mInFlightQueue[mInFlightTail].buffers = buffers;
   1256     mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
   1257     mRequestCount++;
   1258 
   1259     if (!mActive) {
   1260         mActive = true;
   1261         mInputSignal.signal();
   1262     }
   1263 }
   1264 
   1265 bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
   1266     // acquire in same order as threadLoop
   1267     Mutex::Autolock iLock(mInternalsMutex);
   1268     Mutex::Autolock lock(mInputMutex);
   1269 
   1270     size_t i = mInFlightHead;
   1271     while (i != mInFlightTail) {
   1272         for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
   1273             if ( (*(mInFlightQueue[i].buffers))[j].streamId == (int)id )
   1274                 return true;
   1275         }
   1276         i = (i + 1) % kInFlightQueueSize;
   1277     }
   1278 
   1279 
   1280     if (mBuffers != NULL) {
   1281         for (i = 0; i < mBuffers->size(); i++) {
   1282             if ( (*mBuffers)[i].streamId == (int)id) return true;
   1283         }
   1284     }
   1285 
   1286     return false;
   1287 }
   1288 
   1289 int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
   1290     Mutex::Autolock lock(mInputMutex);
   1291 
   1292     return mRequestCount;
   1293 }
   1294 
   1295 bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
   1296     static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
   1297     status_t res;
   1298     int32_t frameNumber;
   1299 
   1300     // Check if we're currently processing or just waiting
   1301     {
   1302         Mutex::Autolock lock(mInputMutex);
   1303         if (!mActive) {
   1304             // Inactive, keep waiting until we've been signaled
   1305             res = mInputSignal.waitRelative(mInputMutex, kWaitPerLoop);
   1306             if (res != NO_ERROR && res != TIMED_OUT) {
   1307                 ALOGE("%s: Error waiting for capture requests: %d",
   1308                         __FUNCTION__, res);
   1309                 mParent->signalError();
   1310                 return false;
   1311             }
   1312             if (!mActive) return true;
   1313         }
   1314         // Active, see if we need a new request
   1315         if (mRequest == NULL) {
   1316             if (mInFlightHead == mInFlightTail) {
   1317                 // Go inactive
   1318                 ALOGV("Waiting for sensor data");
   1319                 mActive = false;
   1320                 return true;
   1321             } else {
   1322                 Mutex::Autolock iLock(mInternalsMutex);
   1323                 mReadySignal.signal();
   1324                 mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
   1325                 mRequest = mInFlightQueue[mInFlightHead].request;
   1326                 mBuffers  = mInFlightQueue[mInFlightHead].buffers;
   1327                 mInFlightQueue[mInFlightHead].request = NULL;
   1328                 mInFlightQueue[mInFlightHead].buffers = NULL;
   1329                 mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
   1330                 ALOGV("Ready to read out request %p, %d buffers",
   1331                         mRequest, mBuffers->size());
   1332             }
   1333         }
   1334     }
   1335 
   1336     // Active with request, wait on sensor to complete
   1337 
   1338     nsecs_t captureTime;
   1339 
   1340     if (mIsCapture) {
   1341         bool gotFrame;
   1342         gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
   1343                 &captureTime);
   1344 
   1345         if (!gotFrame) return true;
   1346     }
   1347 
   1348     Mutex::Autolock iLock(mInternalsMutex);
   1349 
   1350     camera_metadata_entry_t entry;
   1351     if (!mIsCapture) {
   1352         res = find_camera_metadata_entry(mRequest,
   1353                 ANDROID_SENSOR_TIMESTAMP,
   1354             &entry);
   1355         if (res != NO_ERROR) {
   1356             ALOGE("%s: error reading reprocessing timestamp: %s (%d)",
   1357                     __FUNCTION__, strerror(-res), res);
   1358             mParent->signalError();
   1359             return false;
   1360         }
   1361         captureTime = entry.data.i64[0];
   1362     }
   1363 
   1364     res = find_camera_metadata_entry(mRequest,
   1365             ANDROID_REQUEST_FRAME_COUNT,
   1366             &entry);
   1367     if (res != NO_ERROR) {
   1368         ALOGE("%s: error reading frame count tag: %s (%d)",
   1369                 __FUNCTION__, strerror(-res), res);
   1370         mParent->signalError();
   1371         return false;
   1372     }
   1373     frameNumber = *entry.data.i32;
   1374 
   1375     res = find_camera_metadata_entry(mRequest,
   1376             ANDROID_REQUEST_METADATA_MODE,
   1377             &entry);
   1378     if (res != NO_ERROR) {
   1379         ALOGE("%s: error reading metadata mode tag: %s (%d)",
   1380                 __FUNCTION__, strerror(-res), res);
   1381         mParent->signalError();
   1382         return false;
   1383     }
   1384 
   1385     // Got sensor data and request, construct frame and send it out
   1386     ALOGV("Readout: Constructing metadata and frames for request %d",
   1387             frameNumber);
   1388 
   1389     if (*entry.data.u8 == ANDROID_REQUEST_METADATA_MODE_FULL) {
   1390         ALOGV("Readout: Metadata requested, constructing");
   1391 
   1392         camera_metadata_t *frame = NULL;
   1393 
   1394         size_t frame_entries = get_camera_metadata_entry_count(mRequest);
   1395         size_t frame_data    = get_camera_metadata_data_count(mRequest);
   1396 
   1397         // TODO: Dynamically calculate based on enabled statistics, etc
   1398         frame_entries += 10;
   1399         frame_data += 100;
   1400 
   1401         res = mParent->mFrameQueueDst->dequeue_frame(mParent->mFrameQueueDst,
   1402                 frame_entries, frame_data, &frame);
   1403 
   1404         if (res != NO_ERROR || frame == NULL) {
   1405             ALOGE("%s: Unable to dequeue frame metadata buffer", __FUNCTION__);
   1406             mParent->signalError();
   1407             return false;
   1408         }
   1409 
   1410         res = append_camera_metadata(frame, mRequest);
   1411         if (res != NO_ERROR) {
   1412             ALOGE("Unable to append request metadata");
   1413         }
   1414 
   1415         if (mIsCapture) {
   1416             add_camera_metadata_entry(frame,
   1417                     ANDROID_SENSOR_TIMESTAMP,
   1418                     &captureTime,
   1419                     1);
   1420 
   1421             int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
   1422             camera_metadata_entry_t requestedHour;
   1423             res = find_camera_metadata_entry(frame,
   1424                     EMULATOR_SCENE_HOUROFDAY,
   1425                     &requestedHour);
   1426             if (res == NAME_NOT_FOUND) {
   1427                 res = add_camera_metadata_entry(frame,
   1428                         EMULATOR_SCENE_HOUROFDAY,
   1429                         &hourOfDay, 1);
   1430                 if (res != NO_ERROR) {
   1431                     ALOGE("Unable to add vendor tag");
   1432                 }
   1433             } else if (res == OK) {
   1434                 *requestedHour.data.i32 = hourOfDay;
   1435             } else {
   1436                 ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
   1437             }
   1438 
   1439             collectStatisticsMetadata(frame);
   1440             // TODO: Collect all final values used from sensor in addition to timestamp
   1441         }
   1442 
   1443         ALOGV("Readout: Enqueue frame %d", frameNumber);
   1444         mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
   1445                 frame);
   1446     }
   1447     ALOGV("Readout: Free request");
   1448     res = mParent->mRequestQueueSrc->free_request(mParent->mRequestQueueSrc, mRequest);
   1449     if (res != NO_ERROR) {
   1450         ALOGE("%s: Unable to return request buffer to queue: %d",
   1451                 __FUNCTION__, res);
   1452         mParent->signalError();
   1453         return false;
   1454     }
   1455     mRequest = NULL;
   1456 
   1457     int compressedBufferIndex = -1;
   1458     ALOGV("Readout: Processing %d buffers", mBuffers->size());
   1459     for (size_t i = 0; i < mBuffers->size(); i++) {
   1460         const StreamBuffer &b = (*mBuffers)[i];
   1461         ALOGV("Readout:    Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
   1462                 i, b.streamId, b.width, b.height, b.format, b.stride);
   1463         if (b.streamId > 0) {
   1464             if (b.format == HAL_PIXEL_FORMAT_BLOB) {
   1465                 // Assumes only one BLOB buffer type per capture
   1466                 compressedBufferIndex = i;
   1467             } else {
   1468                 ALOGV("Readout:    Sending image buffer %d (%p) to output stream %d",
   1469                         i, (void*)*(b.buffer), b.streamId);
   1470                 GraphicBufferMapper::get().unlock(*(b.buffer));
   1471                 const Stream &s = mParent->getStreamInfo(b.streamId);
   1472                 res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
   1473                 if (res != OK) {
   1474                     ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
   1475                             strerror(-res), res);
   1476                     mParent->signalError();
   1477                 }
   1478             }
   1479         }
   1480     }
   1481 
   1482     if (compressedBufferIndex == -1) {
   1483         delete mBuffers;
   1484     } else {
   1485         ALOGV("Readout:  Starting JPEG compression for buffer %d, stream %d",
   1486                 compressedBufferIndex,
   1487                 (*mBuffers)[compressedBufferIndex].streamId);
   1488         mJpegTimestamp = captureTime;
   1489         // Takes ownership of mBuffers
   1490         mParent->mJpegCompressor->start(mBuffers, this);
   1491     }
   1492     mBuffers = NULL;
   1493 
   1494     Mutex::Autolock l(mInputMutex);
   1495     mRequestCount--;
   1496     ALOGV("Readout: Done with request %d", frameNumber);
   1497     return true;
   1498 }
   1499 
   1500 void EmulatedFakeCamera2::ReadoutThread::onJpegDone(
   1501         const StreamBuffer &jpegBuffer, bool success) {
   1502     status_t res;
   1503     if (!success) {
   1504         ALOGE("%s: Error queueing compressed image buffer %p",
   1505                 __FUNCTION__, jpegBuffer.buffer);
   1506         mParent->signalError();
   1507         return;
   1508     }
   1509 
   1510     // Write to JPEG output stream
   1511     ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
   1512             jpegBuffer.streamId);
   1513 
   1514     GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
   1515     const Stream &s = mParent->getStreamInfo(jpegBuffer.streamId);
   1516     res = s.ops->enqueue_buffer(s.ops, mJpegTimestamp, jpegBuffer.buffer);
   1517 }
   1518 
   1519 void EmulatedFakeCamera2::ReadoutThread::onJpegInputDone(
   1520         const StreamBuffer &inputBuffer) {
   1521     status_t res;
   1522     GraphicBufferMapper::get().unlock(*(inputBuffer.buffer));
   1523     const ReprocessStream &s =
   1524             mParent->getReprocessStreamInfo(-inputBuffer.streamId);
   1525     res = s.ops->release_buffer(s.ops, inputBuffer.buffer);
   1526     if (res != OK) {
   1527         ALOGE("Error releasing reprocess buffer %p: %s (%d)",
   1528                 inputBuffer.buffer, strerror(-res), res);
   1529         mParent->signalError();
   1530     }
   1531 }
   1532 
   1533 status_t EmulatedFakeCamera2::ReadoutThread::collectStatisticsMetadata(
   1534         camera_metadata_t *frame) {
   1535     // Completely fake face rectangles, don't correspond to real faces in scene
   1536     ALOGV("Readout:    Collecting statistics metadata");
   1537 
   1538     status_t res;
   1539     camera_metadata_entry_t entry;
   1540     res = find_camera_metadata_entry(frame,
   1541                 ANDROID_STATISTICS_FACE_DETECT_MODE,
   1542                 &entry);
   1543     if (res != OK) {
   1544         ALOGE("%s: Unable to find face detect mode!", __FUNCTION__);
   1545         return BAD_VALUE;
   1546     }
   1547 
   1548     if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) return OK;
   1549 
   1550     // The coordinate system for the face regions is the raw sensor pixel
   1551     // coordinates. Here, we map from the scene coordinates (0-19 in both axis)
   1552     // to raw pixels, for the scene defined in fake-pipeline2/Scene.cpp. We
   1553     // approximately place two faces on top of the windows of the house. No
   1554     // actual faces exist there, but might one day. Note that this doesn't
   1555     // account for the offsets used to account for aspect ratio differences, so
   1556     // the rectangles don't line up quite right.
   1557     const size_t numFaces = 2;
   1558     int32_t rects[numFaces * 4] = {
   1559             Sensor::kResolution[0] * 10 / 20,
   1560             Sensor::kResolution[1] * 15 / 20,
   1561             Sensor::kResolution[0] * 12 / 20,
   1562             Sensor::kResolution[1] * 17 / 20,
   1563 
   1564             Sensor::kResolution[0] * 16 / 20,
   1565             Sensor::kResolution[1] * 15 / 20,
   1566             Sensor::kResolution[0] * 18 / 20,
   1567             Sensor::kResolution[1] * 17 / 20
   1568     };
   1569     // To simulate some kind of real detection going on, we jitter the rectangles on
   1570     // each frame by a few pixels in each dimension.
   1571     for (size_t i = 0; i < numFaces * 4; i++) {
   1572         rects[i] += (int32_t)(((float)rand() / RAND_MAX) * 6 - 3);
   1573     }
   1574     // The confidence scores (0-100) are similarly jittered.
   1575     uint8_t scores[numFaces] = { 85, 95 };
   1576     for (size_t i = 0; i < numFaces; i++) {
   1577         scores[i] += (int32_t)(((float)rand() / RAND_MAX) * 10 - 5);
   1578     }
   1579 
   1580     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_RECTANGLES,
   1581             rects, numFaces * 4);
   1582     if (res != OK) {
   1583         ALOGE("%s: Unable to add face rectangles!", __FUNCTION__);
   1584         return BAD_VALUE;
   1585     }
   1586 
   1587     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_SCORES,
   1588             scores, numFaces);
   1589     if (res != OK) {
   1590         ALOGE("%s: Unable to add face scores!", __FUNCTION__);
   1591         return BAD_VALUE;
   1592     }
   1593 
   1594     if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE) return OK;
   1595 
   1596     // Advanced face detection options - add eye/mouth coordinates.  The
   1597     // coordinates in order are (leftEyeX, leftEyeY, rightEyeX, rightEyeY,
   1598     // mouthX, mouthY). The mapping is the same as the face rectangles.
   1599     int32_t features[numFaces * 6] = {
   1600         Sensor::kResolution[0] * 10.5 / 20,
   1601         Sensor::kResolution[1] * 16 / 20,
   1602         Sensor::kResolution[0] * 11.5 / 20,
   1603         Sensor::kResolution[1] * 16 / 20,
   1604         Sensor::kResolution[0] * 11 / 20,
   1605         Sensor::kResolution[1] * 16.5 / 20,
   1606 
   1607         Sensor::kResolution[0] * 16.5 / 20,
   1608         Sensor::kResolution[1] * 16 / 20,
   1609         Sensor::kResolution[0] * 17.5 / 20,
   1610         Sensor::kResolution[1] * 16 / 20,
   1611         Sensor::kResolution[0] * 17 / 20,
   1612         Sensor::kResolution[1] * 16.5 / 20,
   1613     };
   1614     // Jitter these a bit less than the rects
   1615     for (size_t i = 0; i < numFaces * 6; i++) {
   1616         features[i] += (int32_t)(((float)rand() / RAND_MAX) * 4 - 2);
   1617     }
   1618     // These are unique IDs that are used to identify each face while it's
   1619     // visible to the detector (if a face went away and came back, it'd get a
   1620     // new ID).
   1621     int32_t ids[numFaces] = {
   1622         100, 200
   1623     };
   1624 
   1625     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_LANDMARKS,
   1626             features, numFaces * 6);
   1627     if (res != OK) {
   1628         ALOGE("%s: Unable to add face landmarks!", __FUNCTION__);
   1629         return BAD_VALUE;
   1630     }
   1631 
   1632     res = add_camera_metadata_entry(frame, ANDROID_STATISTICS_FACE_IDS,
   1633             ids, numFaces);
   1634     if (res != OK) {
   1635         ALOGE("%s: Unable to add face scores!", __FUNCTION__);
   1636         return BAD_VALUE;
   1637     }
   1638 
   1639     return OK;
   1640 }
   1641 
   1642 EmulatedFakeCamera2::ControlThread::ControlThread(EmulatedFakeCamera2 *parent):
   1643         Thread(false),
   1644         mParent(parent) {
   1645     mRunning = false;
   1646 }
   1647 
   1648 EmulatedFakeCamera2::ControlThread::~ControlThread() {
   1649 }
   1650 
   1651 status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
   1652     Mutex::Autolock lock(mInputMutex);
   1653 
   1654     ALOGV("Starting up ControlThread");
   1655     mRunning = true;
   1656     mStartAf = false;
   1657     mCancelAf = false;
   1658     mStartPrecapture = false;
   1659 
   1660     mControlMode = ANDROID_CONTROL_MODE_AUTO;
   1661 
   1662     mEffectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   1663     mSceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   1664 
   1665     mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
   1666     mAfModeChange = false;
   1667 
   1668     mAeMode = ANDROID_CONTROL_AE_MODE_ON;
   1669     mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   1670 
   1671     mAfTriggerId = 0;
   1672     mPrecaptureTriggerId = 0;
   1673 
   1674     mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1675     mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1676     mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
   1677 
   1678     mExposureTime = kNormalExposureTime;
   1679 
   1680     mInputSignal.signal();
   1681     return NO_ERROR;
   1682 }
   1683 
   1684 status_t EmulatedFakeCamera2::ControlThread::waitUntilRunning() {
   1685     Mutex::Autolock lock(mInputMutex);
   1686     if (!mRunning) {
   1687         ALOGV("Waiting for control thread to start");
   1688         mInputSignal.wait(mInputMutex);
   1689     }
   1690     return OK;
   1691 }
   1692 
   1693 // Override android.control.* fields with 3A values before sending request to sensor
   1694 status_t EmulatedFakeCamera2::ControlThread::processRequest(camera_metadata_t *request) {
   1695     Mutex::Autolock lock(mInputMutex);
   1696     // TODO: Add handling for all android.control.* fields here
   1697     camera_metadata_entry_t mode;
   1698     status_t res;
   1699 
   1700 #define READ_IF_OK(res, what, def)                                             \
   1701     (((res) == OK) ? (what) : (uint8_t)(def))
   1702 
   1703     res = find_camera_metadata_entry(request,
   1704             ANDROID_CONTROL_MODE,
   1705             &mode);
   1706     mControlMode = READ_IF_OK(res, mode.data.u8[0], ANDROID_CONTROL_MODE_OFF);
   1707 
   1708     // disable all 3A
   1709     if (mControlMode == ANDROID_CONTROL_MODE_OFF) {
   1710         mEffectMode =   ANDROID_CONTROL_EFFECT_MODE_OFF;
   1711         mSceneMode =    ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED;
   1712         mAfMode =       ANDROID_CONTROL_AF_MODE_OFF;
   1713         mAeLock =       ANDROID_CONTROL_AE_LOCK_ON;
   1714         mAeMode =       ANDROID_CONTROL_AE_MODE_OFF;
   1715         mAfModeChange = true;
   1716         mStartAf =      false;
   1717         mCancelAf =     true;
   1718         mAeState =      ANDROID_CONTROL_AE_STATE_INACTIVE;
   1719         mAwbMode =      ANDROID_CONTROL_AWB_MODE_OFF;
   1720         return res;
   1721     }
   1722 
   1723     res = find_camera_metadata_entry(request,
   1724             ANDROID_CONTROL_EFFECT_MODE,
   1725             &mode);
   1726     mEffectMode = READ_IF_OK(res, mode.data.u8[0],
   1727                              ANDROID_CONTROL_EFFECT_MODE_OFF);
   1728 
   1729     res = find_camera_metadata_entry(request,
   1730             ANDROID_CONTROL_SCENE_MODE,
   1731             &mode);
   1732     mSceneMode = READ_IF_OK(res, mode.data.u8[0],
   1733                              ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED);
   1734 
   1735     res = find_camera_metadata_entry(request,
   1736             ANDROID_CONTROL_AF_MODE,
   1737             &mode);
   1738     if (mAfMode != mode.data.u8[0]) {
   1739         ALOGV("AF new mode: %d, old mode %d", mode.data.u8[0], mAfMode);
   1740         mAfMode = mode.data.u8[0];
   1741         mAfModeChange = true;
   1742         mStartAf = false;
   1743         mCancelAf = false;
   1744     }
   1745 
   1746     res = find_camera_metadata_entry(request,
   1747             ANDROID_CONTROL_AE_MODE,
   1748             &mode);
   1749     mAeMode = READ_IF_OK(res, mode.data.u8[0],
   1750                              ANDROID_CONTROL_AE_MODE_OFF);
   1751 
   1752     res = find_camera_metadata_entry(request,
   1753             ANDROID_CONTROL_AE_LOCK,
   1754             &mode);
   1755     uint8_t aeLockVal = READ_IF_OK(res, mode.data.u8[0],
   1756                                    ANDROID_CONTROL_AE_LOCK_ON);
   1757     bool aeLock = (aeLockVal == ANDROID_CONTROL_AE_LOCK_ON);
   1758     if (mAeLock && !aeLock) {
   1759         mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
   1760     }
   1761     mAeLock = aeLock;
   1762 
   1763     res = find_camera_metadata_entry(request,
   1764             ANDROID_CONTROL_AWB_MODE,
   1765             &mode);
   1766     mAwbMode = READ_IF_OK(res, mode.data.u8[0],
   1767                           ANDROID_CONTROL_AWB_MODE_OFF);
   1768 
   1769     // TODO: Override more control fields
   1770 
   1771     if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
   1772         camera_metadata_entry_t exposureTime;
   1773         res = find_camera_metadata_entry(request,
   1774                 ANDROID_SENSOR_EXPOSURE_TIME,
   1775                 &exposureTime);
   1776         if (res == OK) {
   1777             exposureTime.data.i64[0] = mExposureTime;
   1778         }
   1779     }
   1780 
   1781 #undef READ_IF_OK
   1782 
   1783     return OK;
   1784 }
   1785 
   1786 status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
   1787         int32_t ext1, int32_t ext2) {
   1788     ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
   1789     Mutex::Autolock lock(mInputMutex);
   1790     switch (msgType) {
   1791         case CAMERA2_TRIGGER_AUTOFOCUS:
   1792             mAfTriggerId = ext1;
   1793             mStartAf = true;
   1794             mCancelAf = false;
   1795             break;
   1796         case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
   1797             mAfTriggerId = ext1;
   1798             mStartAf = false;
   1799             mCancelAf = true;
   1800             break;
   1801         case CAMERA2_TRIGGER_PRECAPTURE_METERING:
   1802             mPrecaptureTriggerId = ext1;
   1803             mStartPrecapture = true;
   1804             break;
   1805         default:
   1806             ALOGE("%s: Unknown action triggered: %d (arguments %d %d)",
   1807                     __FUNCTION__, msgType, ext1, ext2);
   1808             return BAD_VALUE;
   1809     }
   1810     return OK;
   1811 }
   1812 
   1813 const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100 * MSEC;
   1814 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
   1815 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
   1816 const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
   1817  // Once every 5 seconds
   1818 const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
   1819         kControlCycleDelay / 5.0 * SEC;
   1820 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
   1821 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
   1822 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration = 100 * MSEC;
   1823 const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration = 400 * MSEC;
   1824  // Once every 3 seconds
   1825 const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
   1826     kControlCycleDelay / 3000000000.0;
   1827 
   1828 const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime = 10 * MSEC;
   1829 const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 2 * MSEC;
   1830 const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
   1831 
   1832 bool EmulatedFakeCamera2::ControlThread::threadLoop() {
   1833     bool afModeChange = false;
   1834     bool afTriggered = false;
   1835     bool afCancelled = false;
   1836     uint8_t afState;
   1837     uint8_t afMode;
   1838     int32_t afTriggerId;
   1839     bool precaptureTriggered = false;
   1840     uint8_t aeState;
   1841     uint8_t aeMode;
   1842     bool    aeLock;
   1843     int32_t precaptureTriggerId;
   1844     nsecs_t nextSleep = kControlCycleDelay;
   1845 
   1846     {
   1847         Mutex::Autolock lock(mInputMutex);
   1848         if (mStartAf) {
   1849             ALOGD("Starting AF trigger processing");
   1850             afTriggered = true;
   1851             mStartAf = false;
   1852         } else if (mCancelAf) {
   1853             ALOGD("Starting cancel AF trigger processing");
   1854             afCancelled = true;
   1855             mCancelAf = false;
   1856         }
   1857         afState = mAfState;
   1858         afMode = mAfMode;
   1859         afModeChange = mAfModeChange;
   1860         mAfModeChange = false;
   1861 
   1862         afTriggerId = mAfTriggerId;
   1863 
   1864         if(mStartPrecapture) {
   1865             ALOGD("Starting precapture trigger processing");
   1866             precaptureTriggered = true;
   1867             mStartPrecapture = false;
   1868         }
   1869         aeState = mAeState;
   1870         aeMode = mAeMode;
   1871         aeLock = mAeLock;
   1872         precaptureTriggerId = mPrecaptureTriggerId;
   1873     }
   1874 
   1875     if (afCancelled || afModeChange) {
   1876         ALOGV("Resetting AF state due to cancel/mode change");
   1877         afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
   1878         updateAfState(afState, afTriggerId);
   1879         mAfScanDuration = 0;
   1880         mLockAfterPassiveScan = false;
   1881     }
   1882 
   1883     uint8_t oldAfState = afState;
   1884 
   1885     if (afTriggered) {
   1886         afState = processAfTrigger(afMode, afState);
   1887     }
   1888 
   1889     afState = maybeStartAfScan(afMode, afState);
   1890     afState = updateAfScan(afMode, afState, &nextSleep);
   1891     updateAfState(afState, afTriggerId);
   1892 
   1893     if (precaptureTriggered) {
   1894         aeState = processPrecaptureTrigger(aeMode, aeState);
   1895     }
   1896 
   1897     aeState = maybeStartAeScan(aeMode, aeLock, aeState);
   1898     aeState = updateAeScan(aeMode, aeLock, aeState, &nextSleep);
   1899     updateAeState(aeState, precaptureTriggerId);
   1900 
   1901     int ret;
   1902     timespec t;
   1903     t.tv_sec = 0;
   1904     t.tv_nsec = nextSleep;
   1905     do {
   1906         ret = nanosleep(&t, &t);
   1907     } while (ret != 0);
   1908 
   1909     if (mAfScanDuration > 0) {
   1910         mAfScanDuration -= nextSleep;
   1911     }
   1912     if (mAeScanDuration > 0) {
   1913         mAeScanDuration -= nextSleep;
   1914     }
   1915 
   1916     return true;
   1917 }
   1918 
   1919 int EmulatedFakeCamera2::ControlThread::processAfTrigger(uint8_t afMode,
   1920         uint8_t afState) {
   1921     switch (afMode) {
   1922         case ANDROID_CONTROL_AF_MODE_OFF:
   1923         case ANDROID_CONTROL_AF_MODE_EDOF:
   1924             // Do nothing
   1925             break;
   1926         case ANDROID_CONTROL_AF_MODE_MACRO:
   1927         case ANDROID_CONTROL_AF_MODE_AUTO:
   1928             switch (afState) {
   1929                 case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1930                 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1931                 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1932                     // Start new focusing cycle
   1933                     mAfScanDuration =  ((double)rand() / RAND_MAX) *
   1934                         (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
   1935                     afState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
   1936                     ALOGV("%s: AF scan start, duration %lld ms",
   1937                           __FUNCTION__, mAfScanDuration / 1000000);
   1938                     break;
   1939                 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
   1940                     // Ignore new request, already scanning
   1941                     break;
   1942                 default:
   1943                     ALOGE("Unexpected AF state in AUTO/MACRO AF mode: %d",
   1944                           afState);
   1945             }
   1946             break;
   1947         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   1948             switch (afState) {
   1949                 // Picture mode waits for passive scan to complete
   1950                 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   1951                     mLockAfterPassiveScan = true;
   1952                     break;
   1953                 case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1954                     afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1955                     break;
   1956                 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   1957                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1958                     break;
   1959                 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1960                 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1961                     // Must cancel to get out of these states
   1962                     break;
   1963                 default:
   1964                     ALOGE("Unexpected AF state in CONTINUOUS_PICTURE AF mode: %d",
   1965                           afState);
   1966             }
   1967             break;
   1968         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   1969             switch (afState) {
   1970                 // Video mode does not wait for passive scan to complete
   1971                 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
   1972                 case ANDROID_CONTROL_AF_STATE_INACTIVE:
   1973                     afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   1974                     break;
   1975                 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
   1976                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   1977                     break;
   1978                 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
   1979                 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
   1980                     // Must cancel to get out of these states
   1981                     break;
   1982                 default:
   1983                     ALOGE("Unexpected AF state in CONTINUOUS_VIDEO AF mode: %d",
   1984                           afState);
   1985             }
   1986             break;
   1987         default:
   1988             break;
   1989     }
   1990     return afState;
   1991 }
   1992 
   1993 int EmulatedFakeCamera2::ControlThread::maybeStartAfScan(uint8_t afMode,
   1994         uint8_t afState) {
   1995     if ((afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO ||
   1996             afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE) &&
   1997         (afState == ANDROID_CONTROL_AF_STATE_INACTIVE ||
   1998             afState == ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED)) {
   1999 
   2000         bool startScan = ((double)rand() / RAND_MAX) < kContinuousAfStartRate;
   2001         if (startScan) {
   2002             // Start new passive focusing cycle
   2003             mAfScanDuration =  ((double)rand() / RAND_MAX) *
   2004                 (kMaxAfDuration - kMinAfDuration) + kMinAfDuration;
   2005             afState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
   2006             ALOGV("%s: AF passive scan start, duration %lld ms",
   2007                 __FUNCTION__, mAfScanDuration / 1000000);
   2008         }
   2009     }
   2010     return afState;
   2011 }
   2012 
   2013 int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
   2014         uint8_t afState, nsecs_t *maxSleep) {
   2015     if (! (afState == ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN ||
   2016             afState == ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN ) ) {
   2017         return afState;
   2018     }
   2019 
   2020     if (mAfScanDuration <= 0) {
   2021         ALOGV("%s: AF scan done", __FUNCTION__);
   2022         switch (afMode) {
   2023             case ANDROID_CONTROL_AF_MODE_MACRO:
   2024             case ANDROID_CONTROL_AF_MODE_AUTO: {
   2025                 bool success = ((double)rand() / RAND_MAX) < kAfSuccessRate;
   2026                 if (success) {
   2027                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2028                 } else {
   2029                     afState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
   2030                 }
   2031                 break;
   2032             }
   2033             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
   2034                 if (mLockAfterPassiveScan) {
   2035                     afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
   2036                     mLockAfterPassiveScan = false;
   2037                 } else {
   2038                     afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   2039                 }
   2040                 break;
   2041             case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
   2042                 afState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
   2043                 break;
   2044             default:
   2045                 ALOGE("Unexpected AF mode in scan state");
   2046         }
   2047     } else {
   2048         if (mAfScanDuration <= *maxSleep) {
   2049             *maxSleep = mAfScanDuration;
   2050         }
   2051     }
   2052     return afState;
   2053 }
   2054 
   2055 void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
   2056         int32_t triggerId) {
   2057     Mutex::Autolock lock(mInputMutex);
   2058     if (mAfState != newState) {
   2059         ALOGV("%s: Autofocus state now %d, id %d", __FUNCTION__,
   2060                 newState, triggerId);
   2061         mAfState = newState;
   2062         mParent->sendNotification(CAMERA2_MSG_AUTOFOCUS,
   2063                 newState, triggerId, 0);
   2064     }
   2065 }
   2066 
   2067 int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(uint8_t aeMode,
   2068         uint8_t aeState) {
   2069     switch (aeMode) {
   2070         case ANDROID_CONTROL_AE_MODE_OFF:
   2071             // Don't do anything for these
   2072             return aeState;
   2073         case ANDROID_CONTROL_AE_MODE_ON:
   2074         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
   2075         case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
   2076         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
   2077             // Trigger a precapture cycle
   2078             aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
   2079             mAeScanDuration = ((double)rand() / RAND_MAX) *
   2080                     (kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
   2081                     kMinPrecaptureAeDuration;
   2082             ALOGD("%s: AE precapture scan start, duration %lld ms",
   2083                     __FUNCTION__, mAeScanDuration / 1000000);
   2084 
   2085     }
   2086     return aeState;
   2087 }
   2088 
   2089 int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
   2090         bool aeLocked,
   2091         uint8_t aeState) {
   2092     if (aeLocked) return aeState;
   2093     switch (aeMode) {
   2094         case ANDROID_CONTROL_AE_MODE_OFF:
   2095             break;
   2096         case ANDROID_CONTROL_AE_MODE_ON:
   2097         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
   2098         case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
   2099         case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: {
   2100             if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
   2101                     aeState != ANDROID_CONTROL_AE_STATE_CONVERGED) break;
   2102 
   2103             bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
   2104             if (startScan) {
   2105                 mAeScanDuration = ((double)rand() / RAND_MAX) *
   2106                 (kMaxAeDuration - kMinAeDuration) + kMinAeDuration;
   2107                 aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
   2108                 ALOGV("%s: AE scan start, duration %lld ms",
   2109                         __FUNCTION__, mAeScanDuration / 1000000);
   2110             }
   2111         }
   2112     }
   2113 
   2114     return aeState;
   2115 }
   2116 
   2117 int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t aeMode,
   2118         bool aeLock, uint8_t aeState, nsecs_t *maxSleep) {
   2119     if (aeLock && aeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
   2120         mAeScanDuration = 0;
   2121         aeState = ANDROID_CONTROL_AE_STATE_LOCKED;
   2122     } else if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
   2123             (aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE ) ) {
   2124         if (mAeScanDuration <= 0) {
   2125             ALOGV("%s: AE scan done", __FUNCTION__);
   2126             aeState = aeLock ?
   2127                     ANDROID_CONTROL_AE_STATE_LOCKED :ANDROID_CONTROL_AE_STATE_CONVERGED;
   2128 
   2129             Mutex::Autolock lock(mInputMutex);
   2130             mExposureTime = kNormalExposureTime;
   2131         } else {
   2132             if (mAeScanDuration <= *maxSleep) {
   2133                 *maxSleep = mAeScanDuration;
   2134             }
   2135 
   2136             int64_t exposureDelta =
   2137                     ((double)rand() / RAND_MAX) * 2 * kExposureJump -
   2138                     kExposureJump;
   2139             Mutex::Autolock lock(mInputMutex);
   2140             mExposureTime = mExposureTime + exposureDelta;
   2141             if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
   2142         }
   2143     }
   2144 
   2145     return aeState;
   2146 }
   2147 
   2148 
   2149 void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
   2150         int32_t triggerId) {
   2151     Mutex::Autolock lock(mInputMutex);
   2152     if (mAeState != newState) {
   2153         ALOGV("%s: Autoexposure state now %d, id %d", __FUNCTION__,
   2154                 newState, triggerId);
   2155         mAeState = newState;
   2156         mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE,
   2157                 newState, triggerId, 0);
   2158     }
   2159 }
   2160 
   2161 /** Private methods */
   2162 
   2163 status_t EmulatedFakeCamera2::constructStaticInfo(
   2164         camera_metadata_t **info,
   2165         bool sizeRequest) const {
   2166 
   2167     size_t entryCount = 0;
   2168     size_t dataCount = 0;
   2169     status_t ret;
   2170 
   2171 #define ADD_OR_SIZE( tag, data, count ) \
   2172     if ( ( ret = addOrSize(*info, sizeRequest, &entryCount, &dataCount, \
   2173             tag, data, count) ) != OK ) return ret
   2174 
   2175     // android.lens
   2176 
   2177     // 5 cm min focus distance for back camera, infinity (fixed focus) for front
   2178     const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
   2179     ADD_OR_SIZE(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   2180             &minFocusDistance, 1);
   2181     // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
   2182     const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
   2183     ADD_OR_SIZE(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   2184             &minFocusDistance, 1);
   2185 
   2186     static const float focalLength = 3.30f; // mm
   2187     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   2188             &focalLength, 1);
   2189     static const float aperture = 2.8f;
   2190     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   2191             &aperture, 1);
   2192     static const float filterDensity = 0;
   2193     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   2194             &filterDensity, 1);
   2195     static const uint8_t availableOpticalStabilization =
   2196             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   2197     ADD_OR_SIZE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   2198             &availableOpticalStabilization, 1);
   2199 
   2200     static const int32_t lensShadingMapSize[] = {1, 1};
   2201     ADD_OR_SIZE(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
   2202             sizeof(lensShadingMapSize)/sizeof(int32_t));
   2203 
   2204     static const float lensShadingMap[3 * 1 * 1 ] =
   2205             { 1.f, 1.f, 1.f };
   2206     ADD_OR_SIZE(ANDROID_LENS_INFO_SHADING_MAP, lensShadingMap,
   2207             sizeof(lensShadingMap)/sizeof(float));
   2208 
   2209     // Identity transform
   2210     static const int32_t geometricCorrectionMapSize[] = {2, 2};
   2211     ADD_OR_SIZE(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
   2212             geometricCorrectionMapSize,
   2213             sizeof(geometricCorrectionMapSize)/sizeof(int32_t));
   2214 
   2215     static const float geometricCorrectionMap[2 * 3 * 2 * 2] = {
   2216             0.f, 0.f,  0.f, 0.f,  0.f, 0.f,
   2217             1.f, 0.f,  1.f, 0.f,  1.f, 0.f,
   2218             0.f, 1.f,  0.f, 1.f,  0.f, 1.f,
   2219             1.f, 1.f,  1.f, 1.f,  1.f, 1.f};
   2220     ADD_OR_SIZE(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
   2221             geometricCorrectionMap,
   2222             sizeof(geometricCorrectionMap)/sizeof(float));
   2223 
   2224     int32_t lensFacing = mFacingBack ?
   2225             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   2226     ADD_OR_SIZE(ANDROID_LENS_FACING, &lensFacing, 1);
   2227 
   2228     float lensPosition[3];
   2229     if (mFacingBack) {
   2230         // Back-facing camera is center-top on device
   2231         lensPosition[0] = 0;
   2232         lensPosition[1] = 20;
   2233         lensPosition[2] = -5;
   2234     } else {
   2235         // Front-facing camera is center-right on device
   2236         lensPosition[0] = 20;
   2237         lensPosition[1] = 20;
   2238         lensPosition[2] = 0;
   2239     }
   2240     ADD_OR_SIZE(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
   2241             sizeof(float));
   2242 
   2243     // android.sensor
   2244 
   2245     ADD_OR_SIZE(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   2246             Sensor::kExposureTimeRange, 2);
   2247 
   2248     ADD_OR_SIZE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   2249             &Sensor::kFrameDurationRange[1], 1);
   2250 
   2251     ADD_OR_SIZE(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES,
   2252             Sensor::kAvailableSensitivities,
   2253             sizeof(Sensor::kAvailableSensitivities)
   2254             /sizeof(uint32_t));
   2255 
   2256     ADD_OR_SIZE(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   2257             &Sensor::kColorFilterArrangement, 1);
   2258 
   2259     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
   2260     ADD_OR_SIZE(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   2261             sensorPhysicalSize, 2);
   2262 
   2263     ADD_OR_SIZE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   2264             Sensor::kResolution, 2);
   2265 
   2266     ADD_OR_SIZE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   2267             Sensor::kResolution, 2);
   2268 
   2269     ADD_OR_SIZE(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   2270             &Sensor::kMaxRawValue, 1);
   2271 
   2272     static const int32_t blackLevelPattern[4] = {
   2273             Sensor::kBlackLevel, Sensor::kBlackLevel,
   2274             Sensor::kBlackLevel, Sensor::kBlackLevel
   2275     };
   2276     ADD_OR_SIZE(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   2277             blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
   2278 
   2279     //TODO: sensor color calibration fields
   2280 
   2281     // android.flash
   2282     static const uint8_t flashAvailable = 0;
   2283     ADD_OR_SIZE(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
   2284 
   2285     static const int64_t flashChargeDuration = 0;
   2286     ADD_OR_SIZE(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
   2287 
   2288     // android.tonemap
   2289 
   2290     static const int32_t tonemapCurvePoints = 128;
   2291     ADD_OR_SIZE(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
   2292 
   2293     // android.scaler
   2294 
   2295     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_FORMATS,
   2296             kAvailableFormats,
   2297             sizeof(kAvailableFormats)/sizeof(uint32_t));
   2298 
   2299     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   2300             kAvailableRawSizes,
   2301             sizeof(kAvailableRawSizes)/sizeof(uint32_t));
   2302 
   2303     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
   2304             kAvailableRawMinDurations,
   2305             sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
   2306 
   2307     if (mFacingBack) {
   2308         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   2309                 kAvailableProcessedSizesBack,
   2310                 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t));
   2311     } else {
   2312         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   2313                 kAvailableProcessedSizesFront,
   2314                 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t));
   2315     }
   2316 
   2317     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
   2318             kAvailableProcessedMinDurations,
   2319             sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
   2320 
   2321     if (mFacingBack) {
   2322         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   2323                 kAvailableJpegSizesBack,
   2324                 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t));
   2325     } else {
   2326         ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   2327                 kAvailableJpegSizesFront,
   2328                 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t));
   2329     }
   2330 
   2331     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
   2332             kAvailableJpegMinDurations,
   2333             sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
   2334 
   2335     static const float maxZoom = 10;
   2336     ADD_OR_SIZE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   2337             &maxZoom, 1);
   2338 
   2339     // android.jpeg
   2340 
   2341     static const int32_t jpegThumbnailSizes[] = {
   2342             0, 0,
   2343             160, 120,
   2344             320, 240
   2345      };
   2346     ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   2347             jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
   2348 
   2349     static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
   2350     ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
   2351 
   2352     // android.stats
   2353 
   2354     static const uint8_t availableFaceDetectModes[] = {
   2355         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   2356         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
   2357         ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
   2358     };
   2359 
   2360     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   2361             availableFaceDetectModes,
   2362             sizeof(availableFaceDetectModes));
   2363 
   2364     static const int32_t maxFaceCount = 8;
   2365     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   2366             &maxFaceCount, 1);
   2367 
   2368     static const int32_t histogramSize = 64;
   2369     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   2370             &histogramSize, 1);
   2371 
   2372     static const int32_t maxHistogramCount = 1000;
   2373     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   2374             &maxHistogramCount, 1);
   2375 
   2376     static const int32_t sharpnessMapSize[2] = {64, 64};
   2377     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   2378             sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
   2379 
   2380     static const int32_t maxSharpnessMapValue = 1000;
   2381     ADD_OR_SIZE(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   2382             &maxSharpnessMapValue, 1);
   2383 
   2384     // android.control
   2385 
   2386     static const uint8_t availableSceneModes[] = {
   2387             ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED
   2388     };
   2389     ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   2390             availableSceneModes, sizeof(availableSceneModes));
   2391 
   2392     static const uint8_t availableEffects[] = {
   2393             ANDROID_CONTROL_EFFECT_MODE_OFF
   2394     };
   2395     ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   2396             availableEffects, sizeof(availableEffects));
   2397 
   2398     int32_t max3aRegions = 0;
   2399     ADD_OR_SIZE(ANDROID_CONTROL_MAX_REGIONS,
   2400             &max3aRegions, 1);
   2401 
   2402     static const uint8_t availableAeModes[] = {
   2403             ANDROID_CONTROL_AE_MODE_OFF,
   2404             ANDROID_CONTROL_AE_MODE_ON
   2405     };
   2406     ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   2407             availableAeModes, sizeof(availableAeModes));
   2408 
   2409     static const camera_metadata_rational exposureCompensationStep = {
   2410             1, 3
   2411     };
   2412     ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   2413             &exposureCompensationStep, 1);
   2414 
   2415     int32_t exposureCompensationRange[] = {-9, 9};
   2416     ADD_OR_SIZE(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   2417             exposureCompensationRange,
   2418             sizeof(exposureCompensationRange)/sizeof(int32_t));
   2419 
   2420     static const int32_t availableTargetFpsRanges[] = {
   2421             5, 30, 15, 30
   2422     };
   2423     ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   2424             availableTargetFpsRanges,
   2425             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
   2426 
   2427     static const uint8_t availableAntibandingModes[] = {
   2428             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
   2429             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
   2430     };
   2431     ADD_OR_SIZE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   2432             availableAntibandingModes, sizeof(availableAntibandingModes));
   2433 
   2434     static const uint8_t availableAwbModes[] = {
   2435             ANDROID_CONTROL_AWB_MODE_OFF,
   2436             ANDROID_CONTROL_AWB_MODE_AUTO,
   2437             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
   2438             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
   2439             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
   2440             ANDROID_CONTROL_AWB_MODE_SHADE
   2441     };
   2442     ADD_OR_SIZE(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   2443             availableAwbModes, sizeof(availableAwbModes));
   2444 
   2445     static const uint8_t availableAfModesBack[] = {
   2446             ANDROID_CONTROL_AF_MODE_OFF,
   2447             ANDROID_CONTROL_AF_MODE_AUTO,
   2448             ANDROID_CONTROL_AF_MODE_MACRO,
   2449             ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
   2450             ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
   2451     };
   2452 
   2453     static const uint8_t availableAfModesFront[] = {
   2454             ANDROID_CONTROL_AF_MODE_OFF
   2455     };
   2456 
   2457     if (mFacingBack) {
   2458         ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   2459                     availableAfModesBack, sizeof(availableAfModesBack));
   2460     } else {
   2461         ADD_OR_SIZE(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   2462                     availableAfModesFront, sizeof(availableAfModesFront));
   2463     }
   2464 
   2465     static const uint8_t availableVstabModes[] = {
   2466             ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
   2467     };
   2468     ADD_OR_SIZE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   2469             availableVstabModes, sizeof(availableVstabModes));
   2470 
   2471 #undef ADD_OR_SIZE
   2472     /** Allocate metadata if sizing */
   2473     if (sizeRequest) {
   2474         ALOGV("Allocating %d entries, %d extra bytes for "
   2475                 "static camera info",
   2476                 entryCount, dataCount);
   2477         *info = allocate_camera_metadata(entryCount, dataCount);
   2478         if (*info == NULL) {
   2479             ALOGE("Unable to allocate camera static info"
   2480                     "(%d entries, %d bytes extra data)",
   2481                     entryCount, dataCount);
   2482             return NO_MEMORY;
   2483         }
   2484     }
   2485     return OK;
   2486 }
   2487 
   2488 status_t EmulatedFakeCamera2::constructDefaultRequest(
   2489         int request_template,
   2490         camera_metadata_t **request,
   2491         bool sizeRequest) const {
   2492 
   2493     size_t entryCount = 0;
   2494     size_t dataCount = 0;
   2495     status_t ret;
   2496 
   2497 #define ADD_OR_SIZE( tag, data, count ) \
   2498     if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
   2499             tag, data, count) ) != OK ) return ret
   2500 
   2501     /** android.request */
   2502 
   2503     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   2504     ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
   2505 
   2506     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
   2507     ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
   2508 
   2509     static const int32_t id = 0;
   2510     ADD_OR_SIZE(ANDROID_REQUEST_ID, &id, 1);
   2511 
   2512     static const int32_t frameCount = 0;
   2513     ADD_OR_SIZE(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
   2514 
   2515     // OUTPUT_STREAMS set by user
   2516     entryCount += 1;
   2517     dataCount += 5; // TODO: Should be maximum stream number
   2518 
   2519     /** android.lens */
   2520 
   2521     static const float focusDistance = 0;
   2522     ADD_OR_SIZE(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
   2523 
   2524     static const float aperture = 2.8f;
   2525     ADD_OR_SIZE(ANDROID_LENS_APERTURE, &aperture, 1);
   2526 
   2527     static const float focalLength = 5.0f;
   2528     ADD_OR_SIZE(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
   2529 
   2530     static const float filterDensity = 0;
   2531     ADD_OR_SIZE(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
   2532 
   2533     static const uint8_t opticalStabilizationMode =
   2534             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   2535     ADD_OR_SIZE(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   2536             &opticalStabilizationMode, 1);
   2537 
   2538     // FOCUS_RANGE set only in frame
   2539 
   2540     /** android.sensor */
   2541 
   2542     static const int64_t exposureTime = 10 * MSEC;
   2543     ADD_OR_SIZE(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
   2544 
   2545     static const int64_t frameDuration = 33333333L; // 1/30 s
   2546     ADD_OR_SIZE(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
   2547 
   2548     static const int32_t sensitivity = 100;
   2549     ADD_OR_SIZE(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
   2550 
   2551     // TIMESTAMP set only in frame
   2552 
   2553     /** android.flash */
   2554 
   2555     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   2556     ADD_OR_SIZE(ANDROID_FLASH_MODE, &flashMode, 1);
   2557 
   2558     static const uint8_t flashPower = 10;
   2559     ADD_OR_SIZE(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
   2560 
   2561     static const int64_t firingTime = 0;
   2562     ADD_OR_SIZE(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
   2563 
   2564     /** Processing block modes */
   2565     uint8_t hotPixelMode = 0;
   2566     uint8_t demosaicMode = 0;
   2567     uint8_t noiseMode = 0;
   2568     uint8_t shadingMode = 0;
   2569     uint8_t geometricMode = 0;
   2570     uint8_t colorMode = 0;
   2571     uint8_t tonemapMode = 0;
   2572     uint8_t edgeMode = 0;
   2573     switch (request_template) {
   2574       case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2575         // fall-through
   2576       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2577         // fall-through
   2578       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2579         hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
   2580         demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
   2581         noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   2582         shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
   2583         geometricMode = ANDROID_GEOMETRIC_MODE_HIGH_QUALITY;
   2584         colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
   2585         tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   2586         edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   2587         break;
   2588       case CAMERA2_TEMPLATE_PREVIEW:
   2589         // fall-through
   2590       case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2591         // fall-through
   2592       default:
   2593         hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   2594         demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   2595         noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   2596         shadingMode = ANDROID_SHADING_MODE_FAST;
   2597         geometricMode = ANDROID_GEOMETRIC_MODE_FAST;
   2598         colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   2599         tonemapMode = ANDROID_TONEMAP_MODE_FAST;
   2600         edgeMode = ANDROID_EDGE_MODE_FAST;
   2601         break;
   2602     }
   2603     ADD_OR_SIZE(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
   2604     ADD_OR_SIZE(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   2605     ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
   2606     ADD_OR_SIZE(ANDROID_SHADING_MODE, &shadingMode, 1);
   2607     ADD_OR_SIZE(ANDROID_GEOMETRIC_MODE, &geometricMode, 1);
   2608     ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
   2609     ADD_OR_SIZE(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
   2610     ADD_OR_SIZE(ANDROID_EDGE_MODE, &edgeMode, 1);
   2611 
   2612     /** android.noise */
   2613     static const uint8_t noiseStrength = 5;
   2614     ADD_OR_SIZE(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
   2615 
   2616     /** android.color */
   2617     static const float colorTransform[9] = {
   2618         1.0f, 0.f, 0.f,
   2619         0.f, 1.f, 0.f,
   2620         0.f, 0.f, 1.f
   2621     };
   2622     ADD_OR_SIZE(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
   2623 
   2624     /** android.tonemap */
   2625     static const float tonemapCurve[4] = {
   2626         0.f, 0.f,
   2627         1.f, 1.f
   2628     };
   2629     ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
   2630     ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
   2631     ADD_OR_SIZE(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
   2632 
   2633     /** android.edge */
   2634     static const uint8_t edgeStrength = 5;
   2635     ADD_OR_SIZE(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
   2636 
   2637     /** android.scaler */
   2638     static const int32_t cropRegion[3] = {
   2639         0, 0, Sensor::kResolution[0]
   2640     };
   2641     ADD_OR_SIZE(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
   2642 
   2643     /** android.jpeg */
   2644     static const int32_t jpegQuality = 80;
   2645     ADD_OR_SIZE(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
   2646 
   2647     static const int32_t thumbnailSize[2] = {
   2648         640, 480
   2649     };
   2650     ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
   2651 
   2652     static const int32_t thumbnailQuality = 80;
   2653     ADD_OR_SIZE(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
   2654 
   2655     static const double gpsCoordinates[2] = {
   2656         0, 0
   2657     };
   2658     ADD_OR_SIZE(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
   2659 
   2660     static const uint8_t gpsProcessingMethod[32] = "None";
   2661     ADD_OR_SIZE(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
   2662 
   2663     static const int64_t gpsTimestamp = 0;
   2664     ADD_OR_SIZE(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
   2665 
   2666     static const int32_t jpegOrientation = 0;
   2667     ADD_OR_SIZE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
   2668 
   2669     /** android.stats */
   2670 
   2671     static const uint8_t faceDetectMode =
   2672         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   2673     ADD_OR_SIZE(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   2674 
   2675     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   2676     ADD_OR_SIZE(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   2677 
   2678     static const uint8_t sharpnessMapMode =
   2679         ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   2680     ADD_OR_SIZE(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   2681 
   2682     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
   2683     // sharpnessMap only in frames
   2684 
   2685     /** android.control */
   2686 
   2687     uint8_t controlIntent = 0;
   2688     switch (request_template) {
   2689       case CAMERA2_TEMPLATE_PREVIEW:
   2690         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   2691         break;
   2692       case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2693         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   2694         break;
   2695       case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2696         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   2697         break;
   2698       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2699         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   2700         break;
   2701       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2702         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   2703         break;
   2704       default:
   2705         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   2706         break;
   2707     }
   2708     ADD_OR_SIZE(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   2709 
   2710     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   2711     ADD_OR_SIZE(ANDROID_CONTROL_MODE, &controlMode, 1);
   2712 
   2713     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   2714     ADD_OR_SIZE(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   2715 
   2716     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   2717     ADD_OR_SIZE(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   2718 
   2719     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   2720     ADD_OR_SIZE(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   2721 
   2722     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   2723     ADD_OR_SIZE(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   2724 
   2725     static const int32_t controlRegions[5] = {
   2726         0, 0, Sensor::kResolution[0], Sensor::kResolution[1], 1000
   2727     };
   2728     ADD_OR_SIZE(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
   2729 
   2730     static const int32_t aeExpCompensation = 0;
   2731     ADD_OR_SIZE(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
   2732 
   2733     static const int32_t aeTargetFpsRange[2] = {
   2734         10, 30
   2735     };
   2736     ADD_OR_SIZE(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
   2737 
   2738     static const uint8_t aeAntibandingMode =
   2739             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   2740     ADD_OR_SIZE(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
   2741 
   2742     static const uint8_t awbMode =
   2743             ANDROID_CONTROL_AWB_MODE_AUTO;
   2744     ADD_OR_SIZE(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   2745 
   2746     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   2747     ADD_OR_SIZE(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   2748 
   2749     ADD_OR_SIZE(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
   2750 
   2751     uint8_t afMode = 0;
   2752     switch (request_template) {
   2753       case CAMERA2_TEMPLATE_PREVIEW:
   2754         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2755         break;
   2756       case CAMERA2_TEMPLATE_STILL_CAPTURE:
   2757         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2758         break;
   2759       case CAMERA2_TEMPLATE_VIDEO_RECORD:
   2760         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   2761         break;
   2762       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
   2763         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   2764         break;
   2765       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
   2766         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   2767         break;
   2768       default:
   2769         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2770         break;
   2771     }
   2772     ADD_OR_SIZE(ANDROID_CONTROL_AF_MODE, &afMode, 1);
   2773 
   2774     ADD_OR_SIZE(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
   2775 
   2776     static const uint8_t vstabMode =
   2777         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   2778     ADD_OR_SIZE(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
   2779 
   2780     // aeState, awbState, afState only in frame
   2781 
   2782     /** Allocate metadata if sizing */
   2783     if (sizeRequest) {
   2784         ALOGV("Allocating %d entries, %d extra bytes for "
   2785                 "request template type %d",
   2786                 entryCount, dataCount, request_template);
   2787         *request = allocate_camera_metadata(entryCount, dataCount);
   2788         if (*request == NULL) {
   2789             ALOGE("Unable to allocate new request template type %d "
   2790                     "(%d entries, %d bytes extra data)", request_template,
   2791                     entryCount, dataCount);
   2792             return NO_MEMORY;
   2793         }
   2794     }
   2795     return OK;
   2796 #undef ADD_OR_SIZE
   2797 }
   2798 
   2799 status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
   2800         bool sizeRequest,
   2801         size_t *entryCount,
   2802         size_t *dataCount,
   2803         uint32_t tag,
   2804         const void *entryData,
   2805         size_t entryDataCount) {
   2806     status_t res;
   2807     if (!sizeRequest) {
   2808         return add_camera_metadata_entry(request, tag, entryData,
   2809                 entryDataCount);
   2810     } else {
   2811         int type = get_camera_metadata_tag_type(tag);
   2812         if (type < 0 ) return BAD_VALUE;
   2813         (*entryCount)++;
   2814         (*dataCount) += calculate_camera_metadata_entry_data_size(type,
   2815                 entryDataCount);
   2816         return OK;
   2817     }
   2818 }
   2819 
   2820 bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
   2821     // Assumes mMutex is locked; otherwise new requests could enter
   2822     // configureThread while readoutThread is being checked
   2823 
   2824     // Order of isStreamInUse calls matters
   2825     if (mConfigureThread->isStreamInUse(id) ||
   2826             mReadoutThread->isStreamInUse(id) ||
   2827             mJpegCompressor->isStreamInUse(id) ) {
   2828         ALOGE("%s: Stream %d is in use in active requests!",
   2829                 __FUNCTION__, id);
   2830         return true;
   2831     }
   2832     return false;
   2833 }
   2834 
   2835 bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t id) {
   2836     // TODO: implement
   2837     return false;
   2838 }
   2839 
   2840 const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
   2841     Mutex::Autolock lock(mMutex);
   2842 
   2843     return mStreams.valueFor(streamId);
   2844 }
   2845 
   2846 const ReprocessStream& EmulatedFakeCamera2::getReprocessStreamInfo(uint32_t streamId) {
   2847     Mutex::Autolock lock(mMutex);
   2848 
   2849     return mReprocessStreams.valueFor(streamId);
   2850 }
   2851 
   2852 };  /* namespace android */
   2853