Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define LOG_TAG "QCamera3HWI"
     31 //#define LOG_NDEBUG 0
     32 
     33 #define __STDC_LIMIT_MACROS
     34 #include <cutils/properties.h>
     35 #include <hardware/camera3.h>
     36 #include <camera/CameraMetadata.h>
     37 #include <stdlib.h>
     38 #include <fcntl.h>
     39 #include <stdint.h>
     40 #include <utils/Log.h>
     41 #include <utils/Errors.h>
     42 #include <ui/Fence.h>
     43 #include <gralloc_priv.h>
     44 #include "QCamera3HWI.h"
     45 #include "QCamera3Mem.h"
     46 #include "QCamera3Channel.h"
     47 #include "QCamera3PostProc.h"
     48 #include "QCamera3VendorTags.h"
     49 
     50 using namespace android;
     51 
     52 namespace qcamera {
     53 
     54 #define MAX(a, b) ((a) > (b) ? (a) : (b))
     55 
     56 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     57 
     58 #define EMPTY_PIPELINE_DELAY 2
     59 #define CAM_MAX_SYNC_LATENCY 4
     60 
     61 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
     62 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
     63 
     64 pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
     65     PTHREAD_MUTEX_INITIALIZER;
     66 unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
     67 
     68 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
     69     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
     70     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
     71     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
     72     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
     73     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
     74     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
     75     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
     76     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
     77     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
     78 };
     79 
     80 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
     81     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
     82     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
     83     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
     84     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
     85     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
     86     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
     87     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
     88     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
     89     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
     90 };
     91 
     92 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
     93     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
     94     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
     95     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
     96     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
     97     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
     98     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
     99     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
    100     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
    101     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
    102     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
    103     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
    104     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
    105     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
    106     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    107     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    108     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    109 };
    110 
    111 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    112     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
    113     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    114     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    115     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    116     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    117     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    118     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    119 };
    120 
    121 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    122     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    123     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    124     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    125     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    126 };
    127 
    128 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    129     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    130     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    131     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    132     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    133     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    134 };
    135 
    136 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    137     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    138     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    139     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    140 };
    141 
    142 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    143     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    144     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
    145     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    146 };
    147 
    148 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
    149     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
    150       CAM_FOCUS_UNCALIBRATED },
    151     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
    152       CAM_FOCUS_APPROXIMATE },
    153     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
    154       CAM_FOCUS_CALIBRATED }
    155 };
    156 
    157 const int32_t available_thumbnail_sizes[] = {0, 0,
    158                                              176, 144,
    159                                              320, 240,
    160                                              432, 288,
    161                                              480, 288,
    162                                              512, 288,
    163                                              512, 384};
    164 
    165 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
    166     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
    167     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
    168     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
    169     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
    170     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
    171 };
    172 
    173 /* Since there is no mapping for all the options some Android enum are not listed.
    174  * Also, the order in this list is important because while mapping from HAL to Android it will
    175  * traverse from lower to higher index which means that for HAL values that are map to different
    176  * Android values, the traverse logic will select the first one found.
    177  */
    178 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
    179     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
    180     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    181     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
    182     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
    183     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
    184     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
    185     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
    186     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
    187     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
    188     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
    189     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
    190     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
    191     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
    192     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
    193     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    194     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
    195 };
    196 
    197 /* Custom tag definitions */
    198 
    199 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    200     initialize:                         QCamera3HardwareInterface::initialize,
    201     configure_streams:                  QCamera3HardwareInterface::configure_streams,
    202     register_stream_buffers:            NULL,
    203     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
    204     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
    205     get_metadata_vendor_tag_ops:        NULL,
    206     dump:                               QCamera3HardwareInterface::dump,
    207     flush:                              QCamera3HardwareInterface::flush,
    208     reserved:                           {0},
    209 };
    210 
    211 int QCamera3HardwareInterface::kMaxInFlight = 5;
    212 
    213 /*===========================================================================
    214  * FUNCTION   : QCamera3HardwareInterface
    215  *
    216  * DESCRIPTION: constructor of QCamera3HardwareInterface
    217  *
    218  * PARAMETERS :
    219  *   @cameraId  : camera ID
    220  *
    221  * RETURN     : none
    222  *==========================================================================*/
    223 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
    224                         const camera_module_callbacks_t *callbacks)
    225     : mCameraId(cameraId),
    226       mCameraHandle(NULL),
    227       mCameraOpened(false),
    228       mCameraInitialized(false),
    229       mCallbackOps(NULL),
    230       mInputStream(NULL),
    231       mMetadataChannel(NULL),
    232       mPictureChannel(NULL),
    233       mRawChannel(NULL),
    234       mSupportChannel(NULL),
    235       mFirstRequest(false),
    236       mRepeatingRequest(false),
    237       mParamHeap(NULL),
    238       mParameters(NULL),
    239       mPrevParameters(NULL),
    240       mLoopBackResult(NULL),
    241       mFlush(false),
    242       mMinProcessedFrameDuration(0),
    243       mMinJpegFrameDuration(0),
    244       mMinRawFrameDuration(0),
    245       m_pPowerModule(NULL),
    246       mHdrHint(false),
    247       mMetaFrameCount(0),
    248       mCallbacks(callbacks)
    249 {
    250     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    251     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
    252     mCameraDevice.common.close = close_camera_device;
    253     mCameraDevice.ops = &mCameraOps;
    254     mCameraDevice.priv = this;
    255     gCamCapability[cameraId]->version = CAM_HAL_V3;
    256     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    257     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    258     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    259 
    260     pthread_cond_init(&mRequestCond, NULL);
    261     mPendingRequest = 0;
    262     mCurrentRequestId = -1;
    263     pthread_mutex_init(&mMutex, NULL);
    264 
    265     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    266         mDefaultMetadata[i] = NULL;
    267 
    268 #ifdef HAS_MULTIMEDIA_HINTS
    269     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
    270         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
    271     }
    272 #endif
    273 }
    274 
    275 /*===========================================================================
    276  * FUNCTION   : ~QCamera3HardwareInterface
    277  *
    278  * DESCRIPTION: destructor of QCamera3HardwareInterface
    279  *
    280  * PARAMETERS : none
    281  *
    282  * RETURN     : none
    283  *==========================================================================*/
    284 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    285 {
    286     ALOGV("%s: E", __func__);
    287     /* We need to stop all streams before deleting any stream */
    288 
    289     // NOTE: 'camera3_stream_t *' objects are already freed at
    290     //        this stage by the framework
    291     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    292         it != mStreamInfo.end(); it++) {
    293         QCamera3Channel *channel = (*it)->channel;
    294         if (channel) {
    295             channel->stop();
    296         }
    297     }
    298     if (mSupportChannel)
    299         mSupportChannel->stop();
    300 
    301     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    302         it != mStreamInfo.end(); it++) {
    303         QCamera3Channel *channel = (*it)->channel;
    304         if (channel)
    305             delete channel;
    306         free (*it);
    307     }
    308     if (mSupportChannel) {
    309         delete mSupportChannel;
    310         mSupportChannel = NULL;
    311     }
    312 
    313     mPictureChannel = NULL;
    314 
    315     /* Clean up all channels */
    316     if (mCameraInitialized) {
    317         if (mMetadataChannel) {
    318             mMetadataChannel->stop();
    319             delete mMetadataChannel;
    320             mMetadataChannel = NULL;
    321         }
    322         deinitParameters();
    323     }
    324 
    325     if (mCameraOpened)
    326         closeCamera();
    327 
    328     mPendingBuffersMap.mPendingBufferList.clear();
    329     mPendingRequestsList.clear();
    330 
    331     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    332         if (mDefaultMetadata[i])
    333             free_camera_metadata(mDefaultMetadata[i]);
    334 
    335     pthread_cond_destroy(&mRequestCond);
    336 
    337     pthread_mutex_destroy(&mMutex);
    338     ALOGV("%s: X", __func__);
    339 }
    340 
    341 /*===========================================================================
    342  * FUNCTION   : openCamera
    343  *
    344  * DESCRIPTION: open camera
    345  *
    346  * PARAMETERS :
    347  *   @hw_device  : double ptr for camera device struct
    348  *
    349  * RETURN     : int32_t type of status
    350  *              NO_ERROR  -- success
    351  *              none-zero failure code
    352  *==========================================================================*/
    353 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    354 {
    355     int rc = 0;
    356     pthread_mutex_lock(&mCameraSessionLock);
    357     if (mCameraSessionActive) {
    358         ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
    359         pthread_mutex_unlock(&mCameraSessionLock);
    360         return -EUSERS;
    361     }
    362 
    363     if (mCameraOpened) {
    364         *hw_device = NULL;
    365         return PERMISSION_DENIED;
    366     }
    367 
    368     rc = openCamera();
    369     if (rc == 0) {
    370         *hw_device = &mCameraDevice.common;
    371         mCameraSessionActive = 1;
    372     } else
    373         *hw_device = NULL;
    374 
    375 #ifdef HAS_MULTIMEDIA_HINTS
    376     if (rc == 0) {
    377         if (m_pPowerModule) {
    378             if (m_pPowerModule->powerHint) {
    379                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    380                         (void *)"state=1");
    381             }
    382         }
    383     }
    384 #endif
    385     pthread_mutex_unlock(&mCameraSessionLock);
    386     return rc;
    387 }
    388 
    389 /*===========================================================================
    390  * FUNCTION   : openCamera
    391  *
    392  * DESCRIPTION: open camera
    393  *
    394  * PARAMETERS : none
    395  *
    396  * RETURN     : int32_t type of status
    397  *              NO_ERROR  -- success
    398  *              none-zero failure code
    399  *==========================================================================*/
    400 int QCamera3HardwareInterface::openCamera()
    401 {
    402     if (mCameraHandle) {
    403         ALOGE("Failure: Camera already opened");
    404         return ALREADY_EXISTS;
    405     }
    406     mCameraHandle = camera_open(mCameraId);
    407     if (!mCameraHandle) {
    408         ALOGE("camera_open failed.");
    409         return UNKNOWN_ERROR;
    410     }
    411 
    412     mCameraOpened = true;
    413 
    414     return NO_ERROR;
    415 }
    416 
    417 /*===========================================================================
    418  * FUNCTION   : closeCamera
    419  *
    420  * DESCRIPTION: close camera
    421  *
    422  * PARAMETERS : none
    423  *
    424  * RETURN     : int32_t type of status
    425  *              NO_ERROR  -- success
    426  *              none-zero failure code
    427  *==========================================================================*/
    428 int QCamera3HardwareInterface::closeCamera()
    429 {
    430     int rc = NO_ERROR;
    431 
    432     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    433     mCameraHandle = NULL;
    434     mCameraOpened = false;
    435 
    436 #ifdef HAS_MULTIMEDIA_HINTS
    437     if (rc == NO_ERROR) {
    438         if (m_pPowerModule) {
    439             if (m_pPowerModule->powerHint) {
    440                 if(mHdrHint == true) {
    441                     m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    442                             (void *)"state=3");
    443                     mHdrHint = false;
    444                 }
    445                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    446                         (void *)"state=0");
    447             }
    448         }
    449     }
    450 #endif
    451 
    452     return rc;
    453 }
    454 
    455 /*===========================================================================
    456  * FUNCTION   : initialize
    457  *
    458  * DESCRIPTION: Initialize frameworks callback functions
    459  *
    460  * PARAMETERS :
    461  *   @callback_ops : callback function to frameworks
    462  *
    463  * RETURN     :
    464  *
    465  *==========================================================================*/
    466 int QCamera3HardwareInterface::initialize(
    467         const struct camera3_callback_ops *callback_ops)
    468 {
    469     int rc;
    470 
    471     pthread_mutex_lock(&mMutex);
    472 
    473     rc = initParameters();
    474     if (rc < 0) {
    475         ALOGE("%s: initParamters failed %d", __func__, rc);
    476        goto err1;
    477     }
    478     mCallbackOps = callback_ops;
    479 
    480     pthread_mutex_unlock(&mMutex);
    481     mCameraInitialized = true;
    482     return 0;
    483 
    484 err1:
    485     pthread_mutex_unlock(&mMutex);
    486     return rc;
    487 }
    488 
    489 /*===========================================================================
    490  * FUNCTION   : validateStreamDimensions
    491  *
    492  * DESCRIPTION: Check if the configuration requested are those advertised
    493  *
    494  * PARAMETERS :
    495  *   @stream_list : streams to be configured
    496  *
    497  * RETURN     :
    498  *
    499  *==========================================================================*/
    500 int QCamera3HardwareInterface::validateStreamDimensions(
    501         camera3_stream_configuration_t *streamList)
    502 {
    503     int rc = NO_ERROR;
    504     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
    505 
    506     /*
    507     * Loop through all streams requested in configuration
    508     * Check if unsupported sizes have been requested on any of them
    509     */
    510     for (size_t j = 0; j < streamList->num_streams; j++){
    511         bool sizeFound = false;
    512         camera3_stream_t *newStream = streamList->streams[j];
    513 
    514         /*
    515         * Sizes are different for each type of stream format check against
    516         * appropriate table.
    517         */
    518         switch (newStream->format) {
    519             case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
    520             case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
    521             case HAL_PIXEL_FORMAT_RAW10:
    522                 for (int i = 0;
    523                       i < gCamCapability[mCameraId]->supported_raw_dim_cnt; i++){
    524                     if (gCamCapability[mCameraId]->raw_dim[i].width
    525                             == (int32_t) newStream->width
    526                         && gCamCapability[mCameraId]->raw_dim[i].height
    527                             == (int32_t) newStream->height) {
    528                         sizeFound = true;
    529                     }
    530                 }
    531                 break;
    532             case HAL_PIXEL_FORMAT_BLOB:
    533                 for (int i = 0;
    534                   i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
    535                     if ((int32_t)(newStream->width) ==
    536                         gCamCapability[mCameraId]
    537                             ->picture_sizes_tbl[i].width
    538                     && (int32_t)(newStream->height) ==
    539                         gCamCapability[mCameraId]
    540                             ->picture_sizes_tbl[i].height){
    541                     sizeFound = true;
    542                     break;
    543                     }
    544                 }
    545                 break;
    546 
    547             case HAL_PIXEL_FORMAT_YCbCr_420_888:
    548             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    549             default:
    550                 /* ZSL stream will be full active array size validate that*/
    551                 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    552                     if ((int32_t)(newStream->width) ==
    553                         gCamCapability[mCameraId]->active_array_size.width
    554                         && (int32_t)(newStream->height)  ==
    555                         gCamCapability[mCameraId]->active_array_size.height) {
    556                         sizeFound = true;
    557                     }
    558                     /* We could potentially break here to enforce ZSL stream
    559                      * set from frameworks always has full active array size
    560                      * but it is not clear from spec if framework will always
    561                      * follow that, also we have logic to override to full array
    562                      * size, so keeping this logic lenient at the moment.
    563                      */
    564                 }
    565 
    566                 /* Non ZSL stream still need to conform to advertised sizes*/
    567                 for (int i = 0;
    568                   i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
    569                     if ((int32_t)(newStream->width) ==
    570                         gCamCapability[mCameraId]
    571                             ->picture_sizes_tbl[i].width
    572                     && (int32_t)(newStream->height) ==
    573                         gCamCapability[mCameraId]
    574                             ->picture_sizes_tbl[i].height){
    575                     sizeFound = true;
    576                     break;
    577                     }
    578                 }
    579                 break;
    580         } /* End of switch(newStream->format) */
    581 
    582         /* We error out even if a single stream has unsupported size set */
    583         if (!sizeFound) {
    584             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
    585                   "type:%d", __func__, newStream->width, newStream->height,
    586                   newStream->format);
    587             rc = -EINVAL;
    588             break;
    589         }
    590     } /* End of for each stream */
    591     return rc;
    592 }
    593 
    594 /*===========================================================================
    595  * FUNCTION   : configureStreams
    596  *
    597  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
    598  *              and output streams.
    599  *
    600  * PARAMETERS :
    601  *   @stream_list : streams to be configured
    602  *
    603  * RETURN     :
    604  *
    605  *==========================================================================*/
    606 int QCamera3HardwareInterface::configureStreams(
    607         camera3_stream_configuration_t *streamList)
    608 {
    609     int rc = 0;
    610 
    611     // Sanity check stream_list
    612     if (streamList == NULL) {
    613         ALOGE("%s: NULL stream configuration", __func__);
    614         return BAD_VALUE;
    615     }
    616     if (streamList->streams == NULL) {
    617         ALOGE("%s: NULL stream list", __func__);
    618         return BAD_VALUE;
    619     }
    620 
    621     if (streamList->num_streams < 1) {
    622         ALOGE("%s: Bad number of streams requested: %d", __func__,
    623                 streamList->num_streams);
    624         return BAD_VALUE;
    625     }
    626 
    627     rc = validateStreamDimensions(streamList);
    628     if (rc != NO_ERROR) {
    629         ALOGE("%s: Invalid stream configuration requested!", __func__);
    630         return rc;
    631     }
    632 
    633     /* first invalidate all the steams in the mStreamList
    634      * if they appear again, they will be validated */
    635     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
    636             it != mStreamInfo.end(); it++) {
    637         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    638         channel->stop();
    639         (*it)->status = INVALID;
    640     }
    641     if (mSupportChannel)
    642         mSupportChannel->stop();
    643     if (mMetadataChannel) {
    644         /* If content of mStreamInfo is not 0, there is metadata stream */
    645         mMetadataChannel->stop();
    646     }
    647 
    648 #ifdef HAS_MULTIMEDIA_HINTS
    649     if(mHdrHint == true) {
    650         if (m_pPowerModule) {
    651             if (m_pPowerModule->powerHint) {
    652                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    653                         (void *)"state=3");
    654                 mHdrHint = false;
    655             }
    656         }
    657     }
    658 #endif
    659 
    660     pthread_mutex_lock(&mMutex);
    661 
    662     bool isZsl = false;
    663     camera3_stream_t *inputStream = NULL;
    664     camera3_stream_t *jpegStream = NULL;
    665     cam_stream_size_info_t stream_config_info;
    666 
    667     for (size_t i = 0; i < streamList->num_streams; i++) {
    668         camera3_stream_t *newStream = streamList->streams[i];
    669         ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
    670                 __func__, newStream->stream_type, newStream->format,
    671                  newStream->width, newStream->height);
    672         //if the stream is in the mStreamList validate it
    673         bool stream_exists = false;
    674         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    675                 it != mStreamInfo.end(); it++) {
    676             if ((*it)->stream == newStream) {
    677                 QCamera3Channel *channel =
    678                     (QCamera3Channel*)(*it)->stream->priv;
    679                 stream_exists = true;
    680                 delete channel;
    681                 (*it)->status = VALID;
    682                 (*it)->stream->priv = NULL;
    683                 (*it)->channel = NULL;
    684             }
    685         }
    686         if (!stream_exists) {
    687             //new stream
    688             stream_info_t* stream_info;
    689             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
    690             stream_info->stream = newStream;
    691             stream_info->status = VALID;
    692             stream_info->channel = NULL;
    693             mStreamInfo.push_back(stream_info);
    694         }
    695         if (newStream->stream_type == CAMERA3_STREAM_INPUT
    696                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
    697             if (inputStream != NULL) {
    698                 ALOGE("%s: Multiple input streams requested!", __func__);
    699                 pthread_mutex_unlock(&mMutex);
    700                 return BAD_VALUE;
    701             }
    702             inputStream = newStream;
    703         }
    704         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
    705             jpegStream = newStream;
    706         }
    707     }
    708     mInputStream = inputStream;
    709 
    710     cleanAndSortStreamInfo();
    711     if (mMetadataChannel) {
    712         delete mMetadataChannel;
    713         mMetadataChannel = NULL;
    714     }
    715     if (mSupportChannel) {
    716         delete mSupportChannel;
    717         mSupportChannel = NULL;
    718     }
    719 
    720     //Create metadata channel and initialize it
    721     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
    722                     mCameraHandle->ops, captureResultCb,
    723                     &gCamCapability[mCameraId]->padding_info, this);
    724     if (mMetadataChannel == NULL) {
    725         ALOGE("%s: failed to allocate metadata channel", __func__);
    726         rc = -ENOMEM;
    727         pthread_mutex_unlock(&mMutex);
    728         return rc;
    729     }
    730     rc = mMetadataChannel->initialize();
    731     if (rc < 0) {
    732         ALOGE("%s: metadata channel initialization failed", __func__);
    733         delete mMetadataChannel;
    734         mMetadataChannel = NULL;
    735         pthread_mutex_unlock(&mMutex);
    736         return rc;
    737     }
    738 
    739     /* Create dummy stream if there is one single raw stream */
    740     if (streamList->num_streams == 1 &&
    741             (streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
    742             streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16)) {
    743         mSupportChannel = new QCamera3SupportChannel(
    744                 mCameraHandle->camera_handle,
    745                 mCameraHandle->ops,
    746                 &gCamCapability[mCameraId]->padding_info,
    747                 this);
    748         if (!mSupportChannel) {
    749             ALOGE("%s: dummy channel cannot be created", __func__);
    750             pthread_mutex_unlock(&mMutex);
    751             return -ENOMEM;
    752         }
    753     }
    754 
    755     /* Allocate channel objects for the requested streams */
    756     for (size_t i = 0; i < streamList->num_streams; i++) {
    757         camera3_stream_t *newStream = streamList->streams[i];
    758         uint32_t stream_usage = newStream->usage;
    759         stream_config_info.stream_sizes[i].width = newStream->width;
    760         stream_config_info.stream_sizes[i].height = newStream->height;
    761         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
    762             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
    763             //for zsl stream the size is active array size
    764             isZsl = true;
    765             stream_config_info.stream_sizes[i].width =
    766                     gCamCapability[mCameraId]->active_array_size.width;
    767             stream_config_info.stream_sizes[i].height =
    768                     gCamCapability[mCameraId]->active_array_size.height;
    769             stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
    770         } else {
    771            //for non zsl streams find out the format
    772            switch (newStream->format) {
    773            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
    774               {
    775                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
    776                     stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
    777                  } else {
    778                     stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
    779                  }
    780               }
    781               break;
    782            case HAL_PIXEL_FORMAT_YCbCr_420_888:
    783               stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
    784 #ifdef HAS_MULTIMEDIA_HINTS
    785               if (m_pPowerModule) {
    786                   if (m_pPowerModule->powerHint) {
    787                       m_pPowerModule->powerHint(m_pPowerModule,
    788                           POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
    789                       mHdrHint = true;
    790                   }
    791               }
    792 #endif
    793               break;
    794            case HAL_PIXEL_FORMAT_BLOB:
    795               stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
    796               break;
    797            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
    798            case HAL_PIXEL_FORMAT_RAW16:
    799               stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
    800               break;
    801            default:
    802               stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
    803               break;
    804            }
    805         }
    806         if (newStream->priv == NULL) {
    807             //New stream, construct channel
    808             switch (newStream->stream_type) {
    809             case CAMERA3_STREAM_INPUT:
    810                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
    811                 break;
    812             case CAMERA3_STREAM_BIDIRECTIONAL:
    813                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
    814                     GRALLOC_USAGE_HW_CAMERA_WRITE;
    815                 break;
    816             case CAMERA3_STREAM_OUTPUT:
    817                 /* For video encoding stream, set read/write rarely
    818                  * flag so that they may be set to un-cached */
    819                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
    820                     newStream->usage =
    821                          (GRALLOC_USAGE_SW_READ_RARELY |
    822                          GRALLOC_USAGE_SW_WRITE_RARELY |
    823                          GRALLOC_USAGE_HW_CAMERA_WRITE);
    824                 else
    825                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    826                 break;
    827             default:
    828                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
    829                 break;
    830             }
    831 
    832             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
    833                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    834                 QCamera3Channel *channel = NULL;
    835                 switch (newStream->format) {
    836                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    837                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
    838                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
    839                     channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
    840                             mCameraHandle->ops, captureResultCb,
    841                             &gCamCapability[mCameraId]->padding_info,
    842                             this,
    843                             newStream,
    844                             (cam_stream_type_t) stream_config_info.type[i]);
    845                     if (channel == NULL) {
    846                         ALOGE("%s: allocation of channel failed", __func__);
    847                         pthread_mutex_unlock(&mMutex);
    848                         return -ENOMEM;
    849                     }
    850 
    851                     newStream->priv = channel;
    852                     break;
    853                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
    854                 case HAL_PIXEL_FORMAT_RAW16:
    855                     newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
    856                     mRawChannel = new QCamera3RawChannel(
    857                             mCameraHandle->camera_handle,
    858                             mCameraHandle->ops, captureResultCb,
    859                             &gCamCapability[mCameraId]->padding_info,
    860                             this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
    861                     if (mRawChannel == NULL) {
    862                         ALOGE("%s: allocation of raw channel failed", __func__);
    863                         pthread_mutex_unlock(&mMutex);
    864                         return -ENOMEM;
    865                     }
    866 
    867                     newStream->priv = (QCamera3Channel*)mRawChannel;
    868                     break;
    869                 case HAL_PIXEL_FORMAT_BLOB:
    870                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
    871                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
    872                             mCameraHandle->ops, captureResultCb,
    873                             &gCamCapability[mCameraId]->padding_info, this, newStream);
    874                     if (mPictureChannel == NULL) {
    875                         ALOGE("%s: allocation of channel failed", __func__);
    876                         pthread_mutex_unlock(&mMutex);
    877                         return -ENOMEM;
    878                     }
    879                     newStream->priv = (QCamera3Channel*)mPictureChannel;
    880                     break;
    881 
    882                 default:
    883                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
    884                     break;
    885                 }
    886             }
    887 
    888             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    889                     it != mStreamInfo.end(); it++) {
    890                 if ((*it)->stream == newStream) {
    891                     (*it)->channel = (QCamera3Channel*) newStream->priv;
    892                     break;
    893                 }
    894             }
    895         } else {
    896             // Channel already exists for this stream
    897             // Do nothing for now
    898         }
    899     }
    900 
    901     if (isZsl)
    902         mPictureChannel->overrideYuvSize(
    903                 gCamCapability[mCameraId]->active_array_size.width,
    904                 gCamCapability[mCameraId]->active_array_size.height);
    905 
    906     int32_t hal_version = CAM_HAL_V3;
    907     stream_config_info.num_streams = streamList->num_streams;
    908     if (mSupportChannel) {
    909         stream_config_info.stream_sizes[stream_config_info.num_streams] =
    910                 QCamera3SupportChannel::kDim;
    911         stream_config_info.type[stream_config_info.num_streams] =
    912                 CAM_STREAM_TYPE_CALLBACK;
    913         stream_config_info.num_streams++;
    914     }
    915 
    916     // settings/parameters don't carry over for new configureStreams
    917     memset(mParameters, 0, sizeof(metadata_buffer_t));
    918 
    919     mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
    920     AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
    921                 sizeof(hal_version), &hal_version);
    922 
    923     AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
    924                 sizeof(stream_config_info), &stream_config_info);
    925 
    926     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    927 
    928     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
    929     mPendingRequestsList.clear();
    930     mPendingFrameDropList.clear();
    931     // Initialize/Reset the pending buffers list
    932     mPendingBuffersMap.num_buffers = 0;
    933     mPendingBuffersMap.mPendingBufferList.clear();
    934 
    935     mFirstRequest = true;
    936 
    937     //Get min frame duration for this streams configuration
    938     deriveMinFrameDuration();
    939 
    940     pthread_mutex_unlock(&mMutex);
    941     return rc;
    942 }
    943 
    944 /*===========================================================================
    945  * FUNCTION   : validateCaptureRequest
    946  *
    947  * DESCRIPTION: validate a capture request from camera service
    948  *
    949  * PARAMETERS :
    950  *   @request : request from framework to process
    951  *
    952  * RETURN     :
    953  *
    954  *==========================================================================*/
    955 int QCamera3HardwareInterface::validateCaptureRequest(
    956                     camera3_capture_request_t *request)
    957 {
    958     ssize_t idx = 0;
    959     const camera3_stream_buffer_t *b;
    960     CameraMetadata meta;
    961 
    962     /* Sanity check the request */
    963     if (request == NULL) {
    964         ALOGE("%s: NULL capture request", __func__);
    965         return BAD_VALUE;
    966     }
    967 
    968     if (request->settings == NULL && mFirstRequest) {
    969         /*settings cannot be null for the first request*/
    970         return BAD_VALUE;
    971     }
    972 
    973     uint32_t frameNumber = request->frame_number;
    974     if (request->input_buffer != NULL &&
    975             request->input_buffer->stream != mInputStream) {
    976         ALOGE("%s: Request %d: Input buffer not from input stream!",
    977                 __FUNCTION__, frameNumber);
    978         return BAD_VALUE;
    979     }
    980     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
    981         ALOGE("%s: Request %d: No output buffers provided!",
    982                 __FUNCTION__, frameNumber);
    983         return BAD_VALUE;
    984     }
    985     if (request->input_buffer != NULL) {
    986         b = request->input_buffer;
    987         QCamera3Channel *channel =
    988             static_cast<QCamera3Channel*>(b->stream->priv);
    989         if (channel == NULL) {
    990             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
    991                     __func__, frameNumber, idx);
    992             return BAD_VALUE;
    993         }
    994         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    995             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
    996                     __func__, frameNumber, idx);
    997             return BAD_VALUE;
    998         }
    999         if (b->release_fence != -1) {
   1000             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
   1001                     __func__, frameNumber, idx);
   1002             return BAD_VALUE;
   1003         }
   1004         if (b->buffer == NULL) {
   1005             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
   1006                     __func__, frameNumber, idx);
   1007             return BAD_VALUE;
   1008         }
   1009     }
   1010 
   1011     // Validate all buffers
   1012     b = request->output_buffers;
   1013     do {
   1014         QCamera3Channel *channel =
   1015                 static_cast<QCamera3Channel*>(b->stream->priv);
   1016         if (channel == NULL) {
   1017             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
   1018                     __func__, frameNumber, idx);
   1019             return BAD_VALUE;
   1020         }
   1021         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   1022             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
   1023                     __func__, frameNumber, idx);
   1024             return BAD_VALUE;
   1025         }
   1026         if (b->release_fence != -1) {
   1027             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
   1028                     __func__, frameNumber, idx);
   1029             return BAD_VALUE;
   1030         }
   1031         if (b->buffer == NULL) {
   1032             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
   1033                     __func__, frameNumber, idx);
   1034             return BAD_VALUE;
   1035         }
   1036         idx++;
   1037         b = request->output_buffers + idx;
   1038     } while (idx < (ssize_t)request->num_output_buffers);
   1039 
   1040     return NO_ERROR;
   1041 }
   1042 
   1043 /*===========================================================================
   1044  * FUNCTION   : deriveMinFrameDuration
   1045  *
   1046  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
   1047  *              on currently configured streams.
   1048  *
   1049  * PARAMETERS : NONE
   1050  *
   1051  * RETURN     : NONE
   1052  *
   1053  *==========================================================================*/
   1054 void QCamera3HardwareInterface::deriveMinFrameDuration()
   1055 {
   1056     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
   1057 
   1058     maxJpegDim = 0;
   1059     maxProcessedDim = 0;
   1060     maxRawDim = 0;
   1061 
   1062     // Figure out maximum jpeg, processed, and raw dimensions
   1063     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   1064         it != mStreamInfo.end(); it++) {
   1065 
   1066         // Input stream doesn't have valid stream_type
   1067         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
   1068             continue;
   1069 
   1070         int32_t dimension = (*it)->stream->width * (*it)->stream->height;
   1071         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   1072             if (dimension > maxJpegDim)
   1073                 maxJpegDim = dimension;
   1074         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   1075                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
   1076             if (dimension > maxRawDim)
   1077                 maxRawDim = dimension;
   1078         } else {
   1079             if (dimension > maxProcessedDim)
   1080                 maxProcessedDim = dimension;
   1081         }
   1082     }
   1083 
   1084     //Assume all jpeg dimensions are in processed dimensions.
   1085     if (maxJpegDim > maxProcessedDim)
   1086         maxProcessedDim = maxJpegDim;
   1087     //Find the smallest raw dimension that is greater or equal to jpeg dimension
   1088     if (maxProcessedDim > maxRawDim) {
   1089         maxRawDim = INT32_MAX;
   1090         for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
   1091             i++) {
   1092 
   1093             int32_t dimension =
   1094                 gCamCapability[mCameraId]->raw_dim[i].width *
   1095                 gCamCapability[mCameraId]->raw_dim[i].height;
   1096 
   1097             if (dimension >= maxProcessedDim && dimension < maxRawDim)
   1098                 maxRawDim = dimension;
   1099         }
   1100     }
   1101 
   1102     //Find minimum durations for processed, jpeg, and raw
   1103     for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
   1104             i++) {
   1105         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
   1106                 gCamCapability[mCameraId]->raw_dim[i].height) {
   1107             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
   1108             break;
   1109         }
   1110     }
   1111     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   1112         if (maxProcessedDim ==
   1113             gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
   1114             gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
   1115             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   1116             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   1117             break;
   1118         }
   1119     }
   1120 }
   1121 
   1122 /*===========================================================================
   1123  * FUNCTION   : getMinFrameDuration
   1124  *
   1125  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
   1126  *              and current request configuration.
   1127  *
   1128  * PARAMETERS : @request: requset sent by the frameworks
   1129  *
   1130  * RETURN     : min farme duration for a particular request
   1131  *
   1132  *==========================================================================*/
   1133 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
   1134 {
   1135     bool hasJpegStream = false;
   1136     bool hasRawStream = false;
   1137     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
   1138         const camera3_stream_t *stream = request->output_buffers[i].stream;
   1139         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
   1140             hasJpegStream = true;
   1141         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   1142                 stream->format == HAL_PIXEL_FORMAT_RAW16)
   1143             hasRawStream = true;
   1144     }
   1145 
   1146     if (!hasJpegStream)
   1147         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
   1148     else
   1149         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
   1150 }
   1151 
   1152 /*===========================================================================
   1153  * FUNCTION   : handleMetadataWithLock
   1154  *
   1155  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
   1156  *
   1157  * PARAMETERS : @metadata_buf: metadata buffer
   1158  *
   1159  * RETURN     :
   1160  *
   1161  *==========================================================================*/
   1162 void QCamera3HardwareInterface::handleMetadataWithLock(
   1163     mm_camera_super_buf_t *metadata_buf)
   1164 {
   1165     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   1166     int32_t frame_number_valid = *(int32_t *)
   1167         POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   1168     uint32_t pending_requests = *(uint32_t *)POINTER_OF(
   1169         CAM_INTF_META_PENDING_REQUESTS, metadata);
   1170     uint32_t frame_number = *(uint32_t *)
   1171         POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
   1172     const struct timeval *tv = (const struct timeval *)
   1173         POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   1174     nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
   1175         tv->tv_usec * NSEC_PER_USEC;
   1176     cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
   1177         POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
   1178 
   1179     int32_t urgent_frame_number_valid = *(int32_t *)
   1180         POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   1181     uint32_t urgent_frame_number = *(uint32_t *)
   1182         POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   1183 
   1184     if (urgent_frame_number_valid) {
   1185         ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
   1186           __func__, urgent_frame_number, capture_time);
   1187 
   1188         //Recieved an urgent Frame Number, handle it
   1189         //using partial results
   1190         for (List<PendingRequestInfo>::iterator i =
   1191             mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
   1192             camera3_notify_msg_t notify_msg;
   1193             ALOGV("%s: Iterator Frame = %d urgent frame = %d",
   1194                 __func__, i->frame_number, urgent_frame_number);
   1195 
   1196             if (i->frame_number < urgent_frame_number &&
   1197                 i->bNotified == 0) {
   1198                 notify_msg.type = CAMERA3_MSG_SHUTTER;
   1199                 notify_msg.message.shutter.frame_number = i->frame_number;
   1200                 notify_msg.message.shutter.timestamp = capture_time -
   1201                     (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
   1202                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   1203                 i->timestamp = notify_msg.message.shutter.timestamp;
   1204                 i->bNotified = 1;
   1205                 ALOGV("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
   1206                     __func__, i->frame_number, notify_msg.message.shutter.timestamp);
   1207             }
   1208 
   1209             if (i->frame_number == urgent_frame_number) {
   1210 
   1211                 camera3_capture_result_t result;
   1212                 memset(&result, 0, sizeof(camera3_capture_result_t));
   1213 
   1214                 // Send shutter notify to frameworks
   1215                 notify_msg.type = CAMERA3_MSG_SHUTTER;
   1216                 notify_msg.message.shutter.frame_number = i->frame_number;
   1217                 notify_msg.message.shutter.timestamp = capture_time;
   1218                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   1219 
   1220                 i->timestamp = capture_time;
   1221                 i->bNotified = 1;
   1222                 i->partial_result_cnt++;
   1223                 // Extract 3A metadata
   1224                 result.result =
   1225                     translateCbUrgentMetadataToResultMetadata(metadata);
   1226                 // Populate metadata result
   1227                 result.frame_number = urgent_frame_number;
   1228                 result.num_output_buffers = 0;
   1229                 result.output_buffers = NULL;
   1230                 result.partial_result = i->partial_result_cnt;
   1231 
   1232                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   1233                 ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
   1234                      __func__, result.frame_number, capture_time);
   1235                 free_camera_metadata((camera_metadata_t *)result.result);
   1236                 break;
   1237             }
   1238         }
   1239     }
   1240 
   1241     if (!frame_number_valid) {
   1242         ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
   1243         mMetadataChannel->bufDone(metadata_buf);
   1244         free(metadata_buf);
   1245         goto done_metadata;
   1246     }
   1247     ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
   1248             frame_number, capture_time);
   1249 
   1250     // Go through the pending requests info and send shutter/results to frameworks
   1251     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1252         i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   1253         camera3_capture_result_t result;
   1254         memset(&result, 0, sizeof(camera3_capture_result_t));
   1255         ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
   1256 
   1257         i->partial_result_cnt++;
   1258         result.partial_result = i->partial_result_cnt;
   1259 
   1260         // Flush out all entries with less or equal frame numbers.
   1261         mPendingRequest--;
   1262 
   1263         // Check whether any stream buffer corresponding to this is dropped or not
   1264         // If dropped, then notify ERROR_BUFFER for the corresponding stream and
   1265         // buffer with CAMERA3_BUFFER_STATUS_ERROR
   1266         if (cam_frame_drop.frame_dropped) {
   1267             camera3_notify_msg_t notify_msg;
   1268             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1269                     j != i->buffers.end(); j++) {
   1270                 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
   1271                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1272                 for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
   1273                   if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
   1274                       // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   1275                       ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
   1276                              __func__, i->frame_number, streamID);
   1277                       notify_msg.type = CAMERA3_MSG_ERROR;
   1278                       notify_msg.message.error.frame_number = i->frame_number;
   1279                       notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
   1280                       notify_msg.message.error.error_stream = j->stream;
   1281                       mCallbackOps->notify(mCallbackOps, &notify_msg);
   1282                       ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
   1283                              __func__, i->frame_number, streamID);
   1284                       PendingFrameDropInfo PendingFrameDrop;
   1285                       PendingFrameDrop.frame_number=i->frame_number;
   1286                       PendingFrameDrop.stream_ID = streamID;
   1287                       // Add the Frame drop info to mPendingFrameDropList
   1288                       mPendingFrameDropList.push_back(PendingFrameDrop);
   1289                   }
   1290                 }
   1291             }
   1292         }
   1293 
   1294         // Send empty metadata with already filled buffers for dropped metadata
   1295         // and send valid metadata with already filled buffers for current metadata
   1296         if (i->frame_number < frame_number) {
   1297             CameraMetadata dummyMetadata;
   1298             dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
   1299                     &i->timestamp, 1);
   1300             dummyMetadata.update(ANDROID_REQUEST_ID,
   1301                     &(i->request_id), 1);
   1302             result.result = dummyMetadata.release();
   1303         } else {
   1304             uint8_t bufferStalled = *((uint8_t *)
   1305                     POINTER_OF(CAM_INTF_META_FRAMES_STALLED, metadata));
   1306 
   1307             if (bufferStalled) {
   1308                 result.result = NULL; //Metadata should not be sent in this case
   1309                 camera3_notify_msg_t notify_msg;
   1310                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   1311                 notify_msg.type = CAMERA3_MSG_ERROR;
   1312                 notify_msg.message.error.frame_number = i->frame_number;
   1313                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   1314                 notify_msg.message.error.error_stream = NULL;
   1315                 ALOGE("%s: Buffer stall observed reporting error", __func__);
   1316                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   1317             } else {
   1318                 result.result = translateFromHalMetadata(metadata,
   1319                         i->timestamp, i->request_id, i->jpegMetadata,
   1320                         i->pipeline_depth);
   1321             }
   1322 
   1323             if (i->blob_request) {
   1324                 {
   1325                     //Dump tuning metadata if enabled and available
   1326                     char prop[PROPERTY_VALUE_MAX];
   1327                     memset(prop, 0, sizeof(prop));
   1328                     property_get("persist.camera.dumpmetadata", prop, "0");
   1329                     int32_t enabled = atoi(prop);
   1330                     if (enabled && metadata->is_tuning_params_valid) {
   1331                         dumpMetadataToFile(metadata->tuning_params,
   1332                                mMetaFrameCount,
   1333                                enabled,
   1334                                "Snapshot",
   1335                                frame_number);
   1336                     }
   1337                 }
   1338 
   1339                 //If it is a blob request then send the metadata to the picture channel
   1340                 metadata_buffer_t *reproc_meta =
   1341                         (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
   1342                 if (reproc_meta == NULL) {
   1343                     ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
   1344                     goto done_metadata;
   1345                 }
   1346                 *reproc_meta = *metadata;
   1347                 mPictureChannel->queueReprocMetadata(reproc_meta);
   1348             }
   1349             // Return metadata buffer
   1350             mMetadataChannel->bufDone(metadata_buf);
   1351             free(metadata_buf);
   1352         }
   1353         if (!result.result) {
   1354             ALOGE("%s: metadata is NULL", __func__);
   1355         }
   1356         result.frame_number = i->frame_number;
   1357         result.num_output_buffers = 0;
   1358         result.output_buffers = NULL;
   1359         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1360                     j != i->buffers.end(); j++) {
   1361             if (j->buffer) {
   1362                 result.num_output_buffers++;
   1363             }
   1364         }
   1365 
   1366         if (result.num_output_buffers > 0) {
   1367             camera3_stream_buffer_t *result_buffers =
   1368                 new camera3_stream_buffer_t[result.num_output_buffers];
   1369             if (!result_buffers) {
   1370                 ALOGE("%s: Fatal error: out of memory", __func__);
   1371             }
   1372             size_t result_buffers_idx = 0;
   1373             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1374                     j != i->buffers.end(); j++) {
   1375                 if (j->buffer) {
   1376                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   1377                             m != mPendingFrameDropList.end(); m++) {
   1378                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   1379                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1380                         if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
   1381                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   1382                             ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   1383                                   __func__, frame_number, streamID);
   1384                             m = mPendingFrameDropList.erase(m);
   1385                             break;
   1386                         }
   1387                     }
   1388 
   1389                     for (List<PendingBufferInfo>::iterator k =
   1390                       mPendingBuffersMap.mPendingBufferList.begin();
   1391                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
   1392                       if (k->buffer == j->buffer->buffer) {
   1393                         ALOGV("%s: Found buffer %p in pending buffer List "
   1394                               "for frame %d, Take it out!!", __func__,
   1395                                k->buffer, k->frame_number);
   1396                         mPendingBuffersMap.num_buffers--;
   1397                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   1398                         break;
   1399                       }
   1400                     }
   1401 
   1402                     result_buffers[result_buffers_idx++] = *(j->buffer);
   1403                     free(j->buffer);
   1404                     j->buffer = NULL;
   1405                 }
   1406             }
   1407             result.output_buffers = result_buffers;
   1408             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1409             ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1410                     __func__, result.frame_number, i->timestamp);
   1411             free_camera_metadata((camera_metadata_t *)result.result);
   1412             delete[] result_buffers;
   1413         } else {
   1414             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1415             ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1416                         __func__, result.frame_number, i->timestamp);
   1417             free_camera_metadata((camera_metadata_t *)result.result);
   1418         }
   1419         // erase the element from the list
   1420         i = mPendingRequestsList.erase(i);
   1421     }
   1422 
   1423 done_metadata:
   1424     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1425         i != mPendingRequestsList.end() ;i++) {
   1426         i->pipeline_depth++;
   1427     }
   1428     if (!pending_requests)
   1429         unblockRequestIfNecessary();
   1430 
   1431 }
   1432 
   1433 /*===========================================================================
   1434  * FUNCTION   : handleBufferWithLock
   1435  *
   1436  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   1437  *
   1438  * PARAMETERS : @buffer: image buffer for the callback
   1439  *              @frame_number: frame number of the image buffer
   1440  *
   1441  * RETURN     :
   1442  *
   1443  *==========================================================================*/
   1444 void QCamera3HardwareInterface::handleBufferWithLock(
   1445     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   1446 {
   1447     // If the frame number doesn't exist in the pending request list,
   1448     // directly send the buffer to the frameworks, and update pending buffers map
   1449     // Otherwise, book-keep the buffer.
   1450     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1451     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   1452         i++;
   1453     }
   1454     if (i == mPendingRequestsList.end()) {
   1455         // Verify all pending requests frame_numbers are greater
   1456         for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
   1457                 j != mPendingRequestsList.end(); j++) {
   1458             if (j->frame_number < frame_number) {
   1459                 ALOGE("%s: Error: pending frame number %d is smaller than %d",
   1460                         __func__, j->frame_number, frame_number);
   1461             }
   1462         }
   1463         camera3_capture_result_t result;
   1464         memset(&result, 0, sizeof(camera3_capture_result_t));
   1465         result.result = NULL;
   1466         result.frame_number = frame_number;
   1467         result.num_output_buffers = 1;
   1468         result.partial_result = 0;
   1469         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   1470                 m != mPendingFrameDropList.end(); m++) {
   1471             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
   1472             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1473             if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
   1474                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   1475                 ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   1476                         __func__, frame_number, streamID);
   1477                 m = mPendingFrameDropList.erase(m);
   1478                 break;
   1479             }
   1480         }
   1481         result.output_buffers = buffer;
   1482         ALOGV("%s: result frame_number = %d, buffer = %p",
   1483                 __func__, frame_number, buffer->buffer);
   1484 
   1485         for (List<PendingBufferInfo>::iterator k =
   1486                 mPendingBuffersMap.mPendingBufferList.begin();
   1487                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
   1488             if (k->buffer == buffer->buffer) {
   1489                 ALOGV("%s: Found Frame buffer, take it out from list",
   1490                         __func__);
   1491 
   1492                 mPendingBuffersMap.num_buffers--;
   1493                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
   1494                 break;
   1495             }
   1496         }
   1497         ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
   1498             __func__, mPendingBuffersMap.num_buffers);
   1499 
   1500         mCallbackOps->process_capture_result(mCallbackOps, &result);
   1501     } else {
   1502         if (i->input_buffer_present) {
   1503             camera3_capture_result result;
   1504             memset(&result, 0, sizeof(camera3_capture_result_t));
   1505             result.result = NULL;
   1506             result.frame_number = frame_number;
   1507             result.num_output_buffers = 1;
   1508             result.output_buffers = buffer;
   1509             result.partial_result = 0;
   1510             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1511             i = mPendingRequestsList.erase(i);
   1512             mPendingRequest--;
   1513         } else {
   1514             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1515                 j != i->buffers.end(); j++) {
   1516                 if (j->stream == buffer->stream) {
   1517                     if (j->buffer != NULL) {
   1518                         ALOGE("%s: Error: buffer is already set", __func__);
   1519                     } else {
   1520                         j->buffer = (camera3_stream_buffer_t *)malloc(
   1521                             sizeof(camera3_stream_buffer_t));
   1522                         *(j->buffer) = *buffer;
   1523                         ALOGV("%s: cache buffer %p at result frame_number %d",
   1524                             __func__, buffer, frame_number);
   1525                     }
   1526                 }
   1527             }
   1528         }
   1529     }
   1530 }
   1531 
   1532 /*===========================================================================
   1533  * FUNCTION   : unblockRequestIfNecessary
   1534  *
   1535  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
   1536  *              that mMutex is held when this function is called.
   1537  *
   1538  * PARAMETERS :
   1539  *
   1540  * RETURN     :
   1541  *
   1542  *==========================================================================*/
   1543 void QCamera3HardwareInterface::unblockRequestIfNecessary()
   1544 {
   1545    // Unblock process_capture_request
   1546    pthread_cond_signal(&mRequestCond);
   1547 }
   1548 
   1549 /*===========================================================================
   1550  * FUNCTION   : registerStreamBuffers
   1551  *
   1552  * DESCRIPTION: Register buffers for a given stream with the HAL device.
   1553  *
   1554  * PARAMETERS :
   1555  *   @stream_list : streams to be configured
   1556  *
   1557  * RETURN     :
   1558  *
   1559  *==========================================================================*/
   1560 int QCamera3HardwareInterface::registerStreamBuffers(
   1561         const camera3_stream_buffer_set_t * /*buffer_set*/)
   1562 {
   1563     //Deprecated
   1564     return NO_ERROR;
   1565 }
   1566 
   1567 /*===========================================================================
   1568  * FUNCTION   : processCaptureRequest
   1569  *
   1570  * DESCRIPTION: process a capture request from camera service
   1571  *
   1572  * PARAMETERS :
   1573  *   @request : request from framework to process
   1574  *
   1575  * RETURN     :
   1576  *
   1577  *==========================================================================*/
   1578 int QCamera3HardwareInterface::processCaptureRequest(
   1579                     camera3_capture_request_t *request)
   1580 {
   1581     int rc = NO_ERROR;
   1582     int32_t request_id;
   1583     CameraMetadata meta;
   1584 
   1585     pthread_mutex_lock(&mMutex);
   1586 
   1587     rc = validateCaptureRequest(request);
   1588     if (rc != NO_ERROR) {
   1589         ALOGE("%s: incoming request is not valid", __func__);
   1590         pthread_mutex_unlock(&mMutex);
   1591         return rc;
   1592     }
   1593 
   1594     meta = request->settings;
   1595 
   1596     // For first capture request, send capture intent, and
   1597     // stream on all streams
   1598     if (mFirstRequest) {
   1599 
   1600         for (size_t i = 0; i < request->num_output_buffers; i++) {
   1601             const camera3_stream_buffer_t& output = request->output_buffers[i];
   1602             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   1603             rc = channel->registerBuffer(output.buffer);
   1604             if (rc < 0) {
   1605                 ALOGE("%s: registerBuffer failed",
   1606                         __func__);
   1607                 pthread_mutex_unlock(&mMutex);
   1608                 return -ENODEV;
   1609             }
   1610         }
   1611 
   1612         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   1613             int32_t hal_version = CAM_HAL_V3;
   1614             uint8_t captureIntent =
   1615                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   1616 
   1617             memset(mParameters, 0, sizeof(metadata_buffer_t));
   1618             mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
   1619             AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   1620                 sizeof(hal_version), &hal_version);
   1621             AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
   1622                 sizeof(captureIntent), &captureIntent);
   1623             mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   1624                 mParameters);
   1625         }
   1626 
   1627         //First initialize all streams
   1628         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   1629             it != mStreamInfo.end(); it++) {
   1630             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   1631             rc = channel->initialize();
   1632             if (NO_ERROR != rc) {
   1633                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
   1634                 pthread_mutex_unlock(&mMutex);
   1635                 return rc;
   1636             }
   1637         }
   1638         if (mSupportChannel) {
   1639             rc = mSupportChannel->initialize();
   1640             if (rc < 0) {
   1641                 ALOGE("%s: Support channel initialization failed", __func__);
   1642                 pthread_mutex_unlock(&mMutex);
   1643                 return rc;
   1644             }
   1645         }
   1646 
   1647         //Then start them.
   1648         ALOGD("%s: Start META Channel", __func__);
   1649         mMetadataChannel->start();
   1650 
   1651         if (mSupportChannel) {
   1652             rc = mSupportChannel->start();
   1653             if (rc < 0) {
   1654                 ALOGE("%s: Support channel start failed", __func__);
   1655                 mMetadataChannel->stop();
   1656                 pthread_mutex_unlock(&mMutex);
   1657                 return rc;
   1658             }
   1659         }
   1660         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   1661             it != mStreamInfo.end(); it++) {
   1662             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   1663             ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
   1664             channel->start();
   1665         }
   1666     }
   1667 
   1668     uint32_t frameNumber = request->frame_number;
   1669     cam_stream_ID_t streamID;
   1670 
   1671     if (meta.exists(ANDROID_REQUEST_ID)) {
   1672         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
   1673         mCurrentRequestId = request_id;
   1674         ALOGV("%s: Received request with id: %d",__func__, request_id);
   1675     } else if (mFirstRequest || mCurrentRequestId == -1){
   1676         ALOGE("%s: Unable to find request id field, \
   1677                 & no previous id available", __func__);
   1678         return NAME_NOT_FOUND;
   1679     } else {
   1680         ALOGV("%s: Re-using old request id", __func__);
   1681         request_id = mCurrentRequestId;
   1682     }
   1683 
   1684     ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
   1685                                     __func__, __LINE__,
   1686                                     request->num_output_buffers,
   1687                                     request->input_buffer,
   1688                                     frameNumber);
   1689     // Acquire all request buffers first
   1690     streamID.num_streams = 0;
   1691     int blob_request = 0;
   1692     for (size_t i = 0; i < request->num_output_buffers; i++) {
   1693         const camera3_stream_buffer_t& output = request->output_buffers[i];
   1694         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   1695         sp<Fence> acquireFence = new Fence(output.acquire_fence);
   1696 
   1697         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   1698             //Call function to store local copy of jpeg data for encode params.
   1699             blob_request = 1;
   1700         }
   1701 
   1702         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
   1703         if (rc != OK) {
   1704             ALOGE("%s: fence wait failed %d", __func__, rc);
   1705             pthread_mutex_unlock(&mMutex);
   1706             return rc;
   1707         }
   1708 
   1709         streamID.streamID[streamID.num_streams] =
   1710             channel->getStreamID(channel->getStreamTypeMask());
   1711         streamID.num_streams++;
   1712     }
   1713 
   1714     if(request->input_buffer == NULL) {
   1715        rc = setFrameParameters(request, streamID);
   1716         if (rc < 0) {
   1717             ALOGE("%s: fail to set frame parameters", __func__);
   1718             pthread_mutex_unlock(&mMutex);
   1719             return rc;
   1720         }
   1721     }
   1722 
   1723     /* Update pending request list and pending buffers map */
   1724     PendingRequestInfo pendingRequest;
   1725     pendingRequest.frame_number = frameNumber;
   1726     pendingRequest.num_buffers = request->num_output_buffers;
   1727     pendingRequest.request_id = request_id;
   1728     pendingRequest.blob_request = blob_request;
   1729     pendingRequest.bNotified = 0;
   1730     pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
   1731     pendingRequest.pipeline_depth = 0;
   1732     pendingRequest.partial_result_cnt = 0;
   1733     extractJpegMetadata(pendingRequest.jpegMetadata, request);
   1734 
   1735     for (size_t i = 0; i < request->num_output_buffers; i++) {
   1736         RequestedBufferInfo requestedBuf;
   1737         requestedBuf.stream = request->output_buffers[i].stream;
   1738         requestedBuf.buffer = NULL;
   1739         pendingRequest.buffers.push_back(requestedBuf);
   1740 
   1741         // Add to buffer handle the pending buffers list
   1742         PendingBufferInfo bufferInfo;
   1743         bufferInfo.frame_number = frameNumber;
   1744         bufferInfo.buffer = request->output_buffers[i].buffer;
   1745         bufferInfo.stream = request->output_buffers[i].stream;
   1746         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
   1747         mPendingBuffersMap.num_buffers++;
   1748         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
   1749           __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
   1750           bufferInfo.stream->format);
   1751     }
   1752     ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
   1753           __func__, mPendingBuffersMap.num_buffers);
   1754 
   1755     mPendingRequestsList.push_back(pendingRequest);
   1756 
   1757     if (mFlush) {
   1758         pthread_mutex_unlock(&mMutex);
   1759         return NO_ERROR;
   1760     }
   1761 
   1762     // Notify metadata channel we receive a request
   1763     mMetadataChannel->request(NULL, frameNumber);
   1764 
   1765     // Call request on other streams
   1766     for (size_t i = 0; i < request->num_output_buffers; i++) {
   1767         const camera3_stream_buffer_t& output = request->output_buffers[i];
   1768         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   1769         mm_camera_buf_def_t *pInputBuffer = NULL;
   1770 
   1771         if (channel == NULL) {
   1772             ALOGE("%s: invalid channel pointer for stream", __func__);
   1773             continue;
   1774         }
   1775 
   1776         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   1777             QCamera3RegularChannel* inputChannel = NULL;
   1778             if(request->input_buffer != NULL){
   1779 
   1780                 //Try to get the internal format
   1781                 inputChannel = (QCamera3RegularChannel*)
   1782                     request->input_buffer->stream->priv;
   1783                 if(inputChannel == NULL ){
   1784                     ALOGE("%s: failed to get input channel handle", __func__);
   1785                 } else {
   1786                     pInputBuffer =
   1787                         inputChannel->getInternalFormatBuffer(
   1788                                 request->input_buffer->buffer);
   1789                     ALOGD("%s: Input buffer dump",__func__);
   1790                     ALOGD("Stream id: %d", pInputBuffer->stream_id);
   1791                     ALOGD("streamtype:%d", pInputBuffer->stream_type);
   1792                     ALOGD("frame len:%d", pInputBuffer->frame_len);
   1793                     ALOGD("Handle:%p", request->input_buffer->buffer);
   1794                 }
   1795                 rc = channel->request(output.buffer, frameNumber,
   1796                             pInputBuffer, mParameters);
   1797                 if (rc < 0) {
   1798                     ALOGE("%s: Fail to request on picture channel", __func__);
   1799                     pthread_mutex_unlock(&mMutex);
   1800                     return rc;
   1801                 }
   1802 
   1803                 rc = setReprocParameters(request);
   1804                 if (rc < 0) {
   1805                     ALOGE("%s: fail to set reproc parameters", __func__);
   1806                     pthread_mutex_unlock(&mMutex);
   1807                     return rc;
   1808                 }
   1809             } else{
   1810                  ALOGV("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
   1811                        __LINE__, output.buffer, frameNumber);
   1812                  if (mRepeatingRequest) {
   1813                    rc = channel->request(output.buffer, frameNumber,
   1814                                NULL, mPrevParameters);
   1815                  } else {
   1816                     rc = channel->request(output.buffer, frameNumber,
   1817                                NULL, mParameters);
   1818                  }
   1819             }
   1820         } else {
   1821             ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
   1822                 __LINE__, output.buffer, frameNumber);
   1823            rc = channel->request(output.buffer, frameNumber);
   1824         }
   1825         if (rc < 0)
   1826             ALOGE("%s: request failed", __func__);
   1827     }
   1828 
   1829     /*set the parameters to backend*/
   1830     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   1831 
   1832     mFirstRequest = false;
   1833     // Added a timed condition wait
   1834     struct timespec ts;
   1835     uint8_t isValidTimeout = 1;
   1836     rc = clock_gettime(CLOCK_REALTIME, &ts);
   1837     if (rc < 0) {
   1838         isValidTimeout = 0;
   1839         ALOGE("%s: Error reading the real time clock!!", __func__);
   1840     }
   1841     else {
   1842         // Make timeout as 5 sec for request to be honored
   1843         ts.tv_sec += 5;
   1844     }
   1845     //Block on conditional variable
   1846 
   1847     mPendingRequest++;
   1848     while (mPendingRequest >= kMaxInFlight) {
   1849         if (!isValidTimeout) {
   1850             ALOGV("%s: Blocking on conditional wait", __func__);
   1851             pthread_cond_wait(&mRequestCond, &mMutex);
   1852         }
   1853         else {
   1854             ALOGV("%s: Blocking on timed conditional wait", __func__);
   1855             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
   1856             if (rc == ETIMEDOUT) {
   1857                 rc = -ENODEV;
   1858                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
   1859                 break;
   1860             }
   1861         }
   1862         ALOGV("%s: Unblocked", __func__);
   1863     }
   1864     pthread_mutex_unlock(&mMutex);
   1865 
   1866     return rc;
   1867 }
   1868 
   1869 /*===========================================================================
   1870  * FUNCTION   : dump
   1871  *
   1872  * DESCRIPTION:
   1873  *
   1874  * PARAMETERS :
   1875  *
   1876  *
   1877  * RETURN     :
   1878  *==========================================================================*/
   1879 void QCamera3HardwareInterface::dump(int /*fd*/)
   1880 {
   1881     /*Enable lock when we implement this function*/
   1882     /*
   1883     pthread_mutex_lock(&mMutex);
   1884 
   1885     pthread_mutex_unlock(&mMutex);
   1886     */
   1887     return;
   1888 }
   1889 
   1890 /*===========================================================================
   1891  * FUNCTION   : flush
   1892  *
   1893  * DESCRIPTION:
   1894  *
   1895  * PARAMETERS :
   1896  *
   1897  *
   1898  * RETURN     :
   1899  *==========================================================================*/
   1900 int QCamera3HardwareInterface::flush()
   1901 {
   1902     unsigned int frameNum = 0;
   1903     camera3_notify_msg_t notify_msg;
   1904     camera3_capture_result_t result;
   1905     camera3_stream_buffer_t *pStream_Buf = NULL;
   1906     FlushMap flushMap;
   1907 
   1908     ALOGV("%s: Unblocking Process Capture Request", __func__);
   1909 
   1910     pthread_mutex_lock(&mMutex);
   1911     mFlush = true;
   1912     pthread_mutex_unlock(&mMutex);
   1913 
   1914     memset(&result, 0, sizeof(camera3_capture_result_t));
   1915 
   1916     // Stop the Streams/Channels
   1917     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   1918         it != mStreamInfo.end(); it++) {
   1919         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   1920         channel->stop();
   1921         (*it)->status = INVALID;
   1922     }
   1923 
   1924     if (mSupportChannel) {
   1925         mSupportChannel->stop();
   1926     }
   1927     if (mMetadataChannel) {
   1928         /* If content of mStreamInfo is not 0, there is metadata stream */
   1929         mMetadataChannel->stop();
   1930     }
   1931 
   1932     // Mutex Lock
   1933     pthread_mutex_lock(&mMutex);
   1934 
   1935     // Unblock process_capture_request
   1936     mPendingRequest = 0;
   1937     pthread_cond_signal(&mRequestCond);
   1938 
   1939     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1940     frameNum = i->frame_number;
   1941     ALOGV("%s: Oldest frame num on  mPendingRequestsList = %d",
   1942       __func__, frameNum);
   1943 
   1944     // Go through the pending buffers and group them depending
   1945     // on frame number
   1946     for (List<PendingBufferInfo>::iterator k =
   1947             mPendingBuffersMap.mPendingBufferList.begin();
   1948             k != mPendingBuffersMap.mPendingBufferList.end();) {
   1949 
   1950         if (k->frame_number < frameNum) {
   1951             ssize_t idx = flushMap.indexOfKey(k->frame_number);
   1952             if (idx == NAME_NOT_FOUND) {
   1953                 Vector<PendingBufferInfo> pending;
   1954                 pending.add(*k);
   1955                 flushMap.add(k->frame_number, pending);
   1956             } else {
   1957                 Vector<PendingBufferInfo> &pending =
   1958                         flushMap.editValueFor(k->frame_number);
   1959                 pending.add(*k);
   1960             }
   1961 
   1962             mPendingBuffersMap.num_buffers--;
   1963             k = mPendingBuffersMap.mPendingBufferList.erase(k);
   1964         } else {
   1965             k++;
   1966         }
   1967     }
   1968 
   1969     for (size_t i = 0; i < flushMap.size(); i++) {
   1970         uint32_t frame_number = flushMap.keyAt(i);
   1971         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
   1972 
   1973         // Send Error notify to frameworks for each buffer for which
   1974         // metadata buffer is already sent
   1975         ALOGV("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
   1976           __func__, frame_number, pending.size());
   1977 
   1978         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   1979         if (NULL == pStream_Buf) {
   1980             ALOGE("%s: No memory for pending buffers array", __func__);
   1981             pthread_mutex_unlock(&mMutex);
   1982             return NO_MEMORY;
   1983         }
   1984 
   1985         for (size_t j = 0; j < pending.size(); j++) {
   1986             const PendingBufferInfo &info = pending.itemAt(j);
   1987             notify_msg.type = CAMERA3_MSG_ERROR;
   1988             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
   1989             notify_msg.message.error.error_stream = info.stream;
   1990             notify_msg.message.error.frame_number = frame_number;
   1991             pStream_Buf[j].acquire_fence = -1;
   1992             pStream_Buf[j].release_fence = -1;
   1993             pStream_Buf[j].buffer = info.buffer;
   1994             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   1995             pStream_Buf[j].stream = info.stream;
   1996             mCallbackOps->notify(mCallbackOps, &notify_msg);
   1997             ALOGV("%s: notify frame_number = %d stream %p", __func__,
   1998                     frame_number, info.stream);
   1999         }
   2000 
   2001         result.result = NULL;
   2002         result.frame_number = frame_number;
   2003         result.num_output_buffers = pending.size();
   2004         result.output_buffers = pStream_Buf;
   2005         mCallbackOps->process_capture_result(mCallbackOps, &result);
   2006 
   2007         delete [] pStream_Buf;
   2008     }
   2009 
   2010     ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
   2011 
   2012     flushMap.clear();
   2013     for (List<PendingBufferInfo>::iterator k =
   2014             mPendingBuffersMap.mPendingBufferList.begin();
   2015             k != mPendingBuffersMap.mPendingBufferList.end();) {
   2016         ssize_t idx = flushMap.indexOfKey(k->frame_number);
   2017         if (idx == NAME_NOT_FOUND) {
   2018             Vector<PendingBufferInfo> pending;
   2019             pending.add(*k);
   2020             flushMap.add(k->frame_number, pending);
   2021         } else {
   2022             Vector<PendingBufferInfo> &pending =
   2023                     flushMap.editValueFor(k->frame_number);
   2024             pending.add(*k);
   2025         }
   2026 
   2027         mPendingBuffersMap.num_buffers--;
   2028         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2029     }
   2030 
   2031     // Go through the pending requests info and send error request to framework
   2032     for (size_t i = 0; i < flushMap.size(); i++) {
   2033         uint32_t frame_number = flushMap.keyAt(i);
   2034         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
   2035         ALOGV("%s:Sending ERROR REQUEST for frame %d",
   2036               __func__, frame_number);
   2037 
   2038         // Send shutter notify to frameworks
   2039         notify_msg.type = CAMERA3_MSG_ERROR;
   2040         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   2041         notify_msg.message.error.error_stream = NULL;
   2042         notify_msg.message.error.frame_number = frame_number;
   2043         mCallbackOps->notify(mCallbackOps, &notify_msg);
   2044 
   2045         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   2046         if (NULL == pStream_Buf) {
   2047             ALOGE("%s: No memory for pending buffers array", __func__);
   2048             pthread_mutex_unlock(&mMutex);
   2049             return NO_MEMORY;
   2050         }
   2051 
   2052         for (size_t j = 0; j < pending.size(); j++) {
   2053             const PendingBufferInfo &info = pending.itemAt(j);
   2054             pStream_Buf[j].acquire_fence = -1;
   2055             pStream_Buf[j].release_fence = -1;
   2056             pStream_Buf[j].buffer = info.buffer;
   2057             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   2058             pStream_Buf[j].stream = info.stream;
   2059         }
   2060 
   2061         result.num_output_buffers = pending.size();
   2062         result.output_buffers = pStream_Buf;
   2063         result.result = NULL;
   2064         result.frame_number = frame_number;
   2065         mCallbackOps->process_capture_result(mCallbackOps, &result);
   2066         delete [] pStream_Buf;
   2067     }
   2068 
   2069     /* Reset pending buffer list and requests list */
   2070     mPendingRequestsList.clear();
   2071     /* Reset pending frame Drop list and requests list */
   2072     mPendingFrameDropList.clear();
   2073 
   2074     flushMap.clear();
   2075     mPendingBuffersMap.num_buffers = 0;
   2076     mPendingBuffersMap.mPendingBufferList.clear();
   2077     ALOGV("%s: Cleared all the pending buffers ", __func__);
   2078 
   2079     mFlush = false;
   2080 
   2081     mFirstRequest = true;
   2082 
   2083     // Start the Streams/Channels
   2084     if (mMetadataChannel) {
   2085         /* If content of mStreamInfo is not 0, there is metadata stream */
   2086         mMetadataChannel->start();
   2087     }
   2088     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2089         it != mStreamInfo.end(); it++) {
   2090         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   2091         channel->start();
   2092     }
   2093     if (mSupportChannel) {
   2094         mSupportChannel->start();
   2095     }
   2096 
   2097     pthread_mutex_unlock(&mMutex);
   2098     return 0;
   2099 }
   2100 
   2101 /*===========================================================================
   2102  * FUNCTION   : captureResultCb
   2103  *
   2104  * DESCRIPTION: Callback handler for all capture result
   2105  *              (streams, as well as metadata)
   2106  *
   2107  * PARAMETERS :
   2108  *   @metadata : metadata information
   2109  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   2110  *               NULL if metadata.
   2111  *
   2112  * RETURN     : NONE
   2113  *==========================================================================*/
   2114 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   2115                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
   2116 {
   2117     pthread_mutex_lock(&mMutex);
   2118 
   2119     /* Assume flush() is called before any reprocessing. Send
   2120      * notify and result immediately upon receipt of any callback*/
   2121     if (mLoopBackResult) {
   2122         /* Send notify */
   2123         camera3_notify_msg_t notify_msg;
   2124         notify_msg.type = CAMERA3_MSG_SHUTTER;
   2125         notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
   2126         notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
   2127         mCallbackOps->notify(mCallbackOps, &notify_msg);
   2128 
   2129         /* Send capture result */
   2130         mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
   2131         free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
   2132         free(mLoopBackResult);
   2133         mLoopBackResult = NULL;
   2134     }
   2135 
   2136     if (metadata_buf)
   2137         handleMetadataWithLock(metadata_buf);
   2138     else
   2139         handleBufferWithLock(buffer, frame_number);
   2140 
   2141     pthread_mutex_unlock(&mMutex);
   2142     return;
   2143 }
   2144 
   2145 /*===========================================================================
   2146  * FUNCTION   : translateFromHalMetadata
   2147  *
   2148  * DESCRIPTION:
   2149  *
   2150  * PARAMETERS :
   2151  *   @metadata : metadata information from callback
   2152  *
   2153  * RETURN     : camera_metadata_t*
   2154  *              metadata in a format specified by fwk
   2155  *==========================================================================*/
   2156 camera_metadata_t*
   2157 QCamera3HardwareInterface::translateFromHalMetadata(
   2158                                  metadata_buffer_t *metadata,
   2159                                  nsecs_t timestamp,
   2160                                  int32_t request_id,
   2161                                  const CameraMetadata& jpegMetadata,
   2162                                  uint8_t pipeline_depth)
   2163 {
   2164     CameraMetadata camMetadata;
   2165     camera_metadata_t* resultMetadata;
   2166 
   2167     if (jpegMetadata.entryCount())
   2168         camMetadata.append(jpegMetadata);
   2169 
   2170     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   2171     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   2172     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
   2173 
   2174     uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
   2175     uint8_t next_entry;
   2176     while (curr_entry != CAM_INTF_PARM_MAX) {
   2177        switch (curr_entry) {
   2178          case CAM_INTF_META_FRAME_NUMBER:{
   2179              int64_t frame_number = *(uint32_t *) POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
   2180              camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
   2181              break;
   2182          }
   2183          case CAM_INTF_META_FACE_DETECTION:{
   2184              cam_face_detection_data_t *faceDetectionInfo =
   2185                 (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
   2186              uint8_t numFaces = faceDetectionInfo->num_faces_detected;
   2187              int32_t faceIds[MAX_ROI];
   2188              uint8_t faceScores[MAX_ROI];
   2189              int32_t faceRectangles[MAX_ROI * 4];
   2190              int j = 0;
   2191              for (int i = 0; i < numFaces; i++) {
   2192                  faceIds[i] = faceDetectionInfo->faces[i].face_id;
   2193                  faceScores[i] = faceDetectionInfo->faces[i].score;
   2194                  convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   2195                          faceRectangles+j, -1);
   2196                  j+= 4;
   2197              }
   2198 
   2199              if (numFaces <= 0) {
   2200                 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
   2201                 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
   2202                 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
   2203              }
   2204 
   2205              camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   2206              camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
   2207              camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   2208                faceRectangles, numFaces*4);
   2209             break;
   2210             }
   2211          case CAM_INTF_META_COLOR_CORRECT_MODE:{
   2212              uint8_t  *color_correct_mode =
   2213                            (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
   2214              camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
   2215              break;
   2216           }
   2217 
   2218          // 3A state is sent in urgent partial result (uses quirk)
   2219          case CAM_INTF_META_AEC_STATE:
   2220          case CAM_INTF_PARM_AEC_LOCK:
   2221          case CAM_INTF_PARM_EV:
   2222          case CAM_INTF_PARM_FOCUS_MODE:
   2223          case CAM_INTF_META_AF_STATE:
   2224          case CAM_INTF_PARM_WHITE_BALANCE:
   2225          case CAM_INTF_META_AWB_REGIONS:
   2226          case CAM_INTF_META_AWB_STATE:
   2227          case CAM_INTF_PARM_AWB_LOCK:
   2228          case CAM_INTF_META_PRECAPTURE_TRIGGER:
   2229          case CAM_INTF_META_AEC_MODE:
   2230          case CAM_INTF_PARM_LED_MODE:
   2231          case CAM_INTF_PARM_REDEYE_REDUCTION:
   2232          case CAM_INTF_META_AF_TRIGGER_NOTICE: {
   2233            ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
   2234            break;
   2235          }
   2236 
   2237           case CAM_INTF_META_MODE: {
   2238              uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
   2239              camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
   2240              break;
   2241           }
   2242 
   2243           case CAM_INTF_META_EDGE_MODE: {
   2244              cam_edge_application_t  *edgeApplication =
   2245                 (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
   2246              uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
   2247              camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
   2248              camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
   2249              break;
   2250           }
   2251           case CAM_INTF_META_FLASH_POWER: {
   2252              uint8_t  *flashPower =
   2253                   (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
   2254              camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
   2255              break;
   2256           }
   2257           case CAM_INTF_META_FLASH_FIRING_TIME: {
   2258              int64_t  *flashFiringTime =
   2259                   (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
   2260              camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   2261              break;
   2262           }
   2263           case CAM_INTF_META_FLASH_STATE: {
   2264              uint8_t  flashState =
   2265                 *((uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata));
   2266              if (!gCamCapability[mCameraId]->flash_available) {
   2267                  flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   2268              }
   2269              camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
   2270              break;
   2271           }
   2272           case CAM_INTF_META_FLASH_MODE:{
   2273              uint8_t flashMode = *((uint8_t*)
   2274                  POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata));
   2275              uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
   2276                                           sizeof(FLASH_MODES_MAP),
   2277                                           flashMode);
   2278              camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
   2279              break;
   2280           }
   2281           case CAM_INTF_META_HOTPIXEL_MODE: {
   2282               uint8_t  *hotPixelMode =
   2283                  (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
   2284               camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
   2285               break;
   2286           }
   2287           case CAM_INTF_META_LENS_APERTURE:{
   2288              float  *lensAperture =
   2289                 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
   2290              camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   2291              break;
   2292           }
   2293           case CAM_INTF_META_LENS_FILTERDENSITY: {
   2294              float  *filterDensity =
   2295                 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
   2296              camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   2297              break;
   2298           }
   2299           case CAM_INTF_META_LENS_FOCAL_LENGTH:{
   2300              float  *focalLength =
   2301                 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
   2302              camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   2303              break;
   2304           }
   2305           case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
   2306              float  *focusDistance =
   2307                 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
   2308              camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   2309              break;
   2310           }
   2311           case CAM_INTF_META_LENS_FOCUS_RANGE: {
   2312              float  *focusRange =
   2313                 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
   2314              camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
   2315              break;
   2316           }
   2317           case CAM_INTF_META_LENS_STATE: {
   2318              uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
   2319              camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
   2320              break;
   2321           }
   2322           case CAM_INTF_META_LENS_OPT_STAB_MODE: {
   2323              uint8_t  *opticalStab =
   2324                 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
   2325              camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
   2326              break;
   2327           }
   2328           case CAM_INTF_META_NOISE_REDUCTION_MODE: {
   2329              uint8_t  *noiseRedMode =
   2330                 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
   2331              camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
   2332              break;
   2333           }
   2334           case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
   2335              uint8_t  *noiseRedStrength =
   2336                 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
   2337              camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
   2338              break;
   2339           }
   2340           case CAM_INTF_META_SCALER_CROP_REGION: {
   2341              cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
   2342              POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
   2343              int32_t scalerCropRegion[4];
   2344              scalerCropRegion[0] = hScalerCropRegion->left;
   2345              scalerCropRegion[1] = hScalerCropRegion->top;
   2346              scalerCropRegion[2] = hScalerCropRegion->width;
   2347              scalerCropRegion[3] = hScalerCropRegion->height;
   2348              camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   2349              break;
   2350           }
   2351           case CAM_INTF_META_AEC_ROI: {
   2352             cam_area_t  *hAeRegions =
   2353                 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
   2354             int32_t aeRegions[5];
   2355             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   2356             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
   2357             ALOGV("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
   2358                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
   2359                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width, hAeRegions->rect.height);
   2360             break;
   2361           }
   2362           case CAM_INTF_META_AF_ROI:{
   2363             /*af regions*/
   2364             cam_area_t  *hAfRegions =
   2365                 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
   2366             int32_t afRegions[5];
   2367             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   2368             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
   2369             ALOGV("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
   2370                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
   2371                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width, hAfRegions->rect.height);
   2372             break;
   2373           }
   2374           case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
   2375              int64_t  *sensorExpTime =
   2376                 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
   2377              ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
   2378              camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   2379              break;
   2380           }
   2381           case CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW:{
   2382              int64_t  *sensorRollingShutterSkew =
   2383                 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
   2384                   metadata);
   2385              ALOGV("%s: sensorRollingShutterSkew = %lld", __func__,
   2386                *sensorRollingShutterSkew);
   2387              camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW ,
   2388                sensorRollingShutterSkew, 1);
   2389              break;
   2390           }
   2391           case CAM_INTF_META_SENSOR_FRAME_DURATION:{
   2392              int64_t  *sensorFameDuration =
   2393                 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
   2394              ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
   2395              camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   2396              break;
   2397           }
   2398           case CAM_INTF_META_SENSOR_SENSITIVITY:{
   2399             int32_t sensorSensitivity =
   2400                *((int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata));
   2401             ALOGV("%s: sensorSensitivity = %d", __func__, sensorSensitivity);
   2402             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, &sensorSensitivity, 1);
   2403 
   2404             double noise_profile_S = computeNoiseModelEntryS(sensorSensitivity);
   2405             double noise_profile_O = computeNoiseModelEntryO(sensorSensitivity);
   2406             double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
   2407             for(int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i+=2){
   2408                 noise_profile[i]   = noise_profile_S;
   2409                 noise_profile[i+1] = noise_profile_O;
   2410             }
   2411             camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
   2412                 2 * gCamCapability[mCameraId]->num_color_channels);
   2413             break;
   2414           }
   2415           case CAM_INTF_PARM_BESTSHOT_MODE: {
   2416               uint8_t *sceneMode =
   2417                   (uint8_t *)POINTER_OF(CAM_INTF_PARM_BESTSHOT_MODE, metadata);
   2418               uint8_t fwkSceneMode =
   2419                   (uint8_t)lookupFwkName(SCENE_MODES_MAP,
   2420                   sizeof(SCENE_MODES_MAP)/
   2421                   sizeof(SCENE_MODES_MAP[0]), *sceneMode);
   2422               camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
   2423                    &fwkSceneMode, 1);
   2424               ALOGV("%s: Metadata : ANDROID_CONTROL_SCENE_MODE: %d", __func__, fwkSceneMode);
   2425               break;
   2426           }
   2427 
   2428           case CAM_INTF_META_SHADING_MODE: {
   2429              uint8_t  *shadingMode =
   2430                 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
   2431              camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
   2432              break;
   2433           }
   2434 
   2435           case CAM_INTF_META_LENS_SHADING_MAP_MODE: {
   2436              uint8_t  *shadingMapMode =
   2437                 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata);
   2438              camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, shadingMapMode, 1);
   2439              break;
   2440           }
   2441 
   2442           case CAM_INTF_META_STATS_FACEDETECT_MODE: {
   2443              uint8_t  *faceDetectMode =
   2444                 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
   2445              uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
   2446                                                         sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
   2447                                                         *faceDetectMode);
   2448              /* Downgrade to simple mode */
   2449              if (fwk_faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
   2450                  fwk_faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE;
   2451              }
   2452              camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
   2453              break;
   2454           }
   2455           case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
   2456              uint8_t  *histogramMode =
   2457                 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
   2458              camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
   2459              break;
   2460           }
   2461           case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
   2462                uint8_t  *sharpnessMapMode =
   2463                   (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
   2464                camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   2465                                   sharpnessMapMode, 1);
   2466                break;
   2467            }
   2468           case CAM_INTF_META_STATS_SHARPNESS_MAP:{
   2469                cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
   2470                POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
   2471                camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
   2472                                   (int32_t*)sharpnessMap->sharpness,
   2473                                   CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
   2474                break;
   2475           }
   2476           case CAM_INTF_META_LENS_SHADING_MAP: {
   2477                cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
   2478                POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
   2479                int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
   2480                int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
   2481                camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   2482                                   (float*)lensShadingMap->lens_shading,
   2483                                   4*map_width*map_height);
   2484                break;
   2485           }
   2486 
   2487           case CAM_INTF_META_TONEMAP_MODE: {
   2488              uint8_t  *toneMapMode =
   2489                 (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
   2490              camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
   2491              break;
   2492           }
   2493 
   2494           case CAM_INTF_META_TONEMAP_CURVES:{
   2495              //Populate CAM_INTF_META_TONEMAP_CURVES
   2496              /* ch0 = G, ch 1 = B, ch 2 = R*/
   2497              cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
   2498              POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
   2499              camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
   2500                                 (float*)tonemap->curves[0].tonemap_points,
   2501                                 tonemap->tonemap_points_cnt * 2);
   2502 
   2503              camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
   2504                                 (float*)tonemap->curves[1].tonemap_points,
   2505                                 tonemap->tonemap_points_cnt * 2);
   2506 
   2507              camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
   2508                                 (float*)tonemap->curves[2].tonemap_points,
   2509                                 tonemap->tonemap_points_cnt * 2);
   2510              break;
   2511           }
   2512 
   2513           case CAM_INTF_META_COLOR_CORRECT_GAINS:{
   2514              cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
   2515              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
   2516              camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
   2517              break;
   2518           }
   2519           case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
   2520               cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
   2521               POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
   2522               camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   2523                        (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
   2524               break;
   2525           }
   2526 
   2527           /* DNG file realted metadata */
   2528           case CAM_INTF_META_PROFILE_TONE_CURVE: {
   2529              cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
   2530              POINTER_OF(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
   2531              camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
   2532                                 (float*)toneCurve->curve.tonemap_points,
   2533                                 toneCurve->tonemap_points_cnt * 2);
   2534              break;
   2535           }
   2536 
   2537           case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
   2538              cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
   2539              POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
   2540              camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   2541                        predColorCorrectionGains->gains, 4);
   2542              break;
   2543           }
   2544           case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
   2545              cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
   2546                    POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
   2547              camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   2548                                   (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
   2549              break;
   2550 
   2551           }
   2552 
   2553           case CAM_INTF_META_OTP_WB_GRGB:{
   2554              float *otpWbGrGb = (float*) POINTER_OF(CAM_INTF_META_OTP_WB_GRGB, metadata);
   2555              camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
   2556              break;
   2557           }
   2558 
   2559           case CAM_INTF_META_BLACK_LEVEL_LOCK:{
   2560              uint8_t *blackLevelLock = (uint8_t*)
   2561                POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
   2562              camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
   2563              break;
   2564           }
   2565           case CAM_INTF_PARM_ANTIBANDING: {
   2566             uint8_t *hal_ab_mode =
   2567               (uint8_t *)POINTER_OF(CAM_INTF_PARM_ANTIBANDING, metadata);
   2568             uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
   2569                      sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   2570                      *hal_ab_mode);
   2571             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
   2572                 &fwk_ab_mode, 1);
   2573             break;
   2574           }
   2575 
   2576           case CAM_INTF_META_CAPTURE_INTENT:{
   2577              uint8_t *captureIntent = (uint8_t*)
   2578                POINTER_OF(CAM_INTF_META_CAPTURE_INTENT, metadata);
   2579              camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, captureIntent, 1);
   2580              break;
   2581           }
   2582 
   2583           case CAM_INTF_META_SCENE_FLICKER:{
   2584              uint8_t *sceneFlicker = (uint8_t*)
   2585              POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
   2586              camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
   2587              break;
   2588           }
   2589           case CAM_INTF_PARM_EFFECT: {
   2590              uint8_t *effectMode = (uint8_t*)
   2591                   POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
   2592              uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
   2593                                                     sizeof(EFFECT_MODES_MAP),
   2594                                                     *effectMode);
   2595              camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
   2596              break;
   2597           }
   2598           case CAM_INTF_META_TEST_PATTERN_DATA: {
   2599              cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
   2600                  POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
   2601              int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
   2602                      sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
   2603                      testPatternData->mode);
   2604              camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
   2605                      &fwk_testPatternMode, 1);
   2606             int32_t fwk_testPatternData[4];
   2607             fwk_testPatternData[0] = testPatternData->r;
   2608             fwk_testPatternData[3] = testPatternData->b;
   2609             switch (gCamCapability[mCameraId]->color_arrangement) {
   2610             case CAM_FILTER_ARRANGEMENT_RGGB:
   2611             case CAM_FILTER_ARRANGEMENT_GRBG:
   2612                 fwk_testPatternData[1] = testPatternData->gr;
   2613                 fwk_testPatternData[2] = testPatternData->gb;
   2614                 break;
   2615             case CAM_FILTER_ARRANGEMENT_GBRG:
   2616             case CAM_FILTER_ARRANGEMENT_BGGR:
   2617                 fwk_testPatternData[2] = testPatternData->gr;
   2618                 fwk_testPatternData[1] = testPatternData->gb;
   2619                 break;
   2620             default:
   2621                 ALOGE("%s: color arrangement %d is not supported", __func__,
   2622                     gCamCapability[mCameraId]->color_arrangement);
   2623                 break;
   2624             }
   2625             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
   2626             break;
   2627 
   2628           }
   2629           case CAM_INTF_META_JPEG_GPS_COORDINATES: {
   2630               double *gps_coords = (double *)POINTER_OF(
   2631                       CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
   2632               camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
   2633               break;
   2634           }
   2635           case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
   2636               char *gps_methods = (char *)POINTER_OF(
   2637                       CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
   2638               String8 str(gps_methods);
   2639               camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
   2640               break;
   2641           }
   2642           case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
   2643               int64_t *gps_timestamp = (int64_t *)POINTER_OF(
   2644                       CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
   2645               camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
   2646               break;
   2647           }
   2648           case CAM_INTF_META_JPEG_ORIENTATION: {
   2649               int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
   2650                       CAM_INTF_META_JPEG_ORIENTATION, metadata);
   2651               camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
   2652               break;
   2653           }
   2654           case CAM_INTF_META_JPEG_QUALITY: {
   2655               uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
   2656                       CAM_INTF_META_JPEG_QUALITY, metadata);
   2657               camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
   2658               break;
   2659           }
   2660           case CAM_INTF_META_JPEG_THUMB_QUALITY: {
   2661               uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
   2662                       CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
   2663               camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
   2664               break;
   2665           }
   2666 
   2667           case CAM_INTF_META_JPEG_THUMB_SIZE: {
   2668               cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
   2669                       CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
   2670               camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
   2671               break;
   2672           }
   2673 
   2674              break;
   2675           case CAM_INTF_META_PRIVATE_DATA: {
   2676              uint8_t *privateData = (uint8_t *)
   2677                  POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
   2678              camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
   2679                  privateData, MAX_METADATA_PAYLOAD_SIZE);
   2680              break;
   2681           }
   2682 
   2683           case CAM_INTF_META_NEUTRAL_COL_POINT:{
   2684              cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
   2685                  POINTER_OF(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
   2686              camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   2687                      (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
   2688              break;
   2689           }
   2690 
   2691           default:
   2692              ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
   2693                    __func__, curr_entry);
   2694              break;
   2695        }
   2696        next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
   2697        curr_entry = next_entry;
   2698     }
   2699 
   2700     /* Constant metadata values to be update*/
   2701     uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   2702     camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
   2703 
   2704     uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   2705     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
   2706 
   2707     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   2708     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   2709 
   2710     int32_t hotPixelMap[2];
   2711     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
   2712 
   2713     uint8_t cac = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   2714     camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   2715                        &cac,
   2716                        1);
   2717 
   2718     resultMetadata = camMetadata.release();
   2719     return resultMetadata;
   2720 }
   2721 
   2722 /*===========================================================================
   2723  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
   2724  *
   2725  * DESCRIPTION:
   2726  *
   2727  * PARAMETERS :
   2728  *   @metadata : metadata information from callback
   2729  *
   2730  * RETURN     : camera_metadata_t*
   2731  *              metadata in a format specified by fwk
   2732  *==========================================================================*/
   2733 camera_metadata_t*
   2734 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
   2735                                 (metadata_buffer_t *metadata)
   2736 {
   2737     CameraMetadata camMetadata;
   2738     camera_metadata_t* resultMetadata;
   2739     uint8_t *aeMode = NULL;
   2740     int32_t *flashMode = NULL;
   2741     int32_t *redeye = NULL;
   2742 
   2743     uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
   2744     uint8_t next_entry;
   2745     while (curr_entry != CAM_INTF_PARM_MAX) {
   2746       switch (curr_entry) {
   2747         case CAM_INTF_META_AEC_STATE:{
   2748             uint8_t *ae_state =
   2749                 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
   2750             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
   2751             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
   2752             break;
   2753         }
   2754         case CAM_INTF_PARM_AEC_LOCK: {
   2755             uint8_t  *ae_lock =
   2756               (uint8_t *)POINTER_OF(CAM_INTF_PARM_AEC_LOCK, metadata);
   2757             camMetadata.update(ANDROID_CONTROL_AE_LOCK,
   2758                                           ae_lock, 1);
   2759             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_LOCK", __func__);
   2760             break;
   2761         }
   2762         case CAM_INTF_PARM_FPS_RANGE: {
   2763             int32_t fps_range[2];
   2764             cam_fps_range_t * float_range =
   2765               (cam_fps_range_t *)POINTER_OF(CAM_INTF_PARM_FPS_RANGE, metadata);
   2766             fps_range[0] = (int32_t)float_range->min_fps;
   2767             fps_range[1] = (int32_t)float_range->max_fps;
   2768             camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   2769                                           fps_range, 2);
   2770             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
   2771                 __func__, fps_range[0], fps_range[1]);
   2772             break;
   2773         }
   2774         case CAM_INTF_PARM_EV: {
   2775             int32_t  *expCompensation =
   2776               (int32_t *)POINTER_OF(CAM_INTF_PARM_EV, metadata);
   2777             camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   2778                                           expCompensation, 1);
   2779             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION",
   2780                 __func__);
   2781             break;
   2782         }
   2783         case CAM_INTF_PARM_FOCUS_MODE:{
   2784             uint8_t  *focusMode =
   2785                 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
   2786             uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
   2787                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
   2788             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
   2789             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
   2790             break;
   2791         }
   2792         case CAM_INTF_META_AF_STATE: {
   2793             uint8_t  *afState =
   2794                (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
   2795             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
   2796             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
   2797             break;
   2798         }
   2799         case CAM_INTF_PARM_WHITE_BALANCE: {
   2800            uint8_t  *whiteBalance =
   2801                 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
   2802              uint8_t fwkWhiteBalanceMode =
   2803                     (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2804                     sizeof(WHITE_BALANCE_MODES_MAP)/
   2805                     sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
   2806              camMetadata.update(ANDROID_CONTROL_AWB_MODE,
   2807                  &fwkWhiteBalanceMode, 1);
   2808             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
   2809              break;
   2810         }
   2811 
   2812         case CAM_INTF_META_AWB_STATE: {
   2813            uint8_t  *whiteBalanceState =
   2814               (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
   2815            camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
   2816            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
   2817            break;
   2818         }
   2819 
   2820 
   2821         case CAM_INTF_PARM_AWB_LOCK: {
   2822             uint8_t  *awb_lock =
   2823               (uint8_t *)POINTER_OF(CAM_INTF_PARM_AWB_LOCK, metadata);
   2824             camMetadata.update(ANDROID_CONTROL_AWB_LOCK, awb_lock, 1);
   2825             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_LOCK", __func__);
   2826             break;
   2827         }
   2828         case CAM_INTF_META_PRECAPTURE_TRIGGER: {
   2829             uint8_t *precaptureTrigger =
   2830                 (uint8_t *)POINTER_OF(CAM_INTF_META_PRECAPTURE_TRIGGER, metadata);
   2831             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
   2832                  precaptureTrigger, 1);
   2833             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER",
   2834                 __func__);
   2835             break;
   2836         }
   2837         case CAM_INTF_META_AF_TRIGGER_NOTICE: {
   2838             uint8_t *af_trigger =
   2839               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_NOTICE, metadata);
   2840             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
   2841                 af_trigger, 1);
   2842             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER = %d",
   2843                 __func__, *af_trigger);
   2844             break;
   2845         }
   2846         case CAM_INTF_META_AEC_MODE:{
   2847             aeMode = (uint8_t*)
   2848             POINTER_OF(CAM_INTF_META_AEC_MODE, metadata);
   2849             break;
   2850         }
   2851         case CAM_INTF_PARM_LED_MODE:{
   2852             flashMode = (int32_t*)
   2853             POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
   2854             break;
   2855         }
   2856         case CAM_INTF_PARM_REDEYE_REDUCTION:{
   2857             redeye = (int32_t*)
   2858             POINTER_OF(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
   2859             break;
   2860         }
   2861         default:
   2862             ALOGV("%s: Normal Metadata %d, do not process",
   2863               __func__, curr_entry);
   2864             break;
   2865        }
   2866        next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
   2867        curr_entry = next_entry;
   2868     }
   2869 
   2870     uint8_t fwk_aeMode;
   2871     if (redeye != NULL && *redeye == 1) {
   2872         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   2873         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   2874     } else if (flashMode != NULL &&
   2875             ((*flashMode == CAM_FLASH_MODE_AUTO)||
   2876              (*flashMode == CAM_FLASH_MODE_ON))) {
   2877         fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
   2878                 sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
   2879         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   2880     } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_ON) {
   2881         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
   2882         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   2883     } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_OFF) {
   2884         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   2885         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   2886     } else {
   2887         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%p!!!",__func__, redeye, flashMode, aeMode);
   2888     }
   2889 
   2890     resultMetadata = camMetadata.release();
   2891     return resultMetadata;
   2892 }
   2893 
   2894 /*===========================================================================
   2895  * FUNCTION   : dumpMetadataToFile
   2896  *
   2897  * DESCRIPTION: Dumps tuning metadata to file system
   2898  *
   2899  * PARAMETERS :
   2900  *   @meta           : tuning metadata
   2901  *   @dumpFrameCount : current dump frame count
   2902  *   @enabled        : Enable mask
   2903  *
   2904  *==========================================================================*/
   2905 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
   2906                                                    uint32_t &dumpFrameCount,
   2907                                                    int32_t enabled,
   2908                                                    const char *type,
   2909                                                    uint32_t frameNumber)
   2910 {
   2911     uint32_t frm_num = 0;
   2912 
   2913     //Some sanity checks
   2914     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
   2915         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
   2916               __func__,
   2917               meta.tuning_sensor_data_size,
   2918               TUNING_SENSOR_DATA_MAX);
   2919         return;
   2920     }
   2921 
   2922     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
   2923         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
   2924               __func__,
   2925               meta.tuning_vfe_data_size,
   2926               TUNING_VFE_DATA_MAX);
   2927         return;
   2928     }
   2929 
   2930     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
   2931         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
   2932               __func__,
   2933               meta.tuning_cpp_data_size,
   2934               TUNING_CPP_DATA_MAX);
   2935         return;
   2936     }
   2937 
   2938     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
   2939         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
   2940               __func__,
   2941               meta.tuning_cac_data_size,
   2942               TUNING_CAC_DATA_MAX);
   2943         return;
   2944     }
   2945     //
   2946 
   2947     if(enabled){
   2948         frm_num = ((enabled & 0xffff0000) >> 16);
   2949         if(frm_num == 0) {
   2950             frm_num = 10; //default 10 frames
   2951         }
   2952         if(frm_num > 256) {
   2953             frm_num = 256; //256 buffers cycle around
   2954         }
   2955         if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
   2956             // reset frame count if cycling
   2957             dumpFrameCount = 0;
   2958         }
   2959         ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
   2960         if (dumpFrameCount < frm_num) {
   2961             char timeBuf[FILENAME_MAX];
   2962             char buf[FILENAME_MAX];
   2963             memset(buf, 0, sizeof(buf));
   2964             memset(timeBuf, 0, sizeof(timeBuf));
   2965             time_t current_time;
   2966             struct tm * timeinfo;
   2967             time (&current_time);
   2968             timeinfo = localtime (&current_time);
   2969             strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
   2970             String8 filePath(timeBuf);
   2971             snprintf(buf,
   2972                      sizeof(buf),
   2973                      "%d_HAL_META_%s_%d.bin",
   2974                      dumpFrameCount,
   2975                      type,
   2976                      frameNumber);
   2977             filePath.append(buf);
   2978             int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
   2979             if (file_fd >= 0) {
   2980                 int written_len = 0;
   2981                 meta.tuning_data_version = TUNING_DATA_VERSION;
   2982                 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
   2983                 written_len += write(file_fd, data, sizeof(uint32_t));
   2984                 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
   2985                 ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
   2986                 written_len += write(file_fd, data, sizeof(uint32_t));
   2987                 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
   2988                 ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
   2989                 written_len += write(file_fd, data, sizeof(uint32_t));
   2990                 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
   2991                 ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
   2992                 written_len += write(file_fd, data, sizeof(uint32_t));
   2993                 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
   2994                 ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
   2995                 written_len += write(file_fd, data, sizeof(uint32_t));
   2996                 int total_size = meta.tuning_sensor_data_size;
   2997                 data = (void *)((uint8_t *)&meta.data);
   2998                 written_len += write(file_fd, data, total_size);
   2999                 total_size = meta.tuning_vfe_data_size;
   3000                 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
   3001                 written_len += write(file_fd, data, total_size);
   3002                 total_size = meta.tuning_cpp_data_size;
   3003                 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
   3004                 written_len += write(file_fd, data, total_size);
   3005                 total_size = meta.tuning_cac_data_size;
   3006                 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
   3007                 written_len += write(file_fd, data, total_size);
   3008                 close(file_fd);
   3009             }else {
   3010                 ALOGE("%s: fail t open file for image dumping", __func__);
   3011             }
   3012             dumpFrameCount++;
   3013         }
   3014     }
   3015 }
   3016 
   3017 /*===========================================================================
   3018  * FUNCTION   : cleanAndSortStreamInfo
   3019  *
   3020  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
   3021  *              and sort them such that raw stream is at the end of the list
   3022  *              This is a workaround for camera daemon constraint.
   3023  *
   3024  * PARAMETERS : None
   3025  *
   3026  *==========================================================================*/
   3027 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
   3028 {
   3029     List<stream_info_t *> newStreamInfo;
   3030 
   3031     /*clean up invalid streams*/
   3032     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   3033             it != mStreamInfo.end();) {
   3034         if(((*it)->status) == INVALID){
   3035             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
   3036             delete channel;
   3037             free(*it);
   3038             it = mStreamInfo.erase(it);
   3039         } else {
   3040             it++;
   3041         }
   3042     }
   3043 
   3044     // Move preview/video/callback/snapshot streams into newList
   3045     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3046             it != mStreamInfo.end();) {
   3047         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
   3048                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
   3049             newStreamInfo.push_back(*it);
   3050             it = mStreamInfo.erase(it);
   3051         } else
   3052             it++;
   3053     }
   3054     // Move raw streams into newList
   3055     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3056             it != mStreamInfo.end();) {
   3057         newStreamInfo.push_back(*it);
   3058         it = mStreamInfo.erase(it);
   3059     }
   3060 
   3061     mStreamInfo = newStreamInfo;
   3062 }
   3063 
   3064 /*===========================================================================
   3065  * FUNCTION   : extractJpegMetadata
   3066  *
   3067  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
   3068  *              JPEG metadata is cached in HAL, and return as part of capture
   3069  *              result when metadata is returned from camera daemon.
   3070  *
   3071  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
   3072  *              @request:      capture request
   3073  *
   3074  *==========================================================================*/
   3075 void QCamera3HardwareInterface::extractJpegMetadata(
   3076         CameraMetadata& jpegMetadata,
   3077         const camera3_capture_request_t *request)
   3078 {
   3079     CameraMetadata frame_settings;
   3080     frame_settings = request->settings;
   3081 
   3082     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
   3083         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
   3084                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
   3085                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
   3086 
   3087     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
   3088         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
   3089                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
   3090                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
   3091 
   3092     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
   3093         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
   3094                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
   3095                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
   3096 
   3097     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
   3098         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
   3099                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
   3100                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
   3101 
   3102     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
   3103         jpegMetadata.update(ANDROID_JPEG_QUALITY,
   3104                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
   3105                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
   3106 
   3107     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
   3108         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
   3109                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
   3110                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
   3111 
   3112     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
   3113         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
   3114                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
   3115                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   3116 }
   3117 
   3118 /*===========================================================================
   3119  * FUNCTION   : convertToRegions
   3120  *
   3121  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   3122  *
   3123  * PARAMETERS :
   3124  *   @rect   : cam_rect_t struct to convert
   3125  *   @region : int32_t destination array
   3126  *   @weight : if we are converting from cam_area_t, weight is valid
   3127  *             else weight = -1
   3128  *
   3129  *==========================================================================*/
   3130 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
   3131     region[0] = rect.left;
   3132     region[1] = rect.top;
   3133     region[2] = rect.left + rect.width;
   3134     region[3] = rect.top + rect.height;
   3135     if (weight > -1) {
   3136         region[4] = weight;
   3137     }
   3138 }
   3139 
   3140 /*===========================================================================
   3141  * FUNCTION   : convertFromRegions
   3142  *
   3143  * DESCRIPTION: helper method to convert from array to cam_rect_t
   3144  *
   3145  * PARAMETERS :
   3146  *   @rect   : cam_rect_t struct to convert
   3147  *   @region : int32_t destination array
   3148  *   @weight : if we are converting from cam_area_t, weight is valid
   3149  *             else weight = -1
   3150  *
   3151  *==========================================================================*/
   3152 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
   3153                                                    const camera_metadata_t *settings,
   3154                                                    uint32_t tag){
   3155     CameraMetadata frame_settings;
   3156     frame_settings = settings;
   3157     int32_t x_min = frame_settings.find(tag).data.i32[0];
   3158     int32_t y_min = frame_settings.find(tag).data.i32[1];
   3159     int32_t x_max = frame_settings.find(tag).data.i32[2];
   3160     int32_t y_max = frame_settings.find(tag).data.i32[3];
   3161     roi->weight = frame_settings.find(tag).data.i32[4];
   3162     roi->rect.left = x_min;
   3163     roi->rect.top = y_min;
   3164     roi->rect.width = x_max - x_min;
   3165     roi->rect.height = y_max - y_min;
   3166 }
   3167 
   3168 /*===========================================================================
   3169  * FUNCTION   : resetIfNeededROI
   3170  *
   3171  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   3172  *              crop region
   3173  *
   3174  * PARAMETERS :
   3175  *   @roi       : cam_area_t struct to resize
   3176  *   @scalerCropRegion : cam_crop_region_t region to compare against
   3177  *
   3178  *
   3179  *==========================================================================*/
   3180 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   3181                                                  const cam_crop_region_t* scalerCropRegion)
   3182 {
   3183     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   3184     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   3185     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
   3186     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
   3187     if ((roi_x_max < scalerCropRegion->left) ||
   3188         (roi_y_max < scalerCropRegion->top)  ||
   3189         (roi->rect.left > crop_x_max) ||
   3190         (roi->rect.top > crop_y_max)){
   3191         return false;
   3192     }
   3193     if (roi->rect.left < scalerCropRegion->left) {
   3194         roi->rect.left = scalerCropRegion->left;
   3195     }
   3196     if (roi->rect.top < scalerCropRegion->top) {
   3197         roi->rect.top = scalerCropRegion->top;
   3198     }
   3199     if (roi_x_max > crop_x_max) {
   3200         roi_x_max = crop_x_max;
   3201     }
   3202     if (roi_y_max > crop_y_max) {
   3203         roi_y_max = crop_y_max;
   3204     }
   3205     roi->rect.width = roi_x_max - roi->rect.left;
   3206     roi->rect.height = roi_y_max - roi->rect.top;
   3207     return true;
   3208 }
   3209 
   3210 /*===========================================================================
   3211  * FUNCTION   : convertLandmarks
   3212  *
   3213  * DESCRIPTION: helper method to extract the landmarks from face detection info
   3214  *
   3215  * PARAMETERS :
   3216  *   @face   : cam_rect_t struct to convert
   3217  *   @landmarks : int32_t destination array
   3218  *
   3219  *
   3220  *==========================================================================*/
   3221 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
   3222 {
   3223     landmarks[0] = face.left_eye_center.x;
   3224     landmarks[1] = face.left_eye_center.y;
   3225     landmarks[2] = face.right_eye_center.x;
   3226     landmarks[3] = face.right_eye_center.y;
   3227     landmarks[4] = face.mouth_center.x;
   3228     landmarks[5] = face.mouth_center.y;
   3229 }
   3230 
   3231 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   3232 /*===========================================================================
   3233  * FUNCTION   : initCapabilities
   3234  *
   3235  * DESCRIPTION: initialize camera capabilities in static data struct
   3236  *
   3237  * PARAMETERS :
   3238  *   @cameraId  : camera Id
   3239  *
   3240  * RETURN     : int32_t type of status
   3241  *              NO_ERROR  -- success
   3242  *              none-zero failure code
   3243  *==========================================================================*/
   3244 int QCamera3HardwareInterface::initCapabilities(int cameraId)
   3245 {
   3246     int rc = 0;
   3247     mm_camera_vtbl_t *cameraHandle = NULL;
   3248     QCamera3HeapMemory *capabilityHeap = NULL;
   3249 
   3250     cameraHandle = camera_open(cameraId);
   3251     if (!cameraHandle) {
   3252         ALOGE("%s: camera_open failed", __func__);
   3253         rc = -1;
   3254         goto open_failed;
   3255     }
   3256 
   3257     capabilityHeap = new QCamera3HeapMemory();
   3258     if (capabilityHeap == NULL) {
   3259         ALOGE("%s: creation of capabilityHeap failed", __func__);
   3260         goto heap_creation_failed;
   3261     }
   3262     /* Allocate memory for capability buffer */
   3263     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
   3264     if(rc != OK) {
   3265         ALOGE("%s: No memory for cappability", __func__);
   3266         goto allocate_failed;
   3267     }
   3268 
   3269     /* Map memory for capability buffer */
   3270     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   3271     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   3272                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   3273                                 capabilityHeap->getFd(0),
   3274                                 sizeof(cam_capability_t));
   3275     if(rc < 0) {
   3276         ALOGE("%s: failed to map capability buffer", __func__);
   3277         goto map_failed;
   3278     }
   3279 
   3280     /* Query Capability */
   3281     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   3282     if(rc < 0) {
   3283         ALOGE("%s: failed to query capability",__func__);
   3284         goto query_failed;
   3285     }
   3286     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   3287     if (!gCamCapability[cameraId]) {
   3288         ALOGE("%s: out of memory", __func__);
   3289         goto query_failed;
   3290     }
   3291     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   3292                                         sizeof(cam_capability_t));
   3293     rc = 0;
   3294 
   3295 query_failed:
   3296     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   3297                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   3298 map_failed:
   3299     capabilityHeap->deallocate();
   3300 allocate_failed:
   3301     delete capabilityHeap;
   3302 heap_creation_failed:
   3303     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   3304     cameraHandle = NULL;
   3305 open_failed:
   3306     return rc;
   3307 }
   3308 
   3309 /*===========================================================================
   3310  * FUNCTION   : initParameters
   3311  *
   3312  * DESCRIPTION: initialize camera parameters
   3313  *
   3314  * PARAMETERS :
   3315  *
   3316  * RETURN     : int32_t type of status
   3317  *              NO_ERROR  -- success
   3318  *              none-zero failure code
   3319  *==========================================================================*/
   3320 int QCamera3HardwareInterface::initParameters()
   3321 {
   3322     int rc = 0;
   3323 
   3324     //Allocate Set Param Buffer
   3325     mParamHeap = new QCamera3HeapMemory();
   3326     rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
   3327     if(rc != OK) {
   3328         rc = NO_MEMORY;
   3329         ALOGE("Failed to allocate SETPARM Heap memory");
   3330         delete mParamHeap;
   3331         mParamHeap = NULL;
   3332         return rc;
   3333     }
   3334 
   3335     //Map memory for parameters buffer
   3336     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   3337             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   3338             mParamHeap->getFd(0),
   3339             sizeof(metadata_buffer_t));
   3340     if(rc < 0) {
   3341         ALOGE("%s:failed to map SETPARM buffer",__func__);
   3342         rc = FAILED_TRANSACTION;
   3343         mParamHeap->deallocate();
   3344         delete mParamHeap;
   3345         mParamHeap = NULL;
   3346         return rc;
   3347     }
   3348 
   3349     mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
   3350 
   3351     mPrevParameters = (metadata_buffer_t*)malloc(sizeof(metadata_buffer_t));
   3352     return rc;
   3353 }
   3354 
   3355 /*===========================================================================
   3356  * FUNCTION   : deinitParameters
   3357  *
   3358  * DESCRIPTION: de-initialize camera parameters
   3359  *
   3360  * PARAMETERS :
   3361  *
   3362  * RETURN     : NONE
   3363  *==========================================================================*/
   3364 void QCamera3HardwareInterface::deinitParameters()
   3365 {
   3366     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   3367             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   3368 
   3369     mParamHeap->deallocate();
   3370     delete mParamHeap;
   3371     mParamHeap = NULL;
   3372 
   3373     mParameters = NULL;
   3374 
   3375     free(mPrevParameters);
   3376     mPrevParameters = NULL;
   3377 }
   3378 
   3379 /*===========================================================================
   3380  * FUNCTION   : calcMaxJpegSize
   3381  *
   3382  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   3383  *
   3384  * PARAMETERS :
   3385  *
   3386  * RETURN     : max_jpeg_size
   3387  *==========================================================================*/
   3388 int QCamera3HardwareInterface::calcMaxJpegSize()
   3389 {
   3390     int32_t max_jpeg_size = 0;
   3391     int temp_width, temp_height;
   3392     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   3393         temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   3394         temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   3395         if (temp_width * temp_height > max_jpeg_size ) {
   3396             max_jpeg_size = temp_width * temp_height;
   3397         }
   3398     }
   3399     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   3400     return max_jpeg_size;
   3401 }
   3402 
   3403 /*===========================================================================
   3404  * FUNCTION   : initStaticMetadata
   3405  *
   3406  * DESCRIPTION: initialize the static metadata
   3407  *
   3408  * PARAMETERS :
   3409  *   @cameraId  : camera Id
   3410  *
   3411  * RETURN     : int32_t type of status
   3412  *              0  -- success
   3413  *              non-zero failure code
   3414  *==========================================================================*/
   3415 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
   3416 {
   3417     int rc = 0;
   3418     CameraMetadata staticInfo;
   3419 
   3420     int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   3421 
   3422      /* android.info: hardware level */
   3423     uint8_t supportedHardwareLevel = (facingBack)? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
   3424       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
   3425     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   3426         &supportedHardwareLevel, 1);
   3427     /*HAL 3 only*/
   3428     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   3429                     &gCamCapability[cameraId]->min_focus_distance, 1);
   3430 
   3431     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   3432                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   3433 
   3434     /*should be using focal lengths but sensor doesn't provide that info now*/
   3435     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   3436                       &gCamCapability[cameraId]->focal_length,
   3437                       1);
   3438 
   3439     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   3440                       gCamCapability[cameraId]->apertures,
   3441                       gCamCapability[cameraId]->apertures_count);
   3442 
   3443     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   3444                 gCamCapability[cameraId]->filter_densities,
   3445                 gCamCapability[cameraId]->filter_densities_count);
   3446 
   3447 
   3448     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   3449                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
   3450                       gCamCapability[cameraId]->optical_stab_modes_count);
   3451 
   3452     staticInfo.update(ANDROID_LENS_POSITION,
   3453                       gCamCapability[cameraId]->lens_position,
   3454                       sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
   3455 
   3456     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
   3457                                        gCamCapability[cameraId]->lens_shading_map_size.height};
   3458     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   3459                       lens_shading_map_size,
   3460                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   3461 
   3462     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   3463             gCamCapability[cameraId]->sensor_physical_size, 2);
   3464 
   3465     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   3466             gCamCapability[cameraId]->exposure_time_range, 2);
   3467 
   3468     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   3469             &gCamCapability[cameraId]->max_frame_duration, 1);
   3470 
   3471     camera_metadata_rational baseGainFactor = {
   3472             gCamCapability[cameraId]->base_gain_factor.numerator,
   3473             gCamCapability[cameraId]->base_gain_factor.denominator};
   3474     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   3475                       &baseGainFactor, 1);
   3476 
   3477     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   3478                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
   3479 
   3480     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   3481                                   gCamCapability[cameraId]->pixel_array_size.height};
   3482     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   3483                       pixel_array_size, 2);
   3484 
   3485     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
   3486                                                 gCamCapability[cameraId]->active_array_size.top,
   3487                                                 gCamCapability[cameraId]->active_array_size.width,
   3488                                                 gCamCapability[cameraId]->active_array_size.height};
   3489     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   3490                       active_array_size, 4);
   3491 
   3492     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   3493             &gCamCapability[cameraId]->white_level, 1);
   3494 
   3495     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   3496             gCamCapability[cameraId]->black_level_pattern, 4);
   3497 
   3498     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   3499                       &gCamCapability[cameraId]->flash_charge_duration, 1);
   3500 
   3501     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   3502                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   3503 
   3504     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
   3505     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   3506                       (int32_t*)&maxFaces, 1);
   3507 
   3508     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   3509                       &gCamCapability[cameraId]->histogram_size, 1);
   3510 
   3511     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   3512             &gCamCapability[cameraId]->max_histogram_count, 1);
   3513 
   3514     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
   3515                                     gCamCapability[cameraId]->sharpness_map_size.height};
   3516 
   3517     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   3518             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   3519 
   3520     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   3521             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   3522 
   3523     int32_t scalar_formats[] = {
   3524             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
   3525             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
   3526             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
   3527             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
   3528             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
   3529     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
   3530     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   3531                       scalar_formats,
   3532                       scalar_formats_count);
   3533 
   3534     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
   3535     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   3536               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
   3537               available_processed_sizes);
   3538     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   3539                 available_processed_sizes,
   3540                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
   3541 
   3542     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
   3543     makeTable(gCamCapability[cameraId]->raw_dim,
   3544               gCamCapability[cameraId]->supported_raw_dim_cnt,
   3545               available_raw_sizes);
   3546     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   3547                 available_raw_sizes,
   3548                 gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
   3549 
   3550     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   3551     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   3552                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
   3553                  available_fps_ranges);
   3554     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   3555             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
   3556 
   3557     camera_metadata_rational exposureCompensationStep = {
   3558             gCamCapability[cameraId]->exp_compensation_step.numerator,
   3559             gCamCapability[cameraId]->exp_compensation_step.denominator};
   3560     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   3561                       &exposureCompensationStep, 1);
   3562 
   3563     /*TO DO*/
   3564     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
   3565     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   3566                       availableVstabModes, sizeof(availableVstabModes));
   3567 
   3568     /*HAL 1 and HAL 3 common*/
   3569     float maxZoom = 4;
   3570     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   3571             &maxZoom, 1);
   3572 
   3573     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
   3574     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
   3575 
   3576     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
   3577     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
   3578         max3aRegions[2] = 0; /* AF not supported */
   3579     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   3580             max3aRegions, 3);
   3581 
   3582     uint8_t availableFaceDetectModes[] = {
   3583             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   3584             ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE };
   3585     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   3586                       availableFaceDetectModes,
   3587                       sizeof(availableFaceDetectModes));
   3588 
   3589     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
   3590                                            gCamCapability[cameraId]->exposure_compensation_max};
   3591     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   3592             exposureCompensationRange,
   3593             sizeof(exposureCompensationRange)/sizeof(int32_t));
   3594 
   3595     uint8_t lensFacing = (facingBack) ?
   3596             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   3597     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   3598 
   3599     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   3600                 available_processed_sizes,
   3601                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
   3602 
   3603     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   3604                       available_thumbnail_sizes,
   3605                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   3606 
   3607     /*android.scaler.availableStreamConfigurations*/
   3608     int32_t max_stream_configs_size =
   3609             gCamCapability[cameraId]->picture_sizes_tbl_cnt *
   3610             sizeof(scalar_formats)/sizeof(int32_t) * 4;
   3611     int32_t available_stream_configs[max_stream_configs_size];
   3612     int idx = 0;
   3613     for (int j = 0; j < scalar_formats_count; j++) {
   3614         switch (scalar_formats[j]) {
   3615         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   3616         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   3617             for (int i = 0;
   3618                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   3619                 available_stream_configs[idx] = scalar_formats[j];
   3620                 available_stream_configs[idx+1] =
   3621                     gCamCapability[cameraId]->raw_dim[i].width;
   3622                 available_stream_configs[idx+2] =
   3623                     gCamCapability[cameraId]->raw_dim[i].height;
   3624                 available_stream_configs[idx+3] =
   3625                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
   3626                 idx+=4;
   3627             }
   3628             break;
   3629         default:
   3630             for (int i = 0;
   3631                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   3632                 available_stream_configs[idx] = scalar_formats[j];
   3633                 available_stream_configs[idx+1] =
   3634                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   3635                 available_stream_configs[idx+2] =
   3636                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   3637                 available_stream_configs[idx+3] =
   3638                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
   3639                 idx+=4;
   3640             }
   3641 
   3642 
   3643             break;
   3644         }
   3645     }
   3646     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   3647                       available_stream_configs, idx);
   3648     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   3649     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   3650 
   3651     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   3652     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   3653 
   3654     /* android.scaler.availableMinFrameDurations */
   3655     int64_t available_min_durations[max_stream_configs_size];
   3656     idx = 0;
   3657     for (int j = 0; j < scalar_formats_count; j++) {
   3658         switch (scalar_formats[j]) {
   3659         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   3660         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   3661             for (int i = 0;
   3662                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   3663                 available_min_durations[idx] = scalar_formats[j];
   3664                 available_min_durations[idx+1] =
   3665                     gCamCapability[cameraId]->raw_dim[i].width;
   3666                 available_min_durations[idx+2] =
   3667                     gCamCapability[cameraId]->raw_dim[i].height;
   3668                 available_min_durations[idx+3] =
   3669                     gCamCapability[cameraId]->raw_min_duration[i];
   3670                 idx+=4;
   3671             }
   3672             break;
   3673         default:
   3674             for (int i = 0;
   3675                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   3676                 available_min_durations[idx] = scalar_formats[j];
   3677                 available_min_durations[idx+1] =
   3678                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   3679                 available_min_durations[idx+2] =
   3680                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   3681                 available_min_durations[idx+3] =
   3682                     gCamCapability[cameraId]->picture_min_duration[i];
   3683                 idx+=4;
   3684             }
   3685             break;
   3686         }
   3687     }
   3688     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   3689                       &available_min_durations[0], idx);
   3690 
   3691     int32_t max_jpeg_size = 0;
   3692     int temp_width, temp_height;
   3693     for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   3694         temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   3695         temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   3696         if (temp_width * temp_height > max_jpeg_size ) {
   3697             max_jpeg_size = temp_width * temp_height;
   3698         }
   3699     }
   3700     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   3701     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   3702                       &max_jpeg_size, 1);
   3703 
   3704     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   3705     size_t size = 0;
   3706     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
   3707         int32_t val = lookupFwkName(EFFECT_MODES_MAP,
   3708                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
   3709                                    gCamCapability[cameraId]->supported_effects[i]);
   3710         if (val != NAME_NOT_FOUND) {
   3711             avail_effects[size] = (uint8_t)val;
   3712             size++;
   3713         }
   3714     }
   3715     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   3716                       avail_effects,
   3717                       size);
   3718 
   3719     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   3720     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   3721     int32_t supported_scene_modes_cnt = 0;
   3722     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
   3723         int32_t val = lookupFwkName(SCENE_MODES_MAP,
   3724                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   3725                                 gCamCapability[cameraId]->supported_scene_modes[i]);
   3726         if (val != NAME_NOT_FOUND) {
   3727             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   3728             supported_indexes[supported_scene_modes_cnt] = i;
   3729             supported_scene_modes_cnt++;
   3730         }
   3731     }
   3732 
   3733     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   3734                       avail_scene_modes,
   3735                       supported_scene_modes_cnt);
   3736 
   3737     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
   3738     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   3739                       supported_scene_modes_cnt,
   3740                       scene_mode_overrides,
   3741                       supported_indexes,
   3742                       cameraId);
   3743     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   3744                       scene_mode_overrides,
   3745                       supported_scene_modes_cnt*3);
   3746 
   3747     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   3748     size = 0;
   3749     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
   3750         int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
   3751                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   3752                                  gCamCapability[cameraId]->supported_antibandings[i]);
   3753         if (val != NAME_NOT_FOUND) {
   3754             avail_antibanding_modes[size] = (uint8_t)val;
   3755             size++;
   3756         }
   3757 
   3758     }
   3759     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   3760                       avail_antibanding_modes,
   3761                       size);
   3762 
   3763     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   3764     size = 0;
   3765     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
   3766         int32_t val = lookupFwkName(FOCUS_MODES_MAP,
   3767                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   3768                                 gCamCapability[cameraId]->supported_focus_modes[i]);
   3769         if (val != NAME_NOT_FOUND) {
   3770             avail_af_modes[size] = (uint8_t)val;
   3771             size++;
   3772         }
   3773     }
   3774     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   3775                       avail_af_modes,
   3776                       size);
   3777 
   3778     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   3779     size = 0;
   3780     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
   3781         int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   3782                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   3783                                     gCamCapability[cameraId]->supported_white_balances[i]);
   3784         if (val != NAME_NOT_FOUND) {
   3785             avail_awb_modes[size] = (uint8_t)val;
   3786             size++;
   3787         }
   3788     }
   3789     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   3790                       avail_awb_modes,
   3791                       size);
   3792 
   3793     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   3794     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
   3795       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
   3796 
   3797     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   3798             available_flash_levels,
   3799             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
   3800 
   3801     uint8_t flashAvailable;
   3802     if (gCamCapability[cameraId]->flash_available)
   3803         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
   3804     else
   3805         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
   3806     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   3807             &flashAvailable, 1);
   3808 
   3809     uint8_t avail_ae_modes[5];
   3810     size = 0;
   3811     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
   3812         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
   3813         size++;
   3814     }
   3815     if (flashAvailable) {
   3816         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   3817         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
   3818         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   3819     }
   3820     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   3821                       avail_ae_modes,
   3822                       size);
   3823 
   3824     int32_t sensitivity_range[2];
   3825     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   3826     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   3827     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   3828                       sensitivity_range,
   3829                       sizeof(sensitivity_range) / sizeof(int32_t));
   3830 
   3831     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   3832                       &gCamCapability[cameraId]->max_analog_sensitivity,
   3833                       1);
   3834 
   3835     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   3836     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   3837                       &sensor_orientation,
   3838                       1);
   3839 
   3840     int32_t max_output_streams[3] = {1, 3, 1};
   3841     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   3842                       max_output_streams,
   3843                       3);
   3844 
   3845     uint8_t avail_leds = 0;
   3846     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
   3847                       &avail_leds, 0);
   3848 
   3849     uint8_t focus_dist_calibrated;
   3850     int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
   3851             sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
   3852             gCamCapability[cameraId]->focus_dist_calibrated);
   3853     if (val != NAME_NOT_FOUND) {
   3854         focus_dist_calibrated = (uint8_t)val;
   3855         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   3856                      &focus_dist_calibrated, 1);
   3857     }
   3858 
   3859     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
   3860     size = 0;
   3861     for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
   3862             i++) {
   3863         int32_t val = lookupFwkName(TEST_PATTERN_MAP,
   3864                                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
   3865                                     gCamCapability[cameraId]->supported_test_pattern_modes[i]);
   3866         if (val != NAME_NOT_FOUND) {
   3867             avail_testpattern_modes[size] = val;
   3868             size++;
   3869         }
   3870     }
   3871     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   3872                       avail_testpattern_modes,
   3873                       size);
   3874 
   3875     uint8_t max_pipeline_depth = kMaxInFlight + EMPTY_PIPELINE_DELAY;
   3876     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
   3877                       &max_pipeline_depth,
   3878                       1);
   3879 
   3880     int32_t partial_result_count = 2;
   3881     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   3882                       &partial_result_count,
   3883                        1);
   3884 
   3885     uint8_t available_capabilities[MAX_AVAILABLE_CAPABILITIES];
   3886     uint8_t available_capabilities_count = 0;
   3887     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE;
   3888     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR;
   3889     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING;
   3890     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS;
   3891     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE;
   3892     if (facingBack) {
   3893         available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW;
   3894     }
   3895     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   3896                       available_capabilities,
   3897                       available_capabilities_count);
   3898 
   3899     int32_t max_input_streams = 0;
   3900     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   3901                       &max_input_streams,
   3902                       1);
   3903 
   3904     int32_t io_format_map[] = {};
   3905     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   3906                       io_format_map, 0);
   3907 
   3908     int32_t max_latency = (facingBack)? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL:CAM_MAX_SYNC_LATENCY;
   3909     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
   3910                       &max_latency,
   3911                       1);
   3912 
   3913     float optical_axis_angle[2];
   3914     optical_axis_angle[0] = 0; //need to verify
   3915     optical_axis_angle[1] = 0; //need to verify
   3916     staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
   3917                       optical_axis_angle,
   3918                       2);
   3919 
   3920     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
   3921     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   3922                       available_hot_pixel_modes,
   3923                       1);
   3924 
   3925     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
   3926                                       ANDROID_EDGE_MODE_FAST};
   3927     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   3928                       available_edge_modes,
   3929                       2);
   3930 
   3931     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
   3932                                            ANDROID_NOISE_REDUCTION_MODE_FAST};
   3933     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   3934                       available_noise_red_modes,
   3935                       2);
   3936 
   3937     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
   3938                                          ANDROID_TONEMAP_MODE_FAST};
   3939     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   3940                       available_tonemap_modes,
   3941                       2);
   3942 
   3943     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
   3944     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   3945                       available_hot_pixel_map_modes,
   3946                       1);
   3947 
   3948     uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
   3949         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
   3950         gCamCapability[cameraId]->reference_illuminant1);
   3951     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
   3952                       &fwkReferenceIlluminant, 1);
   3953 
   3954     fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
   3955         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
   3956         gCamCapability[cameraId]->reference_illuminant2);
   3957     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   3958                       &fwkReferenceIlluminant, 1);
   3959 
   3960     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
   3961                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
   3962                       3*3);
   3963 
   3964     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
   3965                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
   3966                       3*3);
   3967 
   3968     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
   3969                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
   3970                       3*3);
   3971 
   3972     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
   3973                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
   3974                       3*3);
   3975 
   3976     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   3977                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
   3978                       3*3);
   3979 
   3980     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
   3981                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
   3982                       3*3);
   3983 
   3984     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
   3985        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
   3986        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   3987        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   3988        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
   3989        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   3990        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
   3991        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
   3992        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
   3993        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
   3994        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
   3995        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
   3996        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   3997        ANDROID_JPEG_GPS_COORDINATES,
   3998        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
   3999        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
   4000        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
   4001        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   4002        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
   4003        ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
   4004        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
   4005        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
   4006        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
   4007        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
   4008        ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
   4009        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   4010        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
   4011        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   4012        ANDROID_BLACK_LEVEL_LOCK };
   4013 
   4014     size_t request_keys_cnt =
   4015             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
   4016     //NOTE: Please increase available_request_keys array size before
   4017     //adding any new entries.
   4018     int32_t available_request_keys[request_keys_cnt+1];
   4019     memcpy(available_request_keys, request_keys_basic,
   4020             sizeof(request_keys_basic));
   4021     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   4022         available_request_keys[request_keys_cnt++] =
   4023                 ANDROID_CONTROL_AF_REGIONS;
   4024     }
   4025     //NOTE: Please increase available_request_keys array size before
   4026     //adding any new entries.
   4027     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
   4028                       available_request_keys, request_keys_cnt);
   4029 
   4030     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
   4031        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   4032        ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
   4033        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
   4034        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
   4035        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
   4036        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   4037        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
   4038        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
   4039        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
   4040        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   4041        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   4042        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
   4043        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
   4044        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
   4045        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   4046        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
   4047        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   4048        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
   4049        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   4050        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   4051        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
   4052        ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
   4053        ANDROID_STATISTICS_FACE_SCORES,
   4054        ANDROID_SENSOR_NOISE_PROFILE,
   4055        ANDROID_SENSOR_GREEN_SPLIT};
   4056     size_t result_keys_cnt =
   4057             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
   4058     //NOTE: Please increase available_result_keys array size before
   4059     //adding any new entries.
   4060     int32_t available_result_keys[result_keys_cnt+1];
   4061     memcpy(available_result_keys, result_keys_basic,
   4062             sizeof(result_keys_basic));
   4063     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   4064         available_result_keys[result_keys_cnt++] =
   4065                 ANDROID_CONTROL_AF_REGIONS;
   4066     }
   4067     //NOTE: Please increase available_result_keys array size before
   4068     //adding any new entries.
   4069 
   4070     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   4071                       available_result_keys, result_keys_cnt);
   4072 
   4073     int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   4074        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   4075        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
   4076        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
   4077        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   4078        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   4079        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
   4080        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
   4081        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   4082        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   4083        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   4084        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   4085        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   4086        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   4087        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   4088        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   4089        ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
   4090        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   4091        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   4092        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   4093        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   4094        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   4095        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   4096        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   4097        ANDROID_SCALER_CROPPING_TYPE,
   4098        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
   4099        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
   4100        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   4101        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
   4102        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   4103        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   4104        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   4105        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   4106        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   4107        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
   4108        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   4109        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   4110        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   4111        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   4112        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   4113        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   4114        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   4115        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   4116        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   4117        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   4118        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   4119        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   4120        ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   4121        ANDROID_SYNC_MAX_LATENCY };
   4122     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   4123                       available_characteristics_keys,
   4124                       sizeof(available_characteristics_keys)/sizeof(int32_t));
   4125 
   4126     /*available stall durations depend on the hw + sw and will be different for different devices */
   4127     /*have to add for raw after implementation*/
   4128     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
   4129     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
   4130 
   4131     size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
   4132     int64_t available_stall_durations[available_stall_size];
   4133     idx = 0;
   4134     for (uint32_t j = 0; j < stall_formats_count; j++) {
   4135        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
   4136           for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   4137              available_stall_durations[idx]   = stall_formats[j];
   4138              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   4139              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   4140              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
   4141              idx+=4;
   4142           }
   4143        } else {
   4144           for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   4145              available_stall_durations[idx]   = stall_formats[j];
   4146              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
   4147              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
   4148              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
   4149              idx+=4;
   4150           }
   4151        }
   4152     }
   4153     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   4154                       available_stall_durations,
   4155                       idx);
   4156 
   4157     uint8_t available_correction_modes[] =
   4158         {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
   4159     staticInfo.update(
   4160         ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   4161         available_correction_modes,
   4162         1);
   4163 
   4164     uint8_t sensor_timestamp_source[] =
   4165         {ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN};
   4166     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   4167                       sensor_timestamp_source,
   4168                       1);
   4169 
   4170     //QCAMERA3_OPAQUE_RAW
   4171     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   4172     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   4173     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
   4174     case LEGACY_RAW:
   4175         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
   4176             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
   4177         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
   4178             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   4179         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
   4180             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
   4181         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   4182         break;
   4183     case MIPI_RAW:
   4184         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
   4185             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
   4186         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
   4187             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
   4188         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
   4189             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
   4190         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
   4191         break;
   4192     default:
   4193         ALOGE("%s: unknown opaque_raw_format %d", __func__,
   4194                 gCamCapability[cameraId]->opaque_raw_fmt);
   4195         break;
   4196     }
   4197     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
   4198 
   4199     int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
   4200     for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   4201         cam_stream_buf_plane_info_t buf_planes;
   4202         strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
   4203         strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
   4204         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   4205             &gCamCapability[cameraId]->padding_info, &buf_planes);
   4206         strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
   4207     }
   4208     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
   4209             3*gCamCapability[cameraId]->supported_raw_dim_cnt);
   4210 
   4211     gStaticMetadata[cameraId] = staticInfo.release();
   4212     return rc;
   4213 }
   4214 
   4215 /*===========================================================================
   4216  * FUNCTION   : makeTable
   4217  *
   4218  * DESCRIPTION: make a table of sizes
   4219  *
   4220  * PARAMETERS :
   4221  *
   4222  *
   4223  *==========================================================================*/
   4224 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
   4225                                           int32_t* sizeTable)
   4226 {
   4227     int j = 0;
   4228     for (int i = 0; i < size; i++) {
   4229         sizeTable[j] = dimTable[i].width;
   4230         sizeTable[j+1] = dimTable[i].height;
   4231         j+=2;
   4232     }
   4233 }
   4234 
   4235 /*===========================================================================
   4236  * FUNCTION   : makeFPSTable
   4237  *
   4238  * DESCRIPTION: make a table of fps ranges
   4239  *
   4240  * PARAMETERS :
   4241  *
   4242  *==========================================================================*/
   4243 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
   4244                                           int32_t* fpsRangesTable)
   4245 {
   4246     int j = 0;
   4247     for (int i = 0; i < size; i++) {
   4248         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   4249         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   4250         j+=2;
   4251     }
   4252 }
   4253 
   4254 /*===========================================================================
   4255  * FUNCTION   : makeOverridesList
   4256  *
   4257  * DESCRIPTION: make a list of scene mode overrides
   4258  *
   4259  * PARAMETERS :
   4260  *
   4261  *
   4262  *==========================================================================*/
   4263 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
   4264                                                   uint8_t size, uint8_t* overridesList,
   4265                                                   uint8_t* supported_indexes,
   4266                                                   int camera_id)
   4267 {
   4268     /*daemon will give a list of overrides for all scene modes.
   4269       However we should send the fwk only the overrides for the scene modes
   4270       supported by the framework*/
   4271     int j = 0, index = 0, supt = 0;
   4272     uint8_t focus_override;
   4273     for (int i = 0; i < size; i++) {
   4274         supt = 0;
   4275         index = supported_indexes[i];
   4276         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
   4277         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
   4278                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   4279                                                     overridesTable[index].awb_mode);
   4280         focus_override = (uint8_t)overridesTable[index].af_mode;
   4281         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
   4282            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   4283               supt = 1;
   4284               break;
   4285            }
   4286         }
   4287         if (supt) {
   4288            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
   4289                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   4290                                               focus_override);
   4291         } else {
   4292            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   4293         }
   4294         j+=3;
   4295     }
   4296 }
   4297 
   4298 /*===========================================================================
   4299  * FUNCTION   : getPreviewHalPixelFormat
   4300  *
   4301  * DESCRIPTION: convert the format to type recognized by framework
   4302  *
   4303  * PARAMETERS : format : the format from backend
   4304  *
   4305  ** RETURN    : format recognized by framework
   4306  *
   4307  *==========================================================================*/
   4308 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
   4309 {
   4310     int32_t halPixelFormat;
   4311 
   4312     switch (format) {
   4313     case CAM_FORMAT_YUV_420_NV12:
   4314         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
   4315         break;
   4316     case CAM_FORMAT_YUV_420_NV21:
   4317         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   4318         break;
   4319     case CAM_FORMAT_YUV_420_NV21_ADRENO:
   4320         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
   4321         break;
   4322     case CAM_FORMAT_YUV_420_YV12:
   4323         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
   4324         break;
   4325     case CAM_FORMAT_YUV_422_NV16:
   4326     case CAM_FORMAT_YUV_422_NV61:
   4327     default:
   4328         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   4329         break;
   4330     }
   4331     return halPixelFormat;
   4332 }
   4333 /*===========================================================================
   4334  * FUNCTION   : computeNoiseModelEntryS
   4335  *
   4336  * DESCRIPTION: function to map a given sensitivity to the S noise
   4337  *              model parameters in the DNG noise model.
   4338  *
   4339  * PARAMETERS : sens : the sensor sensitivity
   4340  *
   4341  ** RETURN    : S (sensor amplification) noise
   4342  *
   4343  *==========================================================================*/
   4344 
   4345 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
   4346    double s = 1.693069e-06 * sens + 3.480007e-05;
   4347    return s < 0.0 ? 0.0 : s;
   4348 }
   4349 
   4350 /*===========================================================================
   4351  * FUNCTION   : computeNoiseModelEntryO
   4352  *
   4353  * DESCRIPTION: function to map a given sensitivity to the O noise
   4354  *              model parameters in the DNG noise model.
   4355  *
   4356  * PARAMETERS : sens : the sensor sensitivity
   4357  *
   4358  ** RETURN    : O (sensor readout) noise
   4359  *
   4360  *==========================================================================*/
   4361 
   4362 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
   4363    double o = 1.301416e-07 * sens + -2.262256e-04;
   4364    return o < 0.0 ? 0.0 : o
   4365 ;}
   4366 
   4367 /*===========================================================================
   4368  * FUNCTION   : getSensorSensitivity
   4369  *
   4370  * DESCRIPTION: convert iso_mode to an integer value
   4371  *
   4372  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   4373  *
   4374  ** RETURN    : sensitivity supported by sensor
   4375  *
   4376  *==========================================================================*/
   4377 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   4378 {
   4379     int32_t sensitivity;
   4380 
   4381     switch (iso_mode) {
   4382     case CAM_ISO_MODE_100:
   4383         sensitivity = 100;
   4384         break;
   4385     case CAM_ISO_MODE_200:
   4386         sensitivity = 200;
   4387         break;
   4388     case CAM_ISO_MODE_400:
   4389         sensitivity = 400;
   4390         break;
   4391     case CAM_ISO_MODE_800:
   4392         sensitivity = 800;
   4393         break;
   4394     case CAM_ISO_MODE_1600:
   4395         sensitivity = 1600;
   4396         break;
   4397     default:
   4398         sensitivity = -1;
   4399         break;
   4400     }
   4401     return sensitivity;
   4402 }
   4403 
   4404 /*===========================================================================
   4405  * FUNCTION   : AddSetMetaEntryToBatch
   4406  *
   4407  * DESCRIPTION: add set parameter entry into batch
   4408  *
   4409  * PARAMETERS :
   4410  *   @p_table     : ptr to parameter buffer
   4411  *   @paramType   : parameter type
   4412  *   @paramLength : length of parameter value
   4413  *   @paramValue  : ptr to parameter value
   4414  *
   4415  * RETURN     : int32_t type of status
   4416  *              NO_ERROR  -- success
   4417  *              none-zero failure code
   4418  *==========================================================================*/
   4419 int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
   4420                                                           unsigned int paramType,
   4421                                                           uint32_t paramLength,
   4422                                                           void *paramValue)
   4423 {
   4424     int position = paramType;
   4425     int current, next;
   4426 
   4427     /*************************************************************************
   4428     *                 Code to take care of linking next flags                *
   4429     *************************************************************************/
   4430     current = GET_FIRST_PARAM_ID(p_table);
   4431     if (position == current){
   4432         //DO NOTHING
   4433     } else if (position < current){
   4434         SET_NEXT_PARAM_ID(position, p_table, current);
   4435         SET_FIRST_PARAM_ID(p_table, position);
   4436     } else {
   4437         /* Search for the position in the linked list where we need to slot in*/
   4438         while (position > GET_NEXT_PARAM_ID(current, p_table))
   4439             current = GET_NEXT_PARAM_ID(current, p_table);
   4440 
   4441         /*If node already exists no need to alter linking*/
   4442         if (position != GET_NEXT_PARAM_ID(current, p_table)) {
   4443             next = GET_NEXT_PARAM_ID(current, p_table);
   4444             SET_NEXT_PARAM_ID(current, p_table, position);
   4445             SET_NEXT_PARAM_ID(position, p_table, next);
   4446         }
   4447     }
   4448 
   4449     /*************************************************************************
   4450     *                   Copy contents into entry                             *
   4451     *************************************************************************/
   4452 
   4453     if (paramLength > sizeof(parm_type_t)) {
   4454         ALOGE("%s:Size of input larger than max entry size",__func__);
   4455         return BAD_VALUE;
   4456     }
   4457     memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
   4458     SET_PARM_VALID_BIT(paramType,p_table,1);
   4459     return NO_ERROR;
   4460 }
   4461 
   4462 /*===========================================================================
   4463  * FUNCTION   : lookupFwkName
   4464  *
   4465  * DESCRIPTION: In case the enum is not same in fwk and backend
   4466  *              make sure the parameter is correctly propogated
   4467  *
   4468  * PARAMETERS  :
   4469  *   @arr      : map between the two enums
   4470  *   @len      : len of the map
   4471  *   @hal_name : name of the hal_parm to map
   4472  *
   4473  * RETURN     : int type of status
   4474  *              fwk_name  -- success
   4475  *              none-zero failure code
   4476  *==========================================================================*/
   4477 int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
   4478                                              int len, int hal_name)
   4479 {
   4480 
   4481     for (int i = 0; i < len; i++) {
   4482         if (arr[i].hal_name == hal_name)
   4483             return arr[i].fwk_name;
   4484     }
   4485 
   4486     /* Not able to find matching framework type is not necessarily
   4487      * an error case. This happens when mm-camera supports more attributes
   4488      * than the frameworks do */
   4489     ALOGD("%s: Cannot find matching framework type", __func__);
   4490     return NAME_NOT_FOUND;
   4491 }
   4492 
   4493 /*===========================================================================
   4494  * FUNCTION   : lookupHalName
   4495  *
   4496  * DESCRIPTION: In case the enum is not same in fwk and backend
   4497  *              make sure the parameter is correctly propogated
   4498  *
   4499  * PARAMETERS  :
   4500  *   @arr      : map between the two enums
   4501  *   @len      : len of the map
   4502  *   @fwk_name : name of the hal_parm to map
   4503  *
   4504  * RETURN     : int32_t type of status
   4505  *              hal_name  -- success
   4506  *              none-zero failure code
   4507  *==========================================================================*/
   4508 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
   4509                                              int len, unsigned int fwk_name)
   4510 {
   4511     for (int i = 0; i < len; i++) {
   4512        if (arr[i].fwk_name == fwk_name)
   4513            return arr[i].hal_name;
   4514     }
   4515     ALOGE("%s: Cannot find matching hal type", __func__);
   4516     return NAME_NOT_FOUND;
   4517 }
   4518 
   4519 /*===========================================================================
   4520  * FUNCTION   : getCapabilities
   4521  *
   4522  * DESCRIPTION: query camera capabilities
   4523  *
   4524  * PARAMETERS :
   4525  *   @cameraId  : camera Id
   4526  *   @info      : camera info struct to be filled in with camera capabilities
   4527  *
   4528  * RETURN     : int32_t type of status
   4529  *              NO_ERROR  -- success
   4530  *              none-zero failure code
   4531  *==========================================================================*/
   4532 int QCamera3HardwareInterface::getCamInfo(int cameraId,
   4533                                     struct camera_info *info)
   4534 {
   4535     int rc = 0;
   4536 
   4537     if (NULL == gCamCapability[cameraId]) {
   4538         rc = initCapabilities(cameraId);
   4539         if (rc < 0) {
   4540             //pthread_mutex_unlock(&g_camlock);
   4541             return rc;
   4542         }
   4543     }
   4544 
   4545     if (NULL == gStaticMetadata[cameraId]) {
   4546         rc = initStaticMetadata(cameraId);
   4547         if (rc < 0) {
   4548             return rc;
   4549         }
   4550     }
   4551 
   4552     switch(gCamCapability[cameraId]->position) {
   4553     case CAM_POSITION_BACK:
   4554         info->facing = CAMERA_FACING_BACK;
   4555         break;
   4556 
   4557     case CAM_POSITION_FRONT:
   4558         info->facing = CAMERA_FACING_FRONT;
   4559         break;
   4560 
   4561     default:
   4562         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
   4563         rc = -1;
   4564         break;
   4565     }
   4566 
   4567 
   4568     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
   4569     info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
   4570     info->static_camera_characteristics = gStaticMetadata[cameraId];
   4571 
   4572     return rc;
   4573 }
   4574 
   4575 /*===========================================================================
   4576  * FUNCTION   : translateCapabilityToMetadata
   4577  *
   4578  * DESCRIPTION: translate the capability into camera_metadata_t
   4579  *
   4580  * PARAMETERS : type of the request
   4581  *
   4582  *
   4583  * RETURN     : success: camera_metadata_t*
   4584  *              failure: NULL
   4585  *
   4586  *==========================================================================*/
   4587 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   4588 {
   4589     pthread_mutex_lock(&mMutex);
   4590 
   4591     if (mDefaultMetadata[type] != NULL) {
   4592         pthread_mutex_unlock(&mMutex);
   4593         return mDefaultMetadata[type];
   4594     }
   4595     //first time we are handling this request
   4596     //fill up the metadata structure using the wrapper class
   4597     CameraMetadata settings;
   4598     //translate from cam_capability_t to camera_metadata_tag_t
   4599     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   4600     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   4601     int32_t defaultRequestID = 0;
   4602     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   4603 
   4604     uint8_t controlIntent = 0;
   4605     uint8_t focusMode;
   4606     switch (type) {
   4607       case CAMERA3_TEMPLATE_PREVIEW:
   4608         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   4609         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   4610         break;
   4611       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   4612         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   4613         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   4614         break;
   4615       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   4616         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   4617         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   4618         break;
   4619       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   4620         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   4621         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   4622         break;
   4623       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   4624         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   4625         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   4626         break;
   4627       case CAMERA3_TEMPLATE_MANUAL:
   4628         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
   4629         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   4630         break;
   4631       default:
   4632         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   4633         break;
   4634     }
   4635     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   4636 
   4637     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
   4638         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   4639     }
   4640     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   4641 
   4642     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   4643             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   4644 
   4645     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   4646     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   4647 
   4648     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   4649     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   4650 
   4651     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   4652     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   4653 
   4654     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   4655     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   4656 
   4657     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   4658     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   4659 
   4660     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   4661     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   4662 
   4663     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   4664     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   4665 
   4666     /*flash*/
   4667     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   4668     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   4669 
   4670     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   4671     settings.update(ANDROID_FLASH_FIRING_POWER,
   4672             &flashFiringLevel, 1);
   4673 
   4674     /* lens */
   4675     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   4676     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   4677 
   4678     if (gCamCapability[mCameraId]->filter_densities_count) {
   4679         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   4680         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   4681                         gCamCapability[mCameraId]->filter_densities_count);
   4682     }
   4683 
   4684     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   4685     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   4686 
   4687     float default_focus_distance = 0;
   4688     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
   4689 
   4690     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   4691     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   4692 
   4693     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   4694     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   4695 
   4696     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
   4697     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
   4698 
   4699     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   4700     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   4701 
   4702     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   4703     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   4704 
   4705     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   4706     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   4707 
   4708     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   4709     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   4710 
   4711     /* Lens shading map mode */
   4712     uint8_t shadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   4713     if (type == CAMERA3_TEMPLATE_STILL_CAPTURE &&
   4714         gCamCapability[mCameraId]->supported_raw_dim_cnt) {
   4715       shadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
   4716     }
   4717     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingMapMode, 1);
   4718 
   4719     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   4720     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
   4721 
   4722     /* Exposure time(Update the Min Exposure Time)*/
   4723     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   4724     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   4725 
   4726     /* frame duration */
   4727     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
   4728     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
   4729 
   4730     /* sensitivity */
   4731     static const int32_t default_sensitivity = 100;
   4732     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   4733 
   4734     /*edge mode*/
   4735     static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
   4736     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
   4737 
   4738     /*noise reduction mode*/
   4739     static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   4740     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
   4741 
   4742     /*color correction mode*/
   4743     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   4744     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
   4745 
   4746     /*transform matrix mode*/
   4747     static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   4748     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
   4749 
   4750     uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
   4751     settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
   4752 
   4753     int32_t scaler_crop_region[4];
   4754     scaler_crop_region[0] = 0;
   4755     scaler_crop_region[1] = 0;
   4756     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
   4757     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
   4758     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
   4759 
   4760     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   4761     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
   4762 
   4763     static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   4764     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
   4765 
   4766     uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
   4767                              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
   4768                              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   4769     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
   4770 
   4771     /*focus distance*/
   4772     float focus_distance = 0.0;
   4773     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
   4774 
   4775     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
   4776     float max_range = 0.0;
   4777     float max_fixed_fps = 0.0;
   4778     int32_t fps_range[2] = {0, 0};
   4779     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
   4780             i++) {
   4781         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
   4782             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   4783         if (type == CAMERA3_TEMPLATE_PREVIEW ||
   4784                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
   4785                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
   4786             if (range > max_range) {
   4787                 fps_range[0] =
   4788                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   4789                 fps_range[1] =
   4790                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   4791                 max_range = range;
   4792             }
   4793         } else {
   4794             if (range < 0.01 && max_fixed_fps <
   4795                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
   4796                 fps_range[0] =
   4797                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   4798                 fps_range[1] =
   4799                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   4800                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   4801             }
   4802         }
   4803     }
   4804     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
   4805 
   4806     /*precapture trigger*/
   4807     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
   4808     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
   4809 
   4810     /*af trigger*/
   4811     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   4812     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
   4813 
   4814     /* ae & af regions */
   4815     int32_t active_region[] = {
   4816             gCamCapability[mCameraId]->active_array_size.left,
   4817             gCamCapability[mCameraId]->active_array_size.top,
   4818             gCamCapability[mCameraId]->active_array_size.left +
   4819                     gCamCapability[mCameraId]->active_array_size.width,
   4820             gCamCapability[mCameraId]->active_array_size.top +
   4821                     gCamCapability[mCameraId]->active_array_size.height,
   4822             0};
   4823     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
   4824     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
   4825 
   4826     /* black level lock */
   4827     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   4828     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
   4829 
   4830     //special defaults for manual template
   4831     if (type == CAMERA3_TEMPLATE_MANUAL) {
   4832         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
   4833         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
   4834 
   4835         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
   4836         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
   4837 
   4838         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
   4839         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
   4840 
   4841         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
   4842         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
   4843 
   4844         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
   4845         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
   4846 
   4847         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
   4848         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
   4849     }
   4850     mDefaultMetadata[type] = settings.release();
   4851 
   4852     pthread_mutex_unlock(&mMutex);
   4853     return mDefaultMetadata[type];
   4854 }
   4855 
   4856 /*===========================================================================
   4857  * FUNCTION   : setFrameParameters
   4858  *
   4859  * DESCRIPTION: set parameters per frame as requested in the metadata from
   4860  *              framework
   4861  *
   4862  * PARAMETERS :
   4863  *   @request   : request that needs to be serviced
   4864  *   @streamID : Stream ID of all the requested streams
   4865  *
   4866  * RETURN     : success: NO_ERROR
   4867  *              failure:
   4868  *==========================================================================*/
   4869 int QCamera3HardwareInterface::setFrameParameters(
   4870                     camera3_capture_request_t *request,
   4871                     cam_stream_ID_t streamID)
   4872 {
   4873     /*translate from camera_metadata_t type to parm_type_t*/
   4874     int rc = 0;
   4875     int32_t hal_version = CAM_HAL_V3;
   4876     if (mRepeatingRequest == true) {
   4877        //chain of repeating request
   4878        ALOGV("%s: chain of repeating request", __func__);
   4879     } else {
   4880        memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
   4881     }
   4882 
   4883     memset(mParameters, 0, sizeof(metadata_buffer_t));
   4884     mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
   4885     rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   4886                 sizeof(hal_version), &hal_version);
   4887     if (rc < 0) {
   4888         ALOGE("%s: Failed to set hal version in the parameters", __func__);
   4889         return BAD_VALUE;
   4890     }
   4891 
   4892     /*we need to update the frame number in the parameters*/
   4893     rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
   4894                                 sizeof(request->frame_number), &(request->frame_number));
   4895     if (rc < 0) {
   4896         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   4897         return BAD_VALUE;
   4898     }
   4899 
   4900     /* Update stream id of all the requested buffers */
   4901     rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
   4902                                 sizeof(cam_stream_ID_t), &streamID);
   4903 
   4904     if (rc < 0) {
   4905         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
   4906         return BAD_VALUE;
   4907     }
   4908 
   4909     if(request->settings != NULL){
   4910         mRepeatingRequest = false;
   4911         rc = translateToHalMetadata(request, mParameters);
   4912     } else {
   4913        mRepeatingRequest = true;
   4914     }
   4915 
   4916     return rc;
   4917 }
   4918 
   4919 /*===========================================================================
   4920  * FUNCTION   : setReprocParameters
   4921  *
   4922  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
   4923  *              queue it to picture channel for reprocessing.
   4924  *
   4925  * PARAMETERS :
   4926  *   @request   : request that needs to be serviced
   4927  *
   4928  * RETURN     : success: NO_ERROR
   4929  *              failure: non zero failure code
   4930  *==========================================================================*/
   4931 int QCamera3HardwareInterface::setReprocParameters(
   4932         camera3_capture_request_t *request)
   4933 {
   4934     /*translate from camera_metadata_t type to parm_type_t*/
   4935     int rc = 0;
   4936     metadata_buffer_t *reprocParam = NULL;
   4937 
   4938     if(request->settings != NULL){
   4939         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
   4940         return BAD_VALUE;
   4941     }
   4942     reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
   4943     if (!reprocParam) {
   4944         ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
   4945         return NO_MEMORY;
   4946     }
   4947     memset(reprocParam, 0, sizeof(metadata_buffer_t));
   4948     reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
   4949 
   4950     /*we need to update the frame number in the parameters*/
   4951     rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
   4952                                 sizeof(request->frame_number), &(request->frame_number));
   4953     if (rc < 0) {
   4954         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   4955         return BAD_VALUE;
   4956     }
   4957 
   4958 
   4959     rc = translateToHalMetadata(request, reprocParam);
   4960     if (rc < 0) {
   4961         ALOGE("%s: Failed to translate reproc request", __func__);
   4962         delete reprocParam;
   4963         return rc;
   4964     }
   4965     /*queue metadata for reprocessing*/
   4966     rc = mPictureChannel->queueReprocMetadata(reprocParam);
   4967     if (rc < 0) {
   4968         ALOGE("%s: Failed to queue reprocessing metadata", __func__);
   4969         delete reprocParam;
   4970     }
   4971     return rc;
   4972 }
   4973 
   4974 /*===========================================================================
   4975  * FUNCTION   : translateToHalMetadata
   4976  *
   4977  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   4978  *
   4979  *
   4980  * PARAMETERS :
   4981  *   @request  : request sent from framework
   4982  *
   4983  *
   4984  * RETURN     : success: NO_ERROR
   4985  *              failure:
   4986  *==========================================================================*/
   4987 int QCamera3HardwareInterface::translateToHalMetadata
   4988                                   (const camera3_capture_request_t *request,
   4989                                    metadata_buffer_t *hal_metadata)
   4990 {
   4991     int rc = 0;
   4992     CameraMetadata frame_settings;
   4993     frame_settings = request->settings;
   4994 
   4995     /* Do not change the order of the following list unless you know what you are
   4996      * doing.
   4997      * The order is laid out in such a way that parameters in the front of the table
   4998      * may be used to override the parameters later in the table. Examples are:
   4999      * 1. META_MODE should precede AEC/AWB/AF MODE
   5000      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
   5001      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
   5002      * 4. Any mode should precede it's corresponding settings
   5003      */
   5004     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   5005         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   5006         rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
   5007                 sizeof(metaMode), &metaMode);
   5008         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   5009            uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
   5010            uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
   5011                                              sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   5012                                              fwk_sceneMode);
   5013            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
   5014                 sizeof(sceneMode), &sceneMode);
   5015         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
   5016            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   5017            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
   5018                 sizeof(sceneMode), &sceneMode);
   5019         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
   5020            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   5021            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
   5022                 sizeof(sceneMode), &sceneMode);
   5023         }
   5024     }
   5025 
   5026     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   5027         uint8_t fwk_aeMode =
   5028             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   5029         uint8_t aeMode;
   5030         int32_t redeye;
   5031 
   5032         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   5033             aeMode = CAM_AE_MODE_OFF;
   5034         } else {
   5035             aeMode = CAM_AE_MODE_ON;
   5036         }
   5037         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   5038             redeye = 1;
   5039         } else {
   5040             redeye = 0;
   5041         }
   5042 
   5043         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
   5044                                           sizeof(AE_FLASH_MODE_MAP),
   5045                                           fwk_aeMode);
   5046         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
   5047                 sizeof(aeMode), &aeMode);
   5048         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
   5049                 sizeof(flashMode), &flashMode);
   5050         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
   5051                 sizeof(redeye), &redeye);
   5052     }
   5053 
   5054     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   5055         uint8_t fwk_whiteLevel =
   5056             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   5057         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
   5058                 sizeof(WHITE_BALANCE_MODES_MAP),
   5059                 fwk_whiteLevel);
   5060         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
   5061                 sizeof(whiteLevel), &whiteLevel);
   5062     }
   5063 
   5064     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   5065         uint8_t fwk_focusMode =
   5066             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   5067         uint8_t focusMode;
   5068         focusMode = lookupHalName(FOCUS_MODES_MAP,
   5069                                    sizeof(FOCUS_MODES_MAP),
   5070                                    fwk_focusMode);
   5071         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
   5072                 sizeof(focusMode), &focusMode);
   5073     }
   5074 
   5075     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   5076         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   5077         rc = AddSetMetaEntryToBatch(hal_metadata,
   5078                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
   5079                 sizeof(focalDistance), &focalDistance);
   5080     }
   5081 
   5082     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   5083         uint8_t fwk_antibandingMode =
   5084             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
   5085         uint8_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
   5086                      sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   5087                      fwk_antibandingMode);
   5088         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
   5089                 sizeof(hal_antibandingMode), &hal_antibandingMode);
   5090     }
   5091 
   5092     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   5093         int32_t expCompensation = frame_settings.find(
   5094             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   5095         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   5096             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   5097         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   5098             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   5099         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
   5100           sizeof(expCompensation), &expCompensation);
   5101     }
   5102 
   5103     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   5104         int32_t expCompensation = frame_settings.find(
   5105             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   5106         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   5107             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   5108         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   5109             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   5110         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
   5111           sizeof(expCompensation), &expCompensation);
   5112     }
   5113 
   5114     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   5115         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   5116         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
   5117                 sizeof(aeLock), &aeLock);
   5118     }
   5119     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   5120         cam_fps_range_t fps_range;
   5121         fps_range.min_fps =
   5122             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   5123         fps_range.max_fps =
   5124             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   5125         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
   5126                 sizeof(fps_range), &fps_range);
   5127     }
   5128 
   5129     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   5130         uint8_t awbLock =
   5131             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   5132         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
   5133                 sizeof(awbLock), &awbLock);
   5134     }
   5135 
   5136     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   5137         uint8_t fwk_effectMode =
   5138             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   5139         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
   5140                 sizeof(EFFECT_MODES_MAP),
   5141                 fwk_effectMode);
   5142         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
   5143                 sizeof(effectMode), &effectMode);
   5144     }
   5145 
   5146     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   5147         uint8_t colorCorrectMode =
   5148             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   5149         rc =
   5150             AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
   5151                     sizeof(colorCorrectMode), &colorCorrectMode);
   5152     }
   5153 
   5154     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   5155         cam_color_correct_gains_t colorCorrectGains;
   5156         for (int i = 0; i < 4; i++) {
   5157             colorCorrectGains.gains[i] =
   5158                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   5159         }
   5160         rc =
   5161             AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
   5162                     sizeof(colorCorrectGains), &colorCorrectGains);
   5163     }
   5164 
   5165     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   5166         cam_color_correct_matrix_t colorCorrectTransform;
   5167         cam_rational_type_t transform_elem;
   5168         int num = 0;
   5169         for (int i = 0; i < 3; i++) {
   5170            for (int j = 0; j < 3; j++) {
   5171               transform_elem.numerator =
   5172                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   5173               transform_elem.denominator =
   5174                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   5175               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   5176               num++;
   5177            }
   5178         }
   5179         rc =
   5180             AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   5181                     sizeof(colorCorrectTransform), &colorCorrectTransform);
   5182     }
   5183 
   5184     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)) {
   5185         cam_trigger_t aecTrigger;
   5186         aecTrigger.trigger =
   5187             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   5188         rc = AddSetMetaEntryToBatch(hal_metadata,
   5189                 CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   5190                 sizeof(aecTrigger), &aecTrigger);
   5191     }
   5192 
   5193     /*af_trigger must come with a trigger id*/
   5194     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER)) {
   5195         cam_trigger_t af_trigger;
   5196         af_trigger.trigger =
   5197             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   5198         rc = AddSetMetaEntryToBatch(hal_metadata,
   5199                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
   5200     }
   5201 
   5202     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   5203         int32_t demosaic =
   5204             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   5205         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
   5206                 sizeof(demosaic), &demosaic);
   5207     }
   5208 
   5209     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   5210         cam_edge_application_t edge_application;
   5211         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   5212         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
   5213             edge_application.sharpness = 0;
   5214         } else {
   5215             if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
   5216                 uint8_t edgeStrength =
   5217                     frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
   5218                 edge_application.sharpness = (int32_t)edgeStrength;
   5219             } else {
   5220                 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
   5221             }
   5222         }
   5223         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
   5224                 sizeof(edge_application), &edge_application);
   5225     }
   5226 
   5227     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   5228         int32_t respectFlashMode = 1;
   5229         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   5230             uint8_t fwk_aeMode =
   5231                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   5232             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   5233                 respectFlashMode = 0;
   5234                 ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
   5235                     __func__);
   5236             }
   5237         }
   5238         if (respectFlashMode) {
   5239             uint8_t flashMode =
   5240                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
   5241             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
   5242                                           sizeof(FLASH_MODES_MAP),
   5243                                           flashMode);
   5244             ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
   5245             // To check: CAM_INTF_META_FLASH_MODE usage
   5246             rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
   5247                           sizeof(flashMode), &flashMode);
   5248         }
   5249     }
   5250 
   5251     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   5252         uint8_t flashPower =
   5253             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   5254         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
   5255                 sizeof(flashPower), &flashPower);
   5256     }
   5257 
   5258     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   5259         int64_t flashFiringTime =
   5260             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   5261         rc = AddSetMetaEntryToBatch(hal_metadata,
   5262                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
   5263     }
   5264 
   5265     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   5266         uint8_t hotPixelMode =
   5267             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   5268         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
   5269                 sizeof(hotPixelMode), &hotPixelMode);
   5270     }
   5271 
   5272     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   5273         float lensAperture =
   5274             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   5275         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
   5276                 sizeof(lensAperture), &lensAperture);
   5277     }
   5278 
   5279     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   5280         float filterDensity =
   5281             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   5282         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
   5283                 sizeof(filterDensity), &filterDensity);
   5284     }
   5285 
   5286     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   5287         float focalLength =
   5288             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   5289         rc = AddSetMetaEntryToBatch(hal_metadata,
   5290                 CAM_INTF_META_LENS_FOCAL_LENGTH,
   5291                 sizeof(focalLength), &focalLength);
   5292     }
   5293 
   5294     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   5295         uint8_t optStabMode =
   5296             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   5297         rc = AddSetMetaEntryToBatch(hal_metadata,
   5298                 CAM_INTF_META_LENS_OPT_STAB_MODE,
   5299                 sizeof(optStabMode), &optStabMode);
   5300     }
   5301 
   5302     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   5303         uint8_t noiseRedMode =
   5304             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   5305         rc = AddSetMetaEntryToBatch(hal_metadata,
   5306                 CAM_INTF_META_NOISE_REDUCTION_MODE,
   5307                 sizeof(noiseRedMode), &noiseRedMode);
   5308     }
   5309 
   5310     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
   5311         uint8_t noiseRedStrength =
   5312             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
   5313         rc = AddSetMetaEntryToBatch(hal_metadata,
   5314                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
   5315                 sizeof(noiseRedStrength), &noiseRedStrength);
   5316     }
   5317 
   5318     cam_crop_region_t scalerCropRegion;
   5319     bool scalerCropSet = false;
   5320     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   5321         scalerCropRegion.left =
   5322             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   5323         scalerCropRegion.top =
   5324             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   5325         scalerCropRegion.width =
   5326             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   5327         scalerCropRegion.height =
   5328             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   5329         rc = AddSetMetaEntryToBatch(hal_metadata,
   5330                 CAM_INTF_META_SCALER_CROP_REGION,
   5331                 sizeof(scalerCropRegion), &scalerCropRegion);
   5332         scalerCropSet = true;
   5333     }
   5334 
   5335     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   5336         int64_t sensorExpTime =
   5337             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   5338         ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
   5339         rc = AddSetMetaEntryToBatch(hal_metadata,
   5340                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   5341                 sizeof(sensorExpTime), &sensorExpTime);
   5342     }
   5343 
   5344     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   5345         int64_t sensorFrameDuration =
   5346             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   5347         int64_t minFrameDuration = getMinFrameDuration(request);
   5348         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
   5349         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   5350             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   5351         ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
   5352         rc = AddSetMetaEntryToBatch(hal_metadata,
   5353                 CAM_INTF_META_SENSOR_FRAME_DURATION,
   5354                 sizeof(sensorFrameDuration), &sensorFrameDuration);
   5355     }
   5356 
   5357     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   5358         int32_t sensorSensitivity =
   5359             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   5360         if (sensorSensitivity <
   5361                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   5362             sensorSensitivity =
   5363                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   5364         if (sensorSensitivity >
   5365                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   5366             sensorSensitivity =
   5367                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   5368         ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
   5369         rc = AddSetMetaEntryToBatch(hal_metadata,
   5370                 CAM_INTF_META_SENSOR_SENSITIVITY,
   5371                 sizeof(sensorSensitivity), &sensorSensitivity);
   5372     }
   5373 
   5374     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   5375         int32_t shadingMode =
   5376             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   5377         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
   5378                 sizeof(shadingMode), &shadingMode);
   5379     }
   5380 
   5381     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
   5382         uint8_t shadingStrength =
   5383             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
   5384         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
   5385                 sizeof(shadingStrength), &shadingStrength);
   5386     }
   5387 
   5388     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   5389         uint8_t fwk_facedetectMode =
   5390             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   5391         uint8_t facedetectMode =
   5392             lookupHalName(FACEDETECT_MODES_MAP,
   5393                 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
   5394         rc = AddSetMetaEntryToBatch(hal_metadata,
   5395                 CAM_INTF_META_STATS_FACEDETECT_MODE,
   5396                 sizeof(facedetectMode), &facedetectMode);
   5397     }
   5398 
   5399     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   5400         uint8_t histogramMode =
   5401             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   5402         rc = AddSetMetaEntryToBatch(hal_metadata,
   5403                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
   5404                 sizeof(histogramMode), &histogramMode);
   5405     }
   5406 
   5407     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   5408         uint8_t sharpnessMapMode =
   5409             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   5410         rc = AddSetMetaEntryToBatch(hal_metadata,
   5411                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   5412                 sizeof(sharpnessMapMode), &sharpnessMapMode);
   5413     }
   5414 
   5415     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   5416         uint8_t tonemapMode =
   5417             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   5418         rc = AddSetMetaEntryToBatch(hal_metadata,
   5419                 CAM_INTF_META_TONEMAP_MODE,
   5420                 sizeof(tonemapMode), &tonemapMode);
   5421     }
   5422     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
   5423     /*All tonemap channels will have the same number of points*/
   5424     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
   5425         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
   5426         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   5427         cam_rgb_tonemap_curves tonemapCurves;
   5428         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
   5429 
   5430         /* ch0 = G*/
   5431         int point = 0;
   5432         cam_tonemap_curve_t tonemapCurveGreen;
   5433         for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
   5434             for (int j = 0; j < 2; j++) {
   5435                tonemapCurveGreen.tonemap_points[i][j] =
   5436                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   5437                point++;
   5438             }
   5439         }
   5440         tonemapCurves.curves[0] = tonemapCurveGreen;
   5441 
   5442         /* ch 1 = B */
   5443         point = 0;
   5444         cam_tonemap_curve_t tonemapCurveBlue;
   5445         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   5446             for (int j = 0; j < 2; j++) {
   5447                tonemapCurveBlue.tonemap_points[i][j] =
   5448                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   5449                point++;
   5450             }
   5451         }
   5452         tonemapCurves.curves[1] = tonemapCurveBlue;
   5453 
   5454         /* ch 2 = R */
   5455         point = 0;
   5456         cam_tonemap_curve_t tonemapCurveRed;
   5457         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   5458             for (int j = 0; j < 2; j++) {
   5459                tonemapCurveRed.tonemap_points[i][j] =
   5460                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   5461                point++;
   5462             }
   5463         }
   5464         tonemapCurves.curves[2] = tonemapCurveRed;
   5465 
   5466         rc = AddSetMetaEntryToBatch(hal_metadata,
   5467                 CAM_INTF_META_TONEMAP_CURVES,
   5468                 sizeof(tonemapCurves), &tonemapCurves);
   5469     }
   5470 
   5471     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   5472         uint8_t captureIntent =
   5473             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   5474         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
   5475                 sizeof(captureIntent), &captureIntent);
   5476     }
   5477 
   5478     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   5479         uint8_t blackLevelLock =
   5480             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   5481         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
   5482                 sizeof(blackLevelLock), &blackLevelLock);
   5483     }
   5484 
   5485     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   5486         uint8_t lensShadingMapMode =
   5487             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   5488         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   5489                 sizeof(lensShadingMapMode), &lensShadingMapMode);
   5490     }
   5491 
   5492     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   5493         cam_area_t roi;
   5494         bool reset = true;
   5495         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
   5496         if (scalerCropSet) {
   5497             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   5498         }
   5499         if (reset) {
   5500             rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
   5501                     sizeof(roi), &roi);
   5502         }
   5503     }
   5504 
   5505     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   5506         cam_area_t roi;
   5507         bool reset = true;
   5508         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
   5509         if (scalerCropSet) {
   5510             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   5511         }
   5512         if (reset) {
   5513             rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
   5514                     sizeof(roi), &roi);
   5515         }
   5516     }
   5517 
   5518     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
   5519         cam_test_pattern_data_t testPatternData;
   5520         uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
   5521         uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
   5522                sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
   5523 
   5524         memset(&testPatternData, 0, sizeof(testPatternData));
   5525         testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
   5526         if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
   5527                 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
   5528             int32_t* fwk_testPatternData = frame_settings.find(
   5529                     ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
   5530             testPatternData.r = fwk_testPatternData[0];
   5531             testPatternData.b = fwk_testPatternData[3];
   5532             switch (gCamCapability[mCameraId]->color_arrangement) {
   5533             case CAM_FILTER_ARRANGEMENT_RGGB:
   5534             case CAM_FILTER_ARRANGEMENT_GRBG:
   5535                 testPatternData.gr = fwk_testPatternData[1];
   5536                 testPatternData.gb = fwk_testPatternData[2];
   5537                 break;
   5538             case CAM_FILTER_ARRANGEMENT_GBRG:
   5539             case CAM_FILTER_ARRANGEMENT_BGGR:
   5540                 testPatternData.gr = fwk_testPatternData[2];
   5541                 testPatternData.gb = fwk_testPatternData[1];
   5542                 break;
   5543             default:
   5544                 ALOGE("%s: color arrangement %d is not supported", __func__,
   5545                     gCamCapability[mCameraId]->color_arrangement);
   5546                 break;
   5547             }
   5548         }
   5549         rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
   5550             sizeof(testPatternData), &testPatternData);
   5551     }
   5552 
   5553     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   5554         double *gps_coords =
   5555             frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
   5556         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
   5557     }
   5558 
   5559     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   5560         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
   5561         const char *gps_methods_src = (const char *)
   5562                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
   5563         uint32_t count = frame_settings.find(
   5564                 ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
   5565         memset(gps_methods, 0, sizeof(gps_methods));
   5566         strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
   5567         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
   5568     }
   5569 
   5570     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   5571         int64_t gps_timestamp =
   5572             frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   5573         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
   5574     }
   5575 
   5576     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   5577         int32_t orientation =
   5578             frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   5579         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
   5580     }
   5581 
   5582     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
   5583         int8_t quality =
   5584             frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   5585         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
   5586     }
   5587 
   5588     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
   5589         int8_t thumb_quality =
   5590             frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
   5591         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
   5592     }
   5593 
   5594     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   5595         cam_dimension_t dim;
   5596         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   5597         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   5598         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
   5599     }
   5600 
   5601     // Internal metadata
   5602     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
   5603         uint8_t* privatedata =
   5604             frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
   5605         rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
   5606             sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
   5607     }
   5608 
   5609     // EV step
   5610     rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
   5611             sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
   5612 
   5613     return rc;
   5614 }
   5615 
   5616 /*===========================================================================
   5617  * FUNCTION   : captureResultCb
   5618  *
   5619  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   5620  *
   5621  * PARAMETERS :
   5622  *   @frame  : frame information from mm-camera-interface
   5623  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   5624  *   @userdata: userdata
   5625  *
   5626  * RETURN     : NONE
   5627  *==========================================================================*/
   5628 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   5629                 camera3_stream_buffer_t *buffer,
   5630                 uint32_t frame_number, void *userdata)
   5631 {
   5632     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   5633     if (hw == NULL) {
   5634         ALOGE("%s: Invalid hw %p", __func__, hw);
   5635         return;
   5636     }
   5637 
   5638     hw->captureResultCb(metadata, buffer, frame_number);
   5639     return;
   5640 }
   5641 
   5642 
   5643 /*===========================================================================
   5644  * FUNCTION   : initialize
   5645  *
   5646  * DESCRIPTION: Pass framework callback pointers to HAL
   5647  *
   5648  * PARAMETERS :
   5649  *
   5650  *
   5651  * RETURN     : Success : 0
   5652  *              Failure: -ENODEV
   5653  *==========================================================================*/
   5654 
   5655 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   5656                                   const camera3_callback_ops_t *callback_ops)
   5657 {
   5658     ALOGV("%s: E", __func__);
   5659     QCamera3HardwareInterface *hw =
   5660         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   5661     if (!hw) {
   5662         ALOGE("%s: NULL camera device", __func__);
   5663         return -ENODEV;
   5664     }
   5665 
   5666     int rc = hw->initialize(callback_ops);
   5667     ALOGV("%s: X", __func__);
   5668     return rc;
   5669 }
   5670 
   5671 /*===========================================================================
   5672  * FUNCTION   : configure_streams
   5673  *
   5674  * DESCRIPTION:
   5675  *
   5676  * PARAMETERS :
   5677  *
   5678  *
   5679  * RETURN     : Success: 0
   5680  *              Failure: -EINVAL (if stream configuration is invalid)
   5681  *                       -ENODEV (fatal error)
   5682  *==========================================================================*/
   5683 
   5684 int QCamera3HardwareInterface::configure_streams(
   5685         const struct camera3_device *device,
   5686         camera3_stream_configuration_t *stream_list)
   5687 {
   5688     ALOGV("%s: E", __func__);
   5689     QCamera3HardwareInterface *hw =
   5690         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   5691     if (!hw) {
   5692         ALOGE("%s: NULL camera device", __func__);
   5693         return -ENODEV;
   5694     }
   5695     int rc = hw->configureStreams(stream_list);
   5696     ALOGV("%s: X", __func__);
   5697     return rc;
   5698 }
   5699 
   5700 /*===========================================================================
   5701  * FUNCTION   : register_stream_buffers
   5702  *
   5703  * DESCRIPTION: Register stream buffers with the device
   5704  *
   5705  * PARAMETERS :
   5706  *
   5707  * RETURN     :
   5708  *==========================================================================*/
   5709 int QCamera3HardwareInterface::register_stream_buffers(
   5710         const struct camera3_device *device,
   5711         const camera3_stream_buffer_set_t *buffer_set)
   5712 {
   5713     ALOGV("%s: E", __func__);
   5714     QCamera3HardwareInterface *hw =
   5715         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   5716     if (!hw) {
   5717         ALOGE("%s: NULL camera device", __func__);
   5718         return -ENODEV;
   5719     }
   5720     int rc = hw->registerStreamBuffers(buffer_set);
   5721     ALOGV("%s: X", __func__);
   5722     return rc;
   5723 }
   5724 
   5725 /*===========================================================================
   5726  * FUNCTION   : construct_default_request_settings
   5727  *
   5728  * DESCRIPTION: Configure a settings buffer to meet the required use case
   5729  *
   5730  * PARAMETERS :
   5731  *
   5732  *
   5733  * RETURN     : Success: Return valid metadata
   5734  *              Failure: Return NULL
   5735  *==========================================================================*/
   5736 const camera_metadata_t* QCamera3HardwareInterface::
   5737     construct_default_request_settings(const struct camera3_device *device,
   5738                                         int type)
   5739 {
   5740 
   5741     ALOGV("%s: E", __func__);
   5742     camera_metadata_t* fwk_metadata = NULL;
   5743     QCamera3HardwareInterface *hw =
   5744         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   5745     if (!hw) {
   5746         ALOGE("%s: NULL camera device", __func__);
   5747         return NULL;
   5748     }
   5749 
   5750     fwk_metadata = hw->translateCapabilityToMetadata(type);
   5751 
   5752     ALOGV("%s: X", __func__);
   5753     return fwk_metadata;
   5754 }
   5755 
   5756 /*===========================================================================
   5757  * FUNCTION   : process_capture_request
   5758  *
   5759  * DESCRIPTION:
   5760  *
   5761  * PARAMETERS :
   5762  *
   5763  *
   5764  * RETURN     :
   5765  *==========================================================================*/
   5766 int QCamera3HardwareInterface::process_capture_request(
   5767                     const struct camera3_device *device,
   5768                     camera3_capture_request_t *request)
   5769 {
   5770     ALOGV("%s: E", __func__);
   5771     QCamera3HardwareInterface *hw =
   5772         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   5773     if (!hw) {
   5774         ALOGE("%s: NULL camera device", __func__);
   5775         return -EINVAL;
   5776     }
   5777 
   5778     int rc = hw->processCaptureRequest(request);
   5779     ALOGV("%s: X", __func__);
   5780     return rc;
   5781 }
   5782 
   5783 /*===========================================================================
   5784  * FUNCTION   : dump
   5785  *
   5786  * DESCRIPTION:
   5787  *
   5788  * PARAMETERS :
   5789  *
   5790  *
   5791  * RETURN     :
   5792  *==========================================================================*/
   5793 
   5794 void QCamera3HardwareInterface::dump(
   5795                 const struct camera3_device *device, int fd)
   5796 {
   5797     ALOGV("%s: E", __func__);
   5798     QCamera3HardwareInterface *hw =
   5799         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   5800     if (!hw) {
   5801         ALOGE("%s: NULL camera device", __func__);
   5802         return;
   5803     }
   5804 
   5805     hw->dump(fd);
   5806     ALOGV("%s: X", __func__);
   5807     return;
   5808 }
   5809 
   5810 /*===========================================================================
   5811  * FUNCTION   : flush
   5812  *
   5813  * DESCRIPTION:
   5814  *
   5815  * PARAMETERS :
   5816  *
   5817  *
   5818  * RETURN     :
   5819  *==========================================================================*/
   5820 
   5821 int QCamera3HardwareInterface::flush(
   5822                 const struct camera3_device *device)
   5823 {
   5824     int rc;
   5825     ALOGV("%s: E", __func__);
   5826     QCamera3HardwareInterface *hw =
   5827         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   5828     if (!hw) {
   5829         ALOGE("%s: NULL camera device", __func__);
   5830         return -EINVAL;
   5831     }
   5832 
   5833     rc = hw->flush();
   5834     ALOGV("%s: X", __func__);
   5835     return rc;
   5836 }
   5837 
   5838 /*===========================================================================
   5839  * FUNCTION   : close_camera_device
   5840  *
   5841  * DESCRIPTION:
   5842  *
   5843  * PARAMETERS :
   5844  *
   5845  *
   5846  * RETURN     :
   5847  *==========================================================================*/
   5848 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   5849 {
   5850     ALOGV("%s: E", __func__);
   5851     int ret = NO_ERROR;
   5852     QCamera3HardwareInterface *hw =
   5853         reinterpret_cast<QCamera3HardwareInterface *>(
   5854             reinterpret_cast<camera3_device_t *>(device)->priv);
   5855     if (!hw) {
   5856         ALOGE("NULL camera device");
   5857         return BAD_VALUE;
   5858     }
   5859     delete hw;
   5860 
   5861     pthread_mutex_lock(&mCameraSessionLock);
   5862     mCameraSessionActive = 0;
   5863     pthread_mutex_unlock(&mCameraSessionLock);
   5864     ALOGV("%s: X", __func__);
   5865     return ret;
   5866 }
   5867 
   5868 /*===========================================================================
   5869  * FUNCTION   : getWaveletDenoiseProcessPlate
   5870  *
   5871  * DESCRIPTION: query wavelet denoise process plate
   5872  *
   5873  * PARAMETERS : None
   5874  *
   5875  * RETURN     : WNR prcocess plate vlaue
   5876  *==========================================================================*/
   5877 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   5878 {
   5879     char prop[PROPERTY_VALUE_MAX];
   5880     memset(prop, 0, sizeof(prop));
   5881     property_get("persist.denoise.process.plates", prop, "0");
   5882     int processPlate = atoi(prop);
   5883     switch(processPlate) {
   5884     case 0:
   5885         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   5886     case 1:
   5887         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   5888     case 2:
   5889         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   5890     case 3:
   5891         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   5892     default:
   5893         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   5894     }
   5895 }
   5896 
   5897 /*===========================================================================
   5898  * FUNCTION   : needRotationReprocess
   5899  *
   5900  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   5901  *
   5902  * PARAMETERS : none
   5903  *
   5904  * RETURN     : true: needed
   5905  *              false: no need
   5906  *==========================================================================*/
   5907 bool QCamera3HardwareInterface::needRotationReprocess()
   5908 {
   5909     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
   5910         // current rotation is not zero, and pp has the capability to process rotation
   5911         ALOGD("%s: need do reprocess for rotation", __func__);
   5912         return true;
   5913     }
   5914 
   5915     return false;
   5916 }
   5917 
   5918 /*===========================================================================
   5919  * FUNCTION   : needReprocess
   5920  *
   5921  * DESCRIPTION: if reprocess in needed
   5922  *
   5923  * PARAMETERS : none
   5924  *
   5925  * RETURN     : true: needed
   5926  *              false: no need
   5927  *==========================================================================*/
   5928 bool QCamera3HardwareInterface::needReprocess()
   5929 {
   5930     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
   5931         // TODO: add for ZSL HDR later
   5932         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   5933         ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
   5934         return true;
   5935     }
   5936     return needRotationReprocess();
   5937 }
   5938 
   5939 /*===========================================================================
   5940  * FUNCTION   : addOfflineReprocChannel
   5941  *
   5942  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
   5943  *              coming from input channel
   5944  *
   5945  * PARAMETERS :
   5946  *   @pInputChannel : ptr to input channel whose frames will be post-processed
   5947  *
   5948  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   5949  *==========================================================================*/
   5950 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
   5951               QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
   5952 {
   5953     int32_t rc = NO_ERROR;
   5954     QCamera3ReprocessChannel *pChannel = NULL;
   5955     if (pInputChannel == NULL) {
   5956         ALOGE("%s: input channel obj is NULL", __func__);
   5957         return NULL;
   5958     }
   5959 
   5960     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   5961             mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
   5962     if (NULL == pChannel) {
   5963         ALOGE("%s: no mem for reprocess channel", __func__);
   5964         return NULL;
   5965     }
   5966 
   5967     rc = pChannel->initialize();
   5968     if (rc != NO_ERROR) {
   5969         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
   5970         delete pChannel;
   5971         return NULL;
   5972     }
   5973 
   5974     // pp feature config
   5975     cam_pp_feature_config_t pp_config;
   5976     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   5977 
   5978     if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
   5979         cam_edge_application_t *edge = (cam_edge_application_t *)
   5980                 POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
   5981         if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
   5982             pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
   5983             pp_config.sharpness = edge->sharpness;
   5984         }
   5985     }
   5986 
   5987     if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
   5988         uint8_t *noise_mode = (uint8_t *)POINTER_OF(
   5989                 CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
   5990         if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
   5991             pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
   5992             pp_config.denoise2d.denoise_enable = 1;
   5993             pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
   5994         }
   5995     }
   5996 
   5997     if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
   5998         int32_t *rotation = (int32_t *)POINTER_OF(
   5999                 CAM_INTF_META_JPEG_ORIENTATION, metadata);
   6000 
   6001         if (needRotationReprocess()) {
   6002             pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
   6003             if (*rotation == 0) {
   6004                 pp_config.rotation = ROTATE_0;
   6005             } else if (*rotation == 90) {
   6006                 pp_config.rotation = ROTATE_90;
   6007             } else if (*rotation == 180) {
   6008                 pp_config.rotation = ROTATE_180;
   6009             } else if (*rotation == 270) {
   6010                 pp_config.rotation = ROTATE_270;
   6011             }
   6012         }
   6013     }
   6014 
   6015     rc = pChannel->addReprocStreamsFromSource(pp_config,
   6016                                              pInputChannel,
   6017                                              mMetadataChannel);
   6018 
   6019     if (rc != NO_ERROR) {
   6020         delete pChannel;
   6021         return NULL;
   6022     }
   6023     return pChannel;
   6024 }
   6025 
   6026 }; //end namespace qcamera
   6027