Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define LOG_TAG "QCamera3HWI"
     31 //#define LOG_NDEBUG 0
     32 
     33 #include <cutils/properties.h>
     34 #include <hardware/camera3.h>
     35 #include <camera/CameraMetadata.h>
     36 #include <stdlib.h>
     37 #include <utils/Log.h>
     38 #include <utils/Errors.h>
     39 #include <ui/Fence.h>
     40 #include <gralloc_priv.h>
     41 #include "QCamera3HWI.h"
     42 #include "QCamera3Mem.h"
     43 #include "QCamera3Channel.h"
     44 #include "QCamera3PostProc.h"
     45 
     46 using namespace android;
     47 
     48 namespace qcamera {
     49 
     50 #define MAX(a, b) ((a) > (b) ? (a) : (b))
     51 
     52 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     53 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
     54 parm_buffer_t *prevSettings;
     55 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
     56 
     57 pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
     58     PTHREAD_MUTEX_INITIALIZER;
     59 unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
     60 
     61 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
     62     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
     63     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
     64     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
     65     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
     66     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
     67     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
     68     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
     69     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
     70     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
     71 };
     72 
     73 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
     74     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
     75     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
     76     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
     77     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
     78     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
     79     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
     80     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
     81     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
     82     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
     83 };
     84 
     85 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
     86     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
     87     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
     88     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
     89     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
     90     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
     91     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
     92     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
     93     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
     94     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
     95     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
     96     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
     97     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
     98     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
     99     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    100     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    101     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    102 };
    103 
    104 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    105     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    106     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    107     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    108     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    109     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    110     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    111 };
    112 
    113 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    114     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    115     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    116     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    117     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    118 };
    119 
    120 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    121     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    122     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    123     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    124     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    125     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    126 };
    127 
    128 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    129     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    130     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    131     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    132 };
    133 
    134 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    135     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    136     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    137 };
    138 
    139 const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
    140                                              320, 240, 176, 144, 0, 0};
    141 
    142 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    143     initialize:                         QCamera3HardwareInterface::initialize,
    144     configure_streams:                  QCamera3HardwareInterface::configure_streams,
    145     register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
    146     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
    147     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
    148     get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
    149     dump:                               QCamera3HardwareInterface::dump,
    150     flush:                              QCamera3HardwareInterface::flush,
    151     reserved:                           {0},
    152 };
    153 
    154 int QCamera3HardwareInterface::kMaxInFlight = 5;
    155 
    156 /*===========================================================================
    157  * FUNCTION   : QCamera3HardwareInterface
    158  *
    159  * DESCRIPTION: constructor of QCamera3HardwareInterface
    160  *
    161  * PARAMETERS :
    162  *   @cameraId  : camera ID
    163  *
    164  * RETURN     : none
    165  *==========================================================================*/
    166 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
    167     : mCameraId(cameraId),
    168       mCameraHandle(NULL),
    169       mCameraOpened(false),
    170       mCameraInitialized(false),
    171       mCallbackOps(NULL),
    172       mInputStream(NULL),
    173       mMetadataChannel(NULL),
    174       mPictureChannel(NULL),
    175       mFirstRequest(false),
    176       mParamHeap(NULL),
    177       mParameters(NULL),
    178       mJpegSettings(NULL),
    179       mIsZslMode(false),
    180       mMinProcessedFrameDuration(0),
    181       mMinJpegFrameDuration(0),
    182       mMinRawFrameDuration(0),
    183       m_pPowerModule(NULL),
    184       mHdrHint(false)
    185 {
    186     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    187     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
    188     mCameraDevice.common.close = close_camera_device;
    189     mCameraDevice.ops = &mCameraOps;
    190     mCameraDevice.priv = this;
    191     gCamCapability[cameraId]->version = CAM_HAL_V3;
    192     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    193     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    194     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    195 
    196     pthread_cond_init(&mRequestCond, NULL);
    197     mPendingRequest = 0;
    198     mCurrentRequestId = -1;
    199     pthread_mutex_init(&mMutex, NULL);
    200 
    201     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    202         mDefaultMetadata[i] = NULL;
    203 
    204 #ifdef HAS_MULTIMEDIA_HINTS
    205     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
    206         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
    207     }
    208 #endif
    209 }
    210 
    211 /*===========================================================================
    212  * FUNCTION   : ~QCamera3HardwareInterface
    213  *
    214  * DESCRIPTION: destructor of QCamera3HardwareInterface
    215  *
    216  * PARAMETERS : none
    217  *
    218  * RETURN     : none
    219  *==========================================================================*/
    220 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    221 {
    222     ALOGV("%s: E", __func__);
    223     /* We need to stop all streams before deleting any stream */
    224         /*flush the metadata list*/
    225     if (!mStoredMetadataList.empty()) {
    226         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
    227               m != mStoredMetadataList.end(); m++) {
    228             mMetadataChannel->bufDone(m->meta_buf);
    229             free(m->meta_buf);
    230             m = mStoredMetadataList.erase(m);
    231         }
    232     }
    233 
    234     // NOTE: 'camera3_stream_t *' objects are already freed at
    235     //        this stage by the framework
    236     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    237         it != mStreamInfo.end(); it++) {
    238         QCamera3Channel *channel = (*it)->channel;
    239         if (channel) {
    240             channel->stop();
    241         }
    242     }
    243 
    244     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    245         it != mStreamInfo.end(); it++) {
    246         QCamera3Channel *channel = (*it)->channel;
    247         if ((*it)->registered && (*it)->buffer_set.buffers) {
    248              delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
    249         }
    250         if (channel)
    251             delete channel;
    252         free (*it);
    253     }
    254 
    255     mPictureChannel = NULL;
    256 
    257     if (mJpegSettings != NULL) {
    258         free(mJpegSettings);
    259         mJpegSettings = NULL;
    260     }
    261 
    262     /* Clean up all channels */
    263     if (mCameraInitialized) {
    264         if (mMetadataChannel) {
    265             mMetadataChannel->stop();
    266             delete mMetadataChannel;
    267             mMetadataChannel = NULL;
    268         }
    269         deinitParameters();
    270     }
    271 
    272     if (mCameraOpened)
    273         closeCamera();
    274 
    275     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    276         if (mDefaultMetadata[i])
    277             free_camera_metadata(mDefaultMetadata[i]);
    278 
    279     pthread_cond_destroy(&mRequestCond);
    280 
    281     pthread_mutex_destroy(&mMutex);
    282     ALOGV("%s: X", __func__);
    283 }
    284 
    285 /*===========================================================================
    286  * FUNCTION   : openCamera
    287  *
    288  * DESCRIPTION: open camera
    289  *
    290  * PARAMETERS :
    291  *   @hw_device  : double ptr for camera device struct
    292  *
    293  * RETURN     : int32_t type of status
    294  *              NO_ERROR  -- success
    295  *              none-zero failure code
    296  *==========================================================================*/
    297 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    298 {
    299     int rc = 0;
    300     pthread_mutex_lock(&mCameraSessionLock);
    301     if (mCameraSessionActive) {
    302         ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
    303         pthread_mutex_unlock(&mCameraSessionLock);
    304         return -EUSERS;
    305     }
    306 
    307     if (mCameraOpened) {
    308         *hw_device = NULL;
    309         return PERMISSION_DENIED;
    310     }
    311 
    312     rc = openCamera();
    313     if (rc == 0) {
    314         *hw_device = &mCameraDevice.common;
    315         mCameraSessionActive = 1;
    316     } else
    317         *hw_device = NULL;
    318 
    319 #ifdef HAS_MULTIMEDIA_HINTS
    320     if (rc == 0) {
    321         if (m_pPowerModule) {
    322             if (m_pPowerModule->powerHint) {
    323                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    324                         (void *)"state=1");
    325             }
    326         }
    327     }
    328 #endif
    329     pthread_mutex_unlock(&mCameraSessionLock);
    330     return rc;
    331 }
    332 
    333 /*===========================================================================
    334  * FUNCTION   : openCamera
    335  *
    336  * DESCRIPTION: open camera
    337  *
    338  * PARAMETERS : none
    339  *
    340  * RETURN     : int32_t type of status
    341  *              NO_ERROR  -- success
    342  *              none-zero failure code
    343  *==========================================================================*/
    344 int QCamera3HardwareInterface::openCamera()
    345 {
    346     if (mCameraHandle) {
    347         ALOGE("Failure: Camera already opened");
    348         return ALREADY_EXISTS;
    349     }
    350     mCameraHandle = camera_open(mCameraId);
    351     if (!mCameraHandle) {
    352         ALOGE("camera_open failed.");
    353         return UNKNOWN_ERROR;
    354     }
    355 
    356     mCameraOpened = true;
    357 
    358     return NO_ERROR;
    359 }
    360 
    361 /*===========================================================================
    362  * FUNCTION   : closeCamera
    363  *
    364  * DESCRIPTION: close camera
    365  *
    366  * PARAMETERS : none
    367  *
    368  * RETURN     : int32_t type of status
    369  *              NO_ERROR  -- success
    370  *              none-zero failure code
    371  *==========================================================================*/
    372 int QCamera3HardwareInterface::closeCamera()
    373 {
    374     int rc = NO_ERROR;
    375 
    376     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    377     mCameraHandle = NULL;
    378     mCameraOpened = false;
    379 
    380 #ifdef HAS_MULTIMEDIA_HINTS
    381     if (rc == NO_ERROR) {
    382         if (m_pPowerModule) {
    383             if (m_pPowerModule->powerHint) {
    384                 if(mHdrHint == true) {
    385                     m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    386                             (void *)"state=3");
    387                     mHdrHint = false;
    388                 }
    389                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    390                         (void *)"state=0");
    391             }
    392         }
    393     }
    394 #endif
    395 
    396     return rc;
    397 }
    398 
    399 /*===========================================================================
    400  * FUNCTION   : initialize
    401  *
    402  * DESCRIPTION: Initialize frameworks callback functions
    403  *
    404  * PARAMETERS :
    405  *   @callback_ops : callback function to frameworks
    406  *
    407  * RETURN     :
    408  *
    409  *==========================================================================*/
    410 int QCamera3HardwareInterface::initialize(
    411         const struct camera3_callback_ops *callback_ops)
    412 {
    413     int rc;
    414 
    415     pthread_mutex_lock(&mMutex);
    416 
    417     rc = initParameters();
    418     if (rc < 0) {
    419         ALOGE("%s: initParamters failed %d", __func__, rc);
    420        goto err1;
    421     }
    422     mCallbackOps = callback_ops;
    423 
    424     pthread_mutex_unlock(&mMutex);
    425     mCameraInitialized = true;
    426     return 0;
    427 
    428 err1:
    429     pthread_mutex_unlock(&mMutex);
    430     return rc;
    431 }
    432 
    433 /*===========================================================================
    434  * FUNCTION   : configureStreams
    435  *
    436  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
    437  *              and output streams.
    438  *
    439  * PARAMETERS :
    440  *   @stream_list : streams to be configured
    441  *
    442  * RETURN     :
    443  *
    444  *==========================================================================*/
    445 int QCamera3HardwareInterface::configureStreams(
    446         camera3_stream_configuration_t *streamList)
    447 {
    448     int rc = 0;
    449     mIsZslMode = false;
    450 
    451     // Sanity check stream_list
    452     if (streamList == NULL) {
    453         ALOGE("%s: NULL stream configuration", __func__);
    454         return BAD_VALUE;
    455     }
    456     if (streamList->streams == NULL) {
    457         ALOGE("%s: NULL stream list", __func__);
    458         return BAD_VALUE;
    459     }
    460 
    461     if (streamList->num_streams < 1) {
    462         ALOGE("%s: Bad number of streams requested: %d", __func__,
    463                 streamList->num_streams);
    464         return BAD_VALUE;
    465     }
    466 
    467     /* first invalidate all the steams in the mStreamList
    468      * if they appear again, they will be validated */
    469     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    470             it != mStreamInfo.end(); it++) {
    471         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    472         channel->stop();
    473         (*it)->status = INVALID;
    474     }
    475     if (mMetadataChannel) {
    476         /* If content of mStreamInfo is not 0, there is metadata stream */
    477         mMetadataChannel->stop();
    478     }
    479 
    480 #ifdef HAS_MULTIMEDIA_HINTS
    481     if(mHdrHint == true) {
    482         if (m_pPowerModule) {
    483             if (m_pPowerModule->powerHint) {
    484                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    485                         (void *)"state=3");
    486                 mHdrHint = false;
    487             }
    488         }
    489     }
    490 #endif
    491 
    492     pthread_mutex_lock(&mMutex);
    493 
    494     camera3_stream_t *inputStream = NULL;
    495     camera3_stream_t *jpegStream = NULL;
    496     cam_stream_size_info_t stream_config_info;
    497 
    498     for (size_t i = 0; i < streamList->num_streams; i++) {
    499         camera3_stream_t *newStream = streamList->streams[i];
    500         ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d",
    501                 __func__, newStream->stream_type, newStream->format,
    502                  newStream->width, newStream->height);
    503         //if the stream is in the mStreamList validate it
    504         bool stream_exists = false;
    505         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    506                 it != mStreamInfo.end(); it++) {
    507             if ((*it)->stream == newStream) {
    508                 QCamera3Channel *channel =
    509                     (QCamera3Channel*)(*it)->stream->priv;
    510                 stream_exists = true;
    511                 (*it)->status = RECONFIGURE;
    512                 /*delete the channel object associated with the stream because
    513                   we need to reconfigure*/
    514                 delete channel;
    515                 (*it)->stream->priv = NULL;
    516                 (*it)->channel = NULL;
    517             }
    518         }
    519         if (!stream_exists) {
    520             //new stream
    521             stream_info_t* stream_info;
    522             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
    523             stream_info->stream = newStream;
    524             stream_info->status = VALID;
    525             stream_info->registered = 0;
    526             stream_info->channel = NULL;
    527             mStreamInfo.push_back(stream_info);
    528         }
    529         if (newStream->stream_type == CAMERA3_STREAM_INPUT
    530                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
    531             if (inputStream != NULL) {
    532                 ALOGE("%s: Multiple input streams requested!", __func__);
    533                 pthread_mutex_unlock(&mMutex);
    534                 return BAD_VALUE;
    535             }
    536             inputStream = newStream;
    537         }
    538         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
    539             jpegStream = newStream;
    540         }
    541     }
    542     mInputStream = inputStream;
    543 
    544     /*clean up invalid streams*/
    545     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    546             it != mStreamInfo.end();) {
    547         if(((*it)->status) == INVALID){
    548             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    549             delete channel;
    550             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
    551             free(*it);
    552             it = mStreamInfo.erase(it);
    553         } else {
    554             it++;
    555         }
    556     }
    557     if (mMetadataChannel) {
    558         delete mMetadataChannel;
    559         mMetadataChannel = NULL;
    560     }
    561 
    562     //Create metadata channel and initialize it
    563     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
    564                     mCameraHandle->ops, captureResultCb,
    565                     &gCamCapability[mCameraId]->padding_info, this);
    566     if (mMetadataChannel == NULL) {
    567         ALOGE("%s: failed to allocate metadata channel", __func__);
    568         rc = -ENOMEM;
    569         pthread_mutex_unlock(&mMutex);
    570         return rc;
    571     }
    572     rc = mMetadataChannel->initialize();
    573     if (rc < 0) {
    574         ALOGE("%s: metadata channel initialization failed", __func__);
    575         delete mMetadataChannel;
    576         mMetadataChannel = NULL;
    577         pthread_mutex_unlock(&mMutex);
    578         return rc;
    579     }
    580 
    581     /* Allocate channel objects for the requested streams */
    582     for (size_t i = 0; i < streamList->num_streams; i++) {
    583         camera3_stream_t *newStream = streamList->streams[i];
    584         uint32_t stream_usage = newStream->usage;
    585         stream_config_info.stream_sizes[i].width = newStream->width;
    586         stream_config_info.stream_sizes[i].height = newStream->height;
    587         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
    588             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
    589             //for zsl stream the size is jpeg size
    590             stream_config_info.stream_sizes[i].width = jpegStream->width;
    591             stream_config_info.stream_sizes[i].height = jpegStream->height;
    592             stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
    593         } else {
    594            //for non zsl streams find out the format
    595            switch (newStream->format) {
    596            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
    597               {
    598                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
    599                     stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
    600                  } else {
    601                     stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
    602                  }
    603               }
    604               break;
    605            case HAL_PIXEL_FORMAT_YCbCr_420_888:
    606               stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
    607 #ifdef HAS_MULTIMEDIA_HINTS
    608               if (m_pPowerModule) {
    609                   if (m_pPowerModule->powerHint) {
    610                       m_pPowerModule->powerHint(m_pPowerModule,
    611                           POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
    612                       mHdrHint = true;
    613                   }
    614               }
    615 #endif
    616               break;
    617            case HAL_PIXEL_FORMAT_BLOB:
    618               stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
    619               break;
    620            default:
    621               stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
    622               break;
    623            }
    624         }
    625         if (newStream->priv == NULL) {
    626             //New stream, construct channel
    627             switch (newStream->stream_type) {
    628             case CAMERA3_STREAM_INPUT:
    629                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
    630                 break;
    631             case CAMERA3_STREAM_BIDIRECTIONAL:
    632                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
    633                     GRALLOC_USAGE_HW_CAMERA_WRITE;
    634                 break;
    635             case CAMERA3_STREAM_OUTPUT:
    636                 /* For video encoding stream, set read/write rarely
    637                  * flag so that they may be set to un-cached */
    638                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
    639                     newStream->usage =
    640                          (GRALLOC_USAGE_SW_READ_RARELY |
    641                          GRALLOC_USAGE_SW_WRITE_RARELY |
    642                          GRALLOC_USAGE_HW_CAMERA_WRITE);
    643                 else
    644                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    645                 break;
    646             default:
    647                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
    648                 break;
    649             }
    650 
    651             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
    652                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    653                 QCamera3Channel *channel;
    654                 switch (newStream->format) {
    655                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    656                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
    657                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
    658                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
    659                         jpegStream) {
    660                         uint32_t width = jpegStream->width;
    661                         uint32_t height = jpegStream->height;
    662                         mIsZslMode = true;
    663                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
    664                             mCameraHandle->ops, captureResultCb,
    665                             &gCamCapability[mCameraId]->padding_info, this, newStream,
    666                             width, height);
    667                     } else
    668                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
    669                             mCameraHandle->ops, captureResultCb,
    670                             &gCamCapability[mCameraId]->padding_info, this, newStream);
    671                     if (channel == NULL) {
    672                         ALOGE("%s: allocation of channel failed", __func__);
    673                         pthread_mutex_unlock(&mMutex);
    674                         return -ENOMEM;
    675                     }
    676 
    677                     newStream->priv = channel;
    678                     break;
    679                 case HAL_PIXEL_FORMAT_BLOB:
    680                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
    681                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
    682                             mCameraHandle->ops, captureResultCb,
    683                             &gCamCapability[mCameraId]->padding_info, this, newStream);
    684                     if (mPictureChannel == NULL) {
    685                         ALOGE("%s: allocation of channel failed", __func__);
    686                         pthread_mutex_unlock(&mMutex);
    687                         return -ENOMEM;
    688                     }
    689                     newStream->priv = (QCamera3Channel*)mPictureChannel;
    690                     break;
    691 
    692                 //TODO: Add support for app consumed format?
    693                 default:
    694                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
    695                     break;
    696                 }
    697             }
    698 
    699             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    700                     it != mStreamInfo.end(); it++) {
    701                 if ((*it)->stream == newStream) {
    702                     (*it)->channel = (QCamera3Channel*) newStream->priv;
    703                     break;
    704                 }
    705             }
    706         } else {
    707             // Channel already exists for this stream
    708             // Do nothing for now
    709         }
    710     }
    711 
    712     int32_t hal_version = CAM_HAL_V3;
    713     stream_config_info.num_streams = streamList->num_streams;
    714 
    715     // settings/parameters don't carry over for new configureStreams
    716     memset(mParameters, 0, sizeof(parm_buffer_t));
    717 
    718     mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
    719     AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
    720                 sizeof(hal_version), &hal_version);
    721 
    722     AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
    723                 sizeof(stream_config_info), &stream_config_info);
    724 
    725     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    726 
    727     /*For the streams to be reconfigured we need to register the buffers
    728       since the framework wont*/
    729     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    730             it != mStreamInfo.end(); it++) {
    731         if ((*it)->status == RECONFIGURE) {
    732             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
    733             /*only register buffers for streams that have already been
    734               registered*/
    735             if ((*it)->registered) {
    736                 rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
    737                         (*it)->buffer_set.buffers);
    738                 if (rc != NO_ERROR) {
    739                     ALOGE("%s: Failed to register the buffers of old stream,\
    740                             rc = %d", __func__, rc);
    741                 }
    742                 ALOGV("%s: channel %p has %d buffers",
    743                         __func__, channel, (*it)->buffer_set.num_buffers);
    744             }
    745         }
    746 
    747         ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
    748         if (index == NAME_NOT_FOUND) {
    749             mPendingBuffersMap.add((*it)->stream, 0);
    750         } else {
    751             mPendingBuffersMap.editValueAt(index) = 0;
    752         }
    753     }
    754 
    755     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
    756     mPendingRequestsList.clear();
    757 
    758     mPendingFrameDropList.clear();
    759 
    760     /*flush the metadata list*/
    761     if (!mStoredMetadataList.empty()) {
    762         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
    763               m != mStoredMetadataList.end(); m++) {
    764             mMetadataChannel->bufDone(m->meta_buf);
    765             free(m->meta_buf);
    766             m = mStoredMetadataList.erase(m);
    767         }
    768     }
    769 
    770     mFirstRequest = true;
    771 
    772     //Get min frame duration for this streams configuration
    773     deriveMinFrameDuration();
    774 
    775     pthread_mutex_unlock(&mMutex);
    776     return rc;
    777 }
    778 
    779 /*===========================================================================
    780  * FUNCTION   : validateCaptureRequest
    781  *
    782  * DESCRIPTION: validate a capture request from camera service
    783  *
    784  * PARAMETERS :
    785  *   @request : request from framework to process
    786  *
    787  * RETURN     :
    788  *
    789  *==========================================================================*/
    790 int QCamera3HardwareInterface::validateCaptureRequest(
    791                     camera3_capture_request_t *request)
    792 {
    793     ssize_t idx = 0;
    794     const camera3_stream_buffer_t *b;
    795     CameraMetadata meta;
    796 
    797     /* Sanity check the request */
    798     if (request == NULL) {
    799         ALOGE("%s: NULL capture request", __func__);
    800         return BAD_VALUE;
    801     }
    802 
    803     uint32_t frameNumber = request->frame_number;
    804     if (request->input_buffer != NULL &&
    805             request->input_buffer->stream != mInputStream) {
    806         ALOGE("%s: Request %d: Input buffer not from input stream!",
    807                 __FUNCTION__, frameNumber);
    808         return BAD_VALUE;
    809     }
    810     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
    811         ALOGE("%s: Request %d: No output buffers provided!",
    812                 __FUNCTION__, frameNumber);
    813         return BAD_VALUE;
    814     }
    815     if (request->input_buffer != NULL) {
    816         b = request->input_buffer;
    817         QCamera3Channel *channel =
    818             static_cast<QCamera3Channel*>(b->stream->priv);
    819         if (channel == NULL) {
    820             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
    821                     __func__, frameNumber, idx);
    822             return BAD_VALUE;
    823         }
    824         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    825             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
    826                     __func__, frameNumber, idx);
    827             return BAD_VALUE;
    828         }
    829         if (b->release_fence != -1) {
    830             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
    831                     __func__, frameNumber, idx);
    832             return BAD_VALUE;
    833         }
    834         if (b->buffer == NULL) {
    835             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
    836                     __func__, frameNumber, idx);
    837             return BAD_VALUE;
    838         }
    839     }
    840 
    841     // Validate all buffers
    842     b = request->output_buffers;
    843     do {
    844         QCamera3Channel *channel =
    845                 static_cast<QCamera3Channel*>(b->stream->priv);
    846         if (channel == NULL) {
    847             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
    848                     __func__, frameNumber, idx);
    849             return BAD_VALUE;
    850         }
    851         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    852             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
    853                     __func__, frameNumber, idx);
    854             return BAD_VALUE;
    855         }
    856         if (b->release_fence != -1) {
    857             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
    858                     __func__, frameNumber, idx);
    859             return BAD_VALUE;
    860         }
    861         if (b->buffer == NULL) {
    862             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
    863                     __func__, frameNumber, idx);
    864             return BAD_VALUE;
    865         }
    866         idx++;
    867         b = request->output_buffers + idx;
    868     } while (idx < (ssize_t)request->num_output_buffers);
    869 
    870     return NO_ERROR;
    871 }
    872 
    873 /*===========================================================================
    874  * FUNCTION   : deriveMinFrameDuration
    875  *
    876  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
    877  *              on currently configured streams.
    878  *
    879  * PARAMETERS : NONE
    880  *
    881  * RETURN     : NONE
    882  *
    883  *==========================================================================*/
    884 void QCamera3HardwareInterface::deriveMinFrameDuration()
    885 {
    886     int32_t maxJpegDimension, maxProcessedDimension;
    887 
    888     maxJpegDimension = 0;
    889     maxProcessedDimension = 0;
    890 
    891     // Figure out maximum jpeg, processed, and raw dimensions
    892     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
    893         it != mStreamInfo.end(); it++) {
    894 
    895         // Input stream doesn't have valid stream_type
    896         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
    897             continue;
    898 
    899         int32_t dimension = (*it)->stream->width * (*it)->stream->height;
    900         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
    901             if (dimension > maxJpegDimension)
    902                 maxJpegDimension = dimension;
    903         } else if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_SENSOR) {
    904             if (dimension > maxProcessedDimension)
    905                 maxProcessedDimension = dimension;
    906         }
    907     }
    908 
    909     //Assume all jpeg dimensions are in processed dimensions.
    910     if (maxJpegDimension > maxProcessedDimension)
    911         maxProcessedDimension = maxJpegDimension;
    912 
    913     //Find minimum durations for processed, jpeg, and raw
    914     mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration;
    915     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
    916         if (maxProcessedDimension ==
    917             gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
    918             gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
    919             mMinProcessedFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
    920             mMinJpegFrameDuration = gCamCapability[mCameraId]->jpeg_min_duration[i];
    921             break;
    922         }
    923     }
    924 }
    925 
    926 /*===========================================================================
    927  * FUNCTION   : getMinFrameDuration
    928  *
    929  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
    930  *              and current request configuration.
    931  *
    932  * PARAMETERS : @request: requset sent by the frameworks
    933  *
    934  * RETURN     : min farme duration for a particular request
    935  *
    936  *==========================================================================*/
    937 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
    938 {
    939     bool hasJpegStream = false;
    940     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
    941         const camera3_stream_t *stream = request->output_buffers[i].stream;
    942         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
    943             hasJpegStream = true;
    944     }
    945 
    946     if (!hasJpegStream)
    947         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
    948     else
    949         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
    950 }
    951 
    952 /*===========================================================================
    953  * FUNCTION   : handleMetadataWithLock
    954  *
    955  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
    956  *
    957  * PARAMETERS : @metadata_buf: metadata buffer
    958  *
    959  * RETURN     :
    960  *
    961  *==========================================================================*/
    962 void QCamera3HardwareInterface::handleMetadataWithLock(
    963     mm_camera_super_buf_t *metadata_buf)
    964 {
    965     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
    966     int32_t frame_number_valid = *(int32_t *)
    967         POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
    968     uint32_t pending_requests = *(uint32_t *)POINTER_OF(
    969         CAM_INTF_META_PENDING_REQUESTS, metadata);
    970     uint32_t frame_number = *(uint32_t *)
    971         POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
    972     const struct timeval *tv = (const struct timeval *)
    973         POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
    974     nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
    975         tv->tv_usec * NSEC_PER_USEC;
    976     cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
    977         POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
    978 
    979     int32_t urgent_frame_number_valid = *(int32_t *)
    980         POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
    981     uint32_t urgent_frame_number = *(uint32_t *)
    982         POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
    983 
    984     if (urgent_frame_number_valid) {
    985         ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
    986           __func__, urgent_frame_number, capture_time);
    987 
    988         //Recieved an urgent Frame Number, handle it
    989         //using HAL3.1 quirk for partial results
    990         for (List<PendingRequestInfo>::iterator i =
    991             mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
    992             camera3_notify_msg_t notify_msg;
    993             ALOGV("%s: Iterator Frame = %d urgent frame = %d",
    994                 __func__, i->frame_number, urgent_frame_number);
    995 
    996             if (i->frame_number < urgent_frame_number &&
    997                 i->bNotified == 0) {
    998                 notify_msg.type = CAMERA3_MSG_SHUTTER;
    999                 notify_msg.message.shutter.frame_number = i->frame_number;
   1000                 notify_msg.message.shutter.timestamp = capture_time -
   1001                     (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
   1002                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   1003                 i->timestamp = notify_msg.message.shutter.timestamp;
   1004                 i->bNotified = 1;
   1005                 ALOGV("%s: Dummy notification !!!! notify frame_number = %d, capture_time = %lld",
   1006                     __func__, i->frame_number, notify_msg.message.shutter.timestamp);
   1007             }
   1008 
   1009             if (i->frame_number == urgent_frame_number) {
   1010 
   1011                 camera3_capture_result_t result;
   1012 
   1013                 // Send shutter notify to frameworks
   1014                 notify_msg.type = CAMERA3_MSG_SHUTTER;
   1015                 notify_msg.message.shutter.frame_number = i->frame_number;
   1016                 notify_msg.message.shutter.timestamp = capture_time;
   1017                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   1018 
   1019                 i->timestamp = capture_time;
   1020                 i->bNotified = 1;
   1021 
   1022                 // Extract 3A metadata
   1023                 result.result =
   1024                     translateCbUrgentMetadataToResultMetadata(metadata);
   1025                 // Populate metadata result
   1026                 result.frame_number = urgent_frame_number;
   1027                 result.num_output_buffers = 0;
   1028                 result.output_buffers = NULL;
   1029                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   1030                 ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
   1031                      __func__, result.frame_number, capture_time);
   1032                 free_camera_metadata((camera_metadata_t *)result.result);
   1033                 break;
   1034             }
   1035         }
   1036     }
   1037 
   1038     if (!frame_number_valid) {
   1039         ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
   1040         mMetadataChannel->bufDone(metadata_buf);
   1041         free(metadata_buf);
   1042         goto done_metadata;
   1043     }
   1044     ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
   1045             frame_number, capture_time);
   1046 
   1047     // Go through the pending requests info and send shutter/results to frameworks
   1048     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1049         i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   1050         camera3_capture_result_t result;
   1051         ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
   1052 
   1053         // Flush out all entries with less or equal frame numbers.
   1054         mPendingRequest--;
   1055 
   1056         // Check whether any stream buffer corresponding to this is dropped or not
   1057         // If dropped, then notify ERROR_BUFFER for the corresponding stream and
   1058         // buffer with CAMERA3_BUFFER_STATUS_ERROR
   1059         if (cam_frame_drop.frame_dropped) {
   1060             camera3_notify_msg_t notify_msg;
   1061             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1062                     j != i->buffers.end(); j++) {
   1063                 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
   1064                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1065                 for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
   1066                   if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
   1067                       // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   1068                       ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
   1069                              __func__, i->frame_number, streamID);
   1070                       notify_msg.type = CAMERA3_MSG_ERROR;
   1071                       notify_msg.message.error.frame_number = i->frame_number;
   1072                       notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
   1073                       notify_msg.message.error.error_stream = j->stream;
   1074                       mCallbackOps->notify(mCallbackOps, &notify_msg);
   1075                       ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
   1076                              __func__, i->frame_number, streamID);
   1077                       PendingFrameDropInfo PendingFrameDrop;
   1078                       PendingFrameDrop.frame_number=i->frame_number;
   1079                       PendingFrameDrop.stream_ID = streamID;
   1080                       // Add the Frame drop info to mPendingFrameDropList
   1081                       mPendingFrameDropList.push_back(PendingFrameDrop);
   1082                   }
   1083                 }
   1084             }
   1085         }
   1086 
   1087         // Send empty metadata with already filled buffers for dropped metadata
   1088         // and send valid metadata with already filled buffers for current metadata
   1089         if (i->frame_number < frame_number) {
   1090             CameraMetadata dummyMetadata;
   1091             dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
   1092                     &i->timestamp, 1);
   1093             dummyMetadata.update(ANDROID_REQUEST_ID,
   1094                     &(i->request_id), 1);
   1095             result.result = dummyMetadata.release();
   1096         } else {
   1097             result.result = translateCbMetadataToResultMetadata(metadata,
   1098                     i->timestamp, i->request_id, i->blob_request,
   1099                     &(i->input_jpeg_settings));
   1100             if (mIsZslMode) {
   1101                 int found_metadata = 0;
   1102                 //for ZSL case store the metadata buffer and corresp. ZSL handle ptr
   1103                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1104                     j != i->buffers.end(); j++) {
   1105                     if (j->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   1106                         //check if corresp. zsl already exists in the stored metadata list
   1107                         for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
   1108                                 m != mStoredMetadataList.begin(); m++) {
   1109                             if (m->frame_number == frame_number) {
   1110                                 m->meta_buf = metadata_buf;
   1111                                 found_metadata = 1;
   1112                                 break;
   1113                             }
   1114                         }
   1115                         if (!found_metadata) {
   1116                             MetadataBufferInfo store_meta_info;
   1117                             store_meta_info.meta_buf = metadata_buf;
   1118                             store_meta_info.frame_number = frame_number;
   1119                             mStoredMetadataList.push_back(store_meta_info);
   1120                             found_metadata = 1;
   1121                         }
   1122                     }
   1123                 }
   1124                 if (!found_metadata) {
   1125                     if (!i->input_buffer_present && i->blob_request) {
   1126                         //livesnapshot or fallback non-zsl snapshot case
   1127                         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1128                                 j != i->buffers.end(); j++){
   1129                             if (j->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
   1130                                 j->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   1131                                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
   1132                                 break;
   1133                             }
   1134                         }
   1135                     } else {
   1136                         //return the metadata immediately
   1137                         mMetadataChannel->bufDone(metadata_buf);
   1138                         free(metadata_buf);
   1139                     }
   1140                 }
   1141             } else if (!mIsZslMode && i->blob_request) {
   1142                 //If it is a blob request then send the metadata to the picture channel
   1143                 mPictureChannel->queueMetadata(metadata_buf,mMetadataChannel,true);
   1144             } else {
   1145                 // Return metadata buffer
   1146                 mMetadataChannel->bufDone(metadata_buf);
   1147                 free(metadata_buf);
   1148             }
   1149         }
   1150         if (!result.result) {
   1151             ALOGE("%s: metadata is NULL", __func__);
   1152         }
   1153         result.frame_number = i->frame_number;
   1154         result.num_output_buffers = 0;
   1155         result.output_buffers = NULL;
   1156         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1157                     j != i->buffers.end(); j++) {
   1158             if (j->buffer) {
   1159                 result.num_output_buffers++;
   1160             }
   1161         }
   1162 
   1163         if (result.num_output_buffers > 0) {
   1164             camera3_stream_buffer_t *result_buffers =
   1165                 new camera3_stream_buffer_t[result.num_output_buffers];
   1166             if (!result_buffers) {
   1167                 ALOGE("%s: Fatal error: out of memory", __func__);
   1168             }
   1169             size_t result_buffers_idx = 0;
   1170             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1171                     j != i->buffers.end(); j++) {
   1172                 if (j->buffer) {
   1173                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   1174                             m != mPendingFrameDropList.end(); m++) {
   1175                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   1176                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1177                         if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
   1178                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   1179                             ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   1180                                   __func__, frame_number, streamID);
   1181                             m = mPendingFrameDropList.erase(m);
   1182                             break;
   1183                         }
   1184                     }
   1185                     result_buffers[result_buffers_idx++] = *(j->buffer);
   1186                     free(j->buffer);
   1187                     j->buffer = NULL;
   1188                     mPendingBuffersMap.editValueFor(j->stream)--;
   1189                 }
   1190             }
   1191             result.output_buffers = result_buffers;
   1192 
   1193             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1194             ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1195                     __func__, result.frame_number, i->timestamp);
   1196             free_camera_metadata((camera_metadata_t *)result.result);
   1197             delete[] result_buffers;
   1198         } else {
   1199             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1200             ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1201                         __func__, result.frame_number, i->timestamp);
   1202             free_camera_metadata((camera_metadata_t *)result.result);
   1203         }
   1204         // erase the element from the list
   1205         i = mPendingRequestsList.erase(i);
   1206     }
   1207 
   1208 done_metadata:
   1209     if (!pending_requests)
   1210         unblockRequestIfNecessary();
   1211 
   1212 }
   1213 
   1214 /*===========================================================================
   1215  * FUNCTION   : handleBufferWithLock
   1216  *
   1217  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   1218  *
   1219  * PARAMETERS : @buffer: image buffer for the callback
   1220  *              @frame_number: frame number of the image buffer
   1221  *
   1222  * RETURN     :
   1223  *
   1224  *==========================================================================*/
   1225 void QCamera3HardwareInterface::handleBufferWithLock(
   1226     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   1227 {
   1228     // If the frame number doesn't exist in the pending request list,
   1229     // directly send the buffer to the frameworks, and update pending buffers map
   1230     // Otherwise, book-keep the buffer.
   1231     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1232     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   1233         i++;
   1234     }
   1235     if (i == mPendingRequestsList.end()) {
   1236         // Verify all pending requests frame_numbers are greater
   1237         for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
   1238                 j != mPendingRequestsList.end(); j++) {
   1239             if (j->frame_number < frame_number) {
   1240                 ALOGE("%s: Error: pending frame number %d is smaller than %d",
   1241                         __func__, j->frame_number, frame_number);
   1242             }
   1243         }
   1244         camera3_capture_result_t result;
   1245         result.result = NULL;
   1246         result.frame_number = frame_number;
   1247         result.num_output_buffers = 1;
   1248         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   1249                 m != mPendingFrameDropList.end(); m++) {
   1250             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
   1251             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1252             if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
   1253                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   1254                 ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   1255                         __func__, frame_number, streamID);
   1256                 m = mPendingFrameDropList.erase(m);
   1257                 break;
   1258             }
   1259         }
   1260         result.output_buffers = buffer;
   1261         ALOGV("%s: result frame_number = %d, buffer = %p",
   1262                 __func__, frame_number, buffer);
   1263         mPendingBuffersMap.editValueFor(buffer->stream)--;
   1264         if (buffer->stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   1265             int found = 0;
   1266             for (List<MetadataBufferInfo>::iterator k = mStoredMetadataList.begin();
   1267                 k != mStoredMetadataList.end(); k++) {
   1268                 if (k->frame_number == frame_number) {
   1269                     k->zsl_buf_hdl = buffer->buffer;
   1270                     found = 1;
   1271                     break;
   1272                 }
   1273             }
   1274             if (!found) {
   1275                 MetadataBufferInfo meta_info;
   1276                 meta_info.frame_number = frame_number;
   1277                 meta_info.zsl_buf_hdl = buffer->buffer;
   1278                 mStoredMetadataList.push_back(meta_info);
   1279             }
   1280         }
   1281         mCallbackOps->process_capture_result(mCallbackOps, &result);
   1282     } else {
   1283         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1284                 j != i->buffers.end(); j++) {
   1285             if (j->stream == buffer->stream) {
   1286                 if (j->buffer != NULL) {
   1287                     ALOGE("%s: Error: buffer is already set", __func__);
   1288                 } else {
   1289                     j->buffer = (camera3_stream_buffer_t *)malloc(
   1290                             sizeof(camera3_stream_buffer_t));
   1291                     *(j->buffer) = *buffer;
   1292                     ALOGV("%s: cache buffer %p at result frame_number %d",
   1293                             __func__, buffer, frame_number);
   1294                 }
   1295             }
   1296         }
   1297     }
   1298 }
   1299 
   1300 /*===========================================================================
   1301  * FUNCTION   : unblockRequestIfNecessary
   1302  *
   1303  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
   1304  *              that mMutex is held when this function is called.
   1305  *
   1306  * PARAMETERS :
   1307  *
   1308  * RETURN     :
   1309  *
   1310  *==========================================================================*/
   1311 void QCamera3HardwareInterface::unblockRequestIfNecessary()
   1312 {
   1313     bool max_buffers_dequeued = false;
   1314     for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
   1315         const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
   1316         uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
   1317         if (queued_buffers == stream->max_buffers) {
   1318             max_buffers_dequeued = true;
   1319             break;
   1320         }
   1321     }
   1322     if (!max_buffers_dequeued) {
   1323         // Unblock process_capture_request
   1324         pthread_cond_signal(&mRequestCond);
   1325     }
   1326 }
   1327 
   1328 /*===========================================================================
   1329  * FUNCTION   : registerStreamBuffers
   1330  *
   1331  * DESCRIPTION: Register buffers for a given stream with the HAL device.
   1332  *
   1333  * PARAMETERS :
   1334  *   @stream_list : streams to be configured
   1335  *
   1336  * RETURN     :
   1337  *
   1338  *==========================================================================*/
   1339 int QCamera3HardwareInterface::registerStreamBuffers(
   1340         const camera3_stream_buffer_set_t *buffer_set)
   1341 {
   1342     int rc = 0;
   1343 
   1344     pthread_mutex_lock(&mMutex);
   1345 
   1346     if (buffer_set == NULL) {
   1347         ALOGE("%s: Invalid buffer_set parameter.", __func__);
   1348         pthread_mutex_unlock(&mMutex);
   1349         return -EINVAL;
   1350     }
   1351     if (buffer_set->stream == NULL) {
   1352         ALOGE("%s: Invalid stream parameter.", __func__);
   1353         pthread_mutex_unlock(&mMutex);
   1354         return -EINVAL;
   1355     }
   1356     if (buffer_set->num_buffers < 1) {
   1357         ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
   1358         pthread_mutex_unlock(&mMutex);
   1359         return -EINVAL;
   1360     }
   1361     if (buffer_set->buffers == NULL) {
   1362         ALOGE("%s: Invalid buffers parameter.", __func__);
   1363         pthread_mutex_unlock(&mMutex);
   1364         return -EINVAL;
   1365     }
   1366 
   1367     camera3_stream_t *stream = buffer_set->stream;
   1368     QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
   1369 
   1370     //set the buffer_set in the mStreamInfo array
   1371     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   1372             it != mStreamInfo.end(); it++) {
   1373         if ((*it)->stream == stream) {
   1374             uint32_t numBuffers = buffer_set->num_buffers;
   1375             (*it)->buffer_set.stream = buffer_set->stream;
   1376             (*it)->buffer_set.num_buffers = numBuffers;
   1377             (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
   1378             if ((*it)->buffer_set.buffers == NULL) {
   1379                 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
   1380                 pthread_mutex_unlock(&mMutex);
   1381                 return -ENOMEM;
   1382             }
   1383             for (size_t j = 0; j < numBuffers; j++){
   1384                 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
   1385             }
   1386             (*it)->registered = 1;
   1387         }
   1388     }
   1389     rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
   1390     if (rc < 0) {
   1391         ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
   1392         pthread_mutex_unlock(&mMutex);
   1393         return -ENODEV;
   1394     }
   1395 
   1396     pthread_mutex_unlock(&mMutex);
   1397     return NO_ERROR;
   1398 }
   1399 
   1400 /*===========================================================================
   1401  * FUNCTION   : processCaptureRequest
   1402  *
   1403  * DESCRIPTION: process a capture request from camera service
   1404  *
   1405  * PARAMETERS :
   1406  *   @request : request from framework to process
   1407  *
   1408  * RETURN     :
   1409  *
   1410  *==========================================================================*/
   1411 int QCamera3HardwareInterface::processCaptureRequest(
   1412                     camera3_capture_request_t *request)
   1413 {
   1414     int rc = NO_ERROR;
   1415     int32_t request_id;
   1416     CameraMetadata meta;
   1417     MetadataBufferInfo reproc_meta;
   1418     int queueMetadata = 0;
   1419 
   1420     pthread_mutex_lock(&mMutex);
   1421 
   1422     rc = validateCaptureRequest(request);
   1423     if (rc != NO_ERROR) {
   1424         ALOGE("%s: incoming request is not valid", __func__);
   1425         pthread_mutex_unlock(&mMutex);
   1426         return rc;
   1427     }
   1428 
   1429     meta = request->settings;
   1430 
   1431     // For first capture request, send capture intent, and
   1432     // stream on all streams
   1433     if (mFirstRequest) {
   1434 
   1435         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   1436             int32_t hal_version = CAM_HAL_V3;
   1437             uint8_t captureIntent =
   1438                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   1439 
   1440             memset(mParameters, 0, sizeof(parm_buffer_t));
   1441             mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
   1442             AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   1443                 sizeof(hal_version), &hal_version);
   1444             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
   1445                 sizeof(captureIntent), &captureIntent);
   1446             mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   1447                 mParameters);
   1448         }
   1449 
   1450         mMetadataChannel->start();
   1451         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   1452             it != mStreamInfo.end(); it++) {
   1453             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   1454             channel->start();
   1455         }
   1456     }
   1457 
   1458     uint32_t frameNumber = request->frame_number;
   1459     cam_stream_ID_t streamID;
   1460 
   1461     if (meta.exists(ANDROID_REQUEST_ID)) {
   1462         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
   1463         mCurrentRequestId = request_id;
   1464         ALOGV("%s: Received request with id: %d",__func__, request_id);
   1465     } else if (mFirstRequest || mCurrentRequestId == -1){
   1466         ALOGE("%s: Unable to find request id field, \
   1467                 & no previous id available", __func__);
   1468         return NAME_NOT_FOUND;
   1469     } else {
   1470         ALOGV("%s: Re-using old request id", __func__);
   1471         request_id = mCurrentRequestId;
   1472     }
   1473 
   1474     ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
   1475                                     __func__, __LINE__,
   1476                                     request->num_output_buffers,
   1477                                     request->input_buffer,
   1478                                     frameNumber);
   1479     // Acquire all request buffers first
   1480     int blob_request = 0;
   1481     for (size_t i = 0; i < request->num_output_buffers; i++) {
   1482         const camera3_stream_buffer_t& output = request->output_buffers[i];
   1483         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   1484         sp<Fence> acquireFence = new Fence(output.acquire_fence);
   1485 
   1486         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   1487         //Call function to store local copy of jpeg data for encode params.
   1488             blob_request = 1;
   1489             rc = getJpegSettings(request->settings);
   1490             if (rc < 0) {
   1491                 ALOGE("%s: failed to get jpeg parameters", __func__);
   1492                 pthread_mutex_unlock(&mMutex);
   1493                 return rc;
   1494             }
   1495         }
   1496 
   1497         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
   1498         if (rc != OK) {
   1499             ALOGE("%s: fence wait failed %d", __func__, rc);
   1500             pthread_mutex_unlock(&mMutex);
   1501             return rc;
   1502         }
   1503         streamID.streamID[i]=channel->getStreamID(channel->getStreamTypeMask());
   1504     }
   1505     streamID.num_streams=request->num_output_buffers;
   1506 
   1507     rc = setFrameParameters(request, streamID);
   1508     if (rc < 0) {
   1509         ALOGE("%s: fail to set frame parameters", __func__);
   1510         pthread_mutex_unlock(&mMutex);
   1511         return rc;
   1512     }
   1513 
   1514     /* Update pending request list and pending buffers map */
   1515     PendingRequestInfo pendingRequest;
   1516     pendingRequest.frame_number = frameNumber;
   1517     pendingRequest.num_buffers = request->num_output_buffers;
   1518     pendingRequest.request_id = request_id;
   1519     pendingRequest.blob_request = blob_request;
   1520     pendingRequest.bNotified = 0;
   1521     if (blob_request)
   1522         pendingRequest.input_jpeg_settings = *mJpegSettings;
   1523     pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
   1524 
   1525     for (size_t i = 0; i < request->num_output_buffers; i++) {
   1526         RequestedBufferInfo requestedBuf;
   1527         requestedBuf.stream = request->output_buffers[i].stream;
   1528         requestedBuf.buffer = NULL;
   1529         pendingRequest.buffers.push_back(requestedBuf);
   1530 
   1531         mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
   1532     }
   1533     mPendingRequestsList.push_back(pendingRequest);
   1534 
   1535     // Notify metadata channel we receive a request
   1536     mMetadataChannel->request(NULL, frameNumber);
   1537 
   1538     // Call request on other streams
   1539     for (size_t i = 0; i < request->num_output_buffers; i++) {
   1540         const camera3_stream_buffer_t& output = request->output_buffers[i];
   1541         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   1542         mm_camera_buf_def_t *pInputBuffer = NULL;
   1543 
   1544         if (channel == NULL) {
   1545             ALOGE("%s: invalid channel pointer for stream", __func__);
   1546             continue;
   1547         }
   1548 
   1549         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   1550             QCamera3RegularChannel* inputChannel = NULL;
   1551             if(request->input_buffer != NULL){
   1552                 //Try to get the internal format
   1553                 inputChannel = (QCamera3RegularChannel*)
   1554                     request->input_buffer->stream->priv;
   1555                 if(inputChannel == NULL ){
   1556                     ALOGE("%s: failed to get input channel handle", __func__);
   1557                 } else {
   1558                     pInputBuffer =
   1559                         inputChannel->getInternalFormatBuffer(
   1560                                 request->input_buffer->buffer);
   1561                     ALOGD("%s: Input buffer dump",__func__);
   1562                     ALOGD("Stream id: %d", pInputBuffer->stream_id);
   1563                     ALOGD("streamtype:%d", pInputBuffer->stream_type);
   1564                     ALOGD("frame len:%d", pInputBuffer->frame_len);
   1565                     ALOGD("Handle:%p", request->input_buffer->buffer);
   1566                     //TODO: need to get corresponding metadata and send it to pproc
   1567                     for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
   1568                          m != mStoredMetadataList.end(); m++) {
   1569                         if (m->zsl_buf_hdl == request->input_buffer->buffer) {
   1570                             reproc_meta.meta_buf = m->meta_buf;
   1571                             queueMetadata = 1;
   1572                             break;
   1573                         }
   1574                     }
   1575                 }
   1576             }
   1577             rc = channel->request(output.buffer, frameNumber, mJpegSettings,
   1578                             pInputBuffer,(QCamera3Channel*)inputChannel);
   1579             if (queueMetadata) {
   1580                 mPictureChannel->queueMetadata(reproc_meta.meta_buf,mMetadataChannel,false);
   1581             }
   1582         } else {
   1583             ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
   1584                 __LINE__, output.buffer, frameNumber);
   1585             if (mIsZslMode && output.stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   1586                 for (List<MetadataBufferInfo>::iterator m = mStoredMetadataList.begin();
   1587                      m != mStoredMetadataList.end(); m++) {
   1588                    for (uint32_t j = 0; j < request->num_output_buffers; j++) {
   1589                         if (m->zsl_buf_hdl == request->output_buffers[j].buffer) {
   1590                             mMetadataChannel->bufDone(m->meta_buf);
   1591                             free(m->meta_buf);
   1592                             m = mStoredMetadataList.erase(m);
   1593                             break;
   1594                         }
   1595                    }
   1596                 }
   1597             }
   1598             rc = channel->request(output.buffer, frameNumber);
   1599         }
   1600         if (rc < 0)
   1601             ALOGE("%s: request failed", __func__);
   1602     }
   1603 
   1604     mFirstRequest = false;
   1605     // Added a timed condition wait
   1606     struct timespec ts;
   1607     uint8_t isValidTimeout = 1;
   1608     rc = clock_gettime(CLOCK_REALTIME, &ts);
   1609     if (rc < 0) {
   1610         isValidTimeout = 0;
   1611         ALOGE("%s: Error reading the real time clock!!", __func__);
   1612     }
   1613     else {
   1614         // Make timeout as 5 sec for request to be honored
   1615         ts.tv_sec += 5;
   1616     }
   1617     //Block on conditional variable
   1618     mPendingRequest++;
   1619     do {
   1620         if (!isValidTimeout) {
   1621             ALOGV("%s: Blocking on conditional wait", __func__);
   1622             pthread_cond_wait(&mRequestCond, &mMutex);
   1623         }
   1624         else {
   1625             ALOGV("%s: Blocking on timed conditional wait", __func__);
   1626             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
   1627             if (rc == ETIMEDOUT) {
   1628                 rc = -ENODEV;
   1629                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
   1630                 break;
   1631             }
   1632         }
   1633         ALOGV("%s: Unblocked", __func__);
   1634     }while (mPendingRequest >= kMaxInFlight);
   1635 
   1636     pthread_mutex_unlock(&mMutex);
   1637 
   1638     return rc;
   1639 }
   1640 
   1641 /*===========================================================================
   1642  * FUNCTION   : getMetadataVendorTagOps
   1643  *
   1644  * DESCRIPTION:
   1645  *
   1646  * PARAMETERS :
   1647  *
   1648  *
   1649  * RETURN     :
   1650  *==========================================================================*/
   1651 void QCamera3HardwareInterface::getMetadataVendorTagOps(
   1652                     vendor_tag_query_ops_t* /*ops*/)
   1653 {
   1654     /* Enable locks when we eventually add Vendor Tags */
   1655     /*
   1656     pthread_mutex_lock(&mMutex);
   1657 
   1658     pthread_mutex_unlock(&mMutex);
   1659     */
   1660     return;
   1661 }
   1662 
   1663 /*===========================================================================
   1664  * FUNCTION   : dump
   1665  *
   1666  * DESCRIPTION:
   1667  *
   1668  * PARAMETERS :
   1669  *
   1670  *
   1671  * RETURN     :
   1672  *==========================================================================*/
   1673 void QCamera3HardwareInterface::dump(int /*fd*/)
   1674 {
   1675     /*Enable lock when we implement this function*/
   1676     /*
   1677     pthread_mutex_lock(&mMutex);
   1678 
   1679     pthread_mutex_unlock(&mMutex);
   1680     */
   1681     return;
   1682 }
   1683 
   1684 /*===========================================================================
   1685  * FUNCTION   : flush
   1686  *
   1687  * DESCRIPTION:
   1688  *
   1689  * PARAMETERS :
   1690  *
   1691  *
   1692  * RETURN     :
   1693  *==========================================================================*/
   1694 int QCamera3HardwareInterface::flush()
   1695 {
   1696     /*Enable lock when we implement this function*/
   1697     /*
   1698     pthread_mutex_lock(&mMutex);
   1699 
   1700     pthread_mutex_unlock(&mMutex);
   1701     */
   1702     return 0;
   1703 }
   1704 
   1705 /*===========================================================================
   1706  * FUNCTION   : captureResultCb
   1707  *
   1708  * DESCRIPTION: Callback handler for all capture result
   1709  *              (streams, as well as metadata)
   1710  *
   1711  * PARAMETERS :
   1712  *   @metadata : metadata information
   1713  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   1714  *               NULL if metadata.
   1715  *
   1716  * RETURN     : NONE
   1717  *==========================================================================*/
   1718 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   1719                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
   1720 {
   1721     pthread_mutex_lock(&mMutex);
   1722 
   1723     if (metadata_buf)
   1724         handleMetadataWithLock(metadata_buf);
   1725     else
   1726         handleBufferWithLock(buffer, frame_number);
   1727 
   1728     pthread_mutex_unlock(&mMutex);
   1729     return;
   1730 }
   1731 
   1732 /*===========================================================================
   1733  * FUNCTION   : translateCbMetadataToResultMetadata
   1734  *
   1735  * DESCRIPTION:
   1736  *
   1737  * PARAMETERS :
   1738  *   @metadata : metadata information from callback
   1739  *
   1740  * RETURN     : camera_metadata_t*
   1741  *              metadata in a format specified by fwk
   1742  *==========================================================================*/
   1743 camera_metadata_t*
   1744 QCamera3HardwareInterface::translateCbMetadataToResultMetadata
   1745                                 (metadata_buffer_t *metadata, nsecs_t timestamp,
   1746                                  int32_t request_id, int32_t BlobRequest,
   1747                                  jpeg_settings_t* inputjpegsettings)
   1748 {
   1749     CameraMetadata camMetadata;
   1750     camera_metadata_t* resultMetadata;
   1751 
   1752     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   1753     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   1754 
   1755     // Update the JPEG related info
   1756     if (BlobRequest) {
   1757         camMetadata.update(ANDROID_JPEG_ORIENTATION, &(inputjpegsettings->jpeg_orientation), 1);
   1758         camMetadata.update(ANDROID_JPEG_QUALITY, &(inputjpegsettings->jpeg_quality), 1);
   1759 
   1760         int32_t thumbnailSizeTable[2];
   1761         thumbnailSizeTable[0] = inputjpegsettings->thumbnail_size.width;
   1762         thumbnailSizeTable[1] = inputjpegsettings->thumbnail_size.height;
   1763         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSizeTable, 2);
   1764         ALOGV("%s: Orien=%d, quality=%d wid=%d, height=%d", __func__, inputjpegsettings->jpeg_orientation,
   1765                inputjpegsettings->jpeg_quality,thumbnailSizeTable[0], thumbnailSizeTable[1]);
   1766 
   1767         if (inputjpegsettings->gps_coordinates[0]) {
   1768             double gpsCoordinates[3];
   1769             gpsCoordinates[0]=*(inputjpegsettings->gps_coordinates[0]);
   1770             gpsCoordinates[1]=*(inputjpegsettings->gps_coordinates[1]);
   1771             gpsCoordinates[2]=*(inputjpegsettings->gps_coordinates[2]);
   1772             camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3);
   1773             ALOGV("%s: gpsCoordinates[0]=%f, 1=%f 2=%f", __func__, gpsCoordinates[0],
   1774                  gpsCoordinates[1],gpsCoordinates[2]);
   1775         }
   1776 
   1777         if (inputjpegsettings->gps_timestamp) {
   1778             camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, inputjpegsettings->gps_timestamp, 1);
   1779             ALOGV("%s: gps_timestamp=%lld", __func__, *(inputjpegsettings->gps_timestamp));
   1780         }
   1781 
   1782         String8 str(inputjpegsettings->gps_processing_method);
   1783         if (strlen(mJpegSettings->gps_processing_method) > 0) {
   1784             camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
   1785         }
   1786     }
   1787     uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
   1788     uint8_t next_entry;
   1789     while (curr_entry != CAM_INTF_PARM_MAX) {
   1790        switch (curr_entry) {
   1791          case CAM_INTF_META_FACE_DETECTION:{
   1792              cam_face_detection_data_t *faceDetectionInfo =
   1793                 (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
   1794              uint8_t numFaces = faceDetectionInfo->num_faces_detected;
   1795              int32_t faceIds[MAX_ROI];
   1796              uint8_t faceScores[MAX_ROI];
   1797              int32_t faceRectangles[MAX_ROI * 4];
   1798              int32_t faceLandmarks[MAX_ROI * 6];
   1799              int j = 0, k = 0;
   1800              for (int i = 0; i < numFaces; i++) {
   1801                  faceIds[i] = faceDetectionInfo->faces[i].face_id;
   1802                  faceScores[i] = faceDetectionInfo->faces[i].score;
   1803                  convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   1804                          faceRectangles+j, -1);
   1805                  convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
   1806                  j+= 4;
   1807                  k+= 6;
   1808              }
   1809 
   1810              if (numFaces <= 0) {
   1811                 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
   1812                 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
   1813                 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
   1814                 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
   1815              }
   1816 
   1817              camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   1818              camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
   1819              camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   1820                faceRectangles, numFaces*4);
   1821              camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   1822                faceLandmarks, numFaces*6);
   1823 
   1824             break;
   1825             }
   1826          case CAM_INTF_META_COLOR_CORRECT_MODE:{
   1827              uint8_t  *color_correct_mode =
   1828                            (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
   1829              camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
   1830              break;
   1831           }
   1832 
   1833          // 3A state is sent in urgent partial result (uses quirk)
   1834          case CAM_INTF_META_AEC_PRECAPTURE_ID:
   1835          case CAM_INTF_META_AEC_ROI:
   1836          case CAM_INTF_META_AEC_STATE:
   1837          case CAM_INTF_PARM_FOCUS_MODE:
   1838          case CAM_INTF_META_AF_ROI:
   1839          case CAM_INTF_META_AF_STATE:
   1840          case CAM_INTF_META_AF_TRIGGER_ID:
   1841          case CAM_INTF_PARM_WHITE_BALANCE:
   1842          case CAM_INTF_META_AWB_REGIONS:
   1843          case CAM_INTF_META_AWB_STATE:
   1844          case CAM_INTF_META_MODE: {
   1845            ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
   1846            break;
   1847          }
   1848 
   1849           case CAM_INTF_META_EDGE_MODE: {
   1850              cam_edge_application_t  *edgeApplication =
   1851                 (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
   1852              uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
   1853              camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
   1854              camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
   1855              break;
   1856           }
   1857           case CAM_INTF_META_FLASH_POWER: {
   1858              uint8_t  *flashPower =
   1859                   (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
   1860              camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
   1861              break;
   1862           }
   1863           case CAM_INTF_META_FLASH_FIRING_TIME: {
   1864              int64_t  *flashFiringTime =
   1865                   (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
   1866              camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   1867              break;
   1868           }
   1869           case CAM_INTF_META_FLASH_STATE: {
   1870              uint8_t  *flashState =
   1871                 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
   1872              camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
   1873              break;
   1874           }
   1875           case CAM_INTF_META_FLASH_MODE:{
   1876              uint8_t *flashMode = (uint8_t*)
   1877                  POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata);
   1878              camMetadata.update(ANDROID_FLASH_MODE, flashMode, 1);
   1879              break;
   1880           }
   1881           case CAM_INTF_META_HOTPIXEL_MODE: {
   1882               uint8_t  *hotPixelMode =
   1883                  (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
   1884               camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
   1885               break;
   1886           }
   1887           case CAM_INTF_META_LENS_APERTURE:{
   1888              float  *lensAperture =
   1889                 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
   1890              camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   1891              break;
   1892           }
   1893           case CAM_INTF_META_LENS_FILTERDENSITY: {
   1894              float  *filterDensity =
   1895                 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
   1896              camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   1897              break;
   1898           }
   1899           case CAM_INTF_META_LENS_FOCAL_LENGTH:{
   1900              float  *focalLength =
   1901                 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
   1902              camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   1903              break;
   1904           }
   1905           case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
   1906              float  *focusDistance =
   1907                 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
   1908              camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   1909              break;
   1910           }
   1911           case CAM_INTF_META_LENS_FOCUS_RANGE: {
   1912              float  *focusRange =
   1913                 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
   1914              camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
   1915              break;
   1916           }
   1917           case CAM_INTF_META_LENS_STATE: {
   1918              uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
   1919              camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
   1920              break;
   1921           }
   1922           case CAM_INTF_META_LENS_OPT_STAB_MODE: {
   1923              uint8_t  *opticalStab =
   1924                 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
   1925              camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
   1926              break;
   1927           }
   1928           case CAM_INTF_META_NOISE_REDUCTION_MODE: {
   1929              uint8_t  *noiseRedMode =
   1930                 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
   1931              camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
   1932              break;
   1933           }
   1934           case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
   1935              uint8_t  *noiseRedStrength =
   1936                 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
   1937              camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
   1938              break;
   1939           }
   1940           case CAM_INTF_META_SCALER_CROP_REGION: {
   1941              cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
   1942              POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
   1943              int32_t scalerCropRegion[4];
   1944              scalerCropRegion[0] = hScalerCropRegion->left;
   1945              scalerCropRegion[1] = hScalerCropRegion->top;
   1946              scalerCropRegion[2] = hScalerCropRegion->width;
   1947              scalerCropRegion[3] = hScalerCropRegion->height;
   1948              camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   1949              break;
   1950           }
   1951           case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
   1952              int64_t  *sensorExpTime =
   1953                 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
   1954              mMetadataResponse.exposure_time = *sensorExpTime;
   1955              ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
   1956              camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   1957              break;
   1958           }
   1959           case CAM_INTF_META_SENSOR_FRAME_DURATION:{
   1960              int64_t  *sensorFameDuration =
   1961                 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
   1962              ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
   1963              camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   1964              break;
   1965           }
   1966           case CAM_INTF_META_SENSOR_SENSITIVITY:{
   1967              int32_t  *sensorSensitivity =
   1968                 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
   1969              ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
   1970              mMetadataResponse.iso_speed = *sensorSensitivity;
   1971              camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
   1972              break;
   1973           }
   1974           case CAM_INTF_META_SHADING_MODE: {
   1975              uint8_t  *shadingMode =
   1976                 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
   1977              camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
   1978              break;
   1979           }
   1980           case CAM_INTF_META_STATS_FACEDETECT_MODE: {
   1981              uint8_t  *faceDetectMode =
   1982                 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
   1983              uint8_t fwk_faceDetectMode = lookupFwkName(FACEDETECT_MODES_MAP,
   1984                                                         sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
   1985                                                         *faceDetectMode);
   1986              camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
   1987              break;
   1988           }
   1989           case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
   1990              uint8_t  *histogramMode =
   1991                 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
   1992              camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
   1993              break;
   1994           }
   1995           case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
   1996                uint8_t  *sharpnessMapMode =
   1997                   (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
   1998                camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   1999                                   sharpnessMapMode, 1);
   2000                break;
   2001            }
   2002           case CAM_INTF_META_STATS_SHARPNESS_MAP:{
   2003                cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
   2004                POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
   2005                camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
   2006                                   (int32_t*)sharpnessMap->sharpness,
   2007                                   CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
   2008                break;
   2009           }
   2010           case CAM_INTF_META_LENS_SHADING_MAP: {
   2011                cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
   2012                POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
   2013                int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
   2014                int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
   2015                camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   2016                                   (float*)lensShadingMap->lens_shading,
   2017                                   4*map_width*map_height);
   2018                break;
   2019           }
   2020           case CAM_INTF_META_TONEMAP_CURVES:{
   2021              //Populate CAM_INTF_META_TONEMAP_CURVES
   2022              /* ch0 = G, ch 1 = B, ch 2 = R*/
   2023              cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
   2024              POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
   2025              camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
   2026                                 (float*)tonemap->curves[0].tonemap_points,
   2027                                 tonemap->tonemap_points_cnt * 2);
   2028 
   2029              camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
   2030                                 (float*)tonemap->curves[1].tonemap_points,
   2031                                 tonemap->tonemap_points_cnt * 2);
   2032 
   2033              camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
   2034                                 (float*)tonemap->curves[2].tonemap_points,
   2035                                 tonemap->tonemap_points_cnt * 2);
   2036              break;
   2037           }
   2038           case CAM_INTF_META_COLOR_CORRECT_GAINS:{
   2039              cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
   2040              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
   2041              camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
   2042              break;
   2043           }
   2044           case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
   2045               cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
   2046               POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
   2047               camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   2048                        (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
   2049               break;
   2050           }
   2051           case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
   2052              cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
   2053              POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
   2054              camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   2055                        predColorCorrectionGains->gains, 4);
   2056              break;
   2057           }
   2058           case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
   2059              cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
   2060                    POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
   2061              camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   2062                                   (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
   2063              break;
   2064 
   2065           }
   2066           case CAM_INTF_META_BLACK_LEVEL_LOCK:{
   2067              uint8_t *blackLevelLock = (uint8_t*)
   2068                POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
   2069              camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
   2070              break;
   2071           }
   2072           case CAM_INTF_META_SCENE_FLICKER:{
   2073              uint8_t *sceneFlicker = (uint8_t*)
   2074              POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
   2075              camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
   2076              break;
   2077           }
   2078           case CAM_INTF_PARM_LED_MODE:
   2079              break;
   2080           case CAM_INTF_PARM_EFFECT: {
   2081              uint8_t *effectMode = (uint8_t*)
   2082                   POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
   2083              uint8_t fwk_effectMode = lookupFwkName(EFFECT_MODES_MAP,
   2084                                                     sizeof(EFFECT_MODES_MAP),
   2085                                                     *effectMode);
   2086              camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
   2087              break;
   2088           }
   2089           default:
   2090              ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
   2091                    __func__, curr_entry);
   2092              break;
   2093        }
   2094        next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
   2095        curr_entry = next_entry;
   2096     }
   2097     resultMetadata = camMetadata.release();
   2098     return resultMetadata;
   2099 }
   2100 
   2101 /*===========================================================================
   2102  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
   2103  *
   2104  * DESCRIPTION:
   2105  *
   2106  * PARAMETERS :
   2107  *   @metadata : metadata information from callback
   2108  *
   2109  * RETURN     : camera_metadata_t*
   2110  *              metadata in a format specified by fwk
   2111  *==========================================================================*/
   2112 camera_metadata_t*
   2113 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
   2114                                 (metadata_buffer_t *metadata) {
   2115 
   2116     CameraMetadata camMetadata;
   2117     camera_metadata_t* resultMetadata;
   2118 
   2119     uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
   2120     camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
   2121 
   2122     uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
   2123     uint8_t next_entry;
   2124     while (curr_entry != CAM_INTF_PARM_MAX) {
   2125       switch (curr_entry) {
   2126         case CAM_INTF_META_AEC_PRECAPTURE_ID: {
   2127             int32_t  *ae_precapture_id =
   2128               (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
   2129             camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
   2130                                           ae_precapture_id, 1);
   2131             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
   2132           break;
   2133         }
   2134         case CAM_INTF_META_AEC_ROI: {
   2135             cam_area_t  *hAeRegions =
   2136                 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
   2137             int32_t aeRegions[5];
   2138             convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   2139             camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
   2140             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_REGIONS", __func__);
   2141             break;
   2142         }
   2143         case CAM_INTF_META_AEC_STATE:{
   2144             uint8_t *ae_state =
   2145                 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
   2146             camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
   2147             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
   2148             break;
   2149         }
   2150         case CAM_INTF_PARM_FOCUS_MODE:{
   2151             uint8_t  *focusMode =
   2152                 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
   2153             uint8_t fwkAfMode = lookupFwkName(FOCUS_MODES_MAP,
   2154                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
   2155             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
   2156             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
   2157             break;
   2158         }
   2159         case CAM_INTF_META_AF_ROI:{
   2160             /*af regions*/
   2161             cam_area_t  *hAfRegions =
   2162                 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
   2163             int32_t afRegions[5];
   2164             convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   2165             camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
   2166             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_REGIONS", __func__);
   2167             break;
   2168         }
   2169         case CAM_INTF_META_AF_STATE: {
   2170             uint8_t  *afState =
   2171                (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
   2172             camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
   2173             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
   2174             break;
   2175         }
   2176         case CAM_INTF_META_AF_TRIGGER_ID: {
   2177             int32_t  *afTriggerId =
   2178                  (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
   2179             camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
   2180             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
   2181             break;
   2182         }
   2183         case CAM_INTF_PARM_WHITE_BALANCE: {
   2184            uint8_t  *whiteBalance =
   2185                 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
   2186              uint8_t fwkWhiteBalanceMode =
   2187                     lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2188                     sizeof(WHITE_BALANCE_MODES_MAP)/
   2189                     sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
   2190              camMetadata.update(ANDROID_CONTROL_AWB_MODE,
   2191                  &fwkWhiteBalanceMode, 1);
   2192             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
   2193              break;
   2194         }
   2195         case CAM_INTF_META_AWB_REGIONS: {
   2196            /*awb regions*/
   2197            cam_area_t  *hAwbRegions =
   2198                (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
   2199            int32_t awbRegions[5];
   2200            convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
   2201            camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
   2202            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
   2203            break;
   2204         }
   2205         case CAM_INTF_META_AWB_STATE: {
   2206            uint8_t  *whiteBalanceState =
   2207               (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
   2208            camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
   2209            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
   2210            break;
   2211         }
   2212         case CAM_INTF_META_MODE: {
   2213             uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
   2214             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
   2215             ALOGV("%s: urgent Metadata : ANDROID_CONTROL_MODE", __func__);
   2216             break;
   2217         }
   2218         default:
   2219             ALOGV("%s: Normal Metadata %d, do not process",
   2220               __func__, curr_entry);
   2221        }
   2222        next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
   2223        curr_entry = next_entry;
   2224     }
   2225     resultMetadata = camMetadata.release();
   2226     return resultMetadata;
   2227 }
   2228 
   2229 /*===========================================================================
   2230  * FUNCTION   : convertToRegions
   2231  *
   2232  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   2233  *
   2234  * PARAMETERS :
   2235  *   @rect   : cam_rect_t struct to convert
   2236  *   @region : int32_t destination array
   2237  *   @weight : if we are converting from cam_area_t, weight is valid
   2238  *             else weight = -1
   2239  *
   2240  *==========================================================================*/
   2241 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
   2242     region[0] = rect.left;
   2243     region[1] = rect.top;
   2244     region[2] = rect.left + rect.width;
   2245     region[3] = rect.top + rect.height;
   2246     if (weight > -1) {
   2247         region[4] = weight;
   2248     }
   2249 }
   2250 
   2251 /*===========================================================================
   2252  * FUNCTION   : convertFromRegions
   2253  *
   2254  * DESCRIPTION: helper method to convert from array to cam_rect_t
   2255  *
   2256  * PARAMETERS :
   2257  *   @rect   : cam_rect_t struct to convert
   2258  *   @region : int32_t destination array
   2259  *   @weight : if we are converting from cam_area_t, weight is valid
   2260  *             else weight = -1
   2261  *
   2262  *==========================================================================*/
   2263 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
   2264                                                    const camera_metadata_t *settings,
   2265                                                    uint32_t tag){
   2266     CameraMetadata frame_settings;
   2267     frame_settings = settings;
   2268     int32_t x_min = frame_settings.find(tag).data.i32[0];
   2269     int32_t y_min = frame_settings.find(tag).data.i32[1];
   2270     int32_t x_max = frame_settings.find(tag).data.i32[2];
   2271     int32_t y_max = frame_settings.find(tag).data.i32[3];
   2272     roi->weight = frame_settings.find(tag).data.i32[4];
   2273     roi->rect.left = x_min;
   2274     roi->rect.top = y_min;
   2275     roi->rect.width = x_max - x_min;
   2276     roi->rect.height = y_max - y_min;
   2277 }
   2278 
   2279 /*===========================================================================
   2280  * FUNCTION   : resetIfNeededROI
   2281  *
   2282  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   2283  *              crop region
   2284  *
   2285  * PARAMETERS :
   2286  *   @roi       : cam_area_t struct to resize
   2287  *   @scalerCropRegion : cam_crop_region_t region to compare against
   2288  *
   2289  *
   2290  *==========================================================================*/
   2291 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   2292                                                  const cam_crop_region_t* scalerCropRegion)
   2293 {
   2294     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   2295     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   2296     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
   2297     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
   2298     if ((roi_x_max < scalerCropRegion->left) ||
   2299         (roi_y_max < scalerCropRegion->top)  ||
   2300         (roi->rect.left > crop_x_max) ||
   2301         (roi->rect.top > crop_y_max)){
   2302         return false;
   2303     }
   2304     if (roi->rect.left < scalerCropRegion->left) {
   2305         roi->rect.left = scalerCropRegion->left;
   2306     }
   2307     if (roi->rect.top < scalerCropRegion->top) {
   2308         roi->rect.top = scalerCropRegion->top;
   2309     }
   2310     if (roi_x_max > crop_x_max) {
   2311         roi_x_max = crop_x_max;
   2312     }
   2313     if (roi_y_max > crop_y_max) {
   2314         roi_y_max = crop_y_max;
   2315     }
   2316     roi->rect.width = roi_x_max - roi->rect.left;
   2317     roi->rect.height = roi_y_max - roi->rect.top;
   2318     return true;
   2319 }
   2320 
   2321 /*===========================================================================
   2322  * FUNCTION   : convertLandmarks
   2323  *
   2324  * DESCRIPTION: helper method to extract the landmarks from face detection info
   2325  *
   2326  * PARAMETERS :
   2327  *   @face   : cam_rect_t struct to convert
   2328  *   @landmarks : int32_t destination array
   2329  *
   2330  *
   2331  *==========================================================================*/
   2332 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
   2333 {
   2334     landmarks[0] = face.left_eye_center.x;
   2335     landmarks[1] = face.left_eye_center.y;
   2336     landmarks[2] = face.right_eye_center.x;
   2337     landmarks[3] = face.right_eye_center.y;
   2338     landmarks[4] = face.mouth_center.x;
   2339     landmarks[5] = face.mouth_center.y;
   2340 }
   2341 
   2342 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   2343 /*===========================================================================
   2344  * FUNCTION   : initCapabilities
   2345  *
   2346  * DESCRIPTION: initialize camera capabilities in static data struct
   2347  *
   2348  * PARAMETERS :
   2349  *   @cameraId  : camera Id
   2350  *
   2351  * RETURN     : int32_t type of status
   2352  *              NO_ERROR  -- success
   2353  *              none-zero failure code
   2354  *==========================================================================*/
   2355 int QCamera3HardwareInterface::initCapabilities(int cameraId)
   2356 {
   2357     int rc = 0;
   2358     mm_camera_vtbl_t *cameraHandle = NULL;
   2359     QCamera3HeapMemory *capabilityHeap = NULL;
   2360 
   2361     cameraHandle = camera_open(cameraId);
   2362     if (!cameraHandle) {
   2363         ALOGE("%s: camera_open failed", __func__);
   2364         rc = -1;
   2365         goto open_failed;
   2366     }
   2367 
   2368     capabilityHeap = new QCamera3HeapMemory();
   2369     if (capabilityHeap == NULL) {
   2370         ALOGE("%s: creation of capabilityHeap failed", __func__);
   2371         goto heap_creation_failed;
   2372     }
   2373     /* Allocate memory for capability buffer */
   2374     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
   2375     if(rc != OK) {
   2376         ALOGE("%s: No memory for cappability", __func__);
   2377         goto allocate_failed;
   2378     }
   2379 
   2380     /* Map memory for capability buffer */
   2381     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   2382     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   2383                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   2384                                 capabilityHeap->getFd(0),
   2385                                 sizeof(cam_capability_t));
   2386     if(rc < 0) {
   2387         ALOGE("%s: failed to map capability buffer", __func__);
   2388         goto map_failed;
   2389     }
   2390 
   2391     /* Query Capability */
   2392     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   2393     if(rc < 0) {
   2394         ALOGE("%s: failed to query capability",__func__);
   2395         goto query_failed;
   2396     }
   2397     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   2398     if (!gCamCapability[cameraId]) {
   2399         ALOGE("%s: out of memory", __func__);
   2400         goto query_failed;
   2401     }
   2402     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   2403                                         sizeof(cam_capability_t));
   2404     rc = 0;
   2405 
   2406 query_failed:
   2407     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   2408                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   2409 map_failed:
   2410     capabilityHeap->deallocate();
   2411 allocate_failed:
   2412     delete capabilityHeap;
   2413 heap_creation_failed:
   2414     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   2415     cameraHandle = NULL;
   2416 open_failed:
   2417     return rc;
   2418 }
   2419 
   2420 /*===========================================================================
   2421  * FUNCTION   : initParameters
   2422  *
   2423  * DESCRIPTION: initialize camera parameters
   2424  *
   2425  * PARAMETERS :
   2426  *
   2427  * RETURN     : int32_t type of status
   2428  *              NO_ERROR  -- success
   2429  *              none-zero failure code
   2430  *==========================================================================*/
   2431 int QCamera3HardwareInterface::initParameters()
   2432 {
   2433     int rc = 0;
   2434 
   2435     //Allocate Set Param Buffer
   2436     mParamHeap = new QCamera3HeapMemory();
   2437     rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
   2438     if(rc != OK) {
   2439         rc = NO_MEMORY;
   2440         ALOGE("Failed to allocate SETPARM Heap memory");
   2441         delete mParamHeap;
   2442         mParamHeap = NULL;
   2443         return rc;
   2444     }
   2445 
   2446     //Map memory for parameters buffer
   2447     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   2448             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   2449             mParamHeap->getFd(0),
   2450             sizeof(parm_buffer_t));
   2451     if(rc < 0) {
   2452         ALOGE("%s:failed to map SETPARM buffer",__func__);
   2453         rc = FAILED_TRANSACTION;
   2454         mParamHeap->deallocate();
   2455         delete mParamHeap;
   2456         mParamHeap = NULL;
   2457         return rc;
   2458     }
   2459 
   2460     mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
   2461     return rc;
   2462 }
   2463 
   2464 /*===========================================================================
   2465  * FUNCTION   : deinitParameters
   2466  *
   2467  * DESCRIPTION: de-initialize camera parameters
   2468  *
   2469  * PARAMETERS :
   2470  *
   2471  * RETURN     : NONE
   2472  *==========================================================================*/
   2473 void QCamera3HardwareInterface::deinitParameters()
   2474 {
   2475     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   2476             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   2477 
   2478     mParamHeap->deallocate();
   2479     delete mParamHeap;
   2480     mParamHeap = NULL;
   2481 
   2482     mParameters = NULL;
   2483 }
   2484 
   2485 /*===========================================================================
   2486  * FUNCTION   : calcMaxJpegSize
   2487  *
   2488  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   2489  *
   2490  * PARAMETERS :
   2491  *
   2492  * RETURN     : max_jpeg_size
   2493  *==========================================================================*/
   2494 int QCamera3HardwareInterface::calcMaxJpegSize()
   2495 {
   2496     int32_t max_jpeg_size = 0;
   2497     int temp_width, temp_height;
   2498     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   2499         temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   2500         temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   2501         if (temp_width * temp_height > max_jpeg_size ) {
   2502             max_jpeg_size = temp_width * temp_height;
   2503         }
   2504     }
   2505     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   2506     return max_jpeg_size;
   2507 }
   2508 
   2509 /*===========================================================================
   2510  * FUNCTION   : initStaticMetadata
   2511  *
   2512  * DESCRIPTION: initialize the static metadata
   2513  *
   2514  * PARAMETERS :
   2515  *   @cameraId  : camera Id
   2516  *
   2517  * RETURN     : int32_t type of status
   2518  *              0  -- success
   2519  *              non-zero failure code
   2520  *==========================================================================*/
   2521 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
   2522 {
   2523     int rc = 0;
   2524     CameraMetadata staticInfo;
   2525 
   2526     /* android.info: hardware level */
   2527     uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
   2528     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   2529         &supportedHardwareLevel, 1);
   2530 
   2531     int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   2532     /*HAL 3 only*/
   2533     /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   2534                     &gCamCapability[cameraId]->min_focus_distance, 1); */
   2535 
   2536     /*hard coded for now but this should come from sensor*/
   2537     float min_focus_distance;
   2538     if(facingBack){
   2539         min_focus_distance = 10;
   2540     } else {
   2541         min_focus_distance = 0;
   2542     }
   2543     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   2544                     &min_focus_distance, 1);
   2545 
   2546     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   2547                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   2548 
   2549     /*should be using focal lengths but sensor doesn't provide that info now*/
   2550     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   2551                       &gCamCapability[cameraId]->focal_length,
   2552                       1);
   2553 
   2554     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   2555                       gCamCapability[cameraId]->apertures,
   2556                       gCamCapability[cameraId]->apertures_count);
   2557 
   2558     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   2559                 gCamCapability[cameraId]->filter_densities,
   2560                 gCamCapability[cameraId]->filter_densities_count);
   2561 
   2562 
   2563     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   2564                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
   2565                       gCamCapability[cameraId]->optical_stab_modes_count);
   2566 
   2567     staticInfo.update(ANDROID_LENS_POSITION,
   2568                       gCamCapability[cameraId]->lens_position,
   2569                       sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
   2570 
   2571     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
   2572                                                     gCamCapability[cameraId]->lens_shading_map_size.height};
   2573     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   2574                       lens_shading_map_size,
   2575                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   2576 
   2577     int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
   2578                                                       gCamCapability[cameraId]->geo_correction_map_size.height};
   2579     staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
   2580             geo_correction_map_size,
   2581             sizeof(geo_correction_map_size)/sizeof(int32_t));
   2582 
   2583     staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
   2584                        gCamCapability[cameraId]->geo_correction_map,
   2585                        sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
   2586 
   2587     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   2588             gCamCapability[cameraId]->sensor_physical_size, 2);
   2589 
   2590     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   2591             gCamCapability[cameraId]->exposure_time_range, 2);
   2592 
   2593     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   2594             &gCamCapability[cameraId]->max_frame_duration, 1);
   2595 
   2596     camera_metadata_rational baseGainFactor = {
   2597             gCamCapability[cameraId]->base_gain_factor.numerator,
   2598             gCamCapability[cameraId]->base_gain_factor.denominator};
   2599     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   2600                       &baseGainFactor, 1);
   2601 
   2602     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   2603                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
   2604 
   2605     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   2606                                                gCamCapability[cameraId]->pixel_array_size.height};
   2607     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   2608                       pixel_array_size, 2);
   2609 
   2610     int32_t active_array_size[] = {0, 0,
   2611                                                 gCamCapability[cameraId]->active_array_size.width,
   2612                                                 gCamCapability[cameraId]->active_array_size.height};
   2613     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   2614                       active_array_size, 4);
   2615 
   2616     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   2617             &gCamCapability[cameraId]->white_level, 1);
   2618 
   2619     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   2620             gCamCapability[cameraId]->black_level_pattern, 4);
   2621 
   2622     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   2623                       &gCamCapability[cameraId]->flash_charge_duration, 1);
   2624 
   2625     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   2626                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   2627 
   2628     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   2629                       (int*)&gCamCapability[cameraId]->max_num_roi, 1);
   2630 
   2631     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   2632                       &gCamCapability[cameraId]->histogram_size, 1);
   2633 
   2634     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   2635             &gCamCapability[cameraId]->max_histogram_count, 1);
   2636 
   2637     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
   2638                                                 gCamCapability[cameraId]->sharpness_map_size.height};
   2639 
   2640     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   2641             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   2642 
   2643     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   2644             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   2645 
   2646 
   2647     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
   2648                       &gCamCapability[cameraId]->raw_min_duration,
   2649                        1);
   2650 
   2651     int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
   2652                                                 HAL_PIXEL_FORMAT_BLOB};
   2653     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
   2654     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   2655                       scalar_formats,
   2656                       scalar_formats_count);
   2657 
   2658     int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
   2659     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   2660               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
   2661               available_processed_sizes);
   2662     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   2663                 available_processed_sizes,
   2664                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
   2665 
   2666     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
   2667                       &gCamCapability[cameraId]->jpeg_min_duration[0],
   2668                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
   2669 
   2670     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   2671     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   2672                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
   2673                  available_fps_ranges);
   2674     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   2675             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
   2676 
   2677     camera_metadata_rational exposureCompensationStep = {
   2678             gCamCapability[cameraId]->exp_compensation_step.numerator,
   2679             gCamCapability[cameraId]->exp_compensation_step.denominator};
   2680     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   2681                       &exposureCompensationStep, 1);
   2682 
   2683     /*TO DO*/
   2684     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
   2685     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   2686                       availableVstabModes, sizeof(availableVstabModes));
   2687 
   2688     /** Quirk for urgent 3A state until final interface is worked out */
   2689     uint8_t usePartialResultQuirk = 1;
   2690     staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
   2691                       &usePartialResultQuirk, 1);
   2692 
   2693     /*HAL 1 and HAL 3 common*/
   2694     float maxZoom = 4;
   2695     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   2696             &maxZoom, 1);
   2697 
   2698     int32_t max3aRegions = 1;
   2699     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   2700             &max3aRegions, 1);
   2701 
   2702     uint8_t availableFaceDetectModes[] = {
   2703             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   2704             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
   2705     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   2706                       availableFaceDetectModes,
   2707                       sizeof(availableFaceDetectModes));
   2708 
   2709     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
   2710                                                         gCamCapability[cameraId]->exposure_compensation_max};
   2711     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   2712             exposureCompensationRange,
   2713             sizeof(exposureCompensationRange)/sizeof(int32_t));
   2714 
   2715     uint8_t lensFacing = (facingBack) ?
   2716             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   2717     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   2718 
   2719     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   2720                 available_processed_sizes,
   2721                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
   2722 
   2723     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   2724                       available_thumbnail_sizes,
   2725                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   2726 
   2727     int32_t max_jpeg_size = 0;
   2728     int temp_width, temp_height;
   2729     for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   2730         temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   2731         temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   2732         if (temp_width * temp_height > max_jpeg_size ) {
   2733             max_jpeg_size = temp_width * temp_height;
   2734         }
   2735     }
   2736     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   2737     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   2738                       &max_jpeg_size, 1);
   2739 
   2740     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   2741     int32_t size = 0;
   2742     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
   2743         int val = lookupFwkName(EFFECT_MODES_MAP,
   2744                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
   2745                                    gCamCapability[cameraId]->supported_effects[i]);
   2746         if (val != NAME_NOT_FOUND) {
   2747             avail_effects[size] = (uint8_t)val;
   2748             size++;
   2749         }
   2750     }
   2751     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   2752                       avail_effects,
   2753                       size);
   2754 
   2755     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   2756     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   2757     int32_t supported_scene_modes_cnt = 0;
   2758     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
   2759         int val = lookupFwkName(SCENE_MODES_MAP,
   2760                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   2761                                 gCamCapability[cameraId]->supported_scene_modes[i]);
   2762         if (val != NAME_NOT_FOUND) {
   2763             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   2764             supported_indexes[supported_scene_modes_cnt] = i;
   2765             supported_scene_modes_cnt++;
   2766         }
   2767     }
   2768 
   2769     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   2770                       avail_scene_modes,
   2771                       supported_scene_modes_cnt);
   2772 
   2773     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
   2774     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   2775                       supported_scene_modes_cnt,
   2776                       scene_mode_overrides,
   2777                       supported_indexes,
   2778                       cameraId);
   2779     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   2780                       scene_mode_overrides,
   2781                       supported_scene_modes_cnt*3);
   2782 
   2783     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   2784     size = 0;
   2785     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
   2786         int val = lookupFwkName(ANTIBANDING_MODES_MAP,
   2787                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   2788                                  gCamCapability[cameraId]->supported_antibandings[i]);
   2789         if (val != NAME_NOT_FOUND) {
   2790             avail_antibanding_modes[size] = (uint8_t)val;
   2791             size++;
   2792         }
   2793 
   2794     }
   2795     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   2796                       avail_antibanding_modes,
   2797                       size);
   2798 
   2799     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   2800     size = 0;
   2801     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
   2802         int val = lookupFwkName(FOCUS_MODES_MAP,
   2803                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   2804                                 gCamCapability[cameraId]->supported_focus_modes[i]);
   2805         if (val != NAME_NOT_FOUND) {
   2806             avail_af_modes[size] = (uint8_t)val;
   2807             size++;
   2808         }
   2809     }
   2810     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   2811                       avail_af_modes,
   2812                       size);
   2813 
   2814     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   2815     size = 0;
   2816     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
   2817         int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2818                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   2819                                     gCamCapability[cameraId]->supported_white_balances[i]);
   2820         if (val != NAME_NOT_FOUND) {
   2821             avail_awb_modes[size] = (uint8_t)val;
   2822             size++;
   2823         }
   2824     }
   2825     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   2826                       avail_awb_modes,
   2827                       size);
   2828 
   2829     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   2830     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
   2831       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
   2832 
   2833     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   2834             available_flash_levels,
   2835             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
   2836 
   2837 
   2838     uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
   2839     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   2840             &flashAvailable, 1);
   2841 
   2842     uint8_t avail_ae_modes[5];
   2843     size = 0;
   2844     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
   2845         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
   2846         size++;
   2847     }
   2848     if (flashAvailable) {
   2849         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   2850         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
   2851         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   2852     }
   2853     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   2854                       avail_ae_modes,
   2855                       size);
   2856 
   2857     int32_t sensitivity_range[2];
   2858     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   2859     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   2860     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   2861                       sensitivity_range,
   2862                       sizeof(sensitivity_range) / sizeof(int32_t));
   2863 
   2864     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   2865                       &gCamCapability[cameraId]->max_analog_sensitivity,
   2866                       1);
   2867 
   2868     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
   2869                       &gCamCapability[cameraId]->jpeg_min_duration[0],
   2870                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
   2871 
   2872     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   2873     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   2874                       &sensor_orientation,
   2875                       1);
   2876 
   2877     int32_t max_output_streams[3] = {1, 3, 1};
   2878     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   2879                       max_output_streams,
   2880                       3);
   2881 
   2882     gStaticMetadata[cameraId] = staticInfo.release();
   2883     return rc;
   2884 }
   2885 
   2886 /*===========================================================================
   2887  * FUNCTION   : makeTable
   2888  *
   2889  * DESCRIPTION: make a table of sizes
   2890  *
   2891  * PARAMETERS :
   2892  *
   2893  *
   2894  *==========================================================================*/
   2895 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
   2896                                           int32_t* sizeTable)
   2897 {
   2898     int j = 0;
   2899     for (int i = 0; i < size; i++) {
   2900         sizeTable[j] = dimTable[i].width;
   2901         sizeTable[j+1] = dimTable[i].height;
   2902         j+=2;
   2903     }
   2904 }
   2905 
   2906 /*===========================================================================
   2907  * FUNCTION   : makeFPSTable
   2908  *
   2909  * DESCRIPTION: make a table of fps ranges
   2910  *
   2911  * PARAMETERS :
   2912  *
   2913  *==========================================================================*/
   2914 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
   2915                                           int32_t* fpsRangesTable)
   2916 {
   2917     int j = 0;
   2918     for (int i = 0; i < size; i++) {
   2919         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   2920         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   2921         j+=2;
   2922     }
   2923 }
   2924 
   2925 /*===========================================================================
   2926  * FUNCTION   : makeOverridesList
   2927  *
   2928  * DESCRIPTION: make a list of scene mode overrides
   2929  *
   2930  * PARAMETERS :
   2931  *
   2932  *
   2933  *==========================================================================*/
   2934 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
   2935                                                   uint8_t size, uint8_t* overridesList,
   2936                                                   uint8_t* supported_indexes,
   2937                                                   int camera_id)
   2938 {
   2939     /*daemon will give a list of overrides for all scene modes.
   2940       However we should send the fwk only the overrides for the scene modes
   2941       supported by the framework*/
   2942     int j = 0, index = 0, supt = 0;
   2943     uint8_t focus_override;
   2944     for (int i = 0; i < size; i++) {
   2945         supt = 0;
   2946         index = supported_indexes[i];
   2947         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
   2948         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2949                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   2950                                                     overridesTable[index].awb_mode);
   2951         focus_override = (uint8_t)overridesTable[index].af_mode;
   2952         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
   2953            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   2954               supt = 1;
   2955               break;
   2956            }
   2957         }
   2958         if (supt) {
   2959            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
   2960                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   2961                                               focus_override);
   2962         } else {
   2963            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   2964         }
   2965         j+=3;
   2966     }
   2967 }
   2968 
   2969 /*===========================================================================
   2970  * FUNCTION   : getPreviewHalPixelFormat
   2971  *
   2972  * DESCRIPTION: convert the format to type recognized by framework
   2973  *
   2974  * PARAMETERS : format : the format from backend
   2975  *
   2976  ** RETURN    : format recognized by framework
   2977  *
   2978  *==========================================================================*/
   2979 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
   2980 {
   2981     int32_t halPixelFormat;
   2982 
   2983     switch (format) {
   2984     case CAM_FORMAT_YUV_420_NV12:
   2985         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
   2986         break;
   2987     case CAM_FORMAT_YUV_420_NV21:
   2988         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   2989         break;
   2990     case CAM_FORMAT_YUV_420_NV21_ADRENO:
   2991         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
   2992         break;
   2993     case CAM_FORMAT_YUV_420_YV12:
   2994         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
   2995         break;
   2996     case CAM_FORMAT_YUV_422_NV16:
   2997     case CAM_FORMAT_YUV_422_NV61:
   2998     default:
   2999         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   3000         break;
   3001     }
   3002     return halPixelFormat;
   3003 }
   3004 
   3005 /*===========================================================================
   3006  * FUNCTION   : getSensorSensitivity
   3007  *
   3008  * DESCRIPTION: convert iso_mode to an integer value
   3009  *
   3010  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   3011  *
   3012  ** RETURN    : sensitivity supported by sensor
   3013  *
   3014  *==========================================================================*/
   3015 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   3016 {
   3017     int32_t sensitivity;
   3018 
   3019     switch (iso_mode) {
   3020     case CAM_ISO_MODE_100:
   3021         sensitivity = 100;
   3022         break;
   3023     case CAM_ISO_MODE_200:
   3024         sensitivity = 200;
   3025         break;
   3026     case CAM_ISO_MODE_400:
   3027         sensitivity = 400;
   3028         break;
   3029     case CAM_ISO_MODE_800:
   3030         sensitivity = 800;
   3031         break;
   3032     case CAM_ISO_MODE_1600:
   3033         sensitivity = 1600;
   3034         break;
   3035     default:
   3036         sensitivity = -1;
   3037         break;
   3038     }
   3039     return sensitivity;
   3040 }
   3041 
   3042 
   3043 /*===========================================================================
   3044  * FUNCTION   : AddSetParmEntryToBatch
   3045  *
   3046  * DESCRIPTION: add set parameter entry into batch
   3047  *
   3048  * PARAMETERS :
   3049  *   @p_table     : ptr to parameter buffer
   3050  *   @paramType   : parameter type
   3051  *   @paramLength : length of parameter value
   3052  *   @paramValue  : ptr to parameter value
   3053  *
   3054  * RETURN     : int32_t type of status
   3055  *              NO_ERROR  -- success
   3056  *              none-zero failure code
   3057  *==========================================================================*/
   3058 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
   3059                                                           cam_intf_parm_type_t paramType,
   3060                                                           uint32_t paramLength,
   3061                                                           void *paramValue)
   3062 {
   3063     int position = paramType;
   3064     int current, next;
   3065 
   3066     /*************************************************************************
   3067     *                 Code to take care of linking next flags                *
   3068     *************************************************************************/
   3069     current = GET_FIRST_PARAM_ID(p_table);
   3070     if (position == current){
   3071         //DO NOTHING
   3072     } else if (position < current){
   3073         SET_NEXT_PARAM_ID(position, p_table, current);
   3074         SET_FIRST_PARAM_ID(p_table, position);
   3075     } else {
   3076         /* Search for the position in the linked list where we need to slot in*/
   3077         while (position > GET_NEXT_PARAM_ID(current, p_table))
   3078             current = GET_NEXT_PARAM_ID(current, p_table);
   3079 
   3080         /*If node already exists no need to alter linking*/
   3081         if (position != GET_NEXT_PARAM_ID(current, p_table)) {
   3082             next = GET_NEXT_PARAM_ID(current, p_table);
   3083             SET_NEXT_PARAM_ID(current, p_table, position);
   3084             SET_NEXT_PARAM_ID(position, p_table, next);
   3085         }
   3086     }
   3087 
   3088     /*************************************************************************
   3089     *                   Copy contents into entry                             *
   3090     *************************************************************************/
   3091 
   3092     if (paramLength > sizeof(parm_type_t)) {
   3093         ALOGE("%s:Size of input larger than max entry size",__func__);
   3094         return BAD_VALUE;
   3095     }
   3096     memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
   3097     return NO_ERROR;
   3098 }
   3099 
   3100 /*===========================================================================
   3101  * FUNCTION   : lookupFwkName
   3102  *
   3103  * DESCRIPTION: In case the enum is not same in fwk and backend
   3104  *              make sure the parameter is correctly propogated
   3105  *
   3106  * PARAMETERS  :
   3107  *   @arr      : map between the two enums
   3108  *   @len      : len of the map
   3109  *   @hal_name : name of the hal_parm to map
   3110  *
   3111  * RETURN     : int type of status
   3112  *              fwk_name  -- success
   3113  *              none-zero failure code
   3114  *==========================================================================*/
   3115 int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
   3116                                              int len, int hal_name)
   3117 {
   3118 
   3119     for (int i = 0; i < len; i++) {
   3120         if (arr[i].hal_name == hal_name)
   3121             return arr[i].fwk_name;
   3122     }
   3123 
   3124     /* Not able to find matching framework type is not necessarily
   3125      * an error case. This happens when mm-camera supports more attributes
   3126      * than the frameworks do */
   3127     ALOGD("%s: Cannot find matching framework type", __func__);
   3128     return NAME_NOT_FOUND;
   3129 }
   3130 
   3131 /*===========================================================================
   3132  * FUNCTION   : lookupHalName
   3133  *
   3134  * DESCRIPTION: In case the enum is not same in fwk and backend
   3135  *              make sure the parameter is correctly propogated
   3136  *
   3137  * PARAMETERS  :
   3138  *   @arr      : map between the two enums
   3139  *   @len      : len of the map
   3140  *   @fwk_name : name of the hal_parm to map
   3141  *
   3142  * RETURN     : int32_t type of status
   3143  *              hal_name  -- success
   3144  *              none-zero failure code
   3145  *==========================================================================*/
   3146 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
   3147                                              int len, int fwk_name)
   3148 {
   3149     for (int i = 0; i < len; i++) {
   3150        if (arr[i].fwk_name == fwk_name)
   3151            return arr[i].hal_name;
   3152     }
   3153     ALOGE("%s: Cannot find matching hal type", __func__);
   3154     return NAME_NOT_FOUND;
   3155 }
   3156 
   3157 /*===========================================================================
   3158  * FUNCTION   : getCapabilities
   3159  *
   3160  * DESCRIPTION: query camera capabilities
   3161  *
   3162  * PARAMETERS :
   3163  *   @cameraId  : camera Id
   3164  *   @info      : camera info struct to be filled in with camera capabilities
   3165  *
   3166  * RETURN     : int32_t type of status
   3167  *              NO_ERROR  -- success
   3168  *              none-zero failure code
   3169  *==========================================================================*/
   3170 int QCamera3HardwareInterface::getCamInfo(int cameraId,
   3171                                     struct camera_info *info)
   3172 {
   3173     int rc = 0;
   3174 
   3175     if (NULL == gCamCapability[cameraId]) {
   3176         rc = initCapabilities(cameraId);
   3177         if (rc < 0) {
   3178             //pthread_mutex_unlock(&g_camlock);
   3179             return rc;
   3180         }
   3181     }
   3182 
   3183     if (NULL == gStaticMetadata[cameraId]) {
   3184         rc = initStaticMetadata(cameraId);
   3185         if (rc < 0) {
   3186             return rc;
   3187         }
   3188     }
   3189 
   3190     switch(gCamCapability[cameraId]->position) {
   3191     case CAM_POSITION_BACK:
   3192         info->facing = CAMERA_FACING_BACK;
   3193         break;
   3194 
   3195     case CAM_POSITION_FRONT:
   3196         info->facing = CAMERA_FACING_FRONT;
   3197         break;
   3198 
   3199     default:
   3200         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
   3201         rc = -1;
   3202         break;
   3203     }
   3204 
   3205 
   3206     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
   3207     info->device_version = CAMERA_DEVICE_API_VERSION_3_0;
   3208     info->static_camera_characteristics = gStaticMetadata[cameraId];
   3209 
   3210     return rc;
   3211 }
   3212 
   3213 /*===========================================================================
   3214  * FUNCTION   : translateMetadata
   3215  *
   3216  * DESCRIPTION: translate the metadata into camera_metadata_t
   3217  *
   3218  * PARAMETERS : type of the request
   3219  *
   3220  *
   3221  * RETURN     : success: camera_metadata_t*
   3222  *              failure: NULL
   3223  *
   3224  *==========================================================================*/
   3225 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   3226 {
   3227     pthread_mutex_lock(&mMutex);
   3228 
   3229     if (mDefaultMetadata[type] != NULL) {
   3230         pthread_mutex_unlock(&mMutex);
   3231         return mDefaultMetadata[type];
   3232     }
   3233     //first time we are handling this request
   3234     //fill up the metadata structure using the wrapper class
   3235     CameraMetadata settings;
   3236     //translate from cam_capability_t to camera_metadata_tag_t
   3237     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   3238     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   3239     int32_t defaultRequestID = 0;
   3240     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   3241 
   3242     /*control*/
   3243 
   3244     uint8_t controlIntent = 0;
   3245     switch (type) {
   3246       case CAMERA3_TEMPLATE_PREVIEW:
   3247         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   3248         break;
   3249       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   3250         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   3251         break;
   3252       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   3253         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   3254         break;
   3255       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   3256         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   3257         break;
   3258       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   3259         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   3260         break;
   3261       default:
   3262         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   3263         break;
   3264     }
   3265     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   3266 
   3267     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   3268             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   3269 
   3270     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   3271     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   3272 
   3273     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   3274     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   3275 
   3276     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   3277     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   3278 
   3279     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   3280     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   3281 
   3282     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   3283     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   3284 
   3285     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   3286     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   3287 
   3288     static uint8_t focusMode;
   3289     if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
   3290         focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
   3291     } else {
   3292         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   3293     }
   3294     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   3295 
   3296     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   3297     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   3298 
   3299     /*flash*/
   3300     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   3301     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   3302 
   3303     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   3304     settings.update(ANDROID_FLASH_FIRING_POWER,
   3305             &flashFiringLevel, 1);
   3306 
   3307     /* lens */
   3308     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   3309     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   3310 
   3311     if (gCamCapability[mCameraId]->filter_densities_count) {
   3312         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   3313         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   3314                         gCamCapability[mCameraId]->filter_densities_count);
   3315     }
   3316 
   3317     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   3318     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   3319 
   3320     /* Exposure time(Update the Min Exposure Time)*/
   3321     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   3322     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   3323 
   3324     /* frame duration */
   3325     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
   3326     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
   3327 
   3328     /* sensitivity */
   3329     static const int32_t default_sensitivity = 100;
   3330     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   3331 
   3332     /*edge mode*/
   3333     static const uint8_t edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   3334     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
   3335 
   3336     /*noise reduction mode*/
   3337     static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   3338     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
   3339 
   3340     /*color correction mode*/
   3341     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
   3342     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
   3343 
   3344     /*transform matrix mode*/
   3345     static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   3346     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
   3347 
   3348     uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
   3349     settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
   3350 
   3351     int32_t scaler_crop_region[4];
   3352     scaler_crop_region[0] = 0;
   3353     scaler_crop_region[1] = 0;
   3354     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
   3355     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
   3356     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
   3357 
   3358     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
   3359     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
   3360 
   3361     static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   3362     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
   3363 
   3364     uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
   3365                              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
   3366                              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   3367     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
   3368 
   3369     mDefaultMetadata[type] = settings.release();
   3370 
   3371     pthread_mutex_unlock(&mMutex);
   3372     return mDefaultMetadata[type];
   3373 }
   3374 
   3375 /*===========================================================================
   3376  * FUNCTION   : setFrameParameters
   3377  *
   3378  * DESCRIPTION: set parameters per frame as requested in the metadata from
   3379  *              framework
   3380  *
   3381  * PARAMETERS :
   3382  *   @request   : request that needs to be serviced
   3383  *   @streamID : Stream ID of all the requested streams
   3384  *
   3385  * RETURN     : success: NO_ERROR
   3386  *              failure:
   3387  *==========================================================================*/
   3388 int QCamera3HardwareInterface::setFrameParameters(camera3_capture_request_t *request,
   3389                     cam_stream_ID_t streamID)
   3390 {
   3391     /*translate from camera_metadata_t type to parm_type_t*/
   3392     int rc = 0;
   3393     if (request->settings == NULL && mFirstRequest) {
   3394         /*settings cannot be null for the first request*/
   3395         return BAD_VALUE;
   3396     }
   3397 
   3398     int32_t hal_version = CAM_HAL_V3;
   3399 
   3400     memset(mParameters, 0, sizeof(parm_buffer_t));
   3401     mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
   3402     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   3403                 sizeof(hal_version), &hal_version);
   3404     if (rc < 0) {
   3405         ALOGE("%s: Failed to set hal version in the parameters", __func__);
   3406         return BAD_VALUE;
   3407     }
   3408 
   3409     /*we need to update the frame number in the parameters*/
   3410     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
   3411                                 sizeof(request->frame_number), &(request->frame_number));
   3412     if (rc < 0) {
   3413         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   3414         return BAD_VALUE;
   3415     }
   3416 
   3417     /* Update stream id of all the requested buffers */
   3418     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
   3419                                 sizeof(cam_stream_ID_t), &streamID);
   3420 
   3421     if (rc < 0) {
   3422         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
   3423         return BAD_VALUE;
   3424     }
   3425 
   3426     if(request->settings != NULL){
   3427         rc = translateMetadataToParameters(request);
   3428     }
   3429     /*set the parameters to backend*/
   3430     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   3431     return rc;
   3432 }
   3433 
   3434 /*===========================================================================
   3435  * FUNCTION   : translateMetadataToParameters
   3436  *
   3437  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   3438  *
   3439  *
   3440  * PARAMETERS :
   3441  *   @request  : request sent from framework
   3442  *
   3443  *
   3444  * RETURN     : success: NO_ERROR
   3445  *              failure:
   3446  *==========================================================================*/
   3447 int QCamera3HardwareInterface::translateMetadataToParameters
   3448                                   (const camera3_capture_request_t *request)
   3449 {
   3450     int rc = 0;
   3451     CameraMetadata frame_settings;
   3452     frame_settings = request->settings;
   3453 
   3454     /* Do not change the order of the following list unless you know what you are
   3455      * doing.
   3456      * The order is laid out in such a way that parameters in the front of the table
   3457      * may be used to override the parameters later in the table. Examples are:
   3458      * 1. META_MODE should precede AEC/AWB/AF MODE
   3459      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
   3460      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
   3461      * 4. Any mode should precede it's corresponding settings
   3462      */
   3463     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   3464         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   3465         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
   3466                 sizeof(metaMode), &metaMode);
   3467         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   3468            uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
   3469            uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
   3470                                              sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   3471                                              fwk_sceneMode);
   3472            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   3473                 sizeof(sceneMode), &sceneMode);
   3474         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
   3475            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   3476            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   3477                 sizeof(sceneMode), &sceneMode);
   3478         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
   3479            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   3480            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   3481                 sizeof(sceneMode), &sceneMode);
   3482         }
   3483     }
   3484 
   3485     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   3486         uint8_t fwk_aeMode =
   3487             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   3488         uint8_t aeMode;
   3489         int32_t redeye;
   3490 
   3491         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   3492             aeMode = CAM_AE_MODE_OFF;
   3493         } else {
   3494             aeMode = CAM_AE_MODE_ON;
   3495         }
   3496         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   3497             redeye = 1;
   3498         } else {
   3499             redeye = 0;
   3500         }
   3501 
   3502         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
   3503                                           sizeof(AE_FLASH_MODE_MAP),
   3504                                           fwk_aeMode);
   3505         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
   3506                 sizeof(aeMode), &aeMode);
   3507         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
   3508                 sizeof(flashMode), &flashMode);
   3509         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
   3510                 sizeof(redeye), &redeye);
   3511     }
   3512 
   3513     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   3514         uint8_t fwk_whiteLevel =
   3515             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   3516         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
   3517                 sizeof(WHITE_BALANCE_MODES_MAP),
   3518                 fwk_whiteLevel);
   3519         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
   3520                 sizeof(whiteLevel), &whiteLevel);
   3521     }
   3522 
   3523     float focalDistance = -1.0;
   3524     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   3525         focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   3526         rc = AddSetParmEntryToBatch(mParameters,
   3527                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
   3528                 sizeof(focalDistance), &focalDistance);
   3529     }
   3530 
   3531     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   3532         uint8_t fwk_focusMode =
   3533             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   3534         uint8_t focusMode;
   3535         if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
   3536             focusMode = CAM_FOCUS_MODE_INFINITY;
   3537         } else{
   3538          focusMode = lookupHalName(FOCUS_MODES_MAP,
   3539                                    sizeof(FOCUS_MODES_MAP),
   3540                                    fwk_focusMode);
   3541         }
   3542         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
   3543                 sizeof(focusMode), &focusMode);
   3544     }
   3545 
   3546     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   3547         int32_t antibandingMode =
   3548             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
   3549         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
   3550                 sizeof(antibandingMode), &antibandingMode);
   3551     }
   3552 
   3553     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   3554         int32_t expCompensation = frame_settings.find(
   3555             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   3556         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   3557             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   3558         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   3559             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   3560         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   3561           sizeof(expCompensation), &expCompensation);
   3562     }
   3563 
   3564     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   3565         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   3566         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
   3567                 sizeof(aeLock), &aeLock);
   3568     }
   3569     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   3570         cam_fps_range_t fps_range;
   3571         fps_range.min_fps =
   3572             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   3573         fps_range.max_fps =
   3574             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   3575         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
   3576                 sizeof(fps_range), &fps_range);
   3577     }
   3578 
   3579     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   3580         uint8_t awbLock =
   3581             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   3582         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
   3583                 sizeof(awbLock), &awbLock);
   3584     }
   3585 
   3586     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   3587         uint8_t fwk_effectMode =
   3588             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   3589         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
   3590                 sizeof(EFFECT_MODES_MAP),
   3591                 fwk_effectMode);
   3592         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
   3593                 sizeof(effectMode), &effectMode);
   3594     }
   3595 
   3596     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   3597         uint8_t colorCorrectMode =
   3598             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   3599         rc =
   3600             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
   3601                     sizeof(colorCorrectMode), &colorCorrectMode);
   3602     }
   3603 
   3604     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   3605         cam_color_correct_gains_t colorCorrectGains;
   3606         for (int i = 0; i < 4; i++) {
   3607             colorCorrectGains.gains[i] =
   3608                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   3609         }
   3610         rc =
   3611             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
   3612                     sizeof(colorCorrectGains), &colorCorrectGains);
   3613     }
   3614 
   3615     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   3616         cam_color_correct_matrix_t colorCorrectTransform;
   3617         cam_rational_type_t transform_elem;
   3618         int num = 0;
   3619         for (int i = 0; i < 3; i++) {
   3620            for (int j = 0; j < 3; j++) {
   3621               transform_elem.numerator =
   3622                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   3623               transform_elem.denominator =
   3624                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   3625               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   3626               num++;
   3627            }
   3628         }
   3629         rc =
   3630             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   3631                     sizeof(colorCorrectTransform), &colorCorrectTransform);
   3632     }
   3633 
   3634     cam_trigger_t aecTrigger;
   3635     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
   3636     aecTrigger.trigger_id = -1;
   3637     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   3638         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   3639         aecTrigger.trigger =
   3640             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   3641         aecTrigger.trigger_id =
   3642             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   3643     }
   3644     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   3645                                 sizeof(aecTrigger), &aecTrigger);
   3646 
   3647     /*af_trigger must come with a trigger id*/
   3648     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   3649         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   3650         cam_trigger_t af_trigger;
   3651         af_trigger.trigger =
   3652             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   3653         af_trigger.trigger_id =
   3654             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   3655         rc = AddSetParmEntryToBatch(mParameters,
   3656                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
   3657     }
   3658 
   3659     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   3660         int32_t demosaic =
   3661             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   3662         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
   3663                 sizeof(demosaic), &demosaic);
   3664     }
   3665 
   3666     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   3667         cam_edge_application_t edge_application;
   3668         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   3669         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
   3670             edge_application.sharpness = 0;
   3671         } else {
   3672             if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
   3673                 uint8_t edgeStrength =
   3674                     frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
   3675                 edge_application.sharpness = (int32_t)edgeStrength;
   3676             } else {
   3677                 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
   3678             }
   3679         }
   3680         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
   3681                 sizeof(edge_application), &edge_application);
   3682     }
   3683 
   3684     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   3685         int32_t respectFlashMode = 1;
   3686         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   3687             uint8_t fwk_aeMode =
   3688                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   3689             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   3690                 respectFlashMode = 0;
   3691                 ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
   3692                     __func__);
   3693             }
   3694         }
   3695         if (respectFlashMode) {
   3696             uint8_t flashMode =
   3697                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
   3698             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
   3699                                           sizeof(FLASH_MODES_MAP),
   3700                                           flashMode);
   3701             ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
   3702             // To check: CAM_INTF_META_FLASH_MODE usage
   3703             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
   3704                           sizeof(flashMode), &flashMode);
   3705         }
   3706     }
   3707 
   3708     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   3709         uint8_t flashPower =
   3710             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   3711         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
   3712                 sizeof(flashPower), &flashPower);
   3713     }
   3714 
   3715     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   3716         int64_t flashFiringTime =
   3717             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   3718         rc = AddSetParmEntryToBatch(mParameters,
   3719                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
   3720     }
   3721 
   3722     if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
   3723         uint8_t geometricMode =
   3724             frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
   3725         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
   3726                 sizeof(geometricMode), &geometricMode);
   3727     }
   3728 
   3729     if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
   3730         uint8_t geometricStrength =
   3731             frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
   3732         rc = AddSetParmEntryToBatch(mParameters,
   3733                 CAM_INTF_META_GEOMETRIC_STRENGTH,
   3734                 sizeof(geometricStrength), &geometricStrength);
   3735     }
   3736 
   3737     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   3738         uint8_t hotPixelMode =
   3739             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   3740         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
   3741                 sizeof(hotPixelMode), &hotPixelMode);
   3742     }
   3743 
   3744     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   3745         float lensAperture =
   3746             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   3747         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
   3748                 sizeof(lensAperture), &lensAperture);
   3749     }
   3750 
   3751     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   3752         float filterDensity =
   3753             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   3754         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
   3755                 sizeof(filterDensity), &filterDensity);
   3756     }
   3757 
   3758     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   3759         float focalLength =
   3760             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   3761         rc = AddSetParmEntryToBatch(mParameters,
   3762                 CAM_INTF_META_LENS_FOCAL_LENGTH,
   3763                 sizeof(focalLength), &focalLength);
   3764     }
   3765 
   3766     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   3767         uint8_t optStabMode =
   3768             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   3769         rc = AddSetParmEntryToBatch(mParameters,
   3770                 CAM_INTF_META_LENS_OPT_STAB_MODE,
   3771                 sizeof(optStabMode), &optStabMode);
   3772     }
   3773 
   3774     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   3775         uint8_t noiseRedMode =
   3776             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   3777         rc = AddSetParmEntryToBatch(mParameters,
   3778                 CAM_INTF_META_NOISE_REDUCTION_MODE,
   3779                 sizeof(noiseRedMode), &noiseRedMode);
   3780     }
   3781 
   3782     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
   3783         uint8_t noiseRedStrength =
   3784             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
   3785         rc = AddSetParmEntryToBatch(mParameters,
   3786                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
   3787                 sizeof(noiseRedStrength), &noiseRedStrength);
   3788     }
   3789 
   3790     cam_crop_region_t scalerCropRegion;
   3791     bool scalerCropSet = false;
   3792     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   3793         scalerCropRegion.left =
   3794             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   3795         scalerCropRegion.top =
   3796             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   3797         scalerCropRegion.width =
   3798             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   3799         scalerCropRegion.height =
   3800             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   3801         rc = AddSetParmEntryToBatch(mParameters,
   3802                 CAM_INTF_META_SCALER_CROP_REGION,
   3803                 sizeof(scalerCropRegion), &scalerCropRegion);
   3804         scalerCropSet = true;
   3805     }
   3806 
   3807     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   3808         int64_t sensorExpTime =
   3809             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   3810         ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
   3811         rc = AddSetParmEntryToBatch(mParameters,
   3812                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   3813                 sizeof(sensorExpTime), &sensorExpTime);
   3814     }
   3815 
   3816     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   3817         int64_t sensorFrameDuration =
   3818             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   3819         int64_t minFrameDuration = getMinFrameDuration(request);
   3820         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
   3821         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   3822             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   3823         ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
   3824         rc = AddSetParmEntryToBatch(mParameters,
   3825                 CAM_INTF_META_SENSOR_FRAME_DURATION,
   3826                 sizeof(sensorFrameDuration), &sensorFrameDuration);
   3827     }
   3828 
   3829     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   3830         int32_t sensorSensitivity =
   3831             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   3832         if (sensorSensitivity <
   3833                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   3834             sensorSensitivity =
   3835                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   3836         if (sensorSensitivity >
   3837                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   3838             sensorSensitivity =
   3839                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   3840         ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
   3841         rc = AddSetParmEntryToBatch(mParameters,
   3842                 CAM_INTF_META_SENSOR_SENSITIVITY,
   3843                 sizeof(sensorSensitivity), &sensorSensitivity);
   3844     }
   3845 
   3846     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   3847         int32_t shadingMode =
   3848             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   3849         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
   3850                 sizeof(shadingMode), &shadingMode);
   3851     }
   3852 
   3853     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
   3854         uint8_t shadingStrength =
   3855             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
   3856         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
   3857                 sizeof(shadingStrength), &shadingStrength);
   3858     }
   3859 
   3860     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   3861         uint8_t fwk_facedetectMode =
   3862             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   3863         uint8_t facedetectMode =
   3864             lookupHalName(FACEDETECT_MODES_MAP,
   3865                 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
   3866         rc = AddSetParmEntryToBatch(mParameters,
   3867                 CAM_INTF_META_STATS_FACEDETECT_MODE,
   3868                 sizeof(facedetectMode), &facedetectMode);
   3869     }
   3870 
   3871     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   3872         uint8_t histogramMode =
   3873             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   3874         rc = AddSetParmEntryToBatch(mParameters,
   3875                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
   3876                 sizeof(histogramMode), &histogramMode);
   3877     }
   3878 
   3879     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   3880         uint8_t sharpnessMapMode =
   3881             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   3882         rc = AddSetParmEntryToBatch(mParameters,
   3883                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   3884                 sizeof(sharpnessMapMode), &sharpnessMapMode);
   3885     }
   3886 
   3887     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   3888         uint8_t tonemapMode =
   3889             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   3890         rc = AddSetParmEntryToBatch(mParameters,
   3891                 CAM_INTF_META_TONEMAP_MODE,
   3892                 sizeof(tonemapMode), &tonemapMode);
   3893     }
   3894     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
   3895     /*All tonemap channels will have the same number of points*/
   3896     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
   3897         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
   3898         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   3899         cam_rgb_tonemap_curves tonemapCurves;
   3900         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
   3901 
   3902         /* ch0 = G*/
   3903         int point = 0;
   3904         cam_tonemap_curve_t tonemapCurveGreen;
   3905         for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
   3906             for (int j = 0; j < 2; j++) {
   3907                tonemapCurveGreen.tonemap_points[i][j] =
   3908                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   3909                point++;
   3910             }
   3911         }
   3912         tonemapCurves.curves[0] = tonemapCurveGreen;
   3913 
   3914         /* ch 1 = B */
   3915         point = 0;
   3916         cam_tonemap_curve_t tonemapCurveBlue;
   3917         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   3918             for (int j = 0; j < 2; j++) {
   3919                tonemapCurveBlue.tonemap_points[i][j] =
   3920                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   3921                point++;
   3922             }
   3923         }
   3924         tonemapCurves.curves[1] = tonemapCurveBlue;
   3925 
   3926         /* ch 2 = R */
   3927         point = 0;
   3928         cam_tonemap_curve_t tonemapCurveRed;
   3929         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   3930             for (int j = 0; j < 2; j++) {
   3931                tonemapCurveRed.tonemap_points[i][j] =
   3932                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   3933                point++;
   3934             }
   3935         }
   3936         tonemapCurves.curves[2] = tonemapCurveRed;
   3937 
   3938         rc = AddSetParmEntryToBatch(mParameters,
   3939                 CAM_INTF_META_TONEMAP_CURVES,
   3940                 sizeof(tonemapCurves), &tonemapCurves);
   3941     }
   3942 
   3943     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3944         uint8_t captureIntent =
   3945             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3946         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
   3947                 sizeof(captureIntent), &captureIntent);
   3948     }
   3949 
   3950     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   3951         uint8_t blackLevelLock =
   3952             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   3953         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
   3954                 sizeof(blackLevelLock), &blackLevelLock);
   3955     }
   3956 
   3957     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   3958         uint8_t lensShadingMapMode =
   3959             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   3960         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   3961                 sizeof(lensShadingMapMode), &lensShadingMapMode);
   3962     }
   3963 
   3964     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   3965         cam_area_t roi;
   3966         bool reset = true;
   3967         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
   3968         if (scalerCropSet) {
   3969             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3970         }
   3971         if (reset) {
   3972             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
   3973                     sizeof(roi), &roi);
   3974         }
   3975     }
   3976 
   3977     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   3978         cam_area_t roi;
   3979         bool reset = true;
   3980         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
   3981         if (scalerCropSet) {
   3982             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3983         }
   3984         if (reset) {
   3985             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
   3986                     sizeof(roi), &roi);
   3987         }
   3988     }
   3989 
   3990     if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
   3991         cam_area_t roi;
   3992         bool reset = true;
   3993         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
   3994         if (scalerCropSet) {
   3995             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3996         }
   3997         if (reset) {
   3998             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
   3999                     sizeof(roi), &roi);
   4000         }
   4001     }
   4002     return rc;
   4003 }
   4004 
   4005 /*===========================================================================
   4006  * FUNCTION   : getJpegSettings
   4007  *
   4008  * DESCRIPTION: save the jpeg settings in the HAL
   4009  *
   4010  *
   4011  * PARAMETERS :
   4012  *   @settings  : frame settings information from framework
   4013  *
   4014  *
   4015  * RETURN     : success: NO_ERROR
   4016  *              failure:
   4017  *==========================================================================*/
   4018 int QCamera3HardwareInterface::getJpegSettings
   4019                                   (const camera_metadata_t *settings)
   4020 {
   4021     if (mJpegSettings) {
   4022         if (mJpegSettings->gps_timestamp) {
   4023             free(mJpegSettings->gps_timestamp);
   4024             mJpegSettings->gps_timestamp = NULL;
   4025         }
   4026         if (mJpegSettings->gps_coordinates) {
   4027             for (int i = 0; i < 3; i++) {
   4028                 free(mJpegSettings->gps_coordinates[i]);
   4029                 mJpegSettings->gps_coordinates[i] = NULL;
   4030             }
   4031         }
   4032         free(mJpegSettings);
   4033         mJpegSettings = NULL;
   4034     }
   4035     mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
   4036     CameraMetadata jpeg_settings;
   4037     jpeg_settings = settings;
   4038 
   4039     if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   4040         mJpegSettings->jpeg_orientation =
   4041             jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   4042     } else {
   4043         mJpegSettings->jpeg_orientation = 0;
   4044     }
   4045     if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
   4046         mJpegSettings->jpeg_quality =
   4047             jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   4048     } else {
   4049         mJpegSettings->jpeg_quality = 85;
   4050     }
   4051     if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   4052         mJpegSettings->thumbnail_size.width =
   4053             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   4054         mJpegSettings->thumbnail_size.height =
   4055             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   4056     } else {
   4057         mJpegSettings->thumbnail_size.width = 0;
   4058         mJpegSettings->thumbnail_size.height = 0;
   4059     }
   4060     if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   4061         for (int i = 0; i < 3; i++) {
   4062             mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
   4063             *(mJpegSettings->gps_coordinates[i]) =
   4064                 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
   4065         }
   4066     } else{
   4067        for (int i = 0; i < 3; i++) {
   4068             mJpegSettings->gps_coordinates[i] = NULL;
   4069         }
   4070     }
   4071 
   4072     if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   4073         mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
   4074         *(mJpegSettings->gps_timestamp) =
   4075             jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   4076     } else {
   4077         mJpegSettings->gps_timestamp = NULL;
   4078     }
   4079 
   4080     if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   4081         int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
   4082         for (int i = 0; i < len; i++) {
   4083             mJpegSettings->gps_processing_method[i] =
   4084                 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
   4085         }
   4086         if (mJpegSettings->gps_processing_method[len-1] != '\0') {
   4087             mJpegSettings->gps_processing_method[len] = '\0';
   4088         }
   4089     } else {
   4090         mJpegSettings->gps_processing_method[0] = '\0';
   4091     }
   4092 
   4093     if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   4094         mJpegSettings->sensor_sensitivity =
   4095             jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   4096     } else {
   4097         mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
   4098     }
   4099 
   4100     mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
   4101 
   4102     if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   4103         mJpegSettings->lens_focal_length =
   4104             jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   4105     }
   4106     if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   4107         mJpegSettings->exposure_compensation =
   4108             jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   4109     }
   4110     mJpegSettings->sharpness = 10; //default value
   4111     if (jpeg_settings.exists(ANDROID_EDGE_MODE)) {
   4112         uint8_t edgeMode = jpeg_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   4113         if (edgeMode == ANDROID_EDGE_MODE_OFF) {
   4114             mJpegSettings->sharpness = 0;
   4115         }
   4116     }
   4117     mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
   4118     mJpegSettings->max_jpeg_size = calcMaxJpegSize();
   4119     mJpegSettings->is_jpeg_format = true;
   4120     mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
   4121     return 0;
   4122 }
   4123 
   4124 /*===========================================================================
   4125  * FUNCTION   : captureResultCb
   4126  *
   4127  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   4128  *
   4129  * PARAMETERS :
   4130  *   @frame  : frame information from mm-camera-interface
   4131  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   4132  *   @userdata: userdata
   4133  *
   4134  * RETURN     : NONE
   4135  *==========================================================================*/
   4136 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   4137                 camera3_stream_buffer_t *buffer,
   4138                 uint32_t frame_number, void *userdata)
   4139 {
   4140     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   4141     if (hw == NULL) {
   4142         ALOGE("%s: Invalid hw %p", __func__, hw);
   4143         return;
   4144     }
   4145 
   4146     hw->captureResultCb(metadata, buffer, frame_number);
   4147     return;
   4148 }
   4149 
   4150 
   4151 /*===========================================================================
   4152  * FUNCTION   : initialize
   4153  *
   4154  * DESCRIPTION: Pass framework callback pointers to HAL
   4155  *
   4156  * PARAMETERS :
   4157  *
   4158  *
   4159  * RETURN     : Success : 0
   4160  *              Failure: -ENODEV
   4161  *==========================================================================*/
   4162 
   4163 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   4164                                   const camera3_callback_ops_t *callback_ops)
   4165 {
   4166     ALOGV("%s: E", __func__);
   4167     QCamera3HardwareInterface *hw =
   4168         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4169     if (!hw) {
   4170         ALOGE("%s: NULL camera device", __func__);
   4171         return -ENODEV;
   4172     }
   4173 
   4174     int rc = hw->initialize(callback_ops);
   4175     ALOGV("%s: X", __func__);
   4176     return rc;
   4177 }
   4178 
   4179 /*===========================================================================
   4180  * FUNCTION   : configure_streams
   4181  *
   4182  * DESCRIPTION:
   4183  *
   4184  * PARAMETERS :
   4185  *
   4186  *
   4187  * RETURN     : Success: 0
   4188  *              Failure: -EINVAL (if stream configuration is invalid)
   4189  *                       -ENODEV (fatal error)
   4190  *==========================================================================*/
   4191 
   4192 int QCamera3HardwareInterface::configure_streams(
   4193         const struct camera3_device *device,
   4194         camera3_stream_configuration_t *stream_list)
   4195 {
   4196     ALOGV("%s: E", __func__);
   4197     QCamera3HardwareInterface *hw =
   4198         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4199     if (!hw) {
   4200         ALOGE("%s: NULL camera device", __func__);
   4201         return -ENODEV;
   4202     }
   4203     int rc = hw->configureStreams(stream_list);
   4204     ALOGV("%s: X", __func__);
   4205     return rc;
   4206 }
   4207 
   4208 /*===========================================================================
   4209  * FUNCTION   : register_stream_buffers
   4210  *
   4211  * DESCRIPTION: Register stream buffers with the device
   4212  *
   4213  * PARAMETERS :
   4214  *
   4215  * RETURN     :
   4216  *==========================================================================*/
   4217 int QCamera3HardwareInterface::register_stream_buffers(
   4218         const struct camera3_device *device,
   4219         const camera3_stream_buffer_set_t *buffer_set)
   4220 {
   4221     ALOGV("%s: E", __func__);
   4222     QCamera3HardwareInterface *hw =
   4223         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4224     if (!hw) {
   4225         ALOGE("%s: NULL camera device", __func__);
   4226         return -ENODEV;
   4227     }
   4228     int rc = hw->registerStreamBuffers(buffer_set);
   4229     ALOGV("%s: X", __func__);
   4230     return rc;
   4231 }
   4232 
   4233 /*===========================================================================
   4234  * FUNCTION   : construct_default_request_settings
   4235  *
   4236  * DESCRIPTION: Configure a settings buffer to meet the required use case
   4237  *
   4238  * PARAMETERS :
   4239  *
   4240  *
   4241  * RETURN     : Success: Return valid metadata
   4242  *              Failure: Return NULL
   4243  *==========================================================================*/
   4244 const camera_metadata_t* QCamera3HardwareInterface::
   4245     construct_default_request_settings(const struct camera3_device *device,
   4246                                         int type)
   4247 {
   4248 
   4249     ALOGV("%s: E", __func__);
   4250     camera_metadata_t* fwk_metadata = NULL;
   4251     QCamera3HardwareInterface *hw =
   4252         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4253     if (!hw) {
   4254         ALOGE("%s: NULL camera device", __func__);
   4255         return NULL;
   4256     }
   4257 
   4258     fwk_metadata = hw->translateCapabilityToMetadata(type);
   4259 
   4260     ALOGV("%s: X", __func__);
   4261     return fwk_metadata;
   4262 }
   4263 
   4264 /*===========================================================================
   4265  * FUNCTION   : process_capture_request
   4266  *
   4267  * DESCRIPTION:
   4268  *
   4269  * PARAMETERS :
   4270  *
   4271  *
   4272  * RETURN     :
   4273  *==========================================================================*/
   4274 int QCamera3HardwareInterface::process_capture_request(
   4275                     const struct camera3_device *device,
   4276                     camera3_capture_request_t *request)
   4277 {
   4278     ALOGV("%s: E", __func__);
   4279     QCamera3HardwareInterface *hw =
   4280         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4281     if (!hw) {
   4282         ALOGE("%s: NULL camera device", __func__);
   4283         return -EINVAL;
   4284     }
   4285 
   4286     int rc = hw->processCaptureRequest(request);
   4287     ALOGV("%s: X", __func__);
   4288     return rc;
   4289 }
   4290 
   4291 /*===========================================================================
   4292  * FUNCTION   : get_metadata_vendor_tag_ops
   4293  *
   4294  * DESCRIPTION:
   4295  *
   4296  * PARAMETERS :
   4297  *
   4298  *
   4299  * RETURN     :
   4300  *==========================================================================*/
   4301 
   4302 void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
   4303                 const struct camera3_device *device,
   4304                 vendor_tag_query_ops_t* ops)
   4305 {
   4306     ALOGV("%s: E", __func__);
   4307     QCamera3HardwareInterface *hw =
   4308         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4309     if (!hw) {
   4310         ALOGE("%s: NULL camera device", __func__);
   4311         return;
   4312     }
   4313 
   4314     hw->getMetadataVendorTagOps(ops);
   4315     ALOGV("%s: X", __func__);
   4316     return;
   4317 }
   4318 
   4319 /*===========================================================================
   4320  * FUNCTION   : dump
   4321  *
   4322  * DESCRIPTION:
   4323  *
   4324  * PARAMETERS :
   4325  *
   4326  *
   4327  * RETURN     :
   4328  *==========================================================================*/
   4329 
   4330 void QCamera3HardwareInterface::dump(
   4331                 const struct camera3_device *device, int fd)
   4332 {
   4333     ALOGV("%s: E", __func__);
   4334     QCamera3HardwareInterface *hw =
   4335         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4336     if (!hw) {
   4337         ALOGE("%s: NULL camera device", __func__);
   4338         return;
   4339     }
   4340 
   4341     hw->dump(fd);
   4342     ALOGV("%s: X", __func__);
   4343     return;
   4344 }
   4345 
   4346 /*===========================================================================
   4347  * FUNCTION   : flush
   4348  *
   4349  * DESCRIPTION:
   4350  *
   4351  * PARAMETERS :
   4352  *
   4353  *
   4354  * RETURN     :
   4355  *==========================================================================*/
   4356 
   4357 int QCamera3HardwareInterface::flush(
   4358                 const struct camera3_device *device)
   4359 {
   4360     int rc;
   4361     ALOGV("%s: E", __func__);
   4362     QCamera3HardwareInterface *hw =
   4363         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   4364     if (!hw) {
   4365         ALOGE("%s: NULL camera device", __func__);
   4366         return -EINVAL;
   4367     }
   4368 
   4369     rc = hw->flush();
   4370     ALOGV("%s: X", __func__);
   4371     return rc;
   4372 }
   4373 
   4374 /*===========================================================================
   4375  * FUNCTION   : close_camera_device
   4376  *
   4377  * DESCRIPTION:
   4378  *
   4379  * PARAMETERS :
   4380  *
   4381  *
   4382  * RETURN     :
   4383  *==========================================================================*/
   4384 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   4385 {
   4386     ALOGV("%s: E", __func__);
   4387     int ret = NO_ERROR;
   4388     QCamera3HardwareInterface *hw =
   4389         reinterpret_cast<QCamera3HardwareInterface *>(
   4390             reinterpret_cast<camera3_device_t *>(device)->priv);
   4391     if (!hw) {
   4392         ALOGE("NULL camera device");
   4393         return BAD_VALUE;
   4394     }
   4395     delete hw;
   4396 
   4397     pthread_mutex_lock(&mCameraSessionLock);
   4398     mCameraSessionActive = 0;
   4399     pthread_mutex_unlock(&mCameraSessionLock);
   4400     ALOGV("%s: X", __func__);
   4401     return ret;
   4402 }
   4403 
   4404 /*===========================================================================
   4405  * FUNCTION   : getWaveletDenoiseProcessPlate
   4406  *
   4407  * DESCRIPTION: query wavelet denoise process plate
   4408  *
   4409  * PARAMETERS : None
   4410  *
   4411  * RETURN     : WNR prcocess plate vlaue
   4412  *==========================================================================*/
   4413 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   4414 {
   4415     char prop[PROPERTY_VALUE_MAX];
   4416     memset(prop, 0, sizeof(prop));
   4417     property_get("persist.denoise.process.plates", prop, "0");
   4418     int processPlate = atoi(prop);
   4419     switch(processPlate) {
   4420     case 0:
   4421         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   4422     case 1:
   4423         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   4424     case 2:
   4425         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   4426     case 3:
   4427         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   4428     default:
   4429         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   4430     }
   4431 }
   4432 
   4433 /*===========================================================================
   4434  * FUNCTION   : needRotationReprocess
   4435  *
   4436  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   4437  *
   4438  * PARAMETERS : none
   4439  *
   4440  * RETURN     : true: needed
   4441  *              false: no need
   4442  *==========================================================================*/
   4443 bool QCamera3HardwareInterface::needRotationReprocess()
   4444 {
   4445 
   4446     if (!mJpegSettings->is_jpeg_format) {
   4447         // RAW image, no need to reprocess
   4448         return false;
   4449     }
   4450 
   4451     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
   4452         mJpegSettings->jpeg_orientation > 0) {
   4453         // current rotation is not zero, and pp has the capability to process rotation
   4454         ALOGD("%s: need do reprocess for rotation", __func__);
   4455         return true;
   4456     }
   4457 
   4458     return false;
   4459 }
   4460 
   4461 /*===========================================================================
   4462  * FUNCTION   : needReprocess
   4463  *
   4464  * DESCRIPTION: if reprocess in needed
   4465  *
   4466  * PARAMETERS : none
   4467  *
   4468  * RETURN     : true: needed
   4469  *              false: no need
   4470  *==========================================================================*/
   4471 bool QCamera3HardwareInterface::needReprocess()
   4472 {
   4473     if (!mJpegSettings->is_jpeg_format) {
   4474         // RAW image, no need to reprocess
   4475         return false;
   4476     }
   4477 
   4478     if ((mJpegSettings->min_required_pp_mask > 0) ||
   4479          isWNREnabled()) {
   4480         // TODO: add for ZSL HDR later
   4481         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   4482         ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
   4483         return true;
   4484     }
   4485     return needRotationReprocess();
   4486 }
   4487 
   4488 /*===========================================================================
   4489  * FUNCTION   : addOnlineReprocChannel
   4490  *
   4491  * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
   4492  *              coming from input channel
   4493  *
   4494  * PARAMETERS :
   4495  *   @pInputChannel : ptr to input channel whose frames will be post-processed
   4496  *
   4497  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   4498  *==========================================================================*/
   4499 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
   4500               QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
   4501 {
   4502     int32_t rc = NO_ERROR;
   4503     QCamera3ReprocessChannel *pChannel = NULL;
   4504     if (pInputChannel == NULL) {
   4505         ALOGE("%s: input channel obj is NULL", __func__);
   4506         return NULL;
   4507     }
   4508 
   4509     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   4510             mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
   4511     if (NULL == pChannel) {
   4512         ALOGE("%s: no mem for reprocess channel", __func__);
   4513         return NULL;
   4514     }
   4515 
   4516     // Capture channel, only need snapshot and postview streams start together
   4517     mm_camera_channel_attr_t attr;
   4518     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
   4519     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
   4520     attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
   4521     rc = pChannel->initialize();
   4522     if (rc != NO_ERROR) {
   4523         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
   4524         delete pChannel;
   4525         return NULL;
   4526     }
   4527 
   4528     // pp feature config
   4529     cam_pp_feature_config_t pp_config;
   4530     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   4531     if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
   4532         pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
   4533         pp_config.sharpness = mJpegSettings->sharpness;
   4534     }
   4535 
   4536     if (isWNREnabled()) {
   4537         pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
   4538         pp_config.denoise2d.denoise_enable = 1;
   4539         pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
   4540     }
   4541     if (needRotationReprocess()) {
   4542         pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
   4543         int rotation = mJpegSettings->jpeg_orientation;
   4544         if (rotation == 0) {
   4545             pp_config.rotation = ROTATE_0;
   4546         } else if (rotation == 90) {
   4547             pp_config.rotation = ROTATE_90;
   4548         } else if (rotation == 180) {
   4549             pp_config.rotation = ROTATE_180;
   4550         } else if (rotation == 270) {
   4551             pp_config.rotation = ROTATE_270;
   4552         }
   4553     }
   4554 
   4555    rc = pChannel->addReprocStreamsFromSource(pp_config,
   4556                                              pInputChannel,
   4557                                              mMetadataChannel);
   4558 
   4559     if (rc != NO_ERROR) {
   4560         delete pChannel;
   4561         return NULL;
   4562     }
   4563     return pChannel;
   4564 }
   4565 
   4566 int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
   4567 {
   4568     return gCamCapability[mCameraId]->min_num_pp_bufs;
   4569 }
   4570 
   4571 bool QCamera3HardwareInterface::isWNREnabled() {
   4572     return gCamCapability[mCameraId]->isWnrSupported;
   4573 }
   4574 
   4575 }; //end namespace qcamera
   4576