Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define LOG_TAG "QCamera3HWI"
     31 
     32 #include <cutils/properties.h>
     33 #include <hardware/camera3.h>
     34 #include <camera/CameraMetadata.h>
     35 #include <stdlib.h>
     36 #include <utils/Log.h>
     37 #include <utils/Errors.h>
     38 #include <ui/Fence.h>
     39 #include <gralloc_priv.h>
     40 #include "QCamera3HWI.h"
     41 #include "QCamera3Mem.h"
     42 #include "QCamera3Channel.h"
     43 #include "QCamera3PostProc.h"
     44 
     45 using namespace android;
     46 
     47 namespace qcamera {
     48 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     49 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
     50 parm_buffer_t *prevSettings;
     51 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
     52 
     53 pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
     54     PTHREAD_MUTEX_INITIALIZER;
     55 unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
     56 
     57 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
     58     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
     59     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
     60     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
     61     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
     62     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
     63     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
     64     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
     65     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
     66     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
     67 };
     68 
     69 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
     70     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
     71     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
     72     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
     73     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
     74     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
     75     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
     76     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
     77     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
     78     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
     79 };
     80 
     81 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
     82     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
     83     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
     84     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
     85     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
     86     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
     87     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
     88     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
     89     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
     90     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
     91     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
     92     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
     93     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
     94     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
     95     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
     96     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
     97 };
     98 
     99 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    100     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    101     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    102     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    103     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    104     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    105     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    106 };
    107 
    108 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    109     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    110     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    111     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    112     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    113 };
    114 
    115 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    116     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    117     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    118     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    119     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    120     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    121 };
    122 
    123 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    124     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    125     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    126     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    127 };
    128 
    129 const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
    130                                              320, 240, 176, 144, 0, 0};
    131 
    132 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    133     initialize:                         QCamera3HardwareInterface::initialize,
    134     configure_streams:                  QCamera3HardwareInterface::configure_streams,
    135     register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
    136     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
    137     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
    138     get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
    139     dump:                               QCamera3HardwareInterface::dump,
    140 };
    141 
    142 
    143 /*===========================================================================
    144  * FUNCTION   : QCamera3HardwareInterface
    145  *
    146  * DESCRIPTION: constructor of QCamera3HardwareInterface
    147  *
    148  * PARAMETERS :
    149  *   @cameraId  : camera ID
    150  *
    151  * RETURN     : none
    152  *==========================================================================*/
    153 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
    154     : mCameraId(cameraId),
    155       mCameraHandle(NULL),
    156       mCameraOpened(false),
    157       mCameraInitialized(false),
    158       mCallbackOps(NULL),
    159       mInputStream(NULL),
    160       mMetadataChannel(NULL),
    161       mPictureChannel(NULL),
    162       mFirstRequest(false),
    163       mParamHeap(NULL),
    164       mParameters(NULL),
    165       mJpegSettings(NULL),
    166       mIsZslMode(false),
    167       m_pPowerModule(NULL)
    168 {
    169     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    170     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
    171     mCameraDevice.common.close = close_camera_device;
    172     mCameraDevice.ops = &mCameraOps;
    173     mCameraDevice.priv = this;
    174     gCamCapability[cameraId]->version = CAM_HAL_V3;
    175     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    176     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    177     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    178 
    179     pthread_cond_init(&mRequestCond, NULL);
    180     mPendingRequest = 0;
    181     mCurrentRequestId = -1;
    182     pthread_mutex_init(&mMutex, NULL);
    183 
    184     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    185         mDefaultMetadata[i] = NULL;
    186 
    187 #ifdef HAS_MULTIMEDIA_HINTS
    188     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
    189         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
    190     }
    191 #endif
    192 }
    193 
    194 /*===========================================================================
    195  * FUNCTION   : ~QCamera3HardwareInterface
    196  *
    197  * DESCRIPTION: destructor of QCamera3HardwareInterface
    198  *
    199  * PARAMETERS : none
    200  *
    201  * RETURN     : none
    202  *==========================================================================*/
    203 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    204 {
    205     ALOGV("%s: E", __func__);
    206     /* We need to stop all streams before deleting any stream */
    207     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    208         it != mStreamInfo.end(); it++) {
    209         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
    210         if (channel)
    211            channel->stop();
    212     }
    213     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    214         it != mStreamInfo.end(); it++) {
    215         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
    216         if (channel)
    217             delete channel;
    218         free (*it);
    219     }
    220 
    221     mPictureChannel = NULL;
    222 
    223     if (mJpegSettings != NULL) {
    224         free(mJpegSettings);
    225         mJpegSettings = NULL;
    226     }
    227 
    228     /* Clean up all channels */
    229     if (mCameraInitialized) {
    230         if (mMetadataChannel) {
    231             mMetadataChannel->stop();
    232             delete mMetadataChannel;
    233             mMetadataChannel = NULL;
    234         }
    235         deinitParameters();
    236     }
    237 
    238     if (mCameraOpened)
    239         closeCamera();
    240 
    241     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    242         if (mDefaultMetadata[i])
    243             free_camera_metadata(mDefaultMetadata[i]);
    244 
    245     pthread_cond_destroy(&mRequestCond);
    246 
    247     pthread_mutex_destroy(&mMutex);
    248     ALOGV("%s: X", __func__);
    249 }
    250 
    251 /*===========================================================================
    252  * FUNCTION   : openCamera
    253  *
    254  * DESCRIPTION: open camera
    255  *
    256  * PARAMETERS :
    257  *   @hw_device  : double ptr for camera device struct
    258  *
    259  * RETURN     : int32_t type of status
    260  *              NO_ERROR  -- success
    261  *              none-zero failure code
    262  *==========================================================================*/
    263 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    264 {
    265     int rc = 0;
    266     pthread_mutex_lock(&mCameraSessionLock);
    267     if (mCameraSessionActive) {
    268         ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
    269         pthread_mutex_unlock(&mCameraSessionLock);
    270         return -EUSERS;
    271     }
    272 
    273     if (mCameraOpened) {
    274         *hw_device = NULL;
    275         return PERMISSION_DENIED;
    276     }
    277 
    278     rc = openCamera();
    279     if (rc == 0) {
    280         *hw_device = &mCameraDevice.common;
    281         mCameraSessionActive = 1;
    282     } else
    283         *hw_device = NULL;
    284 
    285 #ifdef HAS_MULTIMEDIA_HINTS
    286     if (rc == 0) {
    287         if (m_pPowerModule) {
    288             if (m_pPowerModule->powerHint) {
    289                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    290                         (void *)"state=1");
    291             }
    292         }
    293     }
    294 #endif
    295     pthread_mutex_unlock(&mCameraSessionLock);
    296     return rc;
    297 }
    298 
    299 /*===========================================================================
    300  * FUNCTION   : openCamera
    301  *
    302  * DESCRIPTION: open camera
    303  *
    304  * PARAMETERS : none
    305  *
    306  * RETURN     : int32_t type of status
    307  *              NO_ERROR  -- success
    308  *              none-zero failure code
    309  *==========================================================================*/
    310 int QCamera3HardwareInterface::openCamera()
    311 {
    312     if (mCameraHandle) {
    313         ALOGE("Failure: Camera already opened");
    314         return ALREADY_EXISTS;
    315     }
    316     mCameraHandle = camera_open(mCameraId);
    317     if (!mCameraHandle) {
    318         ALOGE("camera_open failed.");
    319         return UNKNOWN_ERROR;
    320     }
    321 
    322     mCameraOpened = true;
    323 
    324     return NO_ERROR;
    325 }
    326 
    327 /*===========================================================================
    328  * FUNCTION   : closeCamera
    329  *
    330  * DESCRIPTION: close camera
    331  *
    332  * PARAMETERS : none
    333  *
    334  * RETURN     : int32_t type of status
    335  *              NO_ERROR  -- success
    336  *              none-zero failure code
    337  *==========================================================================*/
    338 int QCamera3HardwareInterface::closeCamera()
    339 {
    340     int rc = NO_ERROR;
    341 
    342     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    343     mCameraHandle = NULL;
    344     mCameraOpened = false;
    345 
    346 #ifdef HAS_MULTIMEDIA_HINTS
    347     if (rc == NO_ERROR) {
    348         if (m_pPowerModule) {
    349             if (m_pPowerModule->powerHint) {
    350                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    351                         (void *)"state=0");
    352             }
    353         }
    354     }
    355 #endif
    356 
    357     return rc;
    358 }
    359 
    360 /*===========================================================================
    361  * FUNCTION   : initialize
    362  *
    363  * DESCRIPTION: Initialize frameworks callback functions
    364  *
    365  * PARAMETERS :
    366  *   @callback_ops : callback function to frameworks
    367  *
    368  * RETURN     :
    369  *
    370  *==========================================================================*/
    371 int QCamera3HardwareInterface::initialize(
    372         const struct camera3_callback_ops *callback_ops)
    373 {
    374     int rc;
    375 
    376     pthread_mutex_lock(&mMutex);
    377 
    378     rc = initParameters();
    379     if (rc < 0) {
    380         ALOGE("%s: initParamters failed %d", __func__, rc);
    381        goto err1;
    382     }
    383 
    384     mCallbackOps = callback_ops;
    385 
    386     pthread_mutex_unlock(&mMutex);
    387     mCameraInitialized = true;
    388     return 0;
    389 
    390 err1:
    391     pthread_mutex_unlock(&mMutex);
    392     return rc;
    393 }
    394 
    395 /*===========================================================================
    396  * FUNCTION   : configureStreams
    397  *
    398  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
    399  *              and output streams.
    400  *
    401  * PARAMETERS :
    402  *   @stream_list : streams to be configured
    403  *
    404  * RETURN     :
    405  *
    406  *==========================================================================*/
    407 int QCamera3HardwareInterface::configureStreams(
    408         camera3_stream_configuration_t *streamList)
    409 {
    410     int rc = 0;
    411     // Sanity check stream_list
    412     if (streamList == NULL) {
    413         ALOGE("%s: NULL stream configuration", __func__);
    414         return BAD_VALUE;
    415     }
    416 
    417     if (streamList->streams == NULL) {
    418         ALOGE("%s: NULL stream list", __func__);
    419         return BAD_VALUE;
    420     }
    421 
    422     if (streamList->num_streams < 1) {
    423         ALOGE("%s: Bad number of streams requested: %d", __func__,
    424                 streamList->num_streams);
    425         return BAD_VALUE;
    426     }
    427 
    428     camera3_stream_t *inputStream = NULL;
    429     camera3_stream_t *jpegStream = NULL;
    430     /* first invalidate all the steams in the mStreamList
    431      * if they appear again, they will be validated */
    432     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    433             it != mStreamInfo.end(); it++) {
    434         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    435         channel->stop();
    436         (*it)->status = INVALID;
    437     }
    438 
    439     if (mMetadataChannel) {
    440         /* If content of mStreamInfo is not 0, there is metadata stream */
    441         mMetadataChannel->stop();
    442     }
    443     // Acquire Mutex after stoping all the channels
    444     pthread_mutex_lock(&mMutex);
    445     for (size_t i = 0; i < streamList->num_streams; i++) {
    446         camera3_stream_t *newStream = streamList->streams[i];
    447         ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
    448                 __func__, newStream->stream_type, newStream->format,
    449                  newStream->width, newStream->height);
    450         //if the stream is in the mStreamList validate it
    451         bool stream_exists = false;
    452         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    453                 it != mStreamInfo.end(); it++) {
    454             if ((*it)->stream == newStream) {
    455                 QCamera3Channel *channel =
    456                     (QCamera3Channel*)(*it)->stream->priv;
    457                 stream_exists = true;
    458                 (*it)->status = RECONFIGURE;
    459                 /*delete the channel object associated with the stream because
    460                   we need to reconfigure*/
    461                 delete channel;
    462                 (*it)->stream->priv = NULL;
    463             }
    464         }
    465         if (!stream_exists) {
    466             //new stream
    467             stream_info_t* stream_info;
    468             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
    469             stream_info->stream = newStream;
    470             stream_info->status = VALID;
    471             stream_info->registered = 0;
    472             mStreamInfo.push_back(stream_info);
    473         }
    474         if (newStream->stream_type == CAMERA3_STREAM_INPUT
    475                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
    476             if (inputStream != NULL) {
    477                 ALOGE("%s: Multiple input streams requested!", __func__);
    478                 pthread_mutex_unlock(&mMutex);
    479                 return BAD_VALUE;
    480             }
    481             inputStream = newStream;
    482         }
    483         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
    484             jpegStream = newStream;
    485         }
    486     }
    487     mInputStream = inputStream;
    488 
    489     /*clean up invalid streams*/
    490     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    491             it != mStreamInfo.end();) {
    492         if(((*it)->status) == INVALID){
    493             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    494             delete channel;
    495             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
    496             free(*it);
    497             it = mStreamInfo.erase(it);
    498         } else {
    499             it++;
    500         }
    501     }
    502 
    503     if (mMetadataChannel) {
    504         delete mMetadataChannel;
    505         mMetadataChannel = NULL;
    506     }
    507 
    508     //Create metadata channel and initialize it
    509     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
    510                     mCameraHandle->ops, captureResultCb,
    511                     &gCamCapability[mCameraId]->padding_info, this);
    512     if (mMetadataChannel == NULL) {
    513         ALOGE("%s: failed to allocate metadata channel", __func__);
    514         rc = -ENOMEM;
    515         pthread_mutex_unlock(&mMutex);
    516         return rc;
    517     }
    518     rc = mMetadataChannel->initialize();
    519     if (rc < 0) {
    520         ALOGE("%s: metadata channel initialization failed", __func__);
    521         delete mMetadataChannel;
    522         mMetadataChannel = NULL;
    523         pthread_mutex_unlock(&mMutex);
    524         return rc;
    525     }
    526 
    527     /* Allocate channel objects for the requested streams */
    528     for (size_t i = 0; i < streamList->num_streams; i++) {
    529         camera3_stream_t *newStream = streamList->streams[i];
    530         if (newStream->priv == NULL) {
    531             //New stream, construct channel
    532             switch (newStream->stream_type) {
    533             case CAMERA3_STREAM_INPUT:
    534                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
    535                 break;
    536             case CAMERA3_STREAM_BIDIRECTIONAL:
    537                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
    538                     GRALLOC_USAGE_HW_CAMERA_WRITE;
    539                 break;
    540             case CAMERA3_STREAM_OUTPUT:
    541                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    542                 break;
    543             default:
    544                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
    545                 break;
    546             }
    547 
    548             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
    549                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    550                 QCamera3Channel *channel;
    551                 switch (newStream->format) {
    552                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    553                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
    554                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
    555                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
    556                         jpegStream) {
    557                         uint32_t width = jpegStream->width;
    558                         uint32_t height = jpegStream->height;
    559                         mIsZslMode = true;
    560                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
    561                             mCameraHandle->ops, captureResultCb,
    562                             &gCamCapability[mCameraId]->padding_info, this, newStream,
    563                             width, height);
    564                     } else
    565                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
    566                             mCameraHandle->ops, captureResultCb,
    567                             &gCamCapability[mCameraId]->padding_info, this, newStream);
    568                     if (channel == NULL) {
    569                         ALOGE("%s: allocation of channel failed", __func__);
    570                         pthread_mutex_unlock(&mMutex);
    571                         return -ENOMEM;
    572                     }
    573 
    574                     newStream->priv = channel;
    575                     break;
    576                 case HAL_PIXEL_FORMAT_BLOB:
    577                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
    578                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
    579                             mCameraHandle->ops, captureResultCb,
    580                             &gCamCapability[mCameraId]->padding_info, this, newStream);
    581                     if (mPictureChannel == NULL) {
    582                         ALOGE("%s: allocation of channel failed", __func__);
    583                         pthread_mutex_unlock(&mMutex);
    584                         return -ENOMEM;
    585                     }
    586                     newStream->priv = (QCamera3Channel*)mPictureChannel;
    587                     break;
    588 
    589                 //TODO: Add support for app consumed format?
    590                 default:
    591                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
    592                     break;
    593                 }
    594             }
    595         } else {
    596             // Channel already exists for this stream
    597             // Do nothing for now
    598         }
    599     }
    600     /*For the streams to be reconfigured we need to register the buffers
    601       since the framework wont*/
    602     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    603             it != mStreamInfo.end(); it++) {
    604         if ((*it)->status == RECONFIGURE) {
    605             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
    606             /*only register buffers for streams that have already been
    607               registered*/
    608             if ((*it)->registered) {
    609                 rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
    610                         (*it)->buffer_set.buffers);
    611                 if (rc != NO_ERROR) {
    612                     ALOGE("%s: Failed to register the buffers of old stream,\
    613                             rc = %d", __func__, rc);
    614                 }
    615                 ALOGV("%s: channel %p has %d buffers",
    616                         __func__, channel, (*it)->buffer_set.num_buffers);
    617             }
    618         }
    619 
    620         ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream);
    621         if (index == NAME_NOT_FOUND) {
    622             mPendingBuffersMap.add((*it)->stream, 0);
    623         } else {
    624             mPendingBuffersMap.editValueAt(index) = 0;
    625         }
    626     }
    627 
    628     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
    629     mPendingRequestsList.clear();
    630 
    631     //settings/parameters don't carry over for new configureStreams
    632     memset(mParameters, 0, sizeof(parm_buffer_t));
    633     mFirstRequest = true;
    634 
    635     pthread_mutex_unlock(&mMutex);
    636     return rc;
    637 }
    638 
    639 /*===========================================================================
    640  * FUNCTION   : validateCaptureRequest
    641  *
    642  * DESCRIPTION: validate a capture request from camera service
    643  *
    644  * PARAMETERS :
    645  *   @request : request from framework to process
    646  *
    647  * RETURN     :
    648  *
    649  *==========================================================================*/
    650 int QCamera3HardwareInterface::validateCaptureRequest(
    651                     camera3_capture_request_t *request)
    652 {
    653     ssize_t idx = 0;
    654     const camera3_stream_buffer_t *b;
    655     CameraMetadata meta;
    656 
    657     /* Sanity check the request */
    658     if (request == NULL) {
    659         ALOGE("%s: NULL capture request", __func__);
    660         return BAD_VALUE;
    661     }
    662 
    663     uint32_t frameNumber = request->frame_number;
    664     if (request->input_buffer != NULL &&
    665             request->input_buffer->stream != mInputStream) {
    666         ALOGE("%s: Request %d: Input buffer not from input stream!",
    667                 __FUNCTION__, frameNumber);
    668         return BAD_VALUE;
    669     }
    670     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
    671         ALOGE("%s: Request %d: No output buffers provided!",
    672                 __FUNCTION__, frameNumber);
    673         return BAD_VALUE;
    674     }
    675     if (request->input_buffer != NULL) {
    676         b = request->input_buffer;
    677         QCamera3Channel *channel =
    678             static_cast<QCamera3Channel*>(b->stream->priv);
    679         if (channel == NULL) {
    680             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
    681                     __func__, frameNumber, idx);
    682             return BAD_VALUE;
    683         }
    684         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    685             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
    686                     __func__, frameNumber, idx);
    687             return BAD_VALUE;
    688         }
    689         if (b->release_fence != -1) {
    690             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
    691                     __func__, frameNumber, idx);
    692             return BAD_VALUE;
    693         }
    694         if (b->buffer == NULL) {
    695             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
    696                     __func__, frameNumber, idx);
    697             return BAD_VALUE;
    698         }
    699     }
    700 
    701     // Validate all buffers
    702     b = request->output_buffers;
    703     do {
    704         QCamera3Channel *channel =
    705                 static_cast<QCamera3Channel*>(b->stream->priv);
    706         if (channel == NULL) {
    707             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
    708                     __func__, frameNumber, idx);
    709             return BAD_VALUE;
    710         }
    711         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    712             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
    713                     __func__, frameNumber, idx);
    714             return BAD_VALUE;
    715         }
    716         if (b->release_fence != -1) {
    717             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
    718                     __func__, frameNumber, idx);
    719             return BAD_VALUE;
    720         }
    721         if (b->buffer == NULL) {
    722             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
    723                     __func__, frameNumber, idx);
    724             return BAD_VALUE;
    725         }
    726         idx++;
    727         b = request->output_buffers + idx;
    728     } while (idx < (ssize_t)request->num_output_buffers);
    729 
    730     return NO_ERROR;
    731 }
    732 
    733 /*===========================================================================
    734  * FUNCTION   : registerStreamBuffers
    735  *
    736  * DESCRIPTION: Register buffers for a given stream with the HAL device.
    737  *
    738  * PARAMETERS :
    739  *   @stream_list : streams to be configured
    740  *
    741  * RETURN     :
    742  *
    743  *==========================================================================*/
    744 int QCamera3HardwareInterface::registerStreamBuffers(
    745         const camera3_stream_buffer_set_t *buffer_set)
    746 {
    747     int rc = 0;
    748 
    749     pthread_mutex_lock(&mMutex);
    750 
    751     if (buffer_set == NULL) {
    752         ALOGE("%s: Invalid buffer_set parameter.", __func__);
    753         pthread_mutex_unlock(&mMutex);
    754         return -EINVAL;
    755     }
    756     if (buffer_set->stream == NULL) {
    757         ALOGE("%s: Invalid stream parameter.", __func__);
    758         pthread_mutex_unlock(&mMutex);
    759         return -EINVAL;
    760     }
    761     if (buffer_set->num_buffers < 1) {
    762         ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
    763         pthread_mutex_unlock(&mMutex);
    764         return -EINVAL;
    765     }
    766     if (buffer_set->buffers == NULL) {
    767         ALOGE("%s: Invalid buffers parameter.", __func__);
    768         pthread_mutex_unlock(&mMutex);
    769         return -EINVAL;
    770     }
    771 
    772     camera3_stream_t *stream = buffer_set->stream;
    773     QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
    774 
    775     //set the buffer_set in the mStreamInfo array
    776     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    777             it != mStreamInfo.end(); it++) {
    778         if ((*it)->stream == stream) {
    779             uint32_t numBuffers = buffer_set->num_buffers;
    780             (*it)->buffer_set.stream = buffer_set->stream;
    781             (*it)->buffer_set.num_buffers = numBuffers;
    782             (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
    783             if ((*it)->buffer_set.buffers == NULL) {
    784                 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
    785                 pthread_mutex_unlock(&mMutex);
    786                 return -ENOMEM;
    787             }
    788             for (size_t j = 0; j < numBuffers; j++){
    789                 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
    790             }
    791             (*it)->registered = 1;
    792         }
    793     }
    794     rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
    795     if (rc < 0) {
    796         ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
    797         pthread_mutex_unlock(&mMutex);
    798         return -ENODEV;
    799     }
    800 
    801     pthread_mutex_unlock(&mMutex);
    802     return NO_ERROR;
    803 }
    804 
    805 /*===========================================================================
    806  * FUNCTION   : processCaptureRequest
    807  *
    808  * DESCRIPTION: process a capture request from camera service
    809  *
    810  * PARAMETERS :
    811  *   @request : request from framework to process
    812  *
    813  * RETURN     :
    814  *
    815  *==========================================================================*/
    816 int QCamera3HardwareInterface::processCaptureRequest(
    817                     camera3_capture_request_t *request)
    818 {
    819     int rc = NO_ERROR;
    820     int32_t request_id;
    821     CameraMetadata meta;
    822 
    823     pthread_mutex_lock(&mMutex);
    824 
    825     rc = validateCaptureRequest(request);
    826     if (rc != NO_ERROR) {
    827         ALOGE("%s: incoming request is not valid", __func__);
    828         pthread_mutex_unlock(&mMutex);
    829         return rc;
    830     }
    831 
    832     uint32_t frameNumber = request->frame_number;
    833     uint32_t streamTypeMask = 0;
    834 
    835     meta = request->settings;
    836     if (meta.exists(ANDROID_REQUEST_ID)) {
    837         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
    838         mCurrentRequestId = request_id;
    839         ALOGV("%s: Received request with id: %d",__func__, request_id);
    840     } else if (mFirstRequest || mCurrentRequestId == -1){
    841         ALOGE("%s: Unable to find request id field, \
    842                 & no previous id available", __func__);
    843         return NAME_NOT_FOUND;
    844     } else {
    845         ALOGV("%s: Re-using old request id", __func__);
    846         request_id = mCurrentRequestId;
    847     }
    848 
    849     ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
    850                                     __func__, __LINE__,
    851                                     request->num_output_buffers,
    852                                     request->input_buffer,
    853                                     frameNumber);
    854     // Acquire all request buffers first
    855     int blob_request = 0;
    856     for (size_t i = 0; i < request->num_output_buffers; i++) {
    857         const camera3_stream_buffer_t& output = request->output_buffers[i];
    858         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
    859         sp<Fence> acquireFence = new Fence(output.acquire_fence);
    860 
    861         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
    862         //Call function to store local copy of jpeg data for encode params.
    863             blob_request = 1;
    864             rc = getJpegSettings(request->settings);
    865             if (rc < 0) {
    866                 ALOGE("%s: failed to get jpeg parameters", __func__);
    867                 pthread_mutex_unlock(&mMutex);
    868                 return rc;
    869             }
    870         }
    871 
    872         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
    873         if (rc != OK) {
    874             ALOGE("%s: fence wait failed %d", __func__, rc);
    875             pthread_mutex_unlock(&mMutex);
    876             return rc;
    877         }
    878         streamTypeMask |= channel->getStreamTypeMask();
    879     }
    880 
    881     rc = setFrameParameters(request->frame_number, request->settings, streamTypeMask);
    882     if (rc < 0) {
    883         ALOGE("%s: fail to set frame parameters", __func__);
    884         pthread_mutex_unlock(&mMutex);
    885         return rc;
    886     }
    887 
    888     /* Update pending request list and pending buffers map */
    889     PendingRequestInfo pendingRequest;
    890     pendingRequest.frame_number = frameNumber;
    891     pendingRequest.num_buffers = request->num_output_buffers;
    892     pendingRequest.request_id = request_id;
    893     pendingRequest.blob_request = blob_request;
    894 
    895     for (size_t i = 0; i < request->num_output_buffers; i++) {
    896         RequestedBufferInfo requestedBuf;
    897         requestedBuf.stream = request->output_buffers[i].stream;
    898         requestedBuf.buffer = NULL;
    899         pendingRequest.buffers.push_back(requestedBuf);
    900 
    901         mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
    902     }
    903     mPendingRequestsList.push_back(pendingRequest);
    904 
    905     // Notify metadata channel we receive a request
    906     mMetadataChannel->request(NULL, frameNumber);
    907 
    908     // Call request on other streams
    909     for (size_t i = 0; i < request->num_output_buffers; i++) {
    910         const camera3_stream_buffer_t& output = request->output_buffers[i];
    911         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
    912         mm_camera_buf_def_t *pInputBuffer = NULL;
    913 
    914         if (channel == NULL) {
    915             ALOGE("%s: invalid channel pointer for stream", __func__);
    916             continue;
    917         }
    918 
    919         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
    920             QCamera3RegularChannel* inputChannel = NULL;
    921             if(request->input_buffer != NULL){
    922 
    923                 //Try to get the internal format
    924                 inputChannel = (QCamera3RegularChannel*)
    925                     request->input_buffer->stream->priv;
    926                 if(inputChannel == NULL ){
    927                     ALOGE("%s: failed to get input channel handle", __func__);
    928                 } else {
    929                     pInputBuffer =
    930                         inputChannel->getInternalFormatBuffer(
    931                                 request->input_buffer->buffer);
    932                     ALOGD("%s: Input buffer dump",__func__);
    933                     ALOGD("Stream id: %d", pInputBuffer->stream_id);
    934                     ALOGD("streamtype:%d", pInputBuffer->stream_type);
    935                     ALOGD("frame len:%d", pInputBuffer->frame_len);
    936                 }
    937             }
    938             rc = channel->request(output.buffer, frameNumber, mJpegSettings,
    939                             pInputBuffer,(QCamera3Channel*)inputChannel);
    940         } else {
    941             ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
    942                 __LINE__, output.buffer, frameNumber);
    943             rc = channel->request(output.buffer, frameNumber);
    944         }
    945         if (rc < 0)
    946             ALOGE("%s: request failed", __func__);
    947     }
    948 
    949     mFirstRequest = false;
    950 
    951     //Block on conditional variable
    952     mPendingRequest = 1;
    953     while (mPendingRequest == 1) {
    954         pthread_cond_wait(&mRequestCond, &mMutex);
    955     }
    956 
    957     pthread_mutex_unlock(&mMutex);
    958     return rc;
    959 }
    960 
    961 /*===========================================================================
    962  * FUNCTION   : getMetadataVendorTagOps
    963  *
    964  * DESCRIPTION:
    965  *
    966  * PARAMETERS :
    967  *
    968  *
    969  * RETURN     :
    970  *==========================================================================*/
    971 void QCamera3HardwareInterface::getMetadataVendorTagOps(
    972                     vendor_tag_query_ops_t* /*ops*/)
    973 {
    974     /* Enable locks when we eventually add Vendor Tags */
    975     /*
    976     pthread_mutex_lock(&mMutex);
    977 
    978     pthread_mutex_unlock(&mMutex);
    979     */
    980     return;
    981 }
    982 
    983 /*===========================================================================
    984  * FUNCTION   : dump
    985  *
    986  * DESCRIPTION:
    987  *
    988  * PARAMETERS :
    989  *
    990  *
    991  * RETURN     :
    992  *==========================================================================*/
    993 void QCamera3HardwareInterface::dump(int /*fd*/)
    994 {
    995     /*Enable lock when we implement this function*/
    996     /*
    997     pthread_mutex_lock(&mMutex);
    998 
    999     pthread_mutex_unlock(&mMutex);
   1000     */
   1001     return;
   1002 }
   1003 
   1004 
   1005 /*===========================================================================
   1006  * FUNCTION   : captureResultCb
   1007  *
   1008  * DESCRIPTION: Callback handler for all capture result
   1009  *              (streams, as well as metadata)
   1010  *
   1011  * PARAMETERS :
   1012  *   @metadata : metadata information
   1013  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   1014  *               NULL if metadata.
   1015  *
   1016  * RETURN     : NONE
   1017  *==========================================================================*/
   1018 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   1019                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
   1020 {
   1021     pthread_mutex_lock(&mMutex);
   1022 
   1023     if (metadata_buf) {
   1024         metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   1025         int32_t frame_number_valid = *(int32_t *)
   1026             POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   1027         uint32_t pending_requests = *(uint32_t *)POINTER_OF(
   1028             CAM_INTF_META_PENDING_REQUESTS, metadata);
   1029         uint32_t frame_number = *(uint32_t *)
   1030             POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
   1031         const struct timeval *tv = (const struct timeval *)
   1032             POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   1033         nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
   1034             tv->tv_usec * NSEC_PER_USEC;
   1035         bool frame_number_exists = FALSE;
   1036 
   1037         if (!frame_number_valid) {
   1038             ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
   1039             mMetadataChannel->bufDone(metadata_buf);
   1040             free(metadata_buf);
   1041             goto done_metadata;
   1042         }
   1043         ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
   1044                 frame_number, capture_time);
   1045 
   1046         // Go through the pending requests info and send shutter/results to frameworks
   1047         for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1048                 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   1049             camera3_capture_result_t result;
   1050             camera3_notify_msg_t notify_msg;
   1051             ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
   1052             frame_number_exists = TRUE; // This frame number exists in Pending list
   1053             // Flush out all entries with less or equal frame numbers.
   1054 
   1055             //TODO: Make sure shutter timestamp really reflects shutter timestamp.
   1056             //Right now it's the same as metadata timestamp
   1057 
   1058             //TODO: When there is metadata drop, how do we derive the timestamp of
   1059             //dropped frames? For now, we fake the dropped timestamp by substracting
   1060             //from the reported timestamp
   1061             nsecs_t current_capture_time = capture_time -
   1062                 (frame_number - i->frame_number) * NSEC_PER_33MSEC;
   1063 
   1064             // Send shutter notify to frameworks
   1065             notify_msg.type = CAMERA3_MSG_SHUTTER;
   1066             notify_msg.message.shutter.frame_number = i->frame_number;
   1067             notify_msg.message.shutter.timestamp = current_capture_time;
   1068             mCallbackOps->notify(mCallbackOps, &notify_msg);
   1069             ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
   1070                     i->frame_number, capture_time);
   1071 
   1072             // Send empty metadata with already filled buffers for dropped metadata
   1073             // and send valid metadata with already filled buffers for current metadata
   1074             if (i->frame_number < frame_number) {
   1075                 CameraMetadata dummyMetadata;
   1076                 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
   1077                         &current_capture_time, 1);
   1078                 dummyMetadata.update(ANDROID_REQUEST_ID,
   1079                         &(i->request_id), 1);
   1080                 result.result = dummyMetadata.release();
   1081             } else {
   1082                 result.result = translateCbMetadataToResultMetadata(metadata,
   1083                         current_capture_time, i->request_id);
   1084                 if (i->blob_request && needReprocess()) {
   1085                    //If it is a blob request then send the metadata to the picture channel
   1086                    mPictureChannel->queueMetadata(metadata_buf);
   1087 
   1088                 } else {
   1089                    // Return metadata buffer
   1090                    mMetadataChannel->bufDone(metadata_buf);
   1091                    free(metadata_buf);
   1092                 }
   1093             }
   1094             if (!result.result) {
   1095                 ALOGE("%s: metadata is NULL", __func__);
   1096             }
   1097             result.frame_number = i->frame_number;
   1098             result.num_output_buffers = 0;
   1099             result.output_buffers = NULL;
   1100             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1101                     j != i->buffers.end(); j++) {
   1102                 if (j->buffer) {
   1103                     result.num_output_buffers++;
   1104                 }
   1105             }
   1106 
   1107             if (result.num_output_buffers > 0) {
   1108                 camera3_stream_buffer_t *result_buffers =
   1109                     new camera3_stream_buffer_t[result.num_output_buffers];
   1110                 if (!result_buffers) {
   1111                     ALOGE("%s: Fatal error: out of memory", __func__);
   1112                 }
   1113                 size_t result_buffers_idx = 0;
   1114                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1115                         j != i->buffers.end(); j++) {
   1116                     if (j->buffer) {
   1117                         result_buffers[result_buffers_idx++] = *(j->buffer);
   1118                         free(j->buffer);
   1119                         j->buffer = NULL;
   1120                         mPendingBuffersMap.editValueFor(j->stream)--;
   1121                     }
   1122                 }
   1123                 result.output_buffers = result_buffers;
   1124 
   1125                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   1126                 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1127                         __func__, result.frame_number, current_capture_time);
   1128                 free_camera_metadata((camera_metadata_t *)result.result);
   1129                 delete[] result_buffers;
   1130             } else {
   1131                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   1132                 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1133                         __func__, result.frame_number, current_capture_time);
   1134                 free_camera_metadata((camera_metadata_t *)result.result);
   1135             }
   1136             // erase the element from the list
   1137             i = mPendingRequestsList.erase(i);
   1138         }
   1139         if (!frame_number_exists) {
   1140             ALOGD("%s: Frame number# %d not in the Pending Request list", __func__,
   1141                     frame_number);
   1142             // Race condition where in Metadata Frame# is valid but its not in Pending list
   1143             mMetadataChannel->bufDone(metadata_buf);
   1144             free(metadata_buf);
   1145         }
   1146 
   1147 done_metadata:
   1148         bool max_buffers_dequeued = false;
   1149         for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
   1150             const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
   1151             uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
   1152             if (queued_buffers == stream->max_buffers) {
   1153                 max_buffers_dequeued = true;
   1154                 break;
   1155             }
   1156         }
   1157         if (!max_buffers_dequeued && !pending_requests) {
   1158             // Unblock process_capture_request
   1159             mPendingRequest = 0;
   1160             pthread_cond_signal(&mRequestCond);
   1161         }
   1162     } else {
   1163         // If the frame number doesn't exist in the pending request list,
   1164         // directly send the buffer to the frameworks, and update pending buffers map
   1165         // Otherwise, book-keep the buffer.
   1166         List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1167         while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   1168             i++;
   1169         }
   1170         if (i == mPendingRequestsList.end()) {
   1171             // Verify all pending requests frame_numbers are greater
   1172             for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
   1173                     j != mPendingRequestsList.end(); j++) {
   1174                 if (j->frame_number < frame_number) {
   1175                     ALOGE("%s: Error: pending frame number %d is smaller than %d",
   1176                             __func__, j->frame_number, frame_number);
   1177                 }
   1178             }
   1179             camera3_capture_result_t result;
   1180             result.result = NULL;
   1181             result.frame_number = frame_number;
   1182             result.num_output_buffers = 1;
   1183             result.output_buffers = buffer;
   1184             ALOGV("%s: result frame_number = %d, buffer = %p",
   1185                     __func__, frame_number, buffer);
   1186             mPendingBuffersMap.editValueFor(buffer->stream)--;
   1187             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1188         } else {
   1189             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1190                     j != i->buffers.end(); j++) {
   1191                 if (j->stream == buffer->stream) {
   1192                     if (j->buffer != NULL) {
   1193                         ALOGE("%s: Error: buffer is already set", __func__);
   1194                     } else {
   1195                         j->buffer = (camera3_stream_buffer_t *)malloc(
   1196                                 sizeof(camera3_stream_buffer_t));
   1197                         *(j->buffer) = *buffer;
   1198                         ALOGV("%s: cache buffer %p at result frame_number %d",
   1199                                 __func__, buffer, frame_number);
   1200                     }
   1201                 }
   1202             }
   1203         }
   1204     }
   1205     pthread_mutex_unlock(&mMutex);
   1206     return;
   1207 }
   1208 
   1209 /*===========================================================================
   1210  * FUNCTION   : translateCbMetadataToResultMetadata
   1211  *
   1212  * DESCRIPTION:
   1213  *
   1214  * PARAMETERS :
   1215  *   @metadata : metadata information from callback
   1216  *
   1217  * RETURN     : camera_metadata_t*
   1218  *              metadata in a format specified by fwk
   1219  *==========================================================================*/
   1220 camera_metadata_t*
   1221 QCamera3HardwareInterface::translateCbMetadataToResultMetadata
   1222                                 (metadata_buffer_t *metadata, nsecs_t timestamp,
   1223                                  int32_t request_id)
   1224 {
   1225     CameraMetadata camMetadata;
   1226     camera_metadata_t* resultMetadata;
   1227 
   1228     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   1229     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   1230 
   1231     /*CAM_INTF_META_HISTOGRAM - TODO*/
   1232     /*cam_hist_stats_t  *histogram =
   1233       (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
   1234       metadata);*/
   1235 
   1236     /*face detection*/
   1237     cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
   1238         POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
   1239     uint8_t numFaces = faceDetectionInfo->num_faces_detected;
   1240     int32_t faceIds[numFaces];
   1241     uint8_t faceScores[numFaces];
   1242     int32_t faceRectangles[numFaces * 4];
   1243     int32_t faceLandmarks[numFaces * 6];
   1244     int j = 0, k = 0;
   1245     for (int i = 0; i < numFaces; i++) {
   1246         faceIds[i] = faceDetectionInfo->faces[i].face_id;
   1247         faceScores[i] = faceDetectionInfo->faces[i].score;
   1248         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   1249                 faceRectangles+j, -1);
   1250         convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
   1251         j+= 4;
   1252         k+= 6;
   1253     }
   1254     if (numFaces > 0) {
   1255         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   1256         camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
   1257         camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   1258             faceRectangles, numFaces*4);
   1259         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   1260             faceLandmarks, numFaces*6);
   1261     }
   1262 
   1263     uint8_t  *color_correct_mode =
   1264         (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
   1265     camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
   1266 
   1267     int32_t  *ae_precapture_id =
   1268         (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
   1269     camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1);
   1270 
   1271     /*aec regions*/
   1272     cam_area_t  *hAeRegions =
   1273         (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
   1274     int32_t aeRegions[5];
   1275     convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   1276     camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
   1277     if(mIsZslMode) {
   1278         uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
   1279         camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
   1280     } else {
   1281         uint8_t *ae_state =
   1282             (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
   1283         camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
   1284     }
   1285     uint8_t  *focusMode =
   1286         (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
   1287     camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
   1288 
   1289     /*af regions*/
   1290     cam_area_t  *hAfRegions =
   1291         (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
   1292     int32_t afRegions[5];
   1293     convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   1294     camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
   1295 
   1296     uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
   1297     camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
   1298 
   1299     int32_t  *afTriggerId =
   1300         (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
   1301     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
   1302 
   1303     uint8_t  *whiteBalance =
   1304         (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
   1305     camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
   1306 
   1307     /*awb regions*/
   1308     cam_area_t  *hAwbRegions =
   1309         (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
   1310     int32_t awbRegions[5];
   1311     convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
   1312     camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
   1313 
   1314     uint8_t  *whiteBalanceState =
   1315         (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
   1316     camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
   1317 
   1318     uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
   1319     camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
   1320 
   1321     uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
   1322     camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
   1323 
   1324     uint8_t  *flashPower =
   1325         (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
   1326     camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
   1327 
   1328     int64_t  *flashFiringTime =
   1329         (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
   1330     camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   1331 
   1332     /*int32_t  *ledMode =
   1333       (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
   1334       camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
   1335 
   1336     uint8_t  *flashState =
   1337         (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
   1338     camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
   1339 
   1340     uint8_t  *hotPixelMode =
   1341         (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
   1342     camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
   1343 
   1344     float  *lensAperture =
   1345         (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
   1346     camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   1347 
   1348     float  *filterDensity =
   1349         (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
   1350     camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   1351 
   1352     float  *focalLength =
   1353         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
   1354     camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   1355 
   1356     float  *focusDistance =
   1357         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
   1358     camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   1359 
   1360     float  *focusRange =
   1361         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
   1362     camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
   1363 
   1364     uint8_t  *opticalStab =
   1365         (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
   1366     camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
   1367 
   1368     /*int32_t  *focusState =
   1369       (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
   1370       camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
   1371 
   1372     uint8_t  *noiseRedMode =
   1373         (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
   1374     camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
   1375 
   1376     /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
   1377 
   1378     cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
   1379         POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
   1380     int32_t scalerCropRegion[4];
   1381     scalerCropRegion[0] = hScalerCropRegion->left;
   1382     scalerCropRegion[1] = hScalerCropRegion->top;
   1383     scalerCropRegion[2] = hScalerCropRegion->width;
   1384     scalerCropRegion[3] = hScalerCropRegion->height;
   1385     camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   1386 
   1387     int64_t  *sensorExpTime =
   1388         (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
   1389     mMetadataResponse.exposure_time = *sensorExpTime;
   1390     camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   1391 
   1392 
   1393     int64_t  *sensorFameDuration =
   1394         (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
   1395     camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   1396 
   1397     int32_t  *sensorSensitivity =
   1398         (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
   1399     mMetadataResponse.iso_speed = *sensorSensitivity;
   1400     camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
   1401 
   1402     uint8_t  *shadingMode =
   1403         (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
   1404     camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
   1405 
   1406     uint8_t  *faceDetectMode =
   1407         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
   1408     camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
   1409 
   1410     uint8_t  *histogramMode =
   1411         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
   1412     camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
   1413 
   1414     uint8_t  *sharpnessMapMode =
   1415         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
   1416     camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   1417             sharpnessMapMode, 1);
   1418 
   1419     /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
   1420     cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
   1421         POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
   1422     camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
   1423             (int32_t*)sharpnessMap->sharpness,
   1424             CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
   1425 
   1426     cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
   1427         POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
   1428     int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
   1429     int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
   1430     camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   1431                        (float*)lensShadingMap->lens_shading,
   1432                        4*map_width*map_height);
   1433 
   1434     cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
   1435         POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
   1436     camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
   1437 
   1438     cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
   1439         POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
   1440     camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   1441                        (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
   1442 
   1443     cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
   1444         POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
   1445     camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   1446                        predColorCorrectionGains->gains, 4);
   1447 
   1448     cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
   1449         POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
   1450     camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   1451                        (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
   1452 
   1453     uint8_t *blackLevelLock = (uint8_t*)
   1454         POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
   1455     camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
   1456 
   1457     uint8_t *sceneFlicker = (uint8_t*)
   1458         POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
   1459     camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
   1460 
   1461 
   1462     resultMetadata = camMetadata.release();
   1463     return resultMetadata;
   1464 }
   1465 
   1466 /*===========================================================================
   1467  * FUNCTION   : convertToRegions
   1468  *
   1469  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   1470  *
   1471  * PARAMETERS :
   1472  *   @rect   : cam_rect_t struct to convert
   1473  *   @region : int32_t destination array
   1474  *   @weight : if we are converting from cam_area_t, weight is valid
   1475  *             else weight = -1
   1476  *
   1477  *==========================================================================*/
   1478 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
   1479     region[0] = rect.left;
   1480     region[1] = rect.top;
   1481     region[2] = rect.left + rect.width;
   1482     region[3] = rect.top + rect.height;
   1483     if (weight > -1) {
   1484         region[4] = weight;
   1485     }
   1486 }
   1487 
   1488 /*===========================================================================
   1489  * FUNCTION   : convertFromRegions
   1490  *
   1491  * DESCRIPTION: helper method to convert from array to cam_rect_t
   1492  *
   1493  * PARAMETERS :
   1494  *   @rect   : cam_rect_t struct to convert
   1495  *   @region : int32_t destination array
   1496  *   @weight : if we are converting from cam_area_t, weight is valid
   1497  *             else weight = -1
   1498  *
   1499  *==========================================================================*/
   1500 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
   1501                                                    const camera_metadata_t *settings,
   1502                                                    uint32_t tag){
   1503     CameraMetadata frame_settings;
   1504     frame_settings = settings;
   1505     int32_t x_min = frame_settings.find(tag).data.i32[0];
   1506     int32_t y_min = frame_settings.find(tag).data.i32[1];
   1507     int32_t x_max = frame_settings.find(tag).data.i32[2];
   1508     int32_t y_max = frame_settings.find(tag).data.i32[3];
   1509     roi->weight = frame_settings.find(tag).data.i32[4];
   1510     roi->rect.left = x_min;
   1511     roi->rect.top = y_min;
   1512     roi->rect.width = x_max - x_min;
   1513     roi->rect.height = y_max - y_min;
   1514 }
   1515 
   1516 /*===========================================================================
   1517  * FUNCTION   : resetIfNeededROI
   1518  *
   1519  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   1520  *              crop region
   1521  *
   1522  * PARAMETERS :
   1523  *   @roi       : cam_area_t struct to resize
   1524  *   @scalerCropRegion : cam_crop_region_t region to compare against
   1525  *
   1526  *
   1527  *==========================================================================*/
   1528 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   1529                                                  const cam_crop_region_t* scalerCropRegion)
   1530 {
   1531     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   1532     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   1533     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
   1534     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
   1535     if ((roi_x_max < scalerCropRegion->left) ||
   1536         (roi_y_max < scalerCropRegion->top)  ||
   1537         (roi->rect.left > crop_x_max) ||
   1538         (roi->rect.top > crop_y_max)){
   1539         return false;
   1540     }
   1541     if (roi->rect.left < scalerCropRegion->left) {
   1542         roi->rect.left = scalerCropRegion->left;
   1543     }
   1544     if (roi->rect.top < scalerCropRegion->top) {
   1545         roi->rect.top = scalerCropRegion->top;
   1546     }
   1547     if (roi_x_max > crop_x_max) {
   1548         roi_x_max = crop_x_max;
   1549     }
   1550     if (roi_y_max > crop_y_max) {
   1551         roi_y_max = crop_y_max;
   1552     }
   1553     roi->rect.width = roi_x_max - roi->rect.left;
   1554     roi->rect.height = roi_y_max - roi->rect.top;
   1555     return true;
   1556 }
   1557 
   1558 /*===========================================================================
   1559  * FUNCTION   : convertLandmarks
   1560  *
   1561  * DESCRIPTION: helper method to extract the landmarks from face detection info
   1562  *
   1563  * PARAMETERS :
   1564  *   @face   : cam_rect_t struct to convert
   1565  *   @landmarks : int32_t destination array
   1566  *
   1567  *
   1568  *==========================================================================*/
   1569 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
   1570 {
   1571     landmarks[0] = face.left_eye_center.x;
   1572     landmarks[1] = face.left_eye_center.y;
   1573     landmarks[2] = face.right_eye_center.y;
   1574     landmarks[3] = face.right_eye_center.y;
   1575     landmarks[4] = face.mouth_center.x;
   1576     landmarks[5] = face.mouth_center.y;
   1577 }
   1578 
   1579 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   1580 /*===========================================================================
   1581  * FUNCTION   : initCapabilities
   1582  *
   1583  * DESCRIPTION: initialize camera capabilities in static data struct
   1584  *
   1585  * PARAMETERS :
   1586  *   @cameraId  : camera Id
   1587  *
   1588  * RETURN     : int32_t type of status
   1589  *              NO_ERROR  -- success
   1590  *              none-zero failure code
   1591  *==========================================================================*/
   1592 int QCamera3HardwareInterface::initCapabilities(int cameraId)
   1593 {
   1594     int rc = 0;
   1595     mm_camera_vtbl_t *cameraHandle = NULL;
   1596     QCamera3HeapMemory *capabilityHeap = NULL;
   1597 
   1598     cameraHandle = camera_open(cameraId);
   1599     if (!cameraHandle) {
   1600         ALOGE("%s: camera_open failed", __func__);
   1601         rc = -1;
   1602         goto open_failed;
   1603     }
   1604 
   1605     capabilityHeap = new QCamera3HeapMemory();
   1606     if (capabilityHeap == NULL) {
   1607         ALOGE("%s: creation of capabilityHeap failed", __func__);
   1608         goto heap_creation_failed;
   1609     }
   1610     /* Allocate memory for capability buffer */
   1611     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
   1612     if(rc != OK) {
   1613         ALOGE("%s: No memory for cappability", __func__);
   1614         goto allocate_failed;
   1615     }
   1616 
   1617     /* Map memory for capability buffer */
   1618     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   1619     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   1620                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   1621                                 capabilityHeap->getFd(0),
   1622                                 sizeof(cam_capability_t));
   1623     if(rc < 0) {
   1624         ALOGE("%s: failed to map capability buffer", __func__);
   1625         goto map_failed;
   1626     }
   1627 
   1628     /* Query Capability */
   1629     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   1630     if(rc < 0) {
   1631         ALOGE("%s: failed to query capability",__func__);
   1632         goto query_failed;
   1633     }
   1634     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   1635     if (!gCamCapability[cameraId]) {
   1636         ALOGE("%s: out of memory", __func__);
   1637         goto query_failed;
   1638     }
   1639     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   1640                                         sizeof(cam_capability_t));
   1641     rc = 0;
   1642 
   1643 query_failed:
   1644     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   1645                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   1646 map_failed:
   1647     capabilityHeap->deallocate();
   1648 allocate_failed:
   1649     delete capabilityHeap;
   1650 heap_creation_failed:
   1651     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   1652     cameraHandle = NULL;
   1653 open_failed:
   1654     return rc;
   1655 }
   1656 
   1657 /*===========================================================================
   1658  * FUNCTION   : initParameters
   1659  *
   1660  * DESCRIPTION: initialize camera parameters
   1661  *
   1662  * PARAMETERS :
   1663  *
   1664  * RETURN     : int32_t type of status
   1665  *              NO_ERROR  -- success
   1666  *              none-zero failure code
   1667  *==========================================================================*/
   1668 int QCamera3HardwareInterface::initParameters()
   1669 {
   1670     int rc = 0;
   1671 
   1672     //Allocate Set Param Buffer
   1673     mParamHeap = new QCamera3HeapMemory();
   1674     rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
   1675     if(rc != OK) {
   1676         rc = NO_MEMORY;
   1677         ALOGE("Failed to allocate SETPARM Heap memory");
   1678         delete mParamHeap;
   1679         mParamHeap = NULL;
   1680         return rc;
   1681     }
   1682 
   1683     //Map memory for parameters buffer
   1684     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   1685             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   1686             mParamHeap->getFd(0),
   1687             sizeof(parm_buffer_t));
   1688     if(rc < 0) {
   1689         ALOGE("%s:failed to map SETPARM buffer",__func__);
   1690         rc = FAILED_TRANSACTION;
   1691         mParamHeap->deallocate();
   1692         delete mParamHeap;
   1693         mParamHeap = NULL;
   1694         return rc;
   1695     }
   1696 
   1697     mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
   1698     return rc;
   1699 }
   1700 
   1701 /*===========================================================================
   1702  * FUNCTION   : deinitParameters
   1703  *
   1704  * DESCRIPTION: de-initialize camera parameters
   1705  *
   1706  * PARAMETERS :
   1707  *
   1708  * RETURN     : NONE
   1709  *==========================================================================*/
   1710 void QCamera3HardwareInterface::deinitParameters()
   1711 {
   1712     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   1713             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   1714 
   1715     mParamHeap->deallocate();
   1716     delete mParamHeap;
   1717     mParamHeap = NULL;
   1718 
   1719     mParameters = NULL;
   1720 }
   1721 
   1722 /*===========================================================================
   1723  * FUNCTION   : calcMaxJpegSize
   1724  *
   1725  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   1726  *
   1727  * PARAMETERS :
   1728  *
   1729  * RETURN     : max_jpeg_size
   1730  *==========================================================================*/
   1731 int QCamera3HardwareInterface::calcMaxJpegSize()
   1732 {
   1733     int32_t max_jpeg_size = 0;
   1734     int temp_width, temp_height;
   1735     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   1736         temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   1737         temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   1738         if (temp_width * temp_height > max_jpeg_size ) {
   1739             max_jpeg_size = temp_width * temp_height;
   1740         }
   1741     }
   1742     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   1743     return max_jpeg_size;
   1744 }
   1745 
   1746 /*===========================================================================
   1747  * FUNCTION   : initStaticMetadata
   1748  *
   1749  * DESCRIPTION: initialize the static metadata
   1750  *
   1751  * PARAMETERS :
   1752  *   @cameraId  : camera Id
   1753  *
   1754  * RETURN     : int32_t type of status
   1755  *              0  -- success
   1756  *              non-zero failure code
   1757  *==========================================================================*/
   1758 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
   1759 {
   1760     int rc = 0;
   1761     CameraMetadata staticInfo;
   1762 
   1763     /* android.info: hardware level */
   1764     uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
   1765     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   1766         &supportedHardwareLevel, 1);
   1767 
   1768     int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   1769     /*HAL 3 only*/
   1770     /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   1771                     &gCamCapability[cameraId]->min_focus_distance, 1); */
   1772 
   1773     /*hard coded for now but this should come from sensor*/
   1774     float min_focus_distance;
   1775     if(facingBack){
   1776         min_focus_distance = 10;
   1777     } else {
   1778         min_focus_distance = 0;
   1779     }
   1780     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   1781                     &min_focus_distance, 1);
   1782 
   1783     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   1784                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   1785 
   1786     /*should be using focal lengths but sensor doesn't provide that info now*/
   1787     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   1788                       &gCamCapability[cameraId]->focal_length,
   1789                       1);
   1790 
   1791     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   1792                       gCamCapability[cameraId]->apertures,
   1793                       gCamCapability[cameraId]->apertures_count);
   1794 
   1795     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   1796                 gCamCapability[cameraId]->filter_densities,
   1797                 gCamCapability[cameraId]->filter_densities_count);
   1798 
   1799 
   1800     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   1801                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
   1802                       gCamCapability[cameraId]->optical_stab_modes_count);
   1803 
   1804     staticInfo.update(ANDROID_LENS_POSITION,
   1805                       gCamCapability[cameraId]->lens_position,
   1806                       sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
   1807 
   1808     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
   1809                                                     gCamCapability[cameraId]->lens_shading_map_size.height};
   1810     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   1811                       lens_shading_map_size,
   1812                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   1813 
   1814     int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width,
   1815                                                       gCamCapability[cameraId]->geo_correction_map_size.height};
   1816     staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE,
   1817             geo_correction_map_size,
   1818             sizeof(geo_correction_map_size)/sizeof(int32_t));
   1819 
   1820     staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP,
   1821                        gCamCapability[cameraId]->geo_correction_map,
   1822                        sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float));
   1823 
   1824     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   1825             gCamCapability[cameraId]->sensor_physical_size, 2);
   1826 
   1827     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   1828             gCamCapability[cameraId]->exposure_time_range, 2);
   1829 
   1830     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   1831             &gCamCapability[cameraId]->max_frame_duration, 1);
   1832 
   1833     camera_metadata_rational baseGainFactor = {
   1834             gCamCapability[cameraId]->base_gain_factor.numerator,
   1835             gCamCapability[cameraId]->base_gain_factor.denominator};
   1836     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   1837             &baseGainFactor, 1);
   1838 
   1839     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   1840                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
   1841 
   1842     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   1843                                                gCamCapability[cameraId]->pixel_array_size.height};
   1844     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   1845                       pixel_array_size, 2);
   1846 
   1847     int32_t active_array_size[] = {0, 0,
   1848                                                 gCamCapability[cameraId]->active_array_size.width,
   1849                                                 gCamCapability[cameraId]->active_array_size.height};
   1850     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   1851                       active_array_size, 4);
   1852 
   1853     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   1854             &gCamCapability[cameraId]->white_level, 1);
   1855 
   1856     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   1857             gCamCapability[cameraId]->black_level_pattern, 4);
   1858 
   1859     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   1860                       &gCamCapability[cameraId]->flash_charge_duration, 1);
   1861 
   1862     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   1863                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   1864 
   1865     /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   1866                       (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
   1867     /*hardcode 0 for now*/
   1868     int32_t max_face_count = 0;
   1869     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   1870                       &max_face_count, 1);
   1871 
   1872     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   1873                       &gCamCapability[cameraId]->histogram_size, 1);
   1874 
   1875     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   1876             &gCamCapability[cameraId]->max_histogram_count, 1);
   1877 
   1878     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
   1879                                                 gCamCapability[cameraId]->sharpness_map_size.height};
   1880 
   1881     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   1882             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   1883 
   1884     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   1885             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   1886 
   1887 
   1888     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
   1889                       &gCamCapability[cameraId]->raw_min_duration,
   1890                        1);
   1891 
   1892     int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
   1893                                                 HAL_PIXEL_FORMAT_BLOB};
   1894     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
   1895     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   1896                       scalar_formats,
   1897                       scalar_formats_count);
   1898 
   1899     int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
   1900     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   1901               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
   1902               available_processed_sizes);
   1903     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   1904                 available_processed_sizes,
   1905                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
   1906 
   1907     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
   1908                       &gCamCapability[cameraId]->jpeg_min_duration[0],
   1909                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
   1910 
   1911     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   1912     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   1913                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
   1914                  available_fps_ranges);
   1915     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   1916             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
   1917 
   1918     camera_metadata_rational exposureCompensationStep = {
   1919             gCamCapability[cameraId]->exp_compensation_step.numerator,
   1920             gCamCapability[cameraId]->exp_compensation_step.denominator};
   1921     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   1922                       &exposureCompensationStep, 1);
   1923 
   1924     /*TO DO*/
   1925     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
   1926     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   1927                       availableVstabModes, sizeof(availableVstabModes));
   1928 
   1929     /*HAL 1 and HAL 3 common*/
   1930     float maxZoom = 4;
   1931     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   1932             &maxZoom, 1);
   1933 
   1934     int32_t max3aRegions = 1;
   1935     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   1936             &max3aRegions, 1);
   1937 
   1938     uint8_t availableFaceDetectModes[] = {
   1939             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
   1940     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   1941                       availableFaceDetectModes,
   1942                       sizeof(availableFaceDetectModes));
   1943 
   1944     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
   1945                                                         gCamCapability[cameraId]->exposure_compensation_max};
   1946     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   1947             exposureCompensationRange,
   1948             sizeof(exposureCompensationRange)/sizeof(int32_t));
   1949 
   1950     uint8_t lensFacing = (facingBack) ?
   1951             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   1952     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   1953 
   1954     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   1955                 available_processed_sizes,
   1956                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
   1957 
   1958     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   1959                       available_thumbnail_sizes,
   1960                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   1961 
   1962     int32_t max_jpeg_size = 0;
   1963     int temp_width, temp_height;
   1964     for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   1965         temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   1966         temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   1967         if (temp_width * temp_height > max_jpeg_size ) {
   1968             max_jpeg_size = temp_width * temp_height;
   1969         }
   1970     }
   1971     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   1972     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   1973                       &max_jpeg_size, 1);
   1974 
   1975     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   1976     int32_t size = 0;
   1977     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
   1978         int val = lookupFwkName(EFFECT_MODES_MAP,
   1979                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
   1980                                    gCamCapability[cameraId]->supported_effects[i]);
   1981         if (val != NAME_NOT_FOUND) {
   1982             avail_effects[size] = (uint8_t)val;
   1983             size++;
   1984         }
   1985     }
   1986     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   1987                       avail_effects,
   1988                       size);
   1989 
   1990     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   1991     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   1992     int32_t supported_scene_modes_cnt = 0;
   1993     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
   1994         int val = lookupFwkName(SCENE_MODES_MAP,
   1995                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   1996                                 gCamCapability[cameraId]->supported_scene_modes[i]);
   1997         if (val != NAME_NOT_FOUND) {
   1998             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   1999             supported_indexes[supported_scene_modes_cnt] = i;
   2000             supported_scene_modes_cnt++;
   2001         }
   2002     }
   2003 
   2004     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   2005                       avail_scene_modes,
   2006                       supported_scene_modes_cnt);
   2007 
   2008     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
   2009     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   2010                       supported_scene_modes_cnt,
   2011                       scene_mode_overrides,
   2012                       supported_indexes,
   2013                       cameraId);
   2014     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   2015                       scene_mode_overrides,
   2016                       supported_scene_modes_cnt*3);
   2017 
   2018     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   2019     size = 0;
   2020     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
   2021         int val = lookupFwkName(ANTIBANDING_MODES_MAP,
   2022                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   2023                                  gCamCapability[cameraId]->supported_antibandings[i]);
   2024         if (val != NAME_NOT_FOUND) {
   2025             avail_antibanding_modes[size] = (uint8_t)val;
   2026             size++;
   2027         }
   2028 
   2029     }
   2030     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   2031                       avail_antibanding_modes,
   2032                       size);
   2033 
   2034     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   2035     size = 0;
   2036     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
   2037         int val = lookupFwkName(FOCUS_MODES_MAP,
   2038                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   2039                                 gCamCapability[cameraId]->supported_focus_modes[i]);
   2040         if (val != NAME_NOT_FOUND) {
   2041             avail_af_modes[size] = (uint8_t)val;
   2042             size++;
   2043         }
   2044     }
   2045     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   2046                       avail_af_modes,
   2047                       size);
   2048 
   2049     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   2050     size = 0;
   2051     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
   2052         int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2053                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   2054                                     gCamCapability[cameraId]->supported_white_balances[i]);
   2055         if (val != NAME_NOT_FOUND) {
   2056             avail_awb_modes[size] = (uint8_t)val;
   2057             size++;
   2058         }
   2059     }
   2060     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   2061                       avail_awb_modes,
   2062                       size);
   2063 
   2064     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   2065     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
   2066       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
   2067 
   2068     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   2069             available_flash_levels,
   2070             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
   2071 
   2072 
   2073     uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
   2074     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   2075             &flashAvailable, 1);
   2076 
   2077     uint8_t avail_ae_modes[5];
   2078     size = 0;
   2079     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
   2080         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
   2081         size++;
   2082     }
   2083     if (flashAvailable) {
   2084         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   2085         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
   2086         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   2087     }
   2088     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   2089                       avail_ae_modes,
   2090                       size);
   2091 
   2092     int32_t sensitivity_range[2];
   2093     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   2094     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   2095     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   2096                       sensitivity_range,
   2097                       sizeof(sensitivity_range) / sizeof(int32_t));
   2098 
   2099     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   2100                       &gCamCapability[cameraId]->max_analog_sensitivity,
   2101                       1);
   2102 
   2103     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
   2104                       &gCamCapability[cameraId]->jpeg_min_duration[0],
   2105                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
   2106 
   2107     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   2108     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   2109                       &sensor_orientation,
   2110                       1);
   2111 
   2112     int32_t max_output_streams[3] = {1, 3, 1};
   2113     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   2114                       max_output_streams,
   2115                       3);
   2116 
   2117     gStaticMetadata[cameraId] = staticInfo.release();
   2118     return rc;
   2119 }
   2120 
   2121 /*===========================================================================
   2122  * FUNCTION   : makeTable
   2123  *
   2124  * DESCRIPTION: make a table of sizes
   2125  *
   2126  * PARAMETERS :
   2127  *
   2128  *
   2129  *==========================================================================*/
   2130 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
   2131                                           int32_t* sizeTable)
   2132 {
   2133     int j = 0;
   2134     for (int i = 0; i < size; i++) {
   2135         sizeTable[j] = dimTable[i].width;
   2136         sizeTable[j+1] = dimTable[i].height;
   2137         j+=2;
   2138     }
   2139 }
   2140 
   2141 /*===========================================================================
   2142  * FUNCTION   : makeFPSTable
   2143  *
   2144  * DESCRIPTION: make a table of fps ranges
   2145  *
   2146  * PARAMETERS :
   2147  *
   2148  *==========================================================================*/
   2149 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
   2150                                           int32_t* fpsRangesTable)
   2151 {
   2152     int j = 0;
   2153     for (int i = 0; i < size; i++) {
   2154         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   2155         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   2156         j+=2;
   2157     }
   2158 }
   2159 
   2160 /*===========================================================================
   2161  * FUNCTION   : makeOverridesList
   2162  *
   2163  * DESCRIPTION: make a list of scene mode overrides
   2164  *
   2165  * PARAMETERS :
   2166  *
   2167  *
   2168  *==========================================================================*/
   2169 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
   2170                                                   uint8_t size, uint8_t* overridesList,
   2171                                                   uint8_t* supported_indexes,
   2172                                                   int camera_id)
   2173 {
   2174     /*daemon will give a list of overrides for all scene modes.
   2175       However we should send the fwk only the overrides for the scene modes
   2176       supported by the framework*/
   2177     int j = 0, index = 0, supt = 0;
   2178     uint8_t focus_override;
   2179     for (int i = 0; i < size; i++) {
   2180         supt = 0;
   2181         index = supported_indexes[i];
   2182         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
   2183         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2184                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   2185                                                     overridesTable[index].awb_mode);
   2186         focus_override = (uint8_t)overridesTable[index].af_mode;
   2187         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
   2188            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   2189               supt = 1;
   2190               break;
   2191            }
   2192         }
   2193         if (supt) {
   2194            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
   2195                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   2196                                               focus_override);
   2197         } else {
   2198            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   2199         }
   2200         j+=3;
   2201     }
   2202 }
   2203 
   2204 /*===========================================================================
   2205  * FUNCTION   : getPreviewHalPixelFormat
   2206  *
   2207  * DESCRIPTION: convert the format to type recognized by framework
   2208  *
   2209  * PARAMETERS : format : the format from backend
   2210  *
   2211  ** RETURN    : format recognized by framework
   2212  *
   2213  *==========================================================================*/
   2214 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
   2215 {
   2216     int32_t halPixelFormat;
   2217 
   2218     switch (format) {
   2219     case CAM_FORMAT_YUV_420_NV12:
   2220         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
   2221         break;
   2222     case CAM_FORMAT_YUV_420_NV21:
   2223         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   2224         break;
   2225     case CAM_FORMAT_YUV_420_NV21_ADRENO:
   2226         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
   2227         break;
   2228     case CAM_FORMAT_YUV_420_YV12:
   2229         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
   2230         break;
   2231     case CAM_FORMAT_YUV_422_NV16:
   2232     case CAM_FORMAT_YUV_422_NV61:
   2233     default:
   2234         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   2235         break;
   2236     }
   2237     return halPixelFormat;
   2238 }
   2239 
   2240 /*===========================================================================
   2241  * FUNCTION   : getSensorSensitivity
   2242  *
   2243  * DESCRIPTION: convert iso_mode to an integer value
   2244  *
   2245  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   2246  *
   2247  ** RETURN    : sensitivity supported by sensor
   2248  *
   2249  *==========================================================================*/
   2250 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   2251 {
   2252     int32_t sensitivity;
   2253 
   2254     switch (iso_mode) {
   2255     case CAM_ISO_MODE_100:
   2256         sensitivity = 100;
   2257         break;
   2258     case CAM_ISO_MODE_200:
   2259         sensitivity = 200;
   2260         break;
   2261     case CAM_ISO_MODE_400:
   2262         sensitivity = 400;
   2263         break;
   2264     case CAM_ISO_MODE_800:
   2265         sensitivity = 800;
   2266         break;
   2267     case CAM_ISO_MODE_1600:
   2268         sensitivity = 1600;
   2269         break;
   2270     default:
   2271         sensitivity = -1;
   2272         break;
   2273     }
   2274     return sensitivity;
   2275 }
   2276 
   2277 
   2278 /*===========================================================================
   2279  * FUNCTION   : AddSetParmEntryToBatch
   2280  *
   2281  * DESCRIPTION: add set parameter entry into batch
   2282  *
   2283  * PARAMETERS :
   2284  *   @p_table     : ptr to parameter buffer
   2285  *   @paramType   : parameter type
   2286  *   @paramLength : length of parameter value
   2287  *   @paramValue  : ptr to parameter value
   2288  *
   2289  * RETURN     : int32_t type of status
   2290  *              NO_ERROR  -- success
   2291  *              none-zero failure code
   2292  *==========================================================================*/
   2293 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
   2294                                                           cam_intf_parm_type_t paramType,
   2295                                                           uint32_t paramLength,
   2296                                                           void *paramValue)
   2297 {
   2298     int position = paramType;
   2299     int current, next;
   2300 
   2301     /*************************************************************************
   2302     *                 Code to take care of linking next flags                *
   2303     *************************************************************************/
   2304     current = GET_FIRST_PARAM_ID(p_table);
   2305     if (position == current){
   2306         //DO NOTHING
   2307     } else if (position < current){
   2308         SET_NEXT_PARAM_ID(position, p_table, current);
   2309         SET_FIRST_PARAM_ID(p_table, position);
   2310     } else {
   2311         /* Search for the position in the linked list where we need to slot in*/
   2312         while (position > GET_NEXT_PARAM_ID(current, p_table))
   2313             current = GET_NEXT_PARAM_ID(current, p_table);
   2314 
   2315         /*If node already exists no need to alter linking*/
   2316         if (position != GET_NEXT_PARAM_ID(current, p_table)) {
   2317             next = GET_NEXT_PARAM_ID(current, p_table);
   2318             SET_NEXT_PARAM_ID(current, p_table, position);
   2319             SET_NEXT_PARAM_ID(position, p_table, next);
   2320         }
   2321     }
   2322 
   2323     /*************************************************************************
   2324     *                   Copy contents into entry                             *
   2325     *************************************************************************/
   2326 
   2327     if (paramLength > sizeof(parm_type_t)) {
   2328         ALOGE("%s:Size of input larger than max entry size",__func__);
   2329         return BAD_VALUE;
   2330     }
   2331     memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
   2332     return NO_ERROR;
   2333 }
   2334 
   2335 /*===========================================================================
   2336  * FUNCTION   : lookupFwkName
   2337  *
   2338  * DESCRIPTION: In case the enum is not same in fwk and backend
   2339  *              make sure the parameter is correctly propogated
   2340  *
   2341  * PARAMETERS  :
   2342  *   @arr      : map between the two enums
   2343  *   @len      : len of the map
   2344  *   @hal_name : name of the hal_parm to map
   2345  *
   2346  * RETURN     : int type of status
   2347  *              fwk_name  -- success
   2348  *              none-zero failure code
   2349  *==========================================================================*/
   2350 int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
   2351                                              int len, int hal_name)
   2352 {
   2353 
   2354     for (int i = 0; i < len; i++) {
   2355         if (arr[i].hal_name == hal_name)
   2356             return arr[i].fwk_name;
   2357     }
   2358 
   2359     /* Not able to find matching framework type is not necessarily
   2360      * an error case. This happens when mm-camera supports more attributes
   2361      * than the frameworks do */
   2362     ALOGD("%s: Cannot find matching framework type", __func__);
   2363     return NAME_NOT_FOUND;
   2364 }
   2365 
   2366 /*===========================================================================
   2367  * FUNCTION   : lookupHalName
   2368  *
   2369  * DESCRIPTION: In case the enum is not same in fwk and backend
   2370  *              make sure the parameter is correctly propogated
   2371  *
   2372  * PARAMETERS  :
   2373  *   @arr      : map between the two enums
   2374  *   @len      : len of the map
   2375  *   @fwk_name : name of the hal_parm to map
   2376  *
   2377  * RETURN     : int32_t type of status
   2378  *              hal_name  -- success
   2379  *              none-zero failure code
   2380  *==========================================================================*/
   2381 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
   2382                                              int len, int fwk_name)
   2383 {
   2384     for (int i = 0; i < len; i++) {
   2385        if (arr[i].fwk_name == fwk_name)
   2386            return arr[i].hal_name;
   2387     }
   2388     ALOGE("%s: Cannot find matching hal type", __func__);
   2389     return NAME_NOT_FOUND;
   2390 }
   2391 
   2392 /*===========================================================================
   2393  * FUNCTION   : getCapabilities
   2394  *
   2395  * DESCRIPTION: query camera capabilities
   2396  *
   2397  * PARAMETERS :
   2398  *   @cameraId  : camera Id
   2399  *   @info      : camera info struct to be filled in with camera capabilities
   2400  *
   2401  * RETURN     : int32_t type of status
   2402  *              NO_ERROR  -- success
   2403  *              none-zero failure code
   2404  *==========================================================================*/
   2405 int QCamera3HardwareInterface::getCamInfo(int cameraId,
   2406                                     struct camera_info *info)
   2407 {
   2408     int rc = 0;
   2409 
   2410     if (NULL == gCamCapability[cameraId]) {
   2411         rc = initCapabilities(cameraId);
   2412         if (rc < 0) {
   2413             //pthread_mutex_unlock(&g_camlock);
   2414             return rc;
   2415         }
   2416     }
   2417 
   2418     if (NULL == gStaticMetadata[cameraId]) {
   2419         rc = initStaticMetadata(cameraId);
   2420         if (rc < 0) {
   2421             return rc;
   2422         }
   2423     }
   2424 
   2425     switch(gCamCapability[cameraId]->position) {
   2426     case CAM_POSITION_BACK:
   2427         info->facing = CAMERA_FACING_BACK;
   2428         break;
   2429 
   2430     case CAM_POSITION_FRONT:
   2431         info->facing = CAMERA_FACING_FRONT;
   2432         break;
   2433 
   2434     default:
   2435         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
   2436         rc = -1;
   2437         break;
   2438     }
   2439 
   2440 
   2441     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
   2442     info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
   2443     info->static_camera_characteristics = gStaticMetadata[cameraId];
   2444 
   2445     return rc;
   2446 }
   2447 
   2448 /*===========================================================================
   2449  * FUNCTION   : translateMetadata
   2450  *
   2451  * DESCRIPTION: translate the metadata into camera_metadata_t
   2452  *
   2453  * PARAMETERS : type of the request
   2454  *
   2455  *
   2456  * RETURN     : success: camera_metadata_t*
   2457  *              failure: NULL
   2458  *
   2459  *==========================================================================*/
   2460 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   2461 {
   2462     pthread_mutex_lock(&mMutex);
   2463 
   2464     if (mDefaultMetadata[type] != NULL) {
   2465         pthread_mutex_unlock(&mMutex);
   2466         return mDefaultMetadata[type];
   2467     }
   2468     //first time we are handling this request
   2469     //fill up the metadata structure using the wrapper class
   2470     CameraMetadata settings;
   2471     //translate from cam_capability_t to camera_metadata_tag_t
   2472     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   2473     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   2474     int32_t defaultRequestID = 0;
   2475     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   2476 
   2477     /*control*/
   2478 
   2479     uint8_t controlIntent = 0;
   2480     switch (type) {
   2481       case CAMERA3_TEMPLATE_PREVIEW:
   2482         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   2483         break;
   2484       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   2485         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   2486         break;
   2487       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   2488         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   2489         break;
   2490       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   2491         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   2492         break;
   2493       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   2494         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   2495         break;
   2496       default:
   2497         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   2498         break;
   2499     }
   2500     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   2501 
   2502     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   2503             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   2504 
   2505     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   2506     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   2507 
   2508     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   2509     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   2510 
   2511     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   2512     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   2513 
   2514     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   2515     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   2516 
   2517     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   2518     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   2519 
   2520     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
   2521     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   2522 
   2523     static uint8_t focusMode;
   2524     if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
   2525         ALOGE("%s: Setting focus mode to auto", __func__);
   2526         focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2527     } else {
   2528         ALOGE("%s: Setting focus mode to off", __func__);
   2529         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   2530     }
   2531     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   2532 
   2533     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   2534     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   2535 
   2536     /*flash*/
   2537     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   2538     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   2539 
   2540     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   2541     settings.update(ANDROID_FLASH_FIRING_POWER,
   2542             &flashFiringLevel, 1);
   2543 
   2544     /* lens */
   2545     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   2546     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   2547 
   2548     if (gCamCapability[mCameraId]->filter_densities_count) {
   2549         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   2550         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   2551                         gCamCapability[mCameraId]->filter_densities_count);
   2552     }
   2553 
   2554     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   2555     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   2556 
   2557     /* Exposure time(Update the Min Exposure Time)*/
   2558     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   2559     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   2560 
   2561     /* sensitivity */
   2562     static const int32_t default_sensitivity = 100;
   2563     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   2564 
   2565     mDefaultMetadata[type] = settings.release();
   2566 
   2567     pthread_mutex_unlock(&mMutex);
   2568     return mDefaultMetadata[type];
   2569 }
   2570 
   2571 /*===========================================================================
   2572  * FUNCTION   : setFrameParameters
   2573  *
   2574  * DESCRIPTION: set parameters per frame as requested in the metadata from
   2575  *              framework
   2576  *
   2577  * PARAMETERS :
   2578  *   @frame_id  : frame number for this particular request
   2579  *   @settings  : frame settings information from framework
   2580  *   @streamTypeMask : bit mask of stream types on which buffers are requested
   2581  *
   2582  * RETURN     : success: NO_ERROR
   2583  *              failure:
   2584  *==========================================================================*/
   2585 int QCamera3HardwareInterface::setFrameParameters(int frame_id,
   2586                     const camera_metadata_t *settings, uint32_t streamTypeMask)
   2587 {
   2588     /*translate from camera_metadata_t type to parm_type_t*/
   2589     int rc = 0;
   2590     if (settings == NULL && mFirstRequest) {
   2591         /*settings cannot be null for the first request*/
   2592         return BAD_VALUE;
   2593     }
   2594 
   2595     int32_t hal_version = CAM_HAL_V3;
   2596 
   2597     memset(mParameters, 0, sizeof(parm_buffer_t));
   2598     mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
   2599     AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   2600                 sizeof(hal_version), &hal_version);
   2601 
   2602     /*we need to update the frame number in the parameters*/
   2603     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
   2604                                 sizeof(frame_id), &frame_id);
   2605     if (rc < 0) {
   2606         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   2607         return BAD_VALUE;
   2608     }
   2609 
   2610     /* Update stream id mask where buffers are requested */
   2611     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
   2612                                 sizeof(streamTypeMask), &streamTypeMask);
   2613     if (rc < 0) {
   2614         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
   2615         return BAD_VALUE;
   2616     }
   2617 
   2618     if(settings != NULL){
   2619         rc = translateMetadataToParameters(settings);
   2620     }
   2621     /*set the parameters to backend*/
   2622     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   2623     return rc;
   2624 }
   2625 
   2626 /*===========================================================================
   2627  * FUNCTION   : translateMetadataToParameters
   2628  *
   2629  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   2630  *
   2631  *
   2632  * PARAMETERS :
   2633  *   @settings  : frame settings information from framework
   2634  *
   2635  *
   2636  * RETURN     : success: NO_ERROR
   2637  *              failure:
   2638  *==========================================================================*/
   2639 int QCamera3HardwareInterface::translateMetadataToParameters
   2640                                   (const camera_metadata_t *settings)
   2641 {
   2642     int rc = 0;
   2643     CameraMetadata frame_settings;
   2644     frame_settings = settings;
   2645 
   2646 
   2647     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   2648         int32_t antibandingMode =
   2649             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
   2650         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
   2651                 sizeof(antibandingMode), &antibandingMode);
   2652     }
   2653 
   2654     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   2655         int32_t expCompensation = frame_settings.find(
   2656             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   2657         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   2658             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   2659         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   2660             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   2661         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   2662           sizeof(expCompensation), &expCompensation);
   2663     }
   2664 
   2665     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   2666         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   2667         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
   2668                 sizeof(aeLock), &aeLock);
   2669     }
   2670     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   2671         cam_fps_range_t fps_range;
   2672         fps_range.min_fps =
   2673             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   2674         fps_range.max_fps =
   2675             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   2676         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
   2677                 sizeof(fps_range), &fps_range);
   2678     }
   2679 
   2680     float focalDistance = -1.0;
   2681     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   2682         focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   2683         rc = AddSetParmEntryToBatch(mParameters,
   2684                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
   2685                 sizeof(focalDistance), &focalDistance);
   2686     }
   2687 
   2688     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   2689         uint8_t fwk_focusMode =
   2690             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   2691         uint8_t focusMode;
   2692         if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
   2693             focusMode = CAM_FOCUS_MODE_INFINITY;
   2694         } else{
   2695          focusMode = lookupHalName(FOCUS_MODES_MAP,
   2696                                    sizeof(FOCUS_MODES_MAP),
   2697                                    fwk_focusMode);
   2698         }
   2699         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
   2700                 sizeof(focusMode), &focusMode);
   2701     }
   2702 
   2703     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   2704         uint8_t awbLock =
   2705             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   2706         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
   2707                 sizeof(awbLock), &awbLock);
   2708     }
   2709 
   2710     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   2711         uint8_t fwk_whiteLevel =
   2712             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   2713         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
   2714                 sizeof(WHITE_BALANCE_MODES_MAP),
   2715                 fwk_whiteLevel);
   2716         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
   2717                 sizeof(whiteLevel), &whiteLevel);
   2718     }
   2719 
   2720     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   2721         uint8_t fwk_effectMode =
   2722             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   2723         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
   2724                 sizeof(EFFECT_MODES_MAP),
   2725                 fwk_effectMode);
   2726         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
   2727                 sizeof(effectMode), &effectMode);
   2728     }
   2729 
   2730     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   2731         uint8_t fwk_aeMode =
   2732             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   2733         uint8_t aeMode;
   2734         int32_t redeye;
   2735 
   2736         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   2737             aeMode = CAM_AE_MODE_OFF;
   2738         } else {
   2739             aeMode = CAM_AE_MODE_ON;
   2740         }
   2741         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   2742             redeye = 1;
   2743         } else {
   2744             redeye = 0;
   2745         }
   2746 
   2747         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
   2748                                           sizeof(AE_FLASH_MODE_MAP),
   2749                                           fwk_aeMode);
   2750         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
   2751                 sizeof(aeMode), &aeMode);
   2752         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
   2753                 sizeof(flashMode), &flashMode);
   2754         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
   2755                 sizeof(redeye), &redeye);
   2756     }
   2757 
   2758     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   2759         uint8_t colorCorrectMode =
   2760             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   2761         rc =
   2762             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
   2763                     sizeof(colorCorrectMode), &colorCorrectMode);
   2764     }
   2765 
   2766     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   2767         cam_color_correct_gains_t colorCorrectGains;
   2768         for (int i = 0; i < 4; i++) {
   2769             colorCorrectGains.gains[i] =
   2770                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   2771         }
   2772         rc =
   2773             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
   2774                     sizeof(colorCorrectGains), &colorCorrectGains);
   2775     }
   2776 
   2777     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   2778         cam_color_correct_matrix_t colorCorrectTransform;
   2779         cam_rational_type_t transform_elem;
   2780         int num = 0;
   2781         for (int i = 0; i < 3; i++) {
   2782            for (int j = 0; j < 3; j++) {
   2783               transform_elem.numerator =
   2784                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   2785               transform_elem.denominator =
   2786                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   2787               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   2788               num++;
   2789            }
   2790         }
   2791         rc =
   2792             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   2793                     sizeof(colorCorrectTransform), &colorCorrectTransform);
   2794     }
   2795 
   2796     cam_trigger_t aecTrigger;
   2797     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
   2798     aecTrigger.trigger_id = -1;
   2799     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   2800         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   2801         aecTrigger.trigger =
   2802             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   2803         aecTrigger.trigger_id =
   2804             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   2805     }
   2806     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   2807                                 sizeof(aecTrigger), &aecTrigger);
   2808 
   2809     /*af_trigger must come with a trigger id*/
   2810     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   2811         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   2812         cam_trigger_t af_trigger;
   2813         af_trigger.trigger =
   2814             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   2815         af_trigger.trigger_id =
   2816             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   2817         rc = AddSetParmEntryToBatch(mParameters,
   2818                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
   2819     }
   2820 
   2821     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   2822         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   2823         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
   2824                 sizeof(metaMode), &metaMode);
   2825         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   2826            uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
   2827            uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
   2828                                              sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   2829                                              fwk_sceneMode);
   2830            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   2831                 sizeof(sceneMode), &sceneMode);
   2832         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
   2833            uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
   2834            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   2835                 sizeof(sceneMode), &sceneMode);
   2836         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
   2837            uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
   2838            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   2839                 sizeof(sceneMode), &sceneMode);
   2840         }
   2841     }
   2842 
   2843     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   2844         int32_t demosaic =
   2845             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   2846         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
   2847                 sizeof(demosaic), &demosaic);
   2848     }
   2849 
   2850     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   2851         uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   2852         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
   2853                 sizeof(edgeMode), &edgeMode);
   2854     }
   2855 
   2856     if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
   2857         int32_t edgeStrength =
   2858             frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
   2859         rc = AddSetParmEntryToBatch(mParameters,
   2860                 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
   2861     }
   2862 
   2863     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   2864         int32_t respectFlashMode = 1;
   2865         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   2866             uint8_t fwk_aeMode =
   2867                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   2868             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   2869                 respectFlashMode = 0;
   2870                 ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
   2871                     __func__);
   2872             }
   2873         }
   2874         if (respectFlashMode) {
   2875             uint8_t flashMode =
   2876                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
   2877             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
   2878                                           sizeof(FLASH_MODES_MAP),
   2879                                           flashMode);
   2880             ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
   2881             // To check: CAM_INTF_META_FLASH_MODE usage
   2882             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
   2883                           sizeof(flashMode), &flashMode);
   2884         }
   2885     }
   2886 
   2887     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   2888         uint8_t flashPower =
   2889             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   2890         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
   2891                 sizeof(flashPower), &flashPower);
   2892     }
   2893 
   2894     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   2895         int64_t flashFiringTime =
   2896             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   2897         rc = AddSetParmEntryToBatch(mParameters,
   2898                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
   2899     }
   2900 
   2901     if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) {
   2902         uint8_t geometricMode =
   2903             frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0];
   2904         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE,
   2905                 sizeof(geometricMode), &geometricMode);
   2906     }
   2907 
   2908     if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) {
   2909         uint8_t geometricStrength =
   2910             frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0];
   2911         rc = AddSetParmEntryToBatch(mParameters,
   2912                 CAM_INTF_META_GEOMETRIC_STRENGTH,
   2913                 sizeof(geometricStrength), &geometricStrength);
   2914     }
   2915 
   2916     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   2917         uint8_t hotPixelMode =
   2918             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   2919         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
   2920                 sizeof(hotPixelMode), &hotPixelMode);
   2921     }
   2922 
   2923     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   2924         float lensAperture =
   2925             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   2926         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
   2927                 sizeof(lensAperture), &lensAperture);
   2928     }
   2929 
   2930     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   2931         float filterDensity =
   2932             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   2933         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
   2934                 sizeof(filterDensity), &filterDensity);
   2935     }
   2936 
   2937     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   2938         float focalLength =
   2939             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   2940         rc = AddSetParmEntryToBatch(mParameters,
   2941                 CAM_INTF_META_LENS_FOCAL_LENGTH,
   2942                 sizeof(focalLength), &focalLength);
   2943     }
   2944 
   2945     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   2946         uint8_t optStabMode =
   2947             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   2948         rc = AddSetParmEntryToBatch(mParameters,
   2949                 CAM_INTF_META_LENS_OPT_STAB_MODE,
   2950                 sizeof(optStabMode), &optStabMode);
   2951     }
   2952 
   2953     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   2954         uint8_t noiseRedMode =
   2955             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   2956         rc = AddSetParmEntryToBatch(mParameters,
   2957                 CAM_INTF_META_NOISE_REDUCTION_MODE,
   2958                 sizeof(noiseRedMode), &noiseRedMode);
   2959     }
   2960 
   2961     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
   2962         uint8_t noiseRedStrength =
   2963             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
   2964         rc = AddSetParmEntryToBatch(mParameters,
   2965                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
   2966                 sizeof(noiseRedStrength), &noiseRedStrength);
   2967     }
   2968 
   2969     cam_crop_region_t scalerCropRegion;
   2970     bool scalerCropSet = false;
   2971     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   2972         scalerCropRegion.left =
   2973             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   2974         scalerCropRegion.top =
   2975             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   2976         scalerCropRegion.width =
   2977             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   2978         scalerCropRegion.height =
   2979             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   2980         rc = AddSetParmEntryToBatch(mParameters,
   2981                 CAM_INTF_META_SCALER_CROP_REGION,
   2982                 sizeof(scalerCropRegion), &scalerCropRegion);
   2983         scalerCropSet = true;
   2984     }
   2985 
   2986     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   2987         int64_t sensorExpTime =
   2988             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   2989         rc = AddSetParmEntryToBatch(mParameters,
   2990                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   2991                 sizeof(sensorExpTime), &sensorExpTime);
   2992     }
   2993 
   2994     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   2995         int64_t sensorFrameDuration =
   2996             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   2997         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   2998             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   2999         rc = AddSetParmEntryToBatch(mParameters,
   3000                 CAM_INTF_META_SENSOR_FRAME_DURATION,
   3001                 sizeof(sensorFrameDuration), &sensorFrameDuration);
   3002     }
   3003 
   3004     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   3005         int32_t sensorSensitivity =
   3006             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   3007         if (sensorSensitivity <
   3008                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   3009             sensorSensitivity =
   3010                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   3011         if (sensorSensitivity >
   3012                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   3013             sensorSensitivity =
   3014                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   3015         rc = AddSetParmEntryToBatch(mParameters,
   3016                 CAM_INTF_META_SENSOR_SENSITIVITY,
   3017                 sizeof(sensorSensitivity), &sensorSensitivity);
   3018     }
   3019 
   3020     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   3021         int32_t shadingMode =
   3022             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   3023         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
   3024                 sizeof(shadingMode), &shadingMode);
   3025     }
   3026 
   3027     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
   3028         uint8_t shadingStrength =
   3029             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
   3030         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
   3031                 sizeof(shadingStrength), &shadingStrength);
   3032     }
   3033 
   3034     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   3035         uint8_t facedetectMode =
   3036             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   3037         rc = AddSetParmEntryToBatch(mParameters,
   3038                 CAM_INTF_META_STATS_FACEDETECT_MODE,
   3039                 sizeof(facedetectMode), &facedetectMode);
   3040     }
   3041 
   3042     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   3043         uint8_t histogramMode =
   3044             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   3045         rc = AddSetParmEntryToBatch(mParameters,
   3046                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
   3047                 sizeof(histogramMode), &histogramMode);
   3048     }
   3049 
   3050     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   3051         uint8_t sharpnessMapMode =
   3052             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   3053         rc = AddSetParmEntryToBatch(mParameters,
   3054                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   3055                 sizeof(sharpnessMapMode), &sharpnessMapMode);
   3056     }
   3057 
   3058     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   3059         uint8_t tonemapMode =
   3060             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   3061         rc = AddSetParmEntryToBatch(mParameters,
   3062                 CAM_INTF_META_TONEMAP_MODE,
   3063                 sizeof(tonemapMode), &tonemapMode);
   3064     }
   3065     int point = 0;
   3066     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
   3067         cam_tonemap_curve_t tonemapCurveBlue;
   3068         tonemapCurveBlue.tonemap_points_cnt =
   3069            gCamCapability[mCameraId]->max_tone_map_curve_points;
   3070         for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
   3071             for (int j = 0; j < 2; j++) {
   3072                tonemapCurveBlue.tonemap_points[i][j] =
   3073                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   3074                point++;
   3075             }
   3076         }
   3077         rc = AddSetParmEntryToBatch(mParameters,
   3078                 CAM_INTF_META_TONEMAP_CURVE_BLUE,
   3079                 sizeof(tonemapCurveBlue), &tonemapCurveBlue);
   3080     }
   3081     point = 0;
   3082     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
   3083         cam_tonemap_curve_t tonemapCurveGreen;
   3084         tonemapCurveGreen.tonemap_points_cnt =
   3085            gCamCapability[mCameraId]->max_tone_map_curve_points;
   3086         for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
   3087             for (int j = 0; j < 2; j++) {
   3088                tonemapCurveGreen.tonemap_points[i][j] =
   3089                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   3090                point++;
   3091             }
   3092         }
   3093         rc = AddSetParmEntryToBatch(mParameters,
   3094                 CAM_INTF_META_TONEMAP_CURVE_GREEN,
   3095                 sizeof(tonemapCurveGreen), &tonemapCurveGreen);
   3096     }
   3097     point = 0;
   3098     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   3099         cam_tonemap_curve_t tonemapCurveRed;
   3100         tonemapCurveRed.tonemap_points_cnt =
   3101            gCamCapability[mCameraId]->max_tone_map_curve_points;
   3102         for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
   3103             for (int j = 0; j < 2; j++) {
   3104                tonemapCurveRed.tonemap_points[i][j] =
   3105                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   3106                point++;
   3107             }
   3108         }
   3109         rc = AddSetParmEntryToBatch(mParameters,
   3110                 CAM_INTF_META_TONEMAP_CURVE_RED,
   3111                 sizeof(tonemapCurveRed), &tonemapCurveRed);
   3112     }
   3113 
   3114     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3115         uint8_t captureIntent =
   3116             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3117         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
   3118                 sizeof(captureIntent), &captureIntent);
   3119     }
   3120 
   3121     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   3122         uint8_t blackLevelLock =
   3123             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   3124         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
   3125                 sizeof(blackLevelLock), &blackLevelLock);
   3126     }
   3127 
   3128     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   3129         uint8_t lensShadingMapMode =
   3130             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   3131         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   3132                 sizeof(lensShadingMapMode), &lensShadingMapMode);
   3133     }
   3134 
   3135     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   3136         cam_area_t roi;
   3137         bool reset = true;
   3138         convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
   3139         if (scalerCropSet) {
   3140             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3141         }
   3142         if (reset) {
   3143             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
   3144                     sizeof(roi), &roi);
   3145         }
   3146     }
   3147 
   3148     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   3149         cam_area_t roi;
   3150         bool reset = true;
   3151         convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
   3152         if (scalerCropSet) {
   3153             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3154         }
   3155         if (reset) {
   3156             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
   3157                     sizeof(roi), &roi);
   3158         }
   3159     }
   3160 
   3161     if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
   3162         cam_area_t roi;
   3163         bool reset = true;
   3164         convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
   3165         if (scalerCropSet) {
   3166             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3167         }
   3168         if (reset) {
   3169             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
   3170                     sizeof(roi), &roi);
   3171         }
   3172     }
   3173     return rc;
   3174 }
   3175 
   3176 /*===========================================================================
   3177  * FUNCTION   : getJpegSettings
   3178  *
   3179  * DESCRIPTION: save the jpeg settings in the HAL
   3180  *
   3181  *
   3182  * PARAMETERS :
   3183  *   @settings  : frame settings information from framework
   3184  *
   3185  *
   3186  * RETURN     : success: NO_ERROR
   3187  *              failure:
   3188  *==========================================================================*/
   3189 int QCamera3HardwareInterface::getJpegSettings
   3190                                   (const camera_metadata_t *settings)
   3191 {
   3192     if (mJpegSettings) {
   3193         if (mJpegSettings->gps_timestamp) {
   3194             free(mJpegSettings->gps_timestamp);
   3195             mJpegSettings->gps_timestamp = NULL;
   3196         }
   3197         if (mJpegSettings->gps_coordinates) {
   3198             for (int i = 0; i < 3; i++) {
   3199                 free(mJpegSettings->gps_coordinates[i]);
   3200                 mJpegSettings->gps_coordinates[i] = NULL;
   3201             }
   3202         }
   3203         free(mJpegSettings);
   3204         mJpegSettings = NULL;
   3205     }
   3206     mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
   3207     CameraMetadata jpeg_settings;
   3208     jpeg_settings = settings;
   3209 
   3210     if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   3211         mJpegSettings->jpeg_orientation =
   3212             jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   3213     } else {
   3214         mJpegSettings->jpeg_orientation = 0;
   3215     }
   3216     if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
   3217         mJpegSettings->jpeg_quality =
   3218             jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   3219     } else {
   3220         mJpegSettings->jpeg_quality = 85;
   3221     }
   3222     if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   3223         mJpegSettings->thumbnail_size.width =
   3224             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   3225         mJpegSettings->thumbnail_size.height =
   3226             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   3227     } else {
   3228         mJpegSettings->thumbnail_size.width = 0;
   3229         mJpegSettings->thumbnail_size.height = 0;
   3230     }
   3231     if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   3232         for (int i = 0; i < 3; i++) {
   3233             mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
   3234             *(mJpegSettings->gps_coordinates[i]) =
   3235                 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
   3236         }
   3237     } else{
   3238        for (int i = 0; i < 3; i++) {
   3239             mJpegSettings->gps_coordinates[i] = NULL;
   3240         }
   3241     }
   3242 
   3243     if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   3244         mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
   3245         *(mJpegSettings->gps_timestamp) =
   3246             jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   3247     } else {
   3248         mJpegSettings->gps_timestamp = NULL;
   3249     }
   3250 
   3251     if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   3252         int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
   3253         for (int i = 0; i < len; i++) {
   3254             mJpegSettings->gps_processing_method[i] =
   3255                 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
   3256         }
   3257         if (mJpegSettings->gps_processing_method[len-1] != '\0') {
   3258             mJpegSettings->gps_processing_method[len] = '\0';
   3259         }
   3260     } else {
   3261         mJpegSettings->gps_processing_method[0] = '\0';
   3262     }
   3263 
   3264     mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
   3265 
   3266     mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
   3267 
   3268     if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   3269         mJpegSettings->lens_focal_length =
   3270             jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   3271     }
   3272     if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   3273         mJpegSettings->exposure_compensation =
   3274             jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   3275     }
   3276     mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
   3277     mJpegSettings->max_jpeg_size = calcMaxJpegSize();
   3278     mJpegSettings->is_jpeg_format = true;
   3279     mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
   3280     return 0;
   3281 }
   3282 
   3283 /*===========================================================================
   3284  * FUNCTION   : captureResultCb
   3285  *
   3286  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   3287  *
   3288  * PARAMETERS :
   3289  *   @frame  : frame information from mm-camera-interface
   3290  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   3291  *   @userdata: userdata
   3292  *
   3293  * RETURN     : NONE
   3294  *==========================================================================*/
   3295 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   3296                 camera3_stream_buffer_t *buffer,
   3297                 uint32_t frame_number, void *userdata)
   3298 {
   3299     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   3300     if (hw == NULL) {
   3301         ALOGE("%s: Invalid hw %p", __func__, hw);
   3302         return;
   3303     }
   3304 
   3305     hw->captureResultCb(metadata, buffer, frame_number);
   3306     return;
   3307 }
   3308 
   3309 
   3310 /*===========================================================================
   3311  * FUNCTION   : initialize
   3312  *
   3313  * DESCRIPTION: Pass framework callback pointers to HAL
   3314  *
   3315  * PARAMETERS :
   3316  *
   3317  *
   3318  * RETURN     : Success : 0
   3319  *              Failure: -ENODEV
   3320  *==========================================================================*/
   3321 
   3322 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   3323                                   const camera3_callback_ops_t *callback_ops)
   3324 {
   3325     ALOGV("%s: E", __func__);
   3326     QCamera3HardwareInterface *hw =
   3327         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3328     if (!hw) {
   3329         ALOGE("%s: NULL camera device", __func__);
   3330         return -ENODEV;
   3331     }
   3332 
   3333     int rc = hw->initialize(callback_ops);
   3334     ALOGV("%s: X", __func__);
   3335     return rc;
   3336 }
   3337 
   3338 /*===========================================================================
   3339  * FUNCTION   : configure_streams
   3340  *
   3341  * DESCRIPTION:
   3342  *
   3343  * PARAMETERS :
   3344  *
   3345  *
   3346  * RETURN     : Success: 0
   3347  *              Failure: -EINVAL (if stream configuration is invalid)
   3348  *                       -ENODEV (fatal error)
   3349  *==========================================================================*/
   3350 
   3351 int QCamera3HardwareInterface::configure_streams(
   3352         const struct camera3_device *device,
   3353         camera3_stream_configuration_t *stream_list)
   3354 {
   3355     ALOGV("%s: E", __func__);
   3356     QCamera3HardwareInterface *hw =
   3357         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3358     if (!hw) {
   3359         ALOGE("%s: NULL camera device", __func__);
   3360         return -ENODEV;
   3361     }
   3362     int rc = hw->configureStreams(stream_list);
   3363     ALOGV("%s: X", __func__);
   3364     return rc;
   3365 }
   3366 
   3367 /*===========================================================================
   3368  * FUNCTION   : register_stream_buffers
   3369  *
   3370  * DESCRIPTION: Register stream buffers with the device
   3371  *
   3372  * PARAMETERS :
   3373  *
   3374  * RETURN     :
   3375  *==========================================================================*/
   3376 int QCamera3HardwareInterface::register_stream_buffers(
   3377         const struct camera3_device *device,
   3378         const camera3_stream_buffer_set_t *buffer_set)
   3379 {
   3380     ALOGV("%s: E", __func__);
   3381     QCamera3HardwareInterface *hw =
   3382         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3383     if (!hw) {
   3384         ALOGE("%s: NULL camera device", __func__);
   3385         return -ENODEV;
   3386     }
   3387     int rc = hw->registerStreamBuffers(buffer_set);
   3388     ALOGV("%s: X", __func__);
   3389     return rc;
   3390 }
   3391 
   3392 /*===========================================================================
   3393  * FUNCTION   : construct_default_request_settings
   3394  *
   3395  * DESCRIPTION: Configure a settings buffer to meet the required use case
   3396  *
   3397  * PARAMETERS :
   3398  *
   3399  *
   3400  * RETURN     : Success: Return valid metadata
   3401  *              Failure: Return NULL
   3402  *==========================================================================*/
   3403 const camera_metadata_t* QCamera3HardwareInterface::
   3404     construct_default_request_settings(const struct camera3_device *device,
   3405                                         int type)
   3406 {
   3407 
   3408     ALOGV("%s: E", __func__);
   3409     camera_metadata_t* fwk_metadata = NULL;
   3410     QCamera3HardwareInterface *hw =
   3411         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3412     if (!hw) {
   3413         ALOGE("%s: NULL camera device", __func__);
   3414         return NULL;
   3415     }
   3416 
   3417     fwk_metadata = hw->translateCapabilityToMetadata(type);
   3418 
   3419     ALOGV("%s: X", __func__);
   3420     return fwk_metadata;
   3421 }
   3422 
   3423 /*===========================================================================
   3424  * FUNCTION   : process_capture_request
   3425  *
   3426  * DESCRIPTION:
   3427  *
   3428  * PARAMETERS :
   3429  *
   3430  *
   3431  * RETURN     :
   3432  *==========================================================================*/
   3433 int QCamera3HardwareInterface::process_capture_request(
   3434                     const struct camera3_device *device,
   3435                     camera3_capture_request_t *request)
   3436 {
   3437     ALOGV("%s: E", __func__);
   3438     QCamera3HardwareInterface *hw =
   3439         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3440     if (!hw) {
   3441         ALOGE("%s: NULL camera device", __func__);
   3442         return -EINVAL;
   3443     }
   3444 
   3445     int rc = hw->processCaptureRequest(request);
   3446     ALOGV("%s: X", __func__);
   3447     return rc;
   3448 }
   3449 
   3450 /*===========================================================================
   3451  * FUNCTION   : get_metadata_vendor_tag_ops
   3452  *
   3453  * DESCRIPTION:
   3454  *
   3455  * PARAMETERS :
   3456  *
   3457  *
   3458  * RETURN     :
   3459  *==========================================================================*/
   3460 
   3461 void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
   3462                 const struct camera3_device *device,
   3463                 vendor_tag_query_ops_t* ops)
   3464 {
   3465     ALOGV("%s: E", __func__);
   3466     QCamera3HardwareInterface *hw =
   3467         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3468     if (!hw) {
   3469         ALOGE("%s: NULL camera device", __func__);
   3470         return;
   3471     }
   3472 
   3473     hw->getMetadataVendorTagOps(ops);
   3474     ALOGV("%s: X", __func__);
   3475     return;
   3476 }
   3477 
   3478 /*===========================================================================
   3479  * FUNCTION   : dump
   3480  *
   3481  * DESCRIPTION:
   3482  *
   3483  * PARAMETERS :
   3484  *
   3485  *
   3486  * RETURN     :
   3487  *==========================================================================*/
   3488 
   3489 void QCamera3HardwareInterface::dump(
   3490                 const struct camera3_device *device, int fd)
   3491 {
   3492     ALOGV("%s: E", __func__);
   3493     QCamera3HardwareInterface *hw =
   3494         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3495     if (!hw) {
   3496         ALOGE("%s: NULL camera device", __func__);
   3497         return;
   3498     }
   3499 
   3500     hw->dump(fd);
   3501     ALOGV("%s: X", __func__);
   3502     return;
   3503 }
   3504 
   3505 /*===========================================================================
   3506  * FUNCTION   : close_camera_device
   3507  *
   3508  * DESCRIPTION:
   3509  *
   3510  * PARAMETERS :
   3511  *
   3512  *
   3513  * RETURN     :
   3514  *==========================================================================*/
   3515 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   3516 {
   3517     ALOGV("%s: E", __func__);
   3518     int ret = NO_ERROR;
   3519     QCamera3HardwareInterface *hw =
   3520         reinterpret_cast<QCamera3HardwareInterface *>(
   3521             reinterpret_cast<camera3_device_t *>(device)->priv);
   3522     if (!hw) {
   3523         ALOGE("NULL camera device");
   3524         return BAD_VALUE;
   3525     }
   3526     delete hw;
   3527 
   3528     pthread_mutex_lock(&mCameraSessionLock);
   3529     mCameraSessionActive = 0;
   3530     pthread_mutex_unlock(&mCameraSessionLock);
   3531     ALOGV("%s: X", __func__);
   3532     return ret;
   3533 }
   3534 
   3535 /*===========================================================================
   3536  * FUNCTION   : getWaveletDenoiseProcessPlate
   3537  *
   3538  * DESCRIPTION: query wavelet denoise process plate
   3539  *
   3540  * PARAMETERS : None
   3541  *
   3542  * RETURN     : WNR prcocess plate vlaue
   3543  *==========================================================================*/
   3544 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   3545 {
   3546     char prop[PROPERTY_VALUE_MAX];
   3547     memset(prop, 0, sizeof(prop));
   3548     property_get("persist.denoise.process.plates", prop, "0");
   3549     int processPlate = atoi(prop);
   3550     switch(processPlate) {
   3551     case 0:
   3552         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   3553     case 1:
   3554         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   3555     case 2:
   3556         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   3557     case 3:
   3558         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   3559     default:
   3560         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   3561     }
   3562 }
   3563 
   3564 /*===========================================================================
   3565  * FUNCTION   : needRotationReprocess
   3566  *
   3567  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   3568  *
   3569  * PARAMETERS : none
   3570  *
   3571  * RETURN     : true: needed
   3572  *              false: no need
   3573  *==========================================================================*/
   3574 bool QCamera3HardwareInterface::needRotationReprocess()
   3575 {
   3576 
   3577     if (!mJpegSettings->is_jpeg_format) {
   3578         // RAW image, no need to reprocess
   3579         return false;
   3580     }
   3581 
   3582     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
   3583         mJpegSettings->jpeg_orientation > 0) {
   3584         // current rotation is not zero, and pp has the capability to process rotation
   3585         ALOGD("%s: need do reprocess for rotation", __func__);
   3586         return true;
   3587     }
   3588 
   3589     return false;
   3590 }
   3591 
   3592 /*===========================================================================
   3593  * FUNCTION   : needReprocess
   3594  *
   3595  * DESCRIPTION: if reprocess in needed
   3596  *
   3597  * PARAMETERS : none
   3598  *
   3599  * RETURN     : true: needed
   3600  *              false: no need
   3601  *==========================================================================*/
   3602 bool QCamera3HardwareInterface::needReprocess()
   3603 {
   3604     if (!mJpegSettings->is_jpeg_format) {
   3605         // RAW image, no need to reprocess
   3606         return false;
   3607     }
   3608 
   3609     if ((mJpegSettings->min_required_pp_mask > 0) ||
   3610          isWNREnabled()) {
   3611         // TODO: add for ZSL HDR later
   3612         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   3613         ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
   3614         return true;
   3615     }
   3616     return needRotationReprocess();
   3617 }
   3618 
   3619 /*===========================================================================
   3620  * FUNCTION   : addOnlineReprocChannel
   3621  *
   3622  * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
   3623  *              coming from input channel
   3624  *
   3625  * PARAMETERS :
   3626  *   @pInputChannel : ptr to input channel whose frames will be post-processed
   3627  *
   3628  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   3629  *==========================================================================*/
   3630 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
   3631                                                       QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
   3632 {
   3633     int32_t rc = NO_ERROR;
   3634     QCamera3ReprocessChannel *pChannel = NULL;
   3635     if (pInputChannel == NULL) {
   3636         ALOGE("%s: input channel obj is NULL", __func__);
   3637         return NULL;
   3638     }
   3639 
   3640     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   3641             mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
   3642     if (NULL == pChannel) {
   3643         ALOGE("%s: no mem for reprocess channel", __func__);
   3644         return NULL;
   3645     }
   3646 
   3647     // Capture channel, only need snapshot and postview streams start together
   3648     mm_camera_channel_attr_t attr;
   3649     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
   3650     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
   3651     attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
   3652     rc = pChannel->initialize();
   3653     if (rc != NO_ERROR) {
   3654         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
   3655         delete pChannel;
   3656         return NULL;
   3657     }
   3658 
   3659     // pp feature config
   3660     cam_pp_feature_config_t pp_config;
   3661     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   3662     if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
   3663         pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
   3664         pp_config.sharpness = 10;
   3665     }
   3666 
   3667     if (isWNREnabled()) {
   3668         pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
   3669         pp_config.denoise2d.denoise_enable = 1;
   3670         pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
   3671     }
   3672     if (needRotationReprocess()) {
   3673         pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
   3674         int rotation = mJpegSettings->jpeg_orientation;
   3675         if (rotation == 0) {
   3676             pp_config.rotation = ROTATE_0;
   3677         } else if (rotation == 90) {
   3678             pp_config.rotation = ROTATE_90;
   3679         } else if (rotation == 180) {
   3680             pp_config.rotation = ROTATE_180;
   3681         } else if (rotation == 270) {
   3682             pp_config.rotation = ROTATE_270;
   3683         }
   3684     }
   3685 
   3686    rc = pChannel->addReprocStreamsFromSource(pp_config,
   3687                                              pInputChannel,
   3688                                              mMetadataChannel);
   3689 
   3690     if (rc != NO_ERROR) {
   3691         delete pChannel;
   3692         return NULL;
   3693     }
   3694     return pChannel;
   3695 }
   3696 
   3697 int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
   3698 {
   3699     return gCamCapability[mCameraId]->min_num_pp_bufs;
   3700 }
   3701 
   3702 bool QCamera3HardwareInterface::isWNREnabled() {
   3703     return gCamCapability[mCameraId]->isWnrSupported;
   3704 }
   3705 
   3706 }; //end namespace qcamera
   3707