Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2013, 2015, The Linux Foundataion. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define LOG_TAG "QCamera3HWI"
     31 
     32 #include <cutils/properties.h>
     33 #include <hardware/camera3.h>
     34 #include <camera/CameraMetadata.h>
     35 #include <stdlib.h>
     36 #include <utils/Log.h>
     37 #include <utils/Errors.h>
     38 #include <ui/Fence.h>
     39 #include <gralloc_priv.h>
     40 #include "QCamera3HWI.h"
     41 #include "QCamera3Mem.h"
     42 #include "QCamera3Channel.h"
     43 #include "QCamera3PostProc.h"
     44 
     45 using namespace android;
     46 
     47 namespace qcamera {
     48 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     49 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
     50 parm_buffer_t *prevSettings;
     51 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
     52 
     53 pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
     54     PTHREAD_MUTEX_INITIALIZER;
     55 unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
     56 
     57 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
     58     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
     59     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
     60     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
     61     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
     62     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
     63     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
     64     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
     65     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
     66     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
     67 };
     68 
     69 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
     70     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
     71     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
     72     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
     73     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
     74     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
     75     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
     76     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
     77     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
     78     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
     79 };
     80 
     81 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
     82     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
     83     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
     84     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
     85     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
     86     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
     87     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
     88     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
     89     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
     90     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
     91     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
     92     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
     93     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
     94     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
     95     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
     96     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
     97 };
     98 
     99 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    100     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    101     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    102     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    103     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    104     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    105     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    106 };
    107 
    108 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    109     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    110     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    111     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    112     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    113 };
    114 
    115 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    116     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    117     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    118     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    119     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    120     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    121 };
    122 
    123 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    124     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    125     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    126     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    127 };
    128 
    129 const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288,
    130                                              320, 240, 176, 144, 0, 0};
    131 
    132 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    133     initialize:                         QCamera3HardwareInterface::initialize,
    134     configure_streams:                  QCamera3HardwareInterface::configure_streams,
    135     register_stream_buffers:            QCamera3HardwareInterface::register_stream_buffers,
    136     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
    137     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
    138     get_metadata_vendor_tag_ops:        QCamera3HardwareInterface::get_metadata_vendor_tag_ops,
    139     dump:                               QCamera3HardwareInterface::dump,
    140 };
    141 
    142 
    143 /*===========================================================================
    144  * FUNCTION   : QCamera3HardwareInterface
    145  *
    146  * DESCRIPTION: constructor of QCamera3HardwareInterface
    147  *
    148  * PARAMETERS :
    149  *   @cameraId  : camera ID
    150  *
    151  * RETURN     : none
    152  *==========================================================================*/
    153 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId)
    154     : mCameraId(cameraId),
    155       mCameraHandle(NULL),
    156       mCameraOpened(false),
    157       mCameraInitialized(false),
    158       mCallbackOps(NULL),
    159       mInputStream(NULL),
    160       mMetadataChannel(NULL),
    161       mPictureChannel(NULL),
    162       mFirstRequest(false),
    163       mParamHeap(NULL),
    164       mParameters(NULL),
    165       mJpegSettings(NULL),
    166       mIsZslMode(false),
    167       m_pPowerModule(NULL),
    168       mPrecaptureId(0)
    169 {
    170     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    171     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0;
    172     mCameraDevice.common.close = close_camera_device;
    173     mCameraDevice.ops = &mCameraOps;
    174     mCameraDevice.priv = this;
    175     gCamCapability[cameraId]->version = CAM_HAL_V3;
    176     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    177     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    178     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    179 
    180     pthread_cond_init(&mRequestCond, NULL);
    181     mPendingRequest = 0;
    182     mCurrentRequestId = -1;
    183     pthread_mutex_init(&mMutex, NULL);
    184 
    185     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    186         mDefaultMetadata[i] = NULL;
    187 
    188 #ifdef HAS_MULTIMEDIA_HINTS
    189     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
    190         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
    191     }
    192 #endif
    193 }
    194 
    195 /*===========================================================================
    196  * FUNCTION   : ~QCamera3HardwareInterface
    197  *
    198  * DESCRIPTION: destructor of QCamera3HardwareInterface
    199  *
    200  * PARAMETERS : none
    201  *
    202  * RETURN     : none
    203  *==========================================================================*/
    204 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    205 {
    206     ALOGV("%s: E", __func__);
    207     /* We need to stop all streams before deleting any stream */
    208     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    209         it != mStreamInfo.end(); it++) {
    210         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
    211         if (channel)
    212            channel->stop();
    213     }
    214     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    215         it != mStreamInfo.end(); it++) {
    216         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
    217         if (channel)
    218             delete channel;
    219         free (*it);
    220     }
    221 
    222     mPictureChannel = NULL;
    223 
    224     if (mJpegSettings != NULL) {
    225         free(mJpegSettings);
    226         mJpegSettings = NULL;
    227     }
    228 
    229     /* Clean up all channels */
    230     if (mCameraInitialized) {
    231         if (mMetadataChannel) {
    232             mMetadataChannel->stop();
    233             delete mMetadataChannel;
    234             mMetadataChannel = NULL;
    235         }
    236         deinitParameters();
    237     }
    238 
    239     if (mCameraOpened)
    240         closeCamera();
    241 
    242     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    243         if (mDefaultMetadata[i])
    244             free_camera_metadata(mDefaultMetadata[i]);
    245 
    246     pthread_cond_destroy(&mRequestCond);
    247 
    248     pthread_mutex_destroy(&mMutex);
    249     ALOGV("%s: X", __func__);
    250 }
    251 
    252 /*===========================================================================
    253  * FUNCTION   : openCamera
    254  *
    255  * DESCRIPTION: open camera
    256  *
    257  * PARAMETERS :
    258  *   @hw_device  : double ptr for camera device struct
    259  *
    260  * RETURN     : int32_t type of status
    261  *              NO_ERROR  -- success
    262  *              none-zero failure code
    263  *==========================================================================*/
    264 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    265 {
    266     int rc = 0;
    267     pthread_mutex_lock(&mCameraSessionLock);
    268     if (mCameraSessionActive) {
    269         ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
    270         pthread_mutex_unlock(&mCameraSessionLock);
    271         return -EUSERS;
    272     }
    273 
    274     if (mCameraOpened) {
    275         *hw_device = NULL;
    276         return PERMISSION_DENIED;
    277     }
    278 
    279     rc = openCamera();
    280     if (rc == 0) {
    281         *hw_device = &mCameraDevice.common;
    282         mCameraSessionActive = 1;
    283     } else
    284         *hw_device = NULL;
    285 
    286 #ifdef HAS_MULTIMEDIA_HINTS
    287     if (rc == 0) {
    288         if (m_pPowerModule) {
    289             if (m_pPowerModule->powerHint) {
    290                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    291                         (void *)"state=1");
    292             }
    293         }
    294     }
    295 #endif
    296     pthread_mutex_unlock(&mCameraSessionLock);
    297     return rc;
    298 }
    299 
    300 /*===========================================================================
    301  * FUNCTION   : openCamera
    302  *
    303  * DESCRIPTION: open camera
    304  *
    305  * PARAMETERS : none
    306  *
    307  * RETURN     : int32_t type of status
    308  *              NO_ERROR  -- success
    309  *              none-zero failure code
    310  *==========================================================================*/
    311 int QCamera3HardwareInterface::openCamera()
    312 {
    313     if (mCameraHandle) {
    314         ALOGE("Failure: Camera already opened");
    315         return ALREADY_EXISTS;
    316     }
    317     mCameraHandle = camera_open(mCameraId);
    318     if (!mCameraHandle) {
    319         ALOGE("camera_open failed.");
    320         return UNKNOWN_ERROR;
    321     }
    322 
    323     mCameraOpened = true;
    324 
    325     return NO_ERROR;
    326 }
    327 
    328 /*===========================================================================
    329  * FUNCTION   : closeCamera
    330  *
    331  * DESCRIPTION: close camera
    332  *
    333  * PARAMETERS : none
    334  *
    335  * RETURN     : int32_t type of status
    336  *              NO_ERROR  -- success
    337  *              none-zero failure code
    338  *==========================================================================*/
    339 int QCamera3HardwareInterface::closeCamera()
    340 {
    341     int rc = NO_ERROR;
    342 
    343     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    344     mCameraHandle = NULL;
    345     mCameraOpened = false;
    346 
    347 #ifdef HAS_MULTIMEDIA_HINTS
    348     if (rc == NO_ERROR) {
    349         if (m_pPowerModule) {
    350             if (m_pPowerModule->powerHint) {
    351                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    352                         (void *)"state=0");
    353             }
    354         }
    355     }
    356 #endif
    357 
    358     return rc;
    359 }
    360 
    361 /*===========================================================================
    362  * FUNCTION   : initialize
    363  *
    364  * DESCRIPTION: Initialize frameworks callback functions
    365  *
    366  * PARAMETERS :
    367  *   @callback_ops : callback function to frameworks
    368  *
    369  * RETURN     :
    370  *
    371  *==========================================================================*/
    372 int QCamera3HardwareInterface::initialize(
    373         const struct camera3_callback_ops *callback_ops)
    374 {
    375     int rc;
    376 
    377     pthread_mutex_lock(&mMutex);
    378 
    379     rc = initParameters();
    380     if (rc < 0) {
    381         ALOGE("%s: initParamters failed %d", __func__, rc);
    382        goto err1;
    383     }
    384 
    385     mCallbackOps = callback_ops;
    386 
    387     pthread_mutex_unlock(&mMutex);
    388     mCameraInitialized = true;
    389     return 0;
    390 
    391 err1:
    392     pthread_mutex_unlock(&mMutex);
    393     return rc;
    394 }
    395 
    396 /*===========================================================================
    397  * FUNCTION   : configureStreams
    398  *
    399  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
    400  *              and output streams.
    401  *
    402  * PARAMETERS :
    403  *   @stream_list : streams to be configured
    404  *
    405  * RETURN     :
    406  *
    407  *==========================================================================*/
    408 int QCamera3HardwareInterface::configureStreams(
    409         camera3_stream_configuration_t *streamList)
    410 {
    411     int rc = 0;
    412     // Sanity check stream_list
    413     if (streamList == NULL) {
    414         ALOGE("%s: NULL stream configuration", __func__);
    415         return BAD_VALUE;
    416     }
    417 
    418     if (streamList->streams == NULL) {
    419         ALOGE("%s: NULL stream list", __func__);
    420         return BAD_VALUE;
    421     }
    422 
    423     if (streamList->num_streams < 1) {
    424         ALOGE("%s: Bad number of streams requested: %d", __func__,
    425                 streamList->num_streams);
    426         return BAD_VALUE;
    427     }
    428 
    429     camera3_stream_t *inputStream = NULL;
    430     camera3_stream_t *jpegStream = NULL;
    431     /* first invalidate all the steams in the mStreamList
    432      * if they appear again, they will be validated */
    433     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    434             it != mStreamInfo.end(); it++) {
    435         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    436         channel->stop();
    437         (*it)->status = INVALID;
    438     }
    439 
    440     if (mMetadataChannel) {
    441         /* If content of mStreamInfo is not 0, there is metadata stream */
    442         mMetadataChannel->stop();
    443     }
    444     // Acquire Mutex after stoping all the channels
    445     pthread_mutex_lock(&mMutex);
    446     for (size_t i = 0; i < streamList->num_streams; i++) {
    447         camera3_stream_t *newStream = streamList->streams[i];
    448         ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
    449                 __func__, newStream->stream_type, newStream->format,
    450                  newStream->width, newStream->height);
    451         //if the stream is in the mStreamList validate it
    452         bool stream_exists = false;
    453         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    454                 it != mStreamInfo.end(); it++) {
    455             if ((*it)->stream == newStream) {
    456                 QCamera3Channel *channel =
    457                     (QCamera3Channel*)(*it)->stream->priv;
    458                 stream_exists = true;
    459                 (*it)->status = RECONFIGURE;
    460                 /*delete the channel object associated with the stream because
    461                   we need to reconfigure*/
    462                 delete channel;
    463                 (*it)->stream->priv = NULL;
    464             }
    465         }
    466         if (!stream_exists) {
    467             //new stream
    468             stream_info_t* stream_info;
    469             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
    470             stream_info->stream = newStream;
    471             stream_info->status = VALID;
    472             stream_info->registered = 0;
    473             mStreamInfo.push_back(stream_info);
    474         }
    475         if (newStream->stream_type == CAMERA3_STREAM_INPUT
    476                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
    477             if (inputStream != NULL) {
    478                 ALOGE("%s: Multiple input streams requested!", __func__);
    479                 pthread_mutex_unlock(&mMutex);
    480                 return BAD_VALUE;
    481             }
    482             inputStream = newStream;
    483         }
    484         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
    485             jpegStream = newStream;
    486         }
    487     }
    488     mInputStream = inputStream;
    489 
    490     /*clean up invalid streams*/
    491     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
    492             it != mStreamInfo.end();) {
    493         if(((*it)->status) == INVALID){
    494             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    495             delete channel;
    496             delete[] (buffer_handle_t*)(*it)->buffer_set.buffers;
    497             free(*it);
    498             it = mStreamInfo.erase(it);
    499         } else {
    500             it++;
    501         }
    502     }
    503 
    504     if (mMetadataChannel) {
    505         delete mMetadataChannel;
    506         mMetadataChannel = NULL;
    507     }
    508 
    509     //Create metadata channel and initialize it
    510     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
    511                     mCameraHandle->ops, captureResultCb,
    512                     &gCamCapability[mCameraId]->padding_info, this);
    513     if (mMetadataChannel == NULL) {
    514         ALOGE("%s: failed to allocate metadata channel", __func__);
    515         rc = -ENOMEM;
    516         pthread_mutex_unlock(&mMutex);
    517         return rc;
    518     }
    519     rc = mMetadataChannel->initialize();
    520     if (rc < 0) {
    521         ALOGE("%s: metadata channel initialization failed", __func__);
    522         delete mMetadataChannel;
    523         mMetadataChannel = NULL;
    524         pthread_mutex_unlock(&mMutex);
    525         return rc;
    526     }
    527 
    528     /* Allocate channel objects for the requested streams */
    529     for (size_t i = 0; i < streamList->num_streams; i++) {
    530         camera3_stream_t *newStream = streamList->streams[i];
    531         if (newStream->priv == NULL) {
    532             //New stream, construct channel
    533             switch (newStream->stream_type) {
    534             case CAMERA3_STREAM_INPUT:
    535                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
    536                 break;
    537             case CAMERA3_STREAM_BIDIRECTIONAL:
    538                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
    539                     GRALLOC_USAGE_HW_CAMERA_WRITE;
    540                 break;
    541             case CAMERA3_STREAM_OUTPUT:
    542                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
    543                 break;
    544             default:
    545                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
    546                 break;
    547             }
    548 
    549             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
    550                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
    551                 QCamera3Channel *channel;
    552                 switch (newStream->format) {
    553                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    554                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
    555                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
    556                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
    557                         jpegStream) {
    558                         uint32_t width = jpegStream->width;
    559                         uint32_t height = jpegStream->height;
    560                         mIsZslMode = true;
    561                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
    562                             mCameraHandle->ops, captureResultCb,
    563                             &gCamCapability[mCameraId]->padding_info, this, newStream,
    564                             width, height);
    565                     } else
    566                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
    567                             mCameraHandle->ops, captureResultCb,
    568                             &gCamCapability[mCameraId]->padding_info, this, newStream);
    569                     if (channel == NULL) {
    570                         ALOGE("%s: allocation of channel failed", __func__);
    571                         pthread_mutex_unlock(&mMutex);
    572                         return -ENOMEM;
    573                     }
    574 
    575                     newStream->priv = channel;
    576                     break;
    577                 case HAL_PIXEL_FORMAT_BLOB:
    578                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
    579                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
    580                             mCameraHandle->ops, captureResultCb,
    581                             &gCamCapability[mCameraId]->padding_info, this, newStream);
    582                     if (mPictureChannel == NULL) {
    583                         ALOGE("%s: allocation of channel failed", __func__);
    584                         pthread_mutex_unlock(&mMutex);
    585                         return -ENOMEM;
    586                     }
    587                     newStream->priv = (QCamera3Channel*)mPictureChannel;
    588                     break;
    589 
    590                 //TODO: Add support for app consumed format?
    591                 default:
    592                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
    593                     break;
    594                 }
    595             }
    596         } else {
    597             // Channel already exists for this stream
    598             // Do nothing for now
    599         }
    600     }
    601 
    602     mPendingBuffersMap.clear();
    603     /*For the streams to be reconfigured we need to register the buffers
    604       since the framework wont*/
    605     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    606             it != mStreamInfo.end(); it++) {
    607         if ((*it)->status == RECONFIGURE) {
    608             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
    609             /*only register buffers for streams that have already been
    610               registered*/
    611             if ((*it)->registered) {
    612                 rc = channel->registerBuffers((*it)->buffer_set.num_buffers,
    613                         (*it)->buffer_set.buffers);
    614                 if (rc != NO_ERROR) {
    615                     ALOGE("%s: Failed to register the buffers of old stream,\
    616                             rc = %d", __func__, rc);
    617                 }
    618                 ALOGV("%s: channel %p has %d buffers",
    619                         __func__, channel, (*it)->buffer_set.num_buffers);
    620             }
    621         }
    622 
    623         mPendingBuffersMap.add((*it)->stream, 0);
    624     }
    625 
    626     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
    627     mPendingRequestsList.clear();
    628 
    629     //settings/parameters don't carry over for new configureStreams
    630     memset(mParameters, 0, sizeof(parm_buffer_t));
    631     mFirstRequest = true;
    632 
    633     pthread_mutex_unlock(&mMutex);
    634     return rc;
    635 }
    636 
    637 /*===========================================================================
    638  * FUNCTION   : validateCaptureRequest
    639  *
    640  * DESCRIPTION: validate a capture request from camera service
    641  *
    642  * PARAMETERS :
    643  *   @request : request from framework to process
    644  *
    645  * RETURN     :
    646  *
    647  *==========================================================================*/
    648 int QCamera3HardwareInterface::validateCaptureRequest(
    649                     camera3_capture_request_t *request)
    650 {
    651     ssize_t idx = 0;
    652     const camera3_stream_buffer_t *b;
    653     CameraMetadata meta;
    654 
    655     /* Sanity check the request */
    656     if (request == NULL) {
    657         ALOGE("%s: NULL capture request", __func__);
    658         return BAD_VALUE;
    659     }
    660 
    661     uint32_t frameNumber = request->frame_number;
    662     if (request->input_buffer != NULL &&
    663             request->input_buffer->stream != mInputStream) {
    664         ALOGE("%s: Request %d: Input buffer not from input stream!",
    665                 __FUNCTION__, frameNumber);
    666         return BAD_VALUE;
    667     }
    668     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
    669         ALOGE("%s: Request %d: No output buffers provided!",
    670                 __FUNCTION__, frameNumber);
    671         return BAD_VALUE;
    672     }
    673     if (request->input_buffer != NULL) {
    674         b = request->input_buffer;
    675         QCamera3Channel *channel =
    676             static_cast<QCamera3Channel*>(b->stream->priv);
    677         if (channel == NULL) {
    678             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
    679                     __func__, frameNumber, idx);
    680             return BAD_VALUE;
    681         }
    682         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    683             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
    684                     __func__, frameNumber, idx);
    685             return BAD_VALUE;
    686         }
    687         if (b->release_fence != -1) {
    688             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
    689                     __func__, frameNumber, idx);
    690             return BAD_VALUE;
    691         }
    692         if (b->buffer == NULL) {
    693             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
    694                     __func__, frameNumber, idx);
    695             return BAD_VALUE;
    696         }
    697     }
    698 
    699     // Validate all buffers
    700     b = request->output_buffers;
    701     do {
    702         QCamera3Channel *channel =
    703                 static_cast<QCamera3Channel*>(b->stream->priv);
    704         if (channel == NULL) {
    705             ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
    706                     __func__, frameNumber, idx);
    707             return BAD_VALUE;
    708         }
    709         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
    710             ALOGE("%s: Request %d: Buffer %d: Status not OK!",
    711                     __func__, frameNumber, idx);
    712             return BAD_VALUE;
    713         }
    714         if (b->release_fence != -1) {
    715             ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
    716                     __func__, frameNumber, idx);
    717             return BAD_VALUE;
    718         }
    719         if (b->buffer == NULL) {
    720             ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
    721                     __func__, frameNumber, idx);
    722             return BAD_VALUE;
    723         }
    724         idx++;
    725         b = request->output_buffers + idx;
    726     } while (idx < (ssize_t)request->num_output_buffers);
    727 
    728     return NO_ERROR;
    729 }
    730 
    731 /*===========================================================================
    732  * FUNCTION   : registerStreamBuffers
    733  *
    734  * DESCRIPTION: Register buffers for a given stream with the HAL device.
    735  *
    736  * PARAMETERS :
    737  *   @stream_list : streams to be configured
    738  *
    739  * RETURN     :
    740  *
    741  *==========================================================================*/
    742 int QCamera3HardwareInterface::registerStreamBuffers(
    743         const camera3_stream_buffer_set_t *buffer_set)
    744 {
    745     int rc = 0;
    746 
    747     pthread_mutex_lock(&mMutex);
    748 
    749     if (buffer_set == NULL) {
    750         ALOGE("%s: Invalid buffer_set parameter.", __func__);
    751         pthread_mutex_unlock(&mMutex);
    752         return -EINVAL;
    753     }
    754     if (buffer_set->stream == NULL) {
    755         ALOGE("%s: Invalid stream parameter.", __func__);
    756         pthread_mutex_unlock(&mMutex);
    757         return -EINVAL;
    758     }
    759     if (buffer_set->num_buffers < 1) {
    760         ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers);
    761         pthread_mutex_unlock(&mMutex);
    762         return -EINVAL;
    763     }
    764     if (buffer_set->buffers == NULL) {
    765         ALOGE("%s: Invalid buffers parameter.", __func__);
    766         pthread_mutex_unlock(&mMutex);
    767         return -EINVAL;
    768     }
    769 
    770     camera3_stream_t *stream = buffer_set->stream;
    771     QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
    772 
    773     //set the buffer_set in the mStreamInfo array
    774     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    775             it != mStreamInfo.end(); it++) {
    776         if ((*it)->stream == stream) {
    777             uint32_t numBuffers = buffer_set->num_buffers;
    778             (*it)->buffer_set.stream = buffer_set->stream;
    779             (*it)->buffer_set.num_buffers = numBuffers;
    780             (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers];
    781             if ((*it)->buffer_set.buffers == NULL) {
    782                 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__);
    783                 pthread_mutex_unlock(&mMutex);
    784                 return -ENOMEM;
    785             }
    786             for (size_t j = 0; j < numBuffers; j++){
    787                 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j];
    788             }
    789             (*it)->registered = 1;
    790         }
    791     }
    792     rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers);
    793     if (rc < 0) {
    794         ALOGE("%s: registerBUffers for stream %p failed", __func__, stream);
    795         pthread_mutex_unlock(&mMutex);
    796         return -ENODEV;
    797     }
    798 
    799     pthread_mutex_unlock(&mMutex);
    800     return NO_ERROR;
    801 }
    802 
    803 /*===========================================================================
    804  * FUNCTION   : processCaptureRequest
    805  *
    806  * DESCRIPTION: process a capture request from camera service
    807  *
    808  * PARAMETERS :
    809  *   @request : request from framework to process
    810  *
    811  * RETURN     :
    812  *
    813  *==========================================================================*/
    814 int QCamera3HardwareInterface::processCaptureRequest(
    815                     camera3_capture_request_t *request)
    816 {
    817     int rc = NO_ERROR;
    818     int32_t request_id;
    819     CameraMetadata meta;
    820 
    821     pthread_mutex_lock(&mMutex);
    822 
    823     rc = validateCaptureRequest(request);
    824     if (rc != NO_ERROR) {
    825         ALOGE("%s: incoming request is not valid", __func__);
    826         pthread_mutex_unlock(&mMutex);
    827         return rc;
    828     }
    829 
    830     uint32_t frameNumber = request->frame_number;
    831     uint32_t streamTypeMask = 0;
    832 
    833     meta = request->settings;
    834     if (meta.exists(ANDROID_REQUEST_ID)) {
    835         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
    836         mCurrentRequestId = request_id;
    837         ALOGV("%s: Received request with id: %d",__func__, request_id);
    838     } else if (mFirstRequest || mCurrentRequestId == -1){
    839         ALOGE("%s: Unable to find request id field, \
    840                 & no previous id available", __func__);
    841         return NAME_NOT_FOUND;
    842     } else {
    843         ALOGV("%s: Re-using old request id", __func__);
    844         request_id = mCurrentRequestId;
    845     }
    846 
    847     ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
    848                                     __func__, __LINE__,
    849                                     request->num_output_buffers,
    850                                     request->input_buffer,
    851                                     frameNumber);
    852     // Acquire all request buffers first
    853     int blob_request = 0;
    854     for (size_t i = 0; i < request->num_output_buffers; i++) {
    855         const camera3_stream_buffer_t& output = request->output_buffers[i];
    856         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
    857         sp<Fence> acquireFence = new Fence(output.acquire_fence);
    858 
    859         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
    860         //Call function to store local copy of jpeg data for encode params.
    861             blob_request = 1;
    862             rc = getJpegSettings(request->settings);
    863             if (rc < 0) {
    864                 ALOGE("%s: failed to get jpeg parameters", __func__);
    865                 pthread_mutex_unlock(&mMutex);
    866                 return rc;
    867             }
    868         }
    869 
    870         rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
    871         if (rc != OK) {
    872             ALOGE("%s: fence wait failed %d", __func__, rc);
    873             pthread_mutex_unlock(&mMutex);
    874             return rc;
    875         }
    876         streamTypeMask |= channel->getStreamTypeMask();
    877     }
    878 
    879     PendingRequestInfo pendingRequest;
    880     pendingRequest.frame_number = frameNumber;
    881     pendingRequest.num_buffers = request->num_output_buffers;
    882     pendingRequest.request_id = request_id;
    883     pendingRequest.blob_request = blob_request;
    884     pendingRequest.ae_trigger.trigger_id = mPrecaptureId;
    885     pendingRequest.ae_trigger.trigger = CAM_AEC_TRIGGER_IDLE;
    886 
    887     rc = setFrameParameters(request->frame_number, request->settings,
    888             streamTypeMask, pendingRequest.ae_trigger);
    889     if (rc < 0) {
    890         ALOGE("%s: fail to set frame parameters", __func__);
    891         pthread_mutex_unlock(&mMutex);
    892         return rc;
    893     }
    894 
    895     for (size_t i = 0; i < request->num_output_buffers; i++) {
    896         RequestedBufferInfo requestedBuf;
    897         requestedBuf.stream = request->output_buffers[i].stream;
    898         requestedBuf.buffer = NULL;
    899         pendingRequest.buffers.push_back(requestedBuf);
    900 
    901         mPendingBuffersMap.editValueFor(requestedBuf.stream)++;
    902     }
    903     mPendingRequestsList.push_back(pendingRequest);
    904 
    905     // Notify metadata channel we receive a request
    906     mMetadataChannel->request(NULL, frameNumber);
    907 
    908     // Call request on other streams
    909     for (size_t i = 0; i < request->num_output_buffers; i++) {
    910         const camera3_stream_buffer_t& output = request->output_buffers[i];
    911         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
    912         mm_camera_buf_def_t *pInputBuffer = NULL;
    913 
    914         if (channel == NULL) {
    915             ALOGE("%s: invalid channel pointer for stream", __func__);
    916             continue;
    917         }
    918 
    919         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
    920             QCamera3RegularChannel* inputChannel = NULL;
    921             if(request->input_buffer != NULL){
    922 
    923                 //Try to get the internal format
    924                 inputChannel = (QCamera3RegularChannel*)
    925                     request->input_buffer->stream->priv;
    926                 if(inputChannel == NULL ){
    927                     ALOGE("%s: failed to get input channel handle", __func__);
    928                 } else {
    929                     pInputBuffer =
    930                         inputChannel->getInternalFormatBuffer(
    931                                 request->input_buffer->buffer);
    932                     ALOGD("%s: Input buffer dump",__func__);
    933                     ALOGD("Stream id: %d", pInputBuffer->stream_id);
    934                     ALOGD("streamtype:%d", pInputBuffer->stream_type);
    935                     ALOGD("frame len:%d", pInputBuffer->frame_len);
    936                 }
    937             }
    938             rc = channel->request(output.buffer, frameNumber, mJpegSettings,
    939                             pInputBuffer,(QCamera3Channel*)inputChannel);
    940         } else {
    941             ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
    942                 __LINE__, output.buffer, frameNumber);
    943             rc = channel->request(output.buffer, frameNumber);
    944         }
    945         if (rc < 0)
    946             ALOGE("%s: request failed", __func__);
    947     }
    948 
    949     mFirstRequest = false;
    950 
    951     //Block on conditional variable
    952     mPendingRequest = 1;
    953     while (mPendingRequest == 1) {
    954         pthread_cond_wait(&mRequestCond, &mMutex);
    955     }
    956 
    957     pthread_mutex_unlock(&mMutex);
    958     return rc;
    959 }
    960 
    961 /*===========================================================================
    962  * FUNCTION   : getMetadataVendorTagOps
    963  *
    964  * DESCRIPTION:
    965  *
    966  * PARAMETERS :
    967  *
    968  *
    969  * RETURN     :
    970  *==========================================================================*/
    971 void QCamera3HardwareInterface::getMetadataVendorTagOps(
    972                     vendor_tag_query_ops_t* /*ops*/)
    973 {
    974     /* Enable locks when we eventually add Vendor Tags */
    975     /*
    976     pthread_mutex_lock(&mMutex);
    977 
    978     pthread_mutex_unlock(&mMutex);
    979     */
    980     return;
    981 }
    982 
    983 /*===========================================================================
    984  * FUNCTION   : dump
    985  *
    986  * DESCRIPTION:
    987  *
    988  * PARAMETERS :
    989  *
    990  *
    991  * RETURN     :
    992  *==========================================================================*/
    993 void QCamera3HardwareInterface::dump(int /*fd*/)
    994 {
    995     /*Enable lock when we implement this function*/
    996     /*
    997     pthread_mutex_lock(&mMutex);
    998 
    999     pthread_mutex_unlock(&mMutex);
   1000     */
   1001     return;
   1002 }
   1003 
   1004 
   1005 /*===========================================================================
   1006  * FUNCTION   : captureResultCb
   1007  *
   1008  * DESCRIPTION: Callback handler for all capture result
   1009  *              (streams, as well as metadata)
   1010  *
   1011  * PARAMETERS :
   1012  *   @metadata : metadata information
   1013  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   1014  *               NULL if metadata.
   1015  *
   1016  * RETURN     : NONE
   1017  *==========================================================================*/
   1018 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   1019                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
   1020 {
   1021     pthread_mutex_lock(&mMutex);
   1022 
   1023     if (metadata_buf) {
   1024         metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   1025         int32_t frame_number_valid = *(int32_t *)
   1026             POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   1027         uint32_t pending_requests = *(uint32_t *)POINTER_OF(
   1028             CAM_INTF_META_PENDING_REQUESTS, metadata);
   1029         uint32_t frame_number = *(uint32_t *)
   1030             POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
   1031         const struct timeval *tv = (const struct timeval *)
   1032             POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   1033         nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
   1034             tv->tv_usec * NSEC_PER_USEC;
   1035         bool frame_number_exists = FALSE;
   1036 
   1037         if (!frame_number_valid) {
   1038             ALOGV("%s: Not a valid frame number, used as SOF only", __func__);
   1039             mMetadataChannel->bufDone(metadata_buf);
   1040             free(metadata_buf);
   1041             goto done_metadata;
   1042         }
   1043         ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__,
   1044                 frame_number, capture_time);
   1045 
   1046         // Go through the pending requests info and send shutter/results to frameworks
   1047         for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1048                 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   1049             camera3_capture_result_t result;
   1050             camera3_notify_msg_t notify_msg;
   1051             ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
   1052             frame_number_exists = TRUE; // This frame number exists in Pending list
   1053             // Flush out all entries with less or equal frame numbers.
   1054 
   1055             //TODO: Make sure shutter timestamp really reflects shutter timestamp.
   1056             //Right now it's the same as metadata timestamp
   1057 
   1058             //TODO: When there is metadata drop, how do we derive the timestamp of
   1059             //dropped frames? For now, we fake the dropped timestamp by substracting
   1060             //from the reported timestamp
   1061             nsecs_t current_capture_time = capture_time -
   1062                 (frame_number - i->frame_number) * NSEC_PER_33MSEC;
   1063 
   1064             // Send shutter notify to frameworks
   1065             notify_msg.type = CAMERA3_MSG_SHUTTER;
   1066             notify_msg.message.shutter.frame_number = i->frame_number;
   1067             notify_msg.message.shutter.timestamp = current_capture_time;
   1068             mCallbackOps->notify(mCallbackOps, &notify_msg);
   1069             ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__,
   1070                     i->frame_number, capture_time);
   1071 
   1072             // Send empty metadata with already filled buffers for dropped metadata
   1073             // and send valid metadata with already filled buffers for current metadata
   1074             if (i->frame_number < frame_number) {
   1075                 CameraMetadata dummyMetadata;
   1076                 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
   1077                         &current_capture_time, 1);
   1078                 dummyMetadata.update(ANDROID_REQUEST_ID,
   1079                         &(i->request_id), 1);
   1080                 result.result = dummyMetadata.release();
   1081             } else {
   1082                 result.result = translateCbMetadataToResultMetadata(metadata,
   1083                         current_capture_time, i->request_id, i->ae_trigger);
   1084 
   1085                 if (i->blob_request && needReprocess()) {
   1086                    //If it is a blob request then send the metadata to the picture channel
   1087                    mPictureChannel->queueMetadata(metadata_buf);
   1088 
   1089                 } else {
   1090                    // Return metadata buffer
   1091                    mMetadataChannel->bufDone(metadata_buf);
   1092                    free(metadata_buf);
   1093                 }
   1094             }
   1095             if (!result.result) {
   1096                 ALOGE("%s: metadata is NULL", __func__);
   1097             }
   1098             result.frame_number = i->frame_number;
   1099             result.num_output_buffers = 0;
   1100             result.output_buffers = NULL;
   1101             result.input_buffer = NULL;
   1102             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1103                     j != i->buffers.end(); j++) {
   1104                 if (j->buffer) {
   1105                     result.num_output_buffers++;
   1106                 }
   1107             }
   1108 
   1109             if (result.num_output_buffers > 0) {
   1110                 camera3_stream_buffer_t *result_buffers =
   1111                     new camera3_stream_buffer_t[result.num_output_buffers];
   1112                 if (!result_buffers) {
   1113                     ALOGE("%s: Fatal error: out of memory", __func__);
   1114                 }
   1115                 size_t result_buffers_idx = 0;
   1116                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1117                         j != i->buffers.end(); j++) {
   1118                     if (j->buffer) {
   1119                         result_buffers[result_buffers_idx++] = *(j->buffer);
   1120                         free(j->buffer);
   1121                         j->buffer = NULL;
   1122                         mPendingBuffersMap.editValueFor(j->stream)--;
   1123                     }
   1124                 }
   1125                 result.output_buffers = result_buffers;
   1126 
   1127                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   1128                 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1129                         __func__, result.frame_number, current_capture_time);
   1130                 free_camera_metadata((camera_metadata_t *)result.result);
   1131                 delete[] result_buffers;
   1132             } else {
   1133                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   1134                 ALOGV("%s: meta frame_number = %d, capture_time = %lld",
   1135                         __func__, result.frame_number, current_capture_time);
   1136                 free_camera_metadata((camera_metadata_t *)result.result);
   1137             }
   1138             // erase the element from the list
   1139             i = mPendingRequestsList.erase(i);
   1140         }
   1141         if (!frame_number_exists) {
   1142             ALOGD("%s: Frame number# %d not in the Pending Request list", __func__,
   1143                     frame_number);
   1144             // Race condition where in Metadata Frame# is valid but its not in Pending list
   1145             mMetadataChannel->bufDone(metadata_buf);
   1146             free(metadata_buf);
   1147         }
   1148 
   1149 done_metadata:
   1150         bool max_buffers_dequeued = false;
   1151         for (size_t i = 0; i < mPendingBuffersMap.size(); i++) {
   1152             const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i);
   1153             uint32_t queued_buffers = mPendingBuffersMap.valueAt(i);
   1154             if (queued_buffers == stream->max_buffers) {
   1155                 max_buffers_dequeued = true;
   1156                 break;
   1157             }
   1158         }
   1159         if (!max_buffers_dequeued && !pending_requests) {
   1160             // Unblock process_capture_request
   1161             mPendingRequest = 0;
   1162             pthread_cond_signal(&mRequestCond);
   1163         }
   1164     } else {
   1165         // If the frame number doesn't exist in the pending request list,
   1166         // directly send the buffer to the frameworks, and update pending buffers map
   1167         // Otherwise, book-keep the buffer.
   1168         List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1169         while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   1170             i++;
   1171         }
   1172         if (i == mPendingRequestsList.end()) {
   1173             // Verify all pending requests frame_numbers are greater
   1174             for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
   1175                     j != mPendingRequestsList.end(); j++) {
   1176                 if (j->frame_number < frame_number) {
   1177                     ALOGE("%s: Error: pending frame number %d is smaller than %d",
   1178                             __func__, j->frame_number, frame_number);
   1179                 }
   1180             }
   1181             camera3_capture_result_t result;
   1182             result.result = NULL;
   1183             result.frame_number = frame_number;
   1184             result.num_output_buffers = 1;
   1185             result.output_buffers = buffer;
   1186             result.input_buffer = NULL;
   1187             ALOGV("%s: result frame_number = %d, buffer = %p",
   1188                     __func__, frame_number, buffer);
   1189             mPendingBuffersMap.editValueFor(buffer->stream)--;
   1190             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1191         } else {
   1192             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1193                     j != i->buffers.end(); j++) {
   1194                 if (j->stream == buffer->stream) {
   1195                     if (j->buffer != NULL) {
   1196                         ALOGE("%s: Error: buffer is already set", __func__);
   1197                     } else {
   1198                         j->buffer = (camera3_stream_buffer_t *)malloc(
   1199                                 sizeof(camera3_stream_buffer_t));
   1200                         *(j->buffer) = *buffer;
   1201                         ALOGV("%s: cache buffer %p at result frame_number %d",
   1202                                 __func__, buffer, frame_number);
   1203                     }
   1204                 }
   1205             }
   1206         }
   1207     }
   1208     pthread_mutex_unlock(&mMutex);
   1209     return;
   1210 }
   1211 
   1212 /*===========================================================================
   1213  * FUNCTION   : translateCbMetadataToResultMetadata
   1214  *
   1215  * DESCRIPTION:
   1216  *
   1217  * PARAMETERS :
   1218  *   @metadata : metadata information from callback
   1219  *
   1220  * RETURN     : camera_metadata_t*
   1221  *              metadata in a format specified by fwk
   1222  *==========================================================================*/
   1223 camera_metadata_t*
   1224 QCamera3HardwareInterface::translateCbMetadataToResultMetadata
   1225                                 (metadata_buffer_t *metadata, nsecs_t timestamp,
   1226                                  int32_t request_id, const cam_trigger_t &aeTrigger)
   1227 {
   1228     CameraMetadata camMetadata;
   1229     camera_metadata_t* resultMetadata;
   1230 
   1231     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   1232     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   1233 
   1234     /*CAM_INTF_META_HISTOGRAM - TODO*/
   1235     /*cam_hist_stats_t  *histogram =
   1236       (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM,
   1237       metadata);*/
   1238 
   1239     /*face detection*/
   1240     cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *)
   1241         POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
   1242     uint8_t numFaces = faceDetectionInfo->num_faces_detected;
   1243     int32_t faceIds[numFaces];
   1244     uint8_t faceScores[numFaces];
   1245     int32_t faceRectangles[numFaces * 4];
   1246     int32_t faceLandmarks[numFaces * 6];
   1247     int j = 0, k = 0;
   1248     for (int i = 0; i < numFaces; i++) {
   1249         faceIds[i] = faceDetectionInfo->faces[i].face_id;
   1250         faceScores[i] = faceDetectionInfo->faces[i].score;
   1251         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   1252                 faceRectangles+j, -1);
   1253         convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
   1254         j+= 4;
   1255         k+= 6;
   1256     }
   1257     if (numFaces > 0) {
   1258         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   1259         camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
   1260         camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   1261             faceRectangles, numFaces*4);
   1262         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   1263             faceLandmarks, numFaces*6);
   1264     }
   1265 
   1266     uint8_t  *color_correct_mode =
   1267         (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
   1268     camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
   1269 
   1270     camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
   1271             &aeTrigger.trigger_id, 1);
   1272 
   1273     /*aec regions*/
   1274     cam_area_t  *hAeRegions =
   1275         (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
   1276     int32_t aeRegions[5];
   1277     convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   1278     camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
   1279     if(mIsZslMode) {
   1280         uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
   1281         camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
   1282     } else {
   1283         uint8_t ae_state =
   1284             *(uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
   1285         //Override AE state for front(YUV) sensor if corresponding request
   1286         //contain a precapture trigger. This is to work around the precapture
   1287         //trigger timeout for YUV sensor.
   1288         if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT &&
   1289                 aeTrigger.trigger_id > 0 && aeTrigger.trigger ==
   1290                 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START) {
   1291             ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
   1292         }
   1293         camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
   1294     }
   1295     uint8_t  *focusMode =
   1296         (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
   1297     camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1);
   1298 
   1299     /*af regions*/
   1300     cam_area_t  *hAfRegions =
   1301         (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
   1302     int32_t afRegions[5];
   1303     convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   1304     camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
   1305 
   1306     uint8_t  *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
   1307     camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
   1308 
   1309     int32_t  *afTriggerId =
   1310         (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
   1311     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
   1312 
   1313     uint8_t  *whiteBalance =
   1314         (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
   1315     camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1);
   1316 
   1317     /*awb regions*/
   1318     cam_area_t  *hAwbRegions =
   1319         (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
   1320     int32_t awbRegions[5];
   1321     convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight);
   1322     camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
   1323 
   1324     uint8_t  *whiteBalanceState =
   1325         (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
   1326     camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
   1327 
   1328     uint8_t  *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
   1329     camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
   1330 
   1331     uint8_t  *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
   1332     camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1);
   1333 
   1334     uint8_t  *flashPower =
   1335         (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
   1336     camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
   1337 
   1338     int64_t  *flashFiringTime =
   1339         (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
   1340     camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   1341 
   1342     /*int32_t  *ledMode =
   1343       (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
   1344       camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/
   1345 
   1346     uint8_t  *flashState =
   1347         (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata);
   1348     camMetadata.update(ANDROID_FLASH_STATE, flashState, 1);
   1349 
   1350     uint8_t  *hotPixelMode =
   1351         (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
   1352     camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
   1353 
   1354     float  *lensAperture =
   1355         (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
   1356     camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   1357 
   1358     float  *filterDensity =
   1359         (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
   1360     camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   1361 
   1362     float  *focalLength =
   1363         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
   1364     camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   1365 
   1366     float  *focusDistance =
   1367         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
   1368     camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   1369 
   1370     float  *focusRange =
   1371         (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
   1372     camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1);
   1373 
   1374     uint8_t  *opticalStab =
   1375         (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
   1376     camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
   1377 
   1378     /*int32_t  *focusState =
   1379       (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata);
   1380       camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */
   1381 
   1382     uint8_t  *noiseRedMode =
   1383         (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
   1384     camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
   1385 
   1386     /*CAM_INTF_META_SCALER_CROP_REGION - check size*/
   1387 
   1388     cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
   1389         POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
   1390     int32_t scalerCropRegion[4];
   1391     scalerCropRegion[0] = hScalerCropRegion->left;
   1392     scalerCropRegion[1] = hScalerCropRegion->top;
   1393     scalerCropRegion[2] = hScalerCropRegion->width;
   1394     scalerCropRegion[3] = hScalerCropRegion->height;
   1395     camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   1396 
   1397     int64_t  *sensorExpTime =
   1398         (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
   1399     mMetadataResponse.exposure_time = *sensorExpTime;
   1400     camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   1401 
   1402 
   1403     int64_t  *sensorFameDuration =
   1404         (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
   1405     camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   1406 
   1407     int32_t  *sensorSensitivity =
   1408         (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
   1409     mMetadataResponse.iso_speed = *sensorSensitivity;
   1410     camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
   1411 
   1412     uint8_t  *shadingMode =
   1413         (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
   1414     camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
   1415 
   1416     uint8_t  *faceDetectMode =
   1417         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
   1418     camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1);
   1419 
   1420     uint8_t  *histogramMode =
   1421         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
   1422     camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
   1423 
   1424     uint8_t  *sharpnessMapMode =
   1425         (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
   1426     camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   1427             sharpnessMapMode, 1);
   1428 
   1429     /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/
   1430     cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
   1431         POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
   1432     camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
   1433             (int32_t*)sharpnessMap->sharpness,
   1434             CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
   1435 
   1436     cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
   1437         POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
   1438     int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
   1439     int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
   1440     camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   1441                        (float*)lensShadingMap->lens_shading,
   1442                        4*map_width*map_height);
   1443 
   1444     cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
   1445         POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
   1446     camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
   1447 
   1448     cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
   1449         POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
   1450     camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   1451                        (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
   1452 
   1453     cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
   1454         POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
   1455     camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   1456                        predColorCorrectionGains->gains, 4);
   1457 
   1458     cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
   1459         POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
   1460     camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   1461                        (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
   1462 
   1463     uint8_t *blackLevelLock = (uint8_t*)
   1464         POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
   1465     camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
   1466 
   1467     uint8_t *sceneFlicker = (uint8_t*)
   1468         POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
   1469     camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
   1470 
   1471 
   1472     resultMetadata = camMetadata.release();
   1473     return resultMetadata;
   1474 }
   1475 
   1476 /*===========================================================================
   1477  * FUNCTION   : convertToRegions
   1478  *
   1479  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   1480  *
   1481  * PARAMETERS :
   1482  *   @rect   : cam_rect_t struct to convert
   1483  *   @region : int32_t destination array
   1484  *   @weight : if we are converting from cam_area_t, weight is valid
   1485  *             else weight = -1
   1486  *
   1487  *==========================================================================*/
   1488 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
   1489     region[0] = rect.left;
   1490     region[1] = rect.top;
   1491     region[2] = rect.left + rect.width;
   1492     region[3] = rect.top + rect.height;
   1493     if (weight > -1) {
   1494         region[4] = weight;
   1495     }
   1496 }
   1497 
   1498 /*===========================================================================
   1499  * FUNCTION   : convertFromRegions
   1500  *
   1501  * DESCRIPTION: helper method to convert from array to cam_rect_t
   1502  *
   1503  * PARAMETERS :
   1504  *   @rect   : cam_rect_t struct to convert
   1505  *   @region : int32_t destination array
   1506  *   @weight : if we are converting from cam_area_t, weight is valid
   1507  *             else weight = -1
   1508  *
   1509  *==========================================================================*/
   1510 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
   1511                                                    const camera_metadata_t *settings,
   1512                                                    uint32_t tag){
   1513     CameraMetadata frame_settings;
   1514     frame_settings = settings;
   1515     int32_t x_min = frame_settings.find(tag).data.i32[0];
   1516     int32_t y_min = frame_settings.find(tag).data.i32[1];
   1517     int32_t x_max = frame_settings.find(tag).data.i32[2];
   1518     int32_t y_max = frame_settings.find(tag).data.i32[3];
   1519     roi->weight = frame_settings.find(tag).data.i32[4];
   1520     roi->rect.left = x_min;
   1521     roi->rect.top = y_min;
   1522     roi->rect.width = x_max - x_min;
   1523     roi->rect.height = y_max - y_min;
   1524 }
   1525 
   1526 /*===========================================================================
   1527  * FUNCTION   : resetIfNeededROI
   1528  *
   1529  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   1530  *              crop region
   1531  *
   1532  * PARAMETERS :
   1533  *   @roi       : cam_area_t struct to resize
   1534  *   @scalerCropRegion : cam_crop_region_t region to compare against
   1535  *
   1536  *
   1537  *==========================================================================*/
   1538 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   1539                                                  const cam_crop_region_t* scalerCropRegion)
   1540 {
   1541     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   1542     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   1543     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top;
   1544     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left;
   1545     if ((roi_x_max < scalerCropRegion->left) ||
   1546         (roi_y_max < scalerCropRegion->top)  ||
   1547         (roi->rect.left > crop_x_max) ||
   1548         (roi->rect.top > crop_y_max)){
   1549         return false;
   1550     }
   1551     if (roi->rect.left < scalerCropRegion->left) {
   1552         roi->rect.left = scalerCropRegion->left;
   1553     }
   1554     if (roi->rect.top < scalerCropRegion->top) {
   1555         roi->rect.top = scalerCropRegion->top;
   1556     }
   1557     if (roi_x_max > crop_x_max) {
   1558         roi_x_max = crop_x_max;
   1559     }
   1560     if (roi_y_max > crop_y_max) {
   1561         roi_y_max = crop_y_max;
   1562     }
   1563     roi->rect.width = roi_x_max - roi->rect.left;
   1564     roi->rect.height = roi_y_max - roi->rect.top;
   1565     return true;
   1566 }
   1567 
   1568 /*===========================================================================
   1569  * FUNCTION   : convertLandmarks
   1570  *
   1571  * DESCRIPTION: helper method to extract the landmarks from face detection info
   1572  *
   1573  * PARAMETERS :
   1574  *   @face   : cam_rect_t struct to convert
   1575  *   @landmarks : int32_t destination array
   1576  *
   1577  *
   1578  *==========================================================================*/
   1579 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
   1580 {
   1581     landmarks[0] = face.left_eye_center.x;
   1582     landmarks[1] = face.left_eye_center.y;
   1583     landmarks[2] = face.right_eye_center.y;
   1584     landmarks[3] = face.right_eye_center.y;
   1585     landmarks[4] = face.mouth_center.x;
   1586     landmarks[5] = face.mouth_center.y;
   1587 }
   1588 
   1589 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   1590 /*===========================================================================
   1591  * FUNCTION   : initCapabilities
   1592  *
   1593  * DESCRIPTION: initialize camera capabilities in static data struct
   1594  *
   1595  * PARAMETERS :
   1596  *   @cameraId  : camera Id
   1597  *
   1598  * RETURN     : int32_t type of status
   1599  *              NO_ERROR  -- success
   1600  *              none-zero failure code
   1601  *==========================================================================*/
   1602 int QCamera3HardwareInterface::initCapabilities(int cameraId)
   1603 {
   1604     int rc = 0;
   1605     mm_camera_vtbl_t *cameraHandle = NULL;
   1606     QCamera3HeapMemory *capabilityHeap = NULL;
   1607 
   1608     cameraHandle = camera_open(cameraId);
   1609     if (!cameraHandle) {
   1610         ALOGE("%s: camera_open failed", __func__);
   1611         rc = -1;
   1612         goto open_failed;
   1613     }
   1614 
   1615     capabilityHeap = new QCamera3HeapMemory();
   1616     if (capabilityHeap == NULL) {
   1617         ALOGE("%s: creation of capabilityHeap failed", __func__);
   1618         goto heap_creation_failed;
   1619     }
   1620     /* Allocate memory for capability buffer */
   1621     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
   1622     if(rc != OK) {
   1623         ALOGE("%s: No memory for cappability", __func__);
   1624         goto allocate_failed;
   1625     }
   1626 
   1627     /* Map memory for capability buffer */
   1628     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   1629     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   1630                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   1631                                 capabilityHeap->getFd(0),
   1632                                 sizeof(cam_capability_t));
   1633     if(rc < 0) {
   1634         ALOGE("%s: failed to map capability buffer", __func__);
   1635         goto map_failed;
   1636     }
   1637 
   1638     /* Query Capability */
   1639     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   1640     if(rc < 0) {
   1641         ALOGE("%s: failed to query capability",__func__);
   1642         goto query_failed;
   1643     }
   1644     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   1645     if (!gCamCapability[cameraId]) {
   1646         ALOGE("%s: out of memory", __func__);
   1647         goto query_failed;
   1648     }
   1649     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   1650                                         sizeof(cam_capability_t));
   1651     rc = 0;
   1652 
   1653 query_failed:
   1654     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   1655                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   1656 map_failed:
   1657     capabilityHeap->deallocate();
   1658 allocate_failed:
   1659     delete capabilityHeap;
   1660 heap_creation_failed:
   1661     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   1662     cameraHandle = NULL;
   1663 open_failed:
   1664     return rc;
   1665 }
   1666 
   1667 /*===========================================================================
   1668  * FUNCTION   : initParameters
   1669  *
   1670  * DESCRIPTION: initialize camera parameters
   1671  *
   1672  * PARAMETERS :
   1673  *
   1674  * RETURN     : int32_t type of status
   1675  *              NO_ERROR  -- success
   1676  *              none-zero failure code
   1677  *==========================================================================*/
   1678 int QCamera3HardwareInterface::initParameters()
   1679 {
   1680     int rc = 0;
   1681 
   1682     //Allocate Set Param Buffer
   1683     mParamHeap = new QCamera3HeapMemory();
   1684     rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false);
   1685     if(rc != OK) {
   1686         rc = NO_MEMORY;
   1687         ALOGE("Failed to allocate SETPARM Heap memory");
   1688         delete mParamHeap;
   1689         mParamHeap = NULL;
   1690         return rc;
   1691     }
   1692 
   1693     //Map memory for parameters buffer
   1694     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   1695             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   1696             mParamHeap->getFd(0),
   1697             sizeof(parm_buffer_t));
   1698     if(rc < 0) {
   1699         ALOGE("%s:failed to map SETPARM buffer",__func__);
   1700         rc = FAILED_TRANSACTION;
   1701         mParamHeap->deallocate();
   1702         delete mParamHeap;
   1703         mParamHeap = NULL;
   1704         return rc;
   1705     }
   1706 
   1707     mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0);
   1708     return rc;
   1709 }
   1710 
   1711 /*===========================================================================
   1712  * FUNCTION   : deinitParameters
   1713  *
   1714  * DESCRIPTION: de-initialize camera parameters
   1715  *
   1716  * PARAMETERS :
   1717  *
   1718  * RETURN     : NONE
   1719  *==========================================================================*/
   1720 void QCamera3HardwareInterface::deinitParameters()
   1721 {
   1722     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   1723             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   1724 
   1725     mParamHeap->deallocate();
   1726     delete mParamHeap;
   1727     mParamHeap = NULL;
   1728 
   1729     mParameters = NULL;
   1730 }
   1731 
   1732 /*===========================================================================
   1733  * FUNCTION   : calcMaxJpegSize
   1734  *
   1735  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   1736  *
   1737  * PARAMETERS :
   1738  *
   1739  * RETURN     : max_jpeg_size
   1740  *==========================================================================*/
   1741 int QCamera3HardwareInterface::calcMaxJpegSize()
   1742 {
   1743     int32_t max_jpeg_size = 0;
   1744     int temp_width, temp_height;
   1745     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   1746         temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   1747         temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   1748         if (temp_width * temp_height > max_jpeg_size ) {
   1749             max_jpeg_size = temp_width * temp_height;
   1750         }
   1751     }
   1752     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   1753     return max_jpeg_size;
   1754 }
   1755 
   1756 /*===========================================================================
   1757  * FUNCTION   : initStaticMetadata
   1758  *
   1759  * DESCRIPTION: initialize the static metadata
   1760  *
   1761  * PARAMETERS :
   1762  *   @cameraId  : camera Id
   1763  *
   1764  * RETURN     : int32_t type of status
   1765  *              0  -- success
   1766  *              non-zero failure code
   1767  *==========================================================================*/
   1768 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
   1769 {
   1770     int rc = 0;
   1771     CameraMetadata staticInfo;
   1772 
   1773     /* android.info: hardware level */
   1774     uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
   1775     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   1776         &supportedHardwareLevel, 1);
   1777 
   1778     int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   1779     /*HAL 3 only*/
   1780     /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   1781                     &gCamCapability[cameraId]->min_focus_distance, 1); */
   1782 
   1783     /*hard coded for now but this should come from sensor*/
   1784     float min_focus_distance;
   1785     if(facingBack){
   1786         min_focus_distance = 10;
   1787     } else {
   1788         min_focus_distance = 0;
   1789     }
   1790     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   1791                     &min_focus_distance, 1);
   1792 
   1793     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   1794                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   1795 
   1796     /*should be using focal lengths but sensor doesn't provide that info now*/
   1797     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   1798                       &gCamCapability[cameraId]->focal_length,
   1799                       1);
   1800 
   1801     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   1802                       gCamCapability[cameraId]->apertures,
   1803                       gCamCapability[cameraId]->apertures_count);
   1804 
   1805     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   1806                 gCamCapability[cameraId]->filter_densities,
   1807                 gCamCapability[cameraId]->filter_densities_count);
   1808 
   1809 
   1810     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   1811                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
   1812                       gCamCapability[cameraId]->optical_stab_modes_count);
   1813 
   1814     staticInfo.update(ANDROID_LENS_POSITION,
   1815                       gCamCapability[cameraId]->lens_position,
   1816                       sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
   1817 
   1818     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
   1819                                                     gCamCapability[cameraId]->lens_shading_map_size.height};
   1820     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   1821                       lens_shading_map_size,
   1822                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   1823 
   1824     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   1825             gCamCapability[cameraId]->sensor_physical_size, 2);
   1826 
   1827     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   1828             gCamCapability[cameraId]->exposure_time_range, 2);
   1829 
   1830     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   1831             &gCamCapability[cameraId]->max_frame_duration, 1);
   1832 
   1833     camera_metadata_rational baseGainFactor = {
   1834             gCamCapability[cameraId]->base_gain_factor.numerator,
   1835             gCamCapability[cameraId]->base_gain_factor.denominator};
   1836     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   1837             &baseGainFactor, 1);
   1838 
   1839     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   1840                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
   1841 
   1842     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   1843                                                gCamCapability[cameraId]->pixel_array_size.height};
   1844     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   1845                       pixel_array_size, 2);
   1846 
   1847     int32_t active_array_size[] = {0, 0,
   1848                                                 gCamCapability[cameraId]->active_array_size.width,
   1849                                                 gCamCapability[cameraId]->active_array_size.height};
   1850     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   1851                       active_array_size, 4);
   1852 
   1853     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   1854             &gCamCapability[cameraId]->white_level, 1);
   1855 
   1856     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   1857             gCamCapability[cameraId]->black_level_pattern, 4);
   1858 
   1859     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   1860                       &gCamCapability[cameraId]->flash_charge_duration, 1);
   1861 
   1862     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   1863                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   1864 
   1865     /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   1866                       (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/
   1867     /*hardcode 0 for now*/
   1868     int32_t max_face_count = 0;
   1869     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   1870                       &max_face_count, 1);
   1871 
   1872     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   1873                       &gCamCapability[cameraId]->histogram_size, 1);
   1874 
   1875     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   1876             &gCamCapability[cameraId]->max_histogram_count, 1);
   1877 
   1878     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
   1879                                                 gCamCapability[cameraId]->sharpness_map_size.height};
   1880 
   1881     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   1882             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   1883 
   1884     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   1885             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   1886 
   1887 
   1888     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
   1889                       &gCamCapability[cameraId]->raw_min_duration,
   1890                        1);
   1891 
   1892     int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888,
   1893                                                 HAL_PIXEL_FORMAT_BLOB};
   1894     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
   1895     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   1896                       scalar_formats,
   1897                       scalar_formats_count);
   1898 
   1899     int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
   1900     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   1901               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
   1902               available_processed_sizes);
   1903     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   1904                 available_processed_sizes,
   1905                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
   1906 
   1907     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
   1908                       &gCamCapability[cameraId]->jpeg_min_duration[0],
   1909                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
   1910 
   1911     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   1912     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   1913                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
   1914                  available_fps_ranges);
   1915     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   1916             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
   1917 
   1918     camera_metadata_rational exposureCompensationStep = {
   1919             gCamCapability[cameraId]->exp_compensation_step.numerator,
   1920             gCamCapability[cameraId]->exp_compensation_step.denominator};
   1921     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   1922                       &exposureCompensationStep, 1);
   1923 
   1924     /*TO DO*/
   1925     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
   1926     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   1927                       availableVstabModes, sizeof(availableVstabModes));
   1928 
   1929     /*HAL 1 and HAL 3 common*/
   1930     float maxZoom = 4;
   1931     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   1932             &maxZoom, 1);
   1933 
   1934     int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
   1935     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   1936             max3aRegions, 3);
   1937 
   1938     uint8_t availableFaceDetectModes[] = {
   1939             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF };
   1940     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   1941                       availableFaceDetectModes,
   1942                       sizeof(availableFaceDetectModes));
   1943 
   1944     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
   1945                                                         gCamCapability[cameraId]->exposure_compensation_max};
   1946     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   1947             exposureCompensationRange,
   1948             sizeof(exposureCompensationRange)/sizeof(int32_t));
   1949 
   1950     uint8_t lensFacing = (facingBack) ?
   1951             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   1952     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   1953 
   1954     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
   1955                 available_processed_sizes,
   1956                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
   1957 
   1958     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   1959                       available_thumbnail_sizes,
   1960                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   1961 
   1962     int32_t max_jpeg_size = 0;
   1963     int temp_width, temp_height;
   1964     for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   1965         temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   1966         temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   1967         if (temp_width * temp_height > max_jpeg_size ) {
   1968             max_jpeg_size = temp_width * temp_height;
   1969         }
   1970     }
   1971     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   1972     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   1973                       &max_jpeg_size, 1);
   1974 
   1975     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   1976     int32_t size = 0;
   1977     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
   1978         int val = lookupFwkName(EFFECT_MODES_MAP,
   1979                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
   1980                                    gCamCapability[cameraId]->supported_effects[i]);
   1981         if (val != NAME_NOT_FOUND) {
   1982             avail_effects[size] = (uint8_t)val;
   1983             size++;
   1984         }
   1985     }
   1986     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   1987                       avail_effects,
   1988                       size);
   1989 
   1990     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   1991     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   1992     int32_t supported_scene_modes_cnt = 0;
   1993     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
   1994         int val = lookupFwkName(SCENE_MODES_MAP,
   1995                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   1996                                 gCamCapability[cameraId]->supported_scene_modes[i]);
   1997         if (val != NAME_NOT_FOUND) {
   1998             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   1999             supported_indexes[supported_scene_modes_cnt] = i;
   2000             supported_scene_modes_cnt++;
   2001         }
   2002     }
   2003 
   2004     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   2005                       avail_scene_modes,
   2006                       supported_scene_modes_cnt);
   2007 
   2008     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
   2009     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   2010                       supported_scene_modes_cnt,
   2011                       scene_mode_overrides,
   2012                       supported_indexes,
   2013                       cameraId);
   2014     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   2015                       scene_mode_overrides,
   2016                       supported_scene_modes_cnt*3);
   2017 
   2018     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   2019     size = 0;
   2020     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
   2021         int val = lookupFwkName(ANTIBANDING_MODES_MAP,
   2022                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   2023                                  gCamCapability[cameraId]->supported_antibandings[i]);
   2024         if (val != NAME_NOT_FOUND) {
   2025             avail_antibanding_modes[size] = (uint8_t)val;
   2026             size++;
   2027         }
   2028 
   2029     }
   2030     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   2031                       avail_antibanding_modes,
   2032                       size);
   2033 
   2034     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   2035     size = 0;
   2036     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
   2037         int val = lookupFwkName(FOCUS_MODES_MAP,
   2038                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   2039                                 gCamCapability[cameraId]->supported_focus_modes[i]);
   2040         if (val != NAME_NOT_FOUND) {
   2041             avail_af_modes[size] = (uint8_t)val;
   2042             size++;
   2043         }
   2044     }
   2045     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   2046                       avail_af_modes,
   2047                       size);
   2048 
   2049     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   2050     size = 0;
   2051     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
   2052         int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2053                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   2054                                     gCamCapability[cameraId]->supported_white_balances[i]);
   2055         if (val != NAME_NOT_FOUND) {
   2056             avail_awb_modes[size] = (uint8_t)val;
   2057             size++;
   2058         }
   2059     }
   2060     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   2061                       avail_awb_modes,
   2062                       size);
   2063 
   2064     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   2065     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
   2066       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
   2067 
   2068     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   2069             available_flash_levels,
   2070             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
   2071 
   2072 
   2073     uint8_t flashAvailable = gCamCapability[cameraId]->flash_available;
   2074     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   2075             &flashAvailable, 1);
   2076 
   2077     uint8_t avail_ae_modes[5];
   2078     size = 0;
   2079     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
   2080         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
   2081         size++;
   2082     }
   2083     if (flashAvailable) {
   2084         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   2085         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
   2086         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   2087     }
   2088     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   2089                       avail_ae_modes,
   2090                       size);
   2091 
   2092     int32_t sensitivity_range[2];
   2093     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   2094     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   2095     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   2096                       sensitivity_range,
   2097                       sizeof(sensitivity_range) / sizeof(int32_t));
   2098 
   2099     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   2100                       &gCamCapability[cameraId]->max_analog_sensitivity,
   2101                       1);
   2102 
   2103     staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
   2104                       &gCamCapability[cameraId]->jpeg_min_duration[0],
   2105                       gCamCapability[cameraId]->picture_sizes_tbl_cnt);
   2106 
   2107     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   2108     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   2109                       &sensor_orientation,
   2110                       1);
   2111 
   2112     int32_t max_output_streams[3] = {1, 3, 1};
   2113     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   2114                       max_output_streams,
   2115                       3);
   2116 
   2117     gStaticMetadata[cameraId] = staticInfo.release();
   2118     return rc;
   2119 }
   2120 
   2121 /*===========================================================================
   2122  * FUNCTION   : makeTable
   2123  *
   2124  * DESCRIPTION: make a table of sizes
   2125  *
   2126  * PARAMETERS :
   2127  *
   2128  *
   2129  *==========================================================================*/
   2130 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
   2131                                           int32_t* sizeTable)
   2132 {
   2133     int j = 0;
   2134     for (int i = 0; i < size; i++) {
   2135         sizeTable[j] = dimTable[i].width;
   2136         sizeTable[j+1] = dimTable[i].height;
   2137         j+=2;
   2138     }
   2139 }
   2140 
   2141 /*===========================================================================
   2142  * FUNCTION   : makeFPSTable
   2143  *
   2144  * DESCRIPTION: make a table of fps ranges
   2145  *
   2146  * PARAMETERS :
   2147  *
   2148  *==========================================================================*/
   2149 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
   2150                                           int32_t* fpsRangesTable)
   2151 {
   2152     int j = 0;
   2153     for (int i = 0; i < size; i++) {
   2154         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   2155         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   2156         j+=2;
   2157     }
   2158 }
   2159 
   2160 /*===========================================================================
   2161  * FUNCTION   : makeOverridesList
   2162  *
   2163  * DESCRIPTION: make a list of scene mode overrides
   2164  *
   2165  * PARAMETERS :
   2166  *
   2167  *
   2168  *==========================================================================*/
   2169 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
   2170                                                   uint8_t size, uint8_t* overridesList,
   2171                                                   uint8_t* supported_indexes,
   2172                                                   int camera_id)
   2173 {
   2174     /*daemon will give a list of overrides for all scene modes.
   2175       However we should send the fwk only the overrides for the scene modes
   2176       supported by the framework*/
   2177     int j = 0, index = 0, supt = 0;
   2178     uint8_t focus_override;
   2179     for (int i = 0; i < size; i++) {
   2180         supt = 0;
   2181         index = supported_indexes[i];
   2182         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
   2183         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
   2184                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   2185                                                     overridesTable[index].awb_mode);
   2186         focus_override = (uint8_t)overridesTable[index].af_mode;
   2187         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
   2188            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   2189               supt = 1;
   2190               break;
   2191            }
   2192         }
   2193         if (supt) {
   2194            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
   2195                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   2196                                               focus_override);
   2197         } else {
   2198            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   2199         }
   2200         j+=3;
   2201     }
   2202 }
   2203 
   2204 /*===========================================================================
   2205  * FUNCTION   : getPreviewHalPixelFormat
   2206  *
   2207  * DESCRIPTION: convert the format to type recognized by framework
   2208  *
   2209  * PARAMETERS : format : the format from backend
   2210  *
   2211  ** RETURN    : format recognized by framework
   2212  *
   2213  *==========================================================================*/
   2214 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
   2215 {
   2216     int32_t halPixelFormat;
   2217 
   2218     switch (format) {
   2219     case CAM_FORMAT_YUV_420_NV12:
   2220         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
   2221         break;
   2222     case CAM_FORMAT_YUV_420_NV21:
   2223         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   2224         break;
   2225     case CAM_FORMAT_YUV_420_NV21_ADRENO:
   2226         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
   2227         break;
   2228     case CAM_FORMAT_YUV_420_YV12:
   2229         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
   2230         break;
   2231     case CAM_FORMAT_YUV_422_NV16:
   2232     case CAM_FORMAT_YUV_422_NV61:
   2233     default:
   2234         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   2235         break;
   2236     }
   2237     return halPixelFormat;
   2238 }
   2239 
   2240 /*===========================================================================
   2241  * FUNCTION   : getSensorSensitivity
   2242  *
   2243  * DESCRIPTION: convert iso_mode to an integer value
   2244  *
   2245  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   2246  *
   2247  ** RETURN    : sensitivity supported by sensor
   2248  *
   2249  *==========================================================================*/
   2250 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   2251 {
   2252     int32_t sensitivity;
   2253 
   2254     switch (iso_mode) {
   2255     case CAM_ISO_MODE_100:
   2256         sensitivity = 100;
   2257         break;
   2258     case CAM_ISO_MODE_200:
   2259         sensitivity = 200;
   2260         break;
   2261     case CAM_ISO_MODE_400:
   2262         sensitivity = 400;
   2263         break;
   2264     case CAM_ISO_MODE_800:
   2265         sensitivity = 800;
   2266         break;
   2267     case CAM_ISO_MODE_1600:
   2268         sensitivity = 1600;
   2269         break;
   2270     default:
   2271         sensitivity = -1;
   2272         break;
   2273     }
   2274     return sensitivity;
   2275 }
   2276 
   2277 
   2278 /*===========================================================================
   2279  * FUNCTION   : AddSetParmEntryToBatch
   2280  *
   2281  * DESCRIPTION: add set parameter entry into batch
   2282  *
   2283  * PARAMETERS :
   2284  *   @p_table     : ptr to parameter buffer
   2285  *   @paramType   : parameter type
   2286  *   @paramLength : length of parameter value
   2287  *   @paramValue  : ptr to parameter value
   2288  *
   2289  * RETURN     : int32_t type of status
   2290  *              NO_ERROR  -- success
   2291  *              none-zero failure code
   2292  *==========================================================================*/
   2293 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
   2294                                                           cam_intf_parm_type_t paramType,
   2295                                                           uint32_t paramLength,
   2296                                                           void *paramValue)
   2297 {
   2298     int position = paramType;
   2299     int current, next;
   2300 
   2301     /*************************************************************************
   2302     *                 Code to take care of linking next flags                *
   2303     *************************************************************************/
   2304     current = GET_FIRST_PARAM_ID(p_table);
   2305     if (position == current){
   2306         //DO NOTHING
   2307     } else if (position < current){
   2308         SET_NEXT_PARAM_ID(position, p_table, current);
   2309         SET_FIRST_PARAM_ID(p_table, position);
   2310     } else {
   2311         /* Search for the position in the linked list where we need to slot in*/
   2312         while (position > GET_NEXT_PARAM_ID(current, p_table))
   2313             current = GET_NEXT_PARAM_ID(current, p_table);
   2314 
   2315         /*If node already exists no need to alter linking*/
   2316         if (position != GET_NEXT_PARAM_ID(current, p_table)) {
   2317             next = GET_NEXT_PARAM_ID(current, p_table);
   2318             SET_NEXT_PARAM_ID(current, p_table, position);
   2319             SET_NEXT_PARAM_ID(position, p_table, next);
   2320         }
   2321     }
   2322 
   2323     /*************************************************************************
   2324     *                   Copy contents into entry                             *
   2325     *************************************************************************/
   2326 
   2327     if (paramLength > sizeof(parm_type_t)) {
   2328         ALOGE("%s:Size of input larger than max entry size",__func__);
   2329         return BAD_VALUE;
   2330     }
   2331     memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
   2332     return NO_ERROR;
   2333 }
   2334 
   2335 /*===========================================================================
   2336  * FUNCTION   : lookupFwkName
   2337  *
   2338  * DESCRIPTION: In case the enum is not same in fwk and backend
   2339  *              make sure the parameter is correctly propogated
   2340  *
   2341  * PARAMETERS  :
   2342  *   @arr      : map between the two enums
   2343  *   @len      : len of the map
   2344  *   @hal_name : name of the hal_parm to map
   2345  *
   2346  * RETURN     : int type of status
   2347  *              fwk_name  -- success
   2348  *              none-zero failure code
   2349  *==========================================================================*/
   2350 int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
   2351                                              int len, int hal_name)
   2352 {
   2353 
   2354     for (int i = 0; i < len; i++) {
   2355         if (arr[i].hal_name == hal_name)
   2356             return arr[i].fwk_name;
   2357     }
   2358 
   2359     /* Not able to find matching framework type is not necessarily
   2360      * an error case. This happens when mm-camera supports more attributes
   2361      * than the frameworks do */
   2362     ALOGD("%s: Cannot find matching framework type", __func__);
   2363     return NAME_NOT_FOUND;
   2364 }
   2365 
   2366 /*===========================================================================
   2367  * FUNCTION   : lookupHalName
   2368  *
   2369  * DESCRIPTION: In case the enum is not same in fwk and backend
   2370  *              make sure the parameter is correctly propogated
   2371  *
   2372  * PARAMETERS  :
   2373  *   @arr      : map between the two enums
   2374  *   @len      : len of the map
   2375  *   @fwk_name : name of the hal_parm to map
   2376  *
   2377  * RETURN     : int32_t type of status
   2378  *              hal_name  -- success
   2379  *              none-zero failure code
   2380  *==========================================================================*/
   2381 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
   2382                                              int len, int fwk_name)
   2383 {
   2384     for (int i = 0; i < len; i++) {
   2385        if (arr[i].fwk_name == fwk_name)
   2386            return arr[i].hal_name;
   2387     }
   2388     ALOGE("%s: Cannot find matching hal type", __func__);
   2389     return NAME_NOT_FOUND;
   2390 }
   2391 
   2392 /*===========================================================================
   2393  * FUNCTION   : getCapabilities
   2394  *
   2395  * DESCRIPTION: query camera capabilities
   2396  *
   2397  * PARAMETERS :
   2398  *   @cameraId  : camera Id
   2399  *   @info      : camera info struct to be filled in with camera capabilities
   2400  *
   2401  * RETURN     : int32_t type of status
   2402  *              NO_ERROR  -- success
   2403  *              none-zero failure code
   2404  *==========================================================================*/
   2405 int QCamera3HardwareInterface::getCamInfo(int cameraId,
   2406                                     struct camera_info *info)
   2407 {
   2408     int rc = 0;
   2409 
   2410     if (NULL == gCamCapability[cameraId]) {
   2411         rc = initCapabilities(cameraId);
   2412         if (rc < 0) {
   2413             //pthread_mutex_unlock(&g_camlock);
   2414             return rc;
   2415         }
   2416     }
   2417 
   2418     if (NULL == gStaticMetadata[cameraId]) {
   2419         rc = initStaticMetadata(cameraId);
   2420         if (rc < 0) {
   2421             return rc;
   2422         }
   2423     }
   2424 
   2425     switch(gCamCapability[cameraId]->position) {
   2426     case CAM_POSITION_BACK:
   2427         info->facing = CAMERA_FACING_BACK;
   2428         break;
   2429 
   2430     case CAM_POSITION_FRONT:
   2431         info->facing = CAMERA_FACING_FRONT;
   2432         break;
   2433 
   2434     default:
   2435         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
   2436         rc = -1;
   2437         break;
   2438     }
   2439 
   2440 
   2441     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
   2442     info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0);
   2443     info->static_camera_characteristics = gStaticMetadata[cameraId];
   2444 
   2445     return rc;
   2446 }
   2447 
   2448 /*===========================================================================
   2449  * FUNCTION   : translateMetadata
   2450  *
   2451  * DESCRIPTION: translate the metadata into camera_metadata_t
   2452  *
   2453  * PARAMETERS : type of the request
   2454  *
   2455  *
   2456  * RETURN     : success: camera_metadata_t*
   2457  *              failure: NULL
   2458  *
   2459  *==========================================================================*/
   2460 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   2461 {
   2462     pthread_mutex_lock(&mMutex);
   2463 
   2464     if (mDefaultMetadata[type] != NULL) {
   2465         pthread_mutex_unlock(&mMutex);
   2466         return mDefaultMetadata[type];
   2467     }
   2468     //first time we are handling this request
   2469     //fill up the metadata structure using the wrapper class
   2470     CameraMetadata settings;
   2471     //translate from cam_capability_t to camera_metadata_tag_t
   2472     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   2473     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   2474     int32_t defaultRequestID = 0;
   2475     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   2476 
   2477     /*control*/
   2478 
   2479     uint8_t controlIntent = 0;
   2480     switch (type) {
   2481       case CAMERA3_TEMPLATE_PREVIEW:
   2482         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   2483         break;
   2484       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   2485         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   2486         break;
   2487       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   2488         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   2489         break;
   2490       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   2491         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   2492         break;
   2493       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   2494         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   2495         break;
   2496       default:
   2497         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   2498         break;
   2499     }
   2500     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   2501 
   2502     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   2503             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   2504 
   2505     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   2506     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   2507 
   2508     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   2509     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   2510 
   2511     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   2512     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   2513 
   2514     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   2515     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   2516 
   2517     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   2518     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   2519 
   2520     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO?
   2521     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   2522 
   2523     static uint8_t focusMode;
   2524     if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) {
   2525         ALOGE("%s: Setting focus mode to auto", __func__);
   2526         focusMode = ANDROID_CONTROL_AF_MODE_AUTO;
   2527     } else {
   2528         ALOGE("%s: Setting focus mode to off", __func__);
   2529         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   2530     }
   2531     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   2532 
   2533     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   2534     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   2535 
   2536     /*flash*/
   2537     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   2538     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   2539 
   2540     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   2541     settings.update(ANDROID_FLASH_FIRING_POWER,
   2542             &flashFiringLevel, 1);
   2543 
   2544     /* lens */
   2545     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   2546     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   2547 
   2548     if (gCamCapability[mCameraId]->filter_densities_count) {
   2549         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   2550         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   2551                         gCamCapability[mCameraId]->filter_densities_count);
   2552     }
   2553 
   2554     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   2555     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   2556 
   2557     /* Exposure time(Update the Min Exposure Time)*/
   2558     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   2559     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   2560 
   2561     /* sensitivity */
   2562     static const int32_t default_sensitivity = 100;
   2563     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   2564 
   2565     mDefaultMetadata[type] = settings.release();
   2566 
   2567     pthread_mutex_unlock(&mMutex);
   2568     return mDefaultMetadata[type];
   2569 }
   2570 
   2571 /*===========================================================================
   2572  * FUNCTION   : setFrameParameters
   2573  *
   2574  * DESCRIPTION: set parameters per frame as requested in the metadata from
   2575  *              framework
   2576  *
   2577  * PARAMETERS :
   2578  *   @frame_id  : frame number for this particular request
   2579  *   @settings  : frame settings information from framework
   2580  *   @streamTypeMask : bit mask of stream types on which buffers are requested
   2581  *   @aeTrigger : Return aeTrigger if it exists in the request
   2582  *
   2583  * RETURN     : success: NO_ERROR
   2584  *              failure:
   2585  *==========================================================================*/
   2586 int QCamera3HardwareInterface::setFrameParameters(int frame_id,
   2587         const camera_metadata_t *settings, uint32_t streamTypeMask,
   2588         cam_trigger_t &aeTrigger)
   2589 {
   2590     /*translate from camera_metadata_t type to parm_type_t*/
   2591     int rc = 0;
   2592     if (settings == NULL && mFirstRequest) {
   2593         /*settings cannot be null for the first request*/
   2594         return BAD_VALUE;
   2595     }
   2596 
   2597     int32_t hal_version = CAM_HAL_V3;
   2598 
   2599     memset(mParameters, 0, sizeof(parm_buffer_t));
   2600     mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
   2601     AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   2602                 sizeof(hal_version), &hal_version);
   2603 
   2604     /*we need to update the frame number in the parameters*/
   2605     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
   2606                                 sizeof(frame_id), &frame_id);
   2607     if (rc < 0) {
   2608         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   2609         return BAD_VALUE;
   2610     }
   2611 
   2612     /* Update stream id mask where buffers are requested */
   2613     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_TYPE_MASK,
   2614                                 sizeof(streamTypeMask), &streamTypeMask);
   2615     if (rc < 0) {
   2616         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
   2617         return BAD_VALUE;
   2618     }
   2619 
   2620     if(settings != NULL){
   2621         rc = translateMetadataToParameters(settings, aeTrigger);
   2622     }
   2623     /*set the parameters to backend*/
   2624     mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   2625     return rc;
   2626 }
   2627 
   2628 /*===========================================================================
   2629  * FUNCTION   : translateMetadataToParameters
   2630  *
   2631  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   2632  *
   2633  *
   2634  * PARAMETERS :
   2635  *   @settings  : frame settings information from framework
   2636  *   @aeTrigger : output ae trigger if it's set in request
   2637  *
   2638  * RETURN     : success: NO_ERROR
   2639  *              failure:
   2640  *==========================================================================*/
   2641 int QCamera3HardwareInterface::translateMetadataToParameters(
   2642         const camera_metadata_t *settings, cam_trigger_t &aeTrigger)
   2643 {
   2644     int rc = 0;
   2645     CameraMetadata frame_settings;
   2646     frame_settings = settings;
   2647 
   2648 
   2649     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   2650         int32_t antibandingMode =
   2651             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0];
   2652         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING,
   2653                 sizeof(antibandingMode), &antibandingMode);
   2654     }
   2655 
   2656     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   2657         int32_t expCompensation = frame_settings.find(
   2658             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   2659         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   2660             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   2661         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   2662             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   2663         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   2664           sizeof(expCompensation), &expCompensation);
   2665     }
   2666 
   2667     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   2668         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   2669         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK,
   2670                 sizeof(aeLock), &aeLock);
   2671     }
   2672     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   2673         cam_fps_range_t fps_range;
   2674         fps_range.min_fps =
   2675             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   2676         fps_range.max_fps =
   2677             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   2678         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE,
   2679                 sizeof(fps_range), &fps_range);
   2680     }
   2681 
   2682     float focalDistance = -1.0;
   2683     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   2684         focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   2685         rc = AddSetParmEntryToBatch(mParameters,
   2686                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
   2687                 sizeof(focalDistance), &focalDistance);
   2688     }
   2689 
   2690     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   2691         uint8_t fwk_focusMode =
   2692             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   2693         uint8_t focusMode;
   2694         if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
   2695             focusMode = CAM_FOCUS_MODE_INFINITY;
   2696         } else{
   2697          focusMode = lookupHalName(FOCUS_MODES_MAP,
   2698                                    sizeof(FOCUS_MODES_MAP),
   2699                                    fwk_focusMode);
   2700         }
   2701         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE,
   2702                 sizeof(focusMode), &focusMode);
   2703     }
   2704 
   2705     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   2706         uint8_t awbLock =
   2707             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   2708         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK,
   2709                 sizeof(awbLock), &awbLock);
   2710     }
   2711 
   2712     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   2713         uint8_t fwk_whiteLevel =
   2714             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   2715         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
   2716                 sizeof(WHITE_BALANCE_MODES_MAP),
   2717                 fwk_whiteLevel);
   2718         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE,
   2719                 sizeof(whiteLevel), &whiteLevel);
   2720     }
   2721 
   2722     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   2723         uint8_t fwk_effectMode =
   2724             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   2725         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
   2726                 sizeof(EFFECT_MODES_MAP),
   2727                 fwk_effectMode);
   2728         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT,
   2729                 sizeof(effectMode), &effectMode);
   2730     }
   2731 
   2732     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   2733         uint8_t fwk_aeMode =
   2734             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   2735         uint8_t aeMode;
   2736         int32_t redeye;
   2737 
   2738         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   2739             aeMode = CAM_AE_MODE_OFF;
   2740         } else {
   2741             aeMode = CAM_AE_MODE_ON;
   2742         }
   2743         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   2744             redeye = 1;
   2745         } else {
   2746             redeye = 0;
   2747         }
   2748 
   2749         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
   2750                                           sizeof(AE_FLASH_MODE_MAP),
   2751                                           fwk_aeMode);
   2752         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE,
   2753                 sizeof(aeMode), &aeMode);
   2754         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
   2755                 sizeof(flashMode), &flashMode);
   2756         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION,
   2757                 sizeof(redeye), &redeye);
   2758     }
   2759 
   2760     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   2761         uint8_t colorCorrectMode =
   2762             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   2763         rc =
   2764             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE,
   2765                     sizeof(colorCorrectMode), &colorCorrectMode);
   2766     }
   2767 
   2768     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   2769         cam_color_correct_gains_t colorCorrectGains;
   2770         for (int i = 0; i < 4; i++) {
   2771             colorCorrectGains.gains[i] =
   2772                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   2773         }
   2774         rc =
   2775             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS,
   2776                     sizeof(colorCorrectGains), &colorCorrectGains);
   2777     }
   2778 
   2779     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   2780         cam_color_correct_matrix_t colorCorrectTransform;
   2781         cam_rational_type_t transform_elem;
   2782         int num = 0;
   2783         for (int i = 0; i < 3; i++) {
   2784            for (int j = 0; j < 3; j++) {
   2785               transform_elem.numerator =
   2786                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   2787               transform_elem.denominator =
   2788                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   2789               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   2790               num++;
   2791            }
   2792         }
   2793         rc =
   2794             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   2795                     sizeof(colorCorrectTransform), &colorCorrectTransform);
   2796     }
   2797 
   2798     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   2799         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   2800         aeTrigger.trigger =
   2801             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   2802         aeTrigger.trigger_id =
   2803             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   2804         mPrecaptureId = aeTrigger.trigger_id;
   2805     }
   2806     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   2807                                 sizeof(aeTrigger), &aeTrigger);
   2808 
   2809     /*af_trigger must come with a trigger id*/
   2810     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   2811         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   2812         cam_trigger_t af_trigger;
   2813         af_trigger.trigger =
   2814             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   2815         af_trigger.trigger_id =
   2816             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   2817         rc = AddSetParmEntryToBatch(mParameters,
   2818                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
   2819     }
   2820 
   2821     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   2822         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   2823         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE,
   2824                 sizeof(metaMode), &metaMode);
   2825         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   2826            uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
   2827            uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
   2828                                              sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   2829                                              fwk_sceneMode);
   2830            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   2831                 sizeof(sceneMode), &sceneMode);
   2832         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
   2833            uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
   2834            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   2835                 sizeof(sceneMode), &sceneMode);
   2836         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
   2837            uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF;
   2838            rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE,
   2839                 sizeof(sceneMode), &sceneMode);
   2840         }
   2841     }
   2842 
   2843     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   2844         int32_t demosaic =
   2845             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   2846         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC,
   2847                 sizeof(demosaic), &demosaic);
   2848     }
   2849 
   2850     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   2851         uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   2852         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE,
   2853                 sizeof(edgeMode), &edgeMode);
   2854     }
   2855 
   2856     if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
   2857         int32_t edgeStrength =
   2858             frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0];
   2859         rc = AddSetParmEntryToBatch(mParameters,
   2860                 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength);
   2861     }
   2862 
   2863     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   2864         int32_t respectFlashMode = 1;
   2865         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   2866             uint8_t fwk_aeMode =
   2867                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   2868             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   2869                 respectFlashMode = 0;
   2870                 ALOGI("%s: AE Mode controls flash, ignore android.flash.mode",
   2871                     __func__);
   2872             }
   2873         }
   2874         if (respectFlashMode) {
   2875             uint8_t flashMode =
   2876                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
   2877             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
   2878                                           sizeof(FLASH_MODES_MAP),
   2879                                           flashMode);
   2880             ALOGI("%s: flash mode after mapping %d", __func__, flashMode);
   2881             // To check: CAM_INTF_META_FLASH_MODE usage
   2882             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE,
   2883                           sizeof(flashMode), &flashMode);
   2884         }
   2885     }
   2886 
   2887     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   2888         uint8_t flashPower =
   2889             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   2890         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER,
   2891                 sizeof(flashPower), &flashPower);
   2892     }
   2893 
   2894     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   2895         int64_t flashFiringTime =
   2896             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   2897         rc = AddSetParmEntryToBatch(mParameters,
   2898                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
   2899     }
   2900 
   2901     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   2902         uint8_t hotPixelMode =
   2903             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   2904         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE,
   2905                 sizeof(hotPixelMode), &hotPixelMode);
   2906     }
   2907 
   2908     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   2909         float lensAperture =
   2910             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   2911         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE,
   2912                 sizeof(lensAperture), &lensAperture);
   2913     }
   2914 
   2915     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   2916         float filterDensity =
   2917             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   2918         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY,
   2919                 sizeof(filterDensity), &filterDensity);
   2920     }
   2921 
   2922     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   2923         float focalLength =
   2924             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   2925         rc = AddSetParmEntryToBatch(mParameters,
   2926                 CAM_INTF_META_LENS_FOCAL_LENGTH,
   2927                 sizeof(focalLength), &focalLength);
   2928     }
   2929 
   2930     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   2931         uint8_t optStabMode =
   2932             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   2933         rc = AddSetParmEntryToBatch(mParameters,
   2934                 CAM_INTF_META_LENS_OPT_STAB_MODE,
   2935                 sizeof(optStabMode), &optStabMode);
   2936     }
   2937 
   2938     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   2939         uint8_t noiseRedMode =
   2940             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   2941         rc = AddSetParmEntryToBatch(mParameters,
   2942                 CAM_INTF_META_NOISE_REDUCTION_MODE,
   2943                 sizeof(noiseRedMode), &noiseRedMode);
   2944     }
   2945 
   2946     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
   2947         uint8_t noiseRedStrength =
   2948             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
   2949         rc = AddSetParmEntryToBatch(mParameters,
   2950                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
   2951                 sizeof(noiseRedStrength), &noiseRedStrength);
   2952     }
   2953 
   2954     cam_crop_region_t scalerCropRegion;
   2955     bool scalerCropSet = false;
   2956     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   2957         scalerCropRegion.left =
   2958             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   2959         scalerCropRegion.top =
   2960             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   2961         scalerCropRegion.width =
   2962             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   2963         scalerCropRegion.height =
   2964             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   2965         rc = AddSetParmEntryToBatch(mParameters,
   2966                 CAM_INTF_META_SCALER_CROP_REGION,
   2967                 sizeof(scalerCropRegion), &scalerCropRegion);
   2968         scalerCropSet = true;
   2969     }
   2970 
   2971     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   2972         int64_t sensorExpTime =
   2973             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   2974         rc = AddSetParmEntryToBatch(mParameters,
   2975                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   2976                 sizeof(sensorExpTime), &sensorExpTime);
   2977     }
   2978 
   2979     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   2980         int64_t sensorFrameDuration =
   2981             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   2982         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   2983             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   2984         rc = AddSetParmEntryToBatch(mParameters,
   2985                 CAM_INTF_META_SENSOR_FRAME_DURATION,
   2986                 sizeof(sensorFrameDuration), &sensorFrameDuration);
   2987     }
   2988 
   2989     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   2990         int32_t sensorSensitivity =
   2991             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   2992         if (sensorSensitivity <
   2993                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   2994             sensorSensitivity =
   2995                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   2996         if (sensorSensitivity >
   2997                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   2998             sensorSensitivity =
   2999                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   3000         rc = AddSetParmEntryToBatch(mParameters,
   3001                 CAM_INTF_META_SENSOR_SENSITIVITY,
   3002                 sizeof(sensorSensitivity), &sensorSensitivity);
   3003     }
   3004 
   3005     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   3006         int32_t shadingMode =
   3007             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   3008         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE,
   3009                 sizeof(shadingMode), &shadingMode);
   3010     }
   3011 
   3012     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
   3013         uint8_t shadingStrength =
   3014             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
   3015         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH,
   3016                 sizeof(shadingStrength), &shadingStrength);
   3017     }
   3018 
   3019     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   3020         uint8_t facedetectMode =
   3021             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   3022         rc = AddSetParmEntryToBatch(mParameters,
   3023                 CAM_INTF_META_STATS_FACEDETECT_MODE,
   3024                 sizeof(facedetectMode), &facedetectMode);
   3025     }
   3026 
   3027     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   3028         uint8_t histogramMode =
   3029             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   3030         rc = AddSetParmEntryToBatch(mParameters,
   3031                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
   3032                 sizeof(histogramMode), &histogramMode);
   3033     }
   3034 
   3035     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   3036         uint8_t sharpnessMapMode =
   3037             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   3038         rc = AddSetParmEntryToBatch(mParameters,
   3039                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   3040                 sizeof(sharpnessMapMode), &sharpnessMapMode);
   3041     }
   3042 
   3043     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   3044         uint8_t tonemapMode =
   3045             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   3046         rc = AddSetParmEntryToBatch(mParameters,
   3047                 CAM_INTF_META_TONEMAP_MODE,
   3048                 sizeof(tonemapMode), &tonemapMode);
   3049     }
   3050     int point = 0;
   3051     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) {
   3052         cam_tonemap_curve_t tonemapCurveBlue;
   3053         tonemapCurveBlue.tonemap_points_cnt =
   3054            gCamCapability[mCameraId]->max_tone_map_curve_points;
   3055         for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) {
   3056             for (int j = 0; j < 2; j++) {
   3057                tonemapCurveBlue.tonemap_points[i][j] =
   3058                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   3059                point++;
   3060             }
   3061         }
   3062         rc = AddSetParmEntryToBatch(mParameters,
   3063                 CAM_INTF_META_TONEMAP_CURVE_BLUE,
   3064                 sizeof(tonemapCurveBlue), &tonemapCurveBlue);
   3065     }
   3066     point = 0;
   3067     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) {
   3068         cam_tonemap_curve_t tonemapCurveGreen;
   3069         tonemapCurveGreen.tonemap_points_cnt =
   3070            gCamCapability[mCameraId]->max_tone_map_curve_points;
   3071         for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) {
   3072             for (int j = 0; j < 2; j++) {
   3073                tonemapCurveGreen.tonemap_points[i][j] =
   3074                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   3075                point++;
   3076             }
   3077         }
   3078         rc = AddSetParmEntryToBatch(mParameters,
   3079                 CAM_INTF_META_TONEMAP_CURVE_GREEN,
   3080                 sizeof(tonemapCurveGreen), &tonemapCurveGreen);
   3081     }
   3082     point = 0;
   3083     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   3084         cam_tonemap_curve_t tonemapCurveRed;
   3085         tonemapCurveRed.tonemap_points_cnt =
   3086            gCamCapability[mCameraId]->max_tone_map_curve_points;
   3087         for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) {
   3088             for (int j = 0; j < 2; j++) {
   3089                tonemapCurveRed.tonemap_points[i][j] =
   3090                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   3091                point++;
   3092             }
   3093         }
   3094         rc = AddSetParmEntryToBatch(mParameters,
   3095                 CAM_INTF_META_TONEMAP_CURVE_RED,
   3096                 sizeof(tonemapCurveRed), &tonemapCurveRed);
   3097     }
   3098 
   3099     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3100         uint8_t captureIntent =
   3101             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3102         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
   3103                 sizeof(captureIntent), &captureIntent);
   3104     }
   3105 
   3106     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   3107         uint8_t blackLevelLock =
   3108             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   3109         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK,
   3110                 sizeof(blackLevelLock), &blackLevelLock);
   3111     }
   3112 
   3113     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   3114         uint8_t lensShadingMapMode =
   3115             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   3116         rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   3117                 sizeof(lensShadingMapMode), &lensShadingMapMode);
   3118     }
   3119 
   3120     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   3121         cam_area_t roi;
   3122         bool reset = true;
   3123         convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS);
   3124         if (scalerCropSet) {
   3125             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3126         }
   3127         if (reset) {
   3128             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI,
   3129                     sizeof(roi), &roi);
   3130         }
   3131     }
   3132 
   3133     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   3134         cam_area_t roi;
   3135         bool reset = true;
   3136         convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS);
   3137         if (scalerCropSet) {
   3138             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3139         }
   3140         if (reset) {
   3141             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI,
   3142                     sizeof(roi), &roi);
   3143         }
   3144     }
   3145 
   3146     if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
   3147         cam_area_t roi;
   3148         bool reset = true;
   3149         convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS);
   3150         if (scalerCropSet) {
   3151             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   3152         }
   3153         if (reset) {
   3154             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS,
   3155                     sizeof(roi), &roi);
   3156         }
   3157     }
   3158     return rc;
   3159 }
   3160 
   3161 /*===========================================================================
   3162  * FUNCTION   : getJpegSettings
   3163  *
   3164  * DESCRIPTION: save the jpeg settings in the HAL
   3165  *
   3166  *
   3167  * PARAMETERS :
   3168  *   @settings  : frame settings information from framework
   3169  *
   3170  *
   3171  * RETURN     : success: NO_ERROR
   3172  *              failure:
   3173  *==========================================================================*/
   3174 int QCamera3HardwareInterface::getJpegSettings
   3175                                   (const camera_metadata_t *settings)
   3176 {
   3177     if (mJpegSettings) {
   3178         if (mJpegSettings->gps_timestamp) {
   3179             free(mJpegSettings->gps_timestamp);
   3180             mJpegSettings->gps_timestamp = NULL;
   3181         }
   3182         if (mJpegSettings->gps_coordinates) {
   3183             for (int i = 0; i < 3; i++) {
   3184                 free(mJpegSettings->gps_coordinates[i]);
   3185                 mJpegSettings->gps_coordinates[i] = NULL;
   3186             }
   3187         }
   3188         free(mJpegSettings);
   3189         mJpegSettings = NULL;
   3190     }
   3191     mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t));
   3192     CameraMetadata jpeg_settings;
   3193     jpeg_settings = settings;
   3194 
   3195     if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   3196         mJpegSettings->jpeg_orientation =
   3197             jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   3198     } else {
   3199         mJpegSettings->jpeg_orientation = 0;
   3200     }
   3201     if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) {
   3202         mJpegSettings->jpeg_quality =
   3203             jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   3204     } else {
   3205         mJpegSettings->jpeg_quality = 85;
   3206     }
   3207     if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   3208         mJpegSettings->thumbnail_size.width =
   3209             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   3210         mJpegSettings->thumbnail_size.height =
   3211             jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   3212     } else {
   3213         mJpegSettings->thumbnail_size.width = 0;
   3214         mJpegSettings->thumbnail_size.height = 0;
   3215     }
   3216     if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   3217         for (int i = 0; i < 3; i++) {
   3218             mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*));
   3219             *(mJpegSettings->gps_coordinates[i]) =
   3220                 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i];
   3221         }
   3222     } else{
   3223        for (int i = 0; i < 3; i++) {
   3224             mJpegSettings->gps_coordinates[i] = NULL;
   3225         }
   3226     }
   3227 
   3228     if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   3229         mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*));
   3230         *(mJpegSettings->gps_timestamp) =
   3231             jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   3232     } else {
   3233         mJpegSettings->gps_timestamp = NULL;
   3234     }
   3235 
   3236     if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   3237         int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
   3238         for (int i = 0; i < len; i++) {
   3239             mJpegSettings->gps_processing_method[i] =
   3240                 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i];
   3241         }
   3242         if (mJpegSettings->gps_processing_method[len-1] != '\0') {
   3243             mJpegSettings->gps_processing_method[len] = '\0';
   3244         }
   3245     } else {
   3246         mJpegSettings->gps_processing_method[0] = '\0';
   3247     }
   3248 
   3249     mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed;
   3250 
   3251     mJpegSettings->sensor_exposure_time = mMetadataResponse.exposure_time;
   3252 
   3253     if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   3254         mJpegSettings->lens_focal_length =
   3255             jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   3256     }
   3257     if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   3258         mJpegSettings->exposure_compensation =
   3259             jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   3260     }
   3261     mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step;
   3262     mJpegSettings->max_jpeg_size = calcMaxJpegSize();
   3263     mJpegSettings->is_jpeg_format = true;
   3264     mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask;
   3265     mJpegSettings->f_number = gCamCapability[mCameraId]->apertures[0];
   3266 
   3267     if (jpeg_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   3268         mJpegSettings->wb =
   3269             jpeg_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   3270     } else {
   3271         mJpegSettings->wb = 0;
   3272     }
   3273 
   3274     if (jpeg_settings.exists(ANDROID_FLASH_MODE)) {
   3275         mJpegSettings->flash =
   3276             jpeg_settings.find(ANDROID_FLASH_MODE).data.u8[0];
   3277     } else {
   3278         mJpegSettings->flash = 0;
   3279     }
   3280 
   3281 
   3282     return 0;
   3283 }
   3284 
   3285 /*===========================================================================
   3286  * FUNCTION   : captureResultCb
   3287  *
   3288  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   3289  *
   3290  * PARAMETERS :
   3291  *   @frame  : frame information from mm-camera-interface
   3292  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   3293  *   @userdata: userdata
   3294  *
   3295  * RETURN     : NONE
   3296  *==========================================================================*/
   3297 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   3298                 camera3_stream_buffer_t *buffer,
   3299                 uint32_t frame_number, void *userdata)
   3300 {
   3301     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   3302     if (hw == NULL) {
   3303         ALOGE("%s: Invalid hw %p", __func__, hw);
   3304         return;
   3305     }
   3306 
   3307     hw->captureResultCb(metadata, buffer, frame_number);
   3308     return;
   3309 }
   3310 
   3311 
   3312 /*===========================================================================
   3313  * FUNCTION   : initialize
   3314  *
   3315  * DESCRIPTION: Pass framework callback pointers to HAL
   3316  *
   3317  * PARAMETERS :
   3318  *
   3319  *
   3320  * RETURN     : Success : 0
   3321  *              Failure: -ENODEV
   3322  *==========================================================================*/
   3323 
   3324 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   3325                                   const camera3_callback_ops_t *callback_ops)
   3326 {
   3327     ALOGV("%s: E", __func__);
   3328     QCamera3HardwareInterface *hw =
   3329         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3330     if (!hw) {
   3331         ALOGE("%s: NULL camera device", __func__);
   3332         return -ENODEV;
   3333     }
   3334 
   3335     int rc = hw->initialize(callback_ops);
   3336     ALOGV("%s: X", __func__);
   3337     return rc;
   3338 }
   3339 
   3340 /*===========================================================================
   3341  * FUNCTION   : configure_streams
   3342  *
   3343  * DESCRIPTION:
   3344  *
   3345  * PARAMETERS :
   3346  *
   3347  *
   3348  * RETURN     : Success: 0
   3349  *              Failure: -EINVAL (if stream configuration is invalid)
   3350  *                       -ENODEV (fatal error)
   3351  *==========================================================================*/
   3352 
   3353 int QCamera3HardwareInterface::configure_streams(
   3354         const struct camera3_device *device,
   3355         camera3_stream_configuration_t *stream_list)
   3356 {
   3357     ALOGV("%s: E", __func__);
   3358     QCamera3HardwareInterface *hw =
   3359         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3360     if (!hw) {
   3361         ALOGE("%s: NULL camera device", __func__);
   3362         return -ENODEV;
   3363     }
   3364     int rc = hw->configureStreams(stream_list);
   3365     ALOGV("%s: X", __func__);
   3366     return rc;
   3367 }
   3368 
   3369 /*===========================================================================
   3370  * FUNCTION   : register_stream_buffers
   3371  *
   3372  * DESCRIPTION: Register stream buffers with the device
   3373  *
   3374  * PARAMETERS :
   3375  *
   3376  * RETURN     :
   3377  *==========================================================================*/
   3378 int QCamera3HardwareInterface::register_stream_buffers(
   3379         const struct camera3_device *device,
   3380         const camera3_stream_buffer_set_t *buffer_set)
   3381 {
   3382     ALOGV("%s: E", __func__);
   3383     QCamera3HardwareInterface *hw =
   3384         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3385     if (!hw) {
   3386         ALOGE("%s: NULL camera device", __func__);
   3387         return -ENODEV;
   3388     }
   3389     int rc = hw->registerStreamBuffers(buffer_set);
   3390     ALOGV("%s: X", __func__);
   3391     return rc;
   3392 }
   3393 
   3394 /*===========================================================================
   3395  * FUNCTION   : construct_default_request_settings
   3396  *
   3397  * DESCRIPTION: Configure a settings buffer to meet the required use case
   3398  *
   3399  * PARAMETERS :
   3400  *
   3401  *
   3402  * RETURN     : Success: Return valid metadata
   3403  *              Failure: Return NULL
   3404  *==========================================================================*/
   3405 const camera_metadata_t* QCamera3HardwareInterface::
   3406     construct_default_request_settings(const struct camera3_device *device,
   3407                                         int type)
   3408 {
   3409 
   3410     ALOGV("%s: E", __func__);
   3411     camera_metadata_t* fwk_metadata = NULL;
   3412     QCamera3HardwareInterface *hw =
   3413         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3414     if (!hw) {
   3415         ALOGE("%s: NULL camera device", __func__);
   3416         return NULL;
   3417     }
   3418 
   3419     fwk_metadata = hw->translateCapabilityToMetadata(type);
   3420 
   3421     ALOGV("%s: X", __func__);
   3422     return fwk_metadata;
   3423 }
   3424 
   3425 /*===========================================================================
   3426  * FUNCTION   : process_capture_request
   3427  *
   3428  * DESCRIPTION:
   3429  *
   3430  * PARAMETERS :
   3431  *
   3432  *
   3433  * RETURN     :
   3434  *==========================================================================*/
   3435 int QCamera3HardwareInterface::process_capture_request(
   3436                     const struct camera3_device *device,
   3437                     camera3_capture_request_t *request)
   3438 {
   3439     ALOGV("%s: E", __func__);
   3440     QCamera3HardwareInterface *hw =
   3441         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3442     if (!hw) {
   3443         ALOGE("%s: NULL camera device", __func__);
   3444         return -EINVAL;
   3445     }
   3446 
   3447     int rc = hw->processCaptureRequest(request);
   3448     ALOGV("%s: X", __func__);
   3449     return rc;
   3450 }
   3451 
   3452 /*===========================================================================
   3453  * FUNCTION   : get_metadata_vendor_tag_ops
   3454  *
   3455  * DESCRIPTION:
   3456  *
   3457  * PARAMETERS :
   3458  *
   3459  *
   3460  * RETURN     :
   3461  *==========================================================================*/
   3462 
   3463 void QCamera3HardwareInterface::get_metadata_vendor_tag_ops(
   3464                 const struct camera3_device *device,
   3465                 vendor_tag_query_ops_t* ops)
   3466 {
   3467     ALOGV("%s: E", __func__);
   3468     QCamera3HardwareInterface *hw =
   3469         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3470     if (!hw) {
   3471         ALOGE("%s: NULL camera device", __func__);
   3472         return;
   3473     }
   3474 
   3475     hw->getMetadataVendorTagOps(ops);
   3476     ALOGV("%s: X", __func__);
   3477     return;
   3478 }
   3479 
   3480 /*===========================================================================
   3481  * FUNCTION   : dump
   3482  *
   3483  * DESCRIPTION:
   3484  *
   3485  * PARAMETERS :
   3486  *
   3487  *
   3488  * RETURN     :
   3489  *==========================================================================*/
   3490 
   3491 void QCamera3HardwareInterface::dump(
   3492                 const struct camera3_device *device, int fd)
   3493 {
   3494     ALOGV("%s: E", __func__);
   3495     QCamera3HardwareInterface *hw =
   3496         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   3497     if (!hw) {
   3498         ALOGE("%s: NULL camera device", __func__);
   3499         return;
   3500     }
   3501 
   3502     hw->dump(fd);
   3503     ALOGV("%s: X", __func__);
   3504     return;
   3505 }
   3506 
   3507 /*===========================================================================
   3508  * FUNCTION   : close_camera_device
   3509  *
   3510  * DESCRIPTION:
   3511  *
   3512  * PARAMETERS :
   3513  *
   3514  *
   3515  * RETURN     :
   3516  *==========================================================================*/
   3517 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   3518 {
   3519     ALOGV("%s: E", __func__);
   3520     int ret = NO_ERROR;
   3521     QCamera3HardwareInterface *hw =
   3522         reinterpret_cast<QCamera3HardwareInterface *>(
   3523             reinterpret_cast<camera3_device_t *>(device)->priv);
   3524     if (!hw) {
   3525         ALOGE("NULL camera device");
   3526         return BAD_VALUE;
   3527     }
   3528     delete hw;
   3529 
   3530     pthread_mutex_lock(&mCameraSessionLock);
   3531     mCameraSessionActive = 0;
   3532     pthread_mutex_unlock(&mCameraSessionLock);
   3533     ALOGV("%s: X", __func__);
   3534     return ret;
   3535 }
   3536 
   3537 /*===========================================================================
   3538  * FUNCTION   : getWaveletDenoiseProcessPlate
   3539  *
   3540  * DESCRIPTION: query wavelet denoise process plate
   3541  *
   3542  * PARAMETERS : None
   3543  *
   3544  * RETURN     : WNR prcocess plate vlaue
   3545  *==========================================================================*/
   3546 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   3547 {
   3548     char prop[PROPERTY_VALUE_MAX];
   3549     memset(prop, 0, sizeof(prop));
   3550     property_get("persist.denoise.process.plates", prop, "0");
   3551     int processPlate = atoi(prop);
   3552     switch(processPlate) {
   3553     case 0:
   3554         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   3555     case 1:
   3556         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   3557     case 2:
   3558         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   3559     case 3:
   3560         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   3561     default:
   3562         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   3563     }
   3564 }
   3565 
   3566 /*===========================================================================
   3567  * FUNCTION   : needRotationReprocess
   3568  *
   3569  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   3570  *
   3571  * PARAMETERS : none
   3572  *
   3573  * RETURN     : true: needed
   3574  *              false: no need
   3575  *==========================================================================*/
   3576 bool QCamera3HardwareInterface::needRotationReprocess()
   3577 {
   3578 
   3579     if (!mJpegSettings->is_jpeg_format) {
   3580         // RAW image, no need to reprocess
   3581         return false;
   3582     }
   3583 
   3584     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
   3585         mJpegSettings->jpeg_orientation > 0) {
   3586         // current rotation is not zero, and pp has the capability to process rotation
   3587         ALOGD("%s: need do reprocess for rotation", __func__);
   3588         return true;
   3589     }
   3590 
   3591     return false;
   3592 }
   3593 
   3594 /*===========================================================================
   3595  * FUNCTION   : needReprocess
   3596  *
   3597  * DESCRIPTION: if reprocess in needed
   3598  *
   3599  * PARAMETERS : none
   3600  *
   3601  * RETURN     : true: needed
   3602  *              false: no need
   3603  *==========================================================================*/
   3604 bool QCamera3HardwareInterface::needReprocess()
   3605 {
   3606     if (!mJpegSettings->is_jpeg_format) {
   3607         // RAW image, no need to reprocess
   3608         return false;
   3609     }
   3610 
   3611     if ((mJpegSettings->min_required_pp_mask > 0) ||
   3612          isWNREnabled()) {
   3613         // TODO: add for ZSL HDR later
   3614         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   3615         ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
   3616         return true;
   3617     }
   3618     return needRotationReprocess();
   3619 }
   3620 
   3621 /*===========================================================================
   3622  * FUNCTION   : addOnlineReprocChannel
   3623  *
   3624  * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
   3625  *              coming from input channel
   3626  *
   3627  * PARAMETERS :
   3628  *   @pInputChannel : ptr to input channel whose frames will be post-processed
   3629  *
   3630  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   3631  *==========================================================================*/
   3632 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel(
   3633                                                       QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle)
   3634 {
   3635     int32_t rc = NO_ERROR;
   3636     QCamera3ReprocessChannel *pChannel = NULL;
   3637     if (pInputChannel == NULL) {
   3638         ALOGE("%s: input channel obj is NULL", __func__);
   3639         return NULL;
   3640     }
   3641 
   3642     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   3643             mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
   3644     if (NULL == pChannel) {
   3645         ALOGE("%s: no mem for reprocess channel", __func__);
   3646         return NULL;
   3647     }
   3648 
   3649     // Capture channel, only need snapshot and postview streams start together
   3650     mm_camera_channel_attr_t attr;
   3651     memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
   3652     attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
   3653     attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue();
   3654     rc = pChannel->initialize();
   3655     if (rc != NO_ERROR) {
   3656         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
   3657         delete pChannel;
   3658         return NULL;
   3659     }
   3660 
   3661     // pp feature config
   3662     cam_pp_feature_config_t pp_config;
   3663     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   3664     if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
   3665         pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
   3666         pp_config.sharpness = 10;
   3667     }
   3668 
   3669     if (isWNREnabled()) {
   3670         pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
   3671         pp_config.denoise2d.denoise_enable = 1;
   3672         pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
   3673     }
   3674     if (needRotationReprocess()) {
   3675         pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
   3676         int rotation = mJpegSettings->jpeg_orientation;
   3677         if (rotation == 0) {
   3678             pp_config.rotation = ROTATE_0;
   3679         } else if (rotation == 90) {
   3680             pp_config.rotation = ROTATE_90;
   3681         } else if (rotation == 180) {
   3682             pp_config.rotation = ROTATE_180;
   3683         } else if (rotation == 270) {
   3684             pp_config.rotation = ROTATE_270;
   3685         }
   3686     }
   3687 
   3688    rc = pChannel->addReprocStreamsFromSource(pp_config,
   3689                                              pInputChannel,
   3690                                              mMetadataChannel);
   3691 
   3692     if (rc != NO_ERROR) {
   3693         delete pChannel;
   3694         return NULL;
   3695     }
   3696     return pChannel;
   3697 }
   3698 
   3699 int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue()
   3700 {
   3701     return gCamCapability[mCameraId]->min_num_pp_bufs;
   3702 }
   3703 
   3704 bool QCamera3HardwareInterface::isWNREnabled() {
   3705     return gCamCapability[mCameraId]->isWnrSupported;
   3706 }
   3707 
   3708 }; //end namespace qcamera
   3709