Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define ATRACE_TAG ATRACE_TAG_CAMERA
     31 #define LOG_TAG "QCamera3HWI"
     32 //#define LOG_NDEBUG 0
     33 
     34 #define __STDC_LIMIT_MACROS
     35 #include <cutils/properties.h>
     36 #include <hardware/camera3.h>
     37 #include <camera/CameraMetadata.h>
     38 #include <stdio.h>
     39 #include <stdlib.h>
     40 #include <fcntl.h>
     41 #include <stdint.h>
     42 #include <utils/Log.h>
     43 #include <utils/Errors.h>
     44 #include <utils/Trace.h>
     45 #include <sync/sync.h>
     46 #include <gralloc_priv.h>
     47 #include "util/QCameraFlash.h"
     48 #include "QCamera3HWI.h"
     49 #include "QCamera3Mem.h"
     50 #include "QCamera3Channel.h"
     51 #include "QCamera3PostProc.h"
     52 #include "QCamera3VendorTags.h"
     53 
     54 using namespace android;
     55 
     56 namespace qcamera {
     57 
     58 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     59 
     60 #define EMPTY_PIPELINE_DELAY 2
     61 #define PARTIAL_RESULT_COUNT 2
     62 #define FRAME_SKIP_DELAY     0
     63 #define CAM_MAX_SYNC_LATENCY 4
     64 
     65 #define VIDEO_4K_WIDTH  3840
     66 #define VIDEO_4K_HEIGHT 2160
     67 
     68 #define MAX_RAW_STREAMS        1
     69 #define MAX_STALLING_STREAMS   1
     70 #define MAX_PROCESSED_STREAMS  3
     71 #define TIMEOUT_NEVER -1
     72 
     73 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
     74 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
     75 volatile uint32_t gCamHal3LogLevel = 1;
     76 
     77 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
     78     {"On",  CAM_CDS_MODE_ON},
     79     {"Off", CAM_CDS_MODE_OFF},
     80     {"Auto",CAM_CDS_MODE_AUTO}
     81 };
     82 
     83 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
     84     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
     85     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
     86     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
     87     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
     88     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
     89     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
     90     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
     91     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
     92     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
     93 };
     94 
     95 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
     96     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
     97     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
     98     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
     99     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
    100     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
    101     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
    102     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
    103     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
    104     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
    105 };
    106 
    107 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
    108     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
    109     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
    110     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
    111     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
    112     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
    113     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
    114     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
    115     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
    116     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
    117     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
    118     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
    119     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
    120     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
    121     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    122     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    123     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    124 };
    125 
    126 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    127     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
    128     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    129     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    130     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    131     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    132     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    133     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    134 };
    135 
    136 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
    137     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
    138             CAM_COLOR_CORRECTION_ABERRATION_OFF },
    139     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
    140             CAM_COLOR_CORRECTION_ABERRATION_FAST },
    141     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
    142             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
    143 };
    144 
    145 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    146     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    147     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    148     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    149     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    150 };
    151 
    152 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    153     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    154     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    155     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    156     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    157     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    158 };
    159 
    160 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    161     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    162     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    163     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    164 };
    165 
    166 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    167     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    168     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    169 };
    170 
    171 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
    172     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
    173       CAM_FOCUS_UNCALIBRATED },
    174     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
    175       CAM_FOCUS_APPROXIMATE },
    176     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
    177       CAM_FOCUS_CALIBRATED }
    178 };
    179 
    180 const int32_t available_thumbnail_sizes[] = {0, 0,
    181                                              176, 144,
    182                                              320, 240,
    183                                              432, 288,
    184                                              480, 288,
    185                                              512, 288,
    186                                              512, 384};
    187 
    188 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
    189     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
    190     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
    191     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
    192     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
    193     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
    194 };
    195 
    196 /* Since there is no mapping for all the options some Android enum are not listed.
    197  * Also, the order in this list is important because while mapping from HAL to Android it will
    198  * traverse from lower to higher index which means that for HAL values that are map to different
    199  * Android values, the traverse logic will select the first one found.
    200  */
    201 const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
    202     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
    203     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    204     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
    205     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
    206     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
    207     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
    208     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
    209     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
    210     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
    211     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
    212     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
    213     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
    214     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
    215     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
    216     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    217     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
    218 };
    219 
    220 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    221     initialize:                         QCamera3HardwareInterface::initialize,
    222     configure_streams:                  QCamera3HardwareInterface::configure_streams,
    223     register_stream_buffers:            NULL,
    224     construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
    225     process_capture_request:            QCamera3HardwareInterface::process_capture_request,
    226     get_metadata_vendor_tag_ops:        NULL,
    227     dump:                               QCamera3HardwareInterface::dump,
    228     flush:                              QCamera3HardwareInterface::flush,
    229     reserved:                           {0},
    230 };
    231 
    232 /*===========================================================================
    233  * FUNCTION   : QCamera3HardwareInterface
    234  *
    235  * DESCRIPTION: constructor of QCamera3HardwareInterface
    236  *
    237  * PARAMETERS :
    238  *   @cameraId  : camera ID
    239  *
    240  * RETURN     : none
    241  *==========================================================================*/
    242 QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
    243                         const camera_module_callbacks_t *callbacks)
    244     : mCameraId(cameraId),
    245       mCameraHandle(NULL),
    246       mCameraOpened(false),
    247       mCameraInitialized(false),
    248       mCallbackOps(NULL),
    249       mMetadataChannel(NULL),
    250       mPictureChannel(NULL),
    251       mRawChannel(NULL),
    252       mSupportChannel(NULL),
    253       mRawDumpChannel(NULL),
    254       mFirstRequest(false),
    255       mFlush(false),
    256       mParamHeap(NULL),
    257       mParameters(NULL),
    258       mPrevParameters(NULL),
    259       m_bIsVideo(false),
    260       m_bIs4KVideo(false),
    261       mEisEnable(0),
    262       mLoopBackResult(NULL),
    263       mMinProcessedFrameDuration(0),
    264       mMinJpegFrameDuration(0),
    265       mMinRawFrameDuration(0),
    266       m_pPowerModule(NULL),
    267       mMetaFrameCount(0),
    268       mCallbacks(callbacks),
    269       mCaptureIntent(0)
    270 {
    271     getLogLevel();
    272     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    273     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
    274     mCameraDevice.common.close = close_camera_device;
    275     mCameraDevice.ops = &mCameraOps;
    276     mCameraDevice.priv = this;
    277     gCamCapability[cameraId]->version = CAM_HAL_V3;
    278     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    279     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    280     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    281 
    282     pthread_cond_init(&mRequestCond, NULL);
    283     mPendingRequest = 0;
    284     mCurrentRequestId = -1;
    285     pthread_mutex_init(&mMutex, NULL);
    286 
    287     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    288         mDefaultMetadata[i] = NULL;
    289 
    290 #ifdef HAS_MULTIMEDIA_HINTS
    291     if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
    292         ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
    293     }
    294 #endif
    295 
    296     char prop[PROPERTY_VALUE_MAX];
    297     property_get("persist.camera.raw.dump", prop, "0");
    298     mEnableRawDump = atoi(prop);
    299     if (mEnableRawDump)
    300         CDBG("%s: Raw dump from Camera HAL enabled", __func__);
    301 
    302     mPendingBuffersMap.num_buffers = 0;
    303     mPendingBuffersMap.last_frame_number = -1;
    304 }
    305 
    306 /*===========================================================================
    307  * FUNCTION   : ~QCamera3HardwareInterface
    308  *
    309  * DESCRIPTION: destructor of QCamera3HardwareInterface
    310  *
    311  * PARAMETERS : none
    312  *
    313  * RETURN     : none
    314  *==========================================================================*/
    315 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    316 {
    317     CDBG("%s: E", __func__);
    318     /* We need to stop all streams before deleting any stream */
    319 
    320     bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
    321 
    322     if (mRawDumpChannel) {
    323         mRawDumpChannel->stop();
    324     }
    325 
    326     // NOTE: 'camera3_stream_t *' objects are already freed at
    327     //        this stage by the framework
    328     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    329         it != mStreamInfo.end(); it++) {
    330         QCamera3Channel *channel = (*it)->channel;
    331         if (channel) {
    332             channel->stop();
    333         }
    334     }
    335     if (mSupportChannel)
    336         mSupportChannel->stop();
    337 
    338     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    339         it != mStreamInfo.end(); it++) {
    340         QCamera3Channel *channel = (*it)->channel;
    341         if (channel)
    342             delete channel;
    343         free (*it);
    344     }
    345     if (mSupportChannel) {
    346         delete mSupportChannel;
    347         mSupportChannel = NULL;
    348     }
    349 
    350     if (mRawDumpChannel) {
    351         delete mRawDumpChannel;
    352         mRawDumpChannel = NULL;
    353     }
    354     mPictureChannel = NULL;
    355 
    356     /* Clean up all channels */
    357     if (mCameraInitialized) {
    358         if (mMetadataChannel) {
    359             mMetadataChannel->stop();
    360             delete mMetadataChannel;
    361             mMetadataChannel = NULL;
    362         }
    363 
    364         memset(mParameters, 0, sizeof(parm_buffer_t));
    365         // Check if there is still pending buffer not yet returned.
    366         if (hasPendingBuffers) {
    367             for (auto& pendingBuffer : mPendingBuffersMap.mPendingBufferList) {
    368                 ALOGE("%s: Buffer not yet returned for stream. Frame number %d, format 0x%x, width %d, height %d",
    369                         __func__, pendingBuffer.frame_number, pendingBuffer.stream->format, pendingBuffer.stream->width,
    370                         pendingBuffer.stream->height);
    371             }
    372             ALOGE("%s: Last requested frame number is %d", __func__, mPendingBuffersMap.last_frame_number);
    373 
    374             uint8_t restart = TRUE;
    375             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DAEMON_RESTART,
    376                     sizeof(restart), &restart);
    377         }
    378 
    379         int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    380         if (rc < 0) {
    381             ALOGE("%s: set_parms failed for unconfigure", __func__);
    382         }
    383         deinitParameters();
    384     }
    385 
    386     if (mCameraOpened)
    387         closeCamera();
    388 
    389     mPendingBuffersMap.mPendingBufferList.clear();
    390     mPendingRequestsList.clear();
    391     mPendingReprocessResultList.clear();
    392 
    393     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    394         if (mDefaultMetadata[i])
    395             free_camera_metadata(mDefaultMetadata[i]);
    396 
    397     pthread_cond_destroy(&mRequestCond);
    398 
    399     pthread_mutex_destroy(&mMutex);
    400 
    401     if (hasPendingBuffers) {
    402         ALOGE("%s: Not all buffers were returned. Notified the camera daemon process to restart."
    403                 " Exiting here...", __func__);
    404         exit(EXIT_FAILURE);
    405     }
    406     CDBG("%s: X", __func__);
    407 }
    408 
    409 /*===========================================================================
    410  * FUNCTION   : camEvtHandle
    411  *
    412  * DESCRIPTION: Function registered to mm-camera-interface to handle events
    413  *
    414  * PARAMETERS :
    415  *   @camera_handle : interface layer camera handle
    416  *   @evt           : ptr to event
    417  *   @user_data     : user data ptr
    418  *
    419  * RETURN     : none
    420  *==========================================================================*/
    421 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
    422                                           mm_camera_event_t *evt,
    423                                           void *user_data)
    424 {
    425     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
    426     camera3_notify_msg_t notify_msg;
    427     if (obj && evt) {
    428         switch(evt->server_event_type) {
    429             case CAM_EVENT_TYPE_DAEMON_DIED:
    430                 ALOGE("%s: Fatal, camera daemon died", __func__);
    431 
    432                 //close the camera backend
    433                 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
    434                         && obj->mCameraHandle->ops) {
    435                     obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
    436                 } else {
    437                     ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
    438                             __func__);
    439                 }
    440                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
    441                 notify_msg.type = CAMERA3_MSG_ERROR;
    442                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
    443                 notify_msg.message.error.error_stream = NULL;
    444                 notify_msg.message.error.frame_number = 0;
    445                 obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
    446                 break;
    447 
    448             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
    449                 CDBG("%s: HAL got request pull from Daemon", __func__);
    450                 pthread_mutex_lock(&obj->mMutex);
    451                 obj->mWokenUpByDaemon = true;
    452                 obj->unblockRequestIfNecessary();
    453                 pthread_mutex_unlock(&obj->mMutex);
    454                 break;
    455 
    456             default:
    457                 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
    458                         evt->server_event_type);
    459                 break;
    460         }
    461     } else {
    462         ALOGE("%s: NULL user_data/evt", __func__);
    463     }
    464 }
    465 
    466 /*===========================================================================
    467  * FUNCTION   : openCamera
    468  *
    469  * DESCRIPTION: open camera
    470  *
    471  * PARAMETERS :
    472  *   @hw_device  : double ptr for camera device struct
    473  *
    474  * RETURN     : int32_t type of status
    475  *              NO_ERROR  -- success
    476  *              none-zero failure code
    477  *==========================================================================*/
    478 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    479 {
    480     int rc = 0;
    481     if (mCameraOpened) {
    482         *hw_device = NULL;
    483         return PERMISSION_DENIED;
    484     }
    485 
    486     rc = openCamera();
    487     if (rc == 0) {
    488         *hw_device = &mCameraDevice.common;
    489     } else
    490         *hw_device = NULL;
    491 
    492 #ifdef HAS_MULTIMEDIA_HINTS
    493     if (rc == 0) {
    494         if (m_pPowerModule) {
    495             if (m_pPowerModule->powerHint) {
    496                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    497                         (void *)"state=1");
    498             }
    499         }
    500     }
    501 #endif
    502     return rc;
    503 }
    504 
    505 /*===========================================================================
    506  * FUNCTION   : openCamera
    507  *
    508  * DESCRIPTION: open camera
    509  *
    510  * PARAMETERS : none
    511  *
    512  * RETURN     : int32_t type of status
    513  *              NO_ERROR  -- success
    514  *              none-zero failure code
    515  *==========================================================================*/
    516 int QCamera3HardwareInterface::openCamera()
    517 {
    518     int rc = 0;
    519 
    520     ATRACE_CALL();
    521     if (mCameraHandle) {
    522         ALOGE("Failure: Camera already opened");
    523         return ALREADY_EXISTS;
    524     }
    525 
    526     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
    527     if (rc < 0) {
    528         ALOGE("%s: Failed to reserve flash for camera id: %d",
    529                 __func__,
    530                 mCameraId);
    531         return UNKNOWN_ERROR;
    532     }
    533 
    534     mCameraHandle = camera_open(mCameraId);
    535     if (!mCameraHandle) {
    536         ALOGE("camera_open failed.");
    537         return UNKNOWN_ERROR;
    538     }
    539 
    540     mCameraOpened = true;
    541 
    542     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
    543             camEvtHandle, (void *)this);
    544 
    545     if (rc < 0) {
    546         ALOGE("%s: Error, failed to register event callback", __func__);
    547         /* Not closing camera here since it is already handled in destructor */
    548         return FAILED_TRANSACTION;
    549     }
    550 
    551     return NO_ERROR;
    552 }
    553 
    554 /*===========================================================================
    555  * FUNCTION   : closeCamera
    556  *
    557  * DESCRIPTION: close camera
    558  *
    559  * PARAMETERS : none
    560  *
    561  * RETURN     : int32_t type of status
    562  *              NO_ERROR  -- success
    563  *              none-zero failure code
    564  *==========================================================================*/
    565 int QCamera3HardwareInterface::closeCamera()
    566 {
    567     ATRACE_CALL();
    568     int rc = NO_ERROR;
    569 
    570     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    571     mCameraHandle = NULL;
    572     mCameraOpened = false;
    573 
    574 #ifdef HAS_MULTIMEDIA_HINTS
    575     if (rc == NO_ERROR) {
    576         if (m_pPowerModule) {
    577             if (m_pPowerModule->powerHint) {
    578                 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
    579                         (void *)"state=0");
    580             }
    581         }
    582     }
    583 #endif
    584 
    585     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
    586         CDBG("%s: Failed to release flash for camera id: %d",
    587                 __func__,
    588                 mCameraId);
    589     }
    590 
    591     return rc;
    592 }
    593 
    594 /*===========================================================================
    595  * FUNCTION   : initialize
    596  *
    597  * DESCRIPTION: Initialize frameworks callback functions
    598  *
    599  * PARAMETERS :
    600  *   @callback_ops : callback function to frameworks
    601  *
    602  * RETURN     :
    603  *
    604  *==========================================================================*/
    605 int QCamera3HardwareInterface::initialize(
    606         const struct camera3_callback_ops *callback_ops)
    607 {
    608     ATRACE_CALL();
    609     int rc;
    610 
    611     pthread_mutex_lock(&mMutex);
    612 
    613     rc = initParameters();
    614     if (rc < 0) {
    615         ALOGE("%s: initParamters failed %d", __func__, rc);
    616        goto err1;
    617     }
    618     mCallbackOps = callback_ops;
    619 
    620     pthread_mutex_unlock(&mMutex);
    621     mCameraInitialized = true;
    622     return 0;
    623 
    624 err1:
    625     pthread_mutex_unlock(&mMutex);
    626     return rc;
    627 }
    628 
    629 /*===========================================================================
    630  * FUNCTION   : validateStreamDimensions
    631  *
    632  * DESCRIPTION: Check if the configuration requested are those advertised
    633  *
    634  * PARAMETERS :
    635  *   @stream_list : streams to be configured
    636  *
    637  * RETURN     :
    638  *
    639  *==========================================================================*/
    640 int QCamera3HardwareInterface::validateStreamDimensions(
    641         camera3_stream_configuration_t *streamList)
    642 {
    643     int rc = NO_ERROR;
    644     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
    645     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
    646     uint8_t jpeg_sizes_cnt = 0;
    647 
    648     camera3_stream_t *inputStream = NULL;
    649     /*
    650     * Loop through all streams to find input stream if it exists*
    651     */
    652     for (size_t i = 0; i< streamList->num_streams; i++) {
    653         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
    654             if (inputStream != NULL) {
    655                 ALOGE("%s: Error, Multiple input streams requested", __func__);
    656                 return -EINVAL;
    657             }
    658             inputStream = streamList->streams[i];
    659         }
    660     }
    661     /*
    662     * Loop through all streams requested in configuration
    663     * Check if unsupported sizes have been requested on any of them
    664     */
    665     for (size_t j = 0; j < streamList->num_streams; j++){
    666         bool sizeFound = false;
    667         camera3_stream_t *newStream = streamList->streams[j];
    668 
    669         uint32_t rotatedHeight = newStream->height;
    670         uint32_t rotatedWidth = newStream->width;
    671         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
    672                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
    673             rotatedHeight = newStream->width;
    674             rotatedWidth = newStream->height;
    675         }
    676 
    677         /*
    678         * Sizes are different for each type of stream format check against
    679         * appropriate table.
    680         */
    681         switch (newStream->format) {
    682         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
    683         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
    684         case HAL_PIXEL_FORMAT_RAW10:
    685             for (int i = 0;
    686                     i < gCamCapability[mCameraId]->supported_raw_dim_cnt; i++){
    687                 if (gCamCapability[mCameraId]->raw_dim[i].width
    688                         == (int32_t) rotatedWidth
    689                     && gCamCapability[mCameraId]->raw_dim[i].height
    690                         == (int32_t) rotatedHeight) {
    691                     sizeFound = true;
    692                     break;
    693                 }
    694             }
    695             break;
    696         case HAL_PIXEL_FORMAT_BLOB:
    697             /* Generate JPEG sizes table */
    698             makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
    699                     gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
    700                     available_processed_sizes);
    701             jpeg_sizes_cnt = filterJpegSizes(
    702                     available_jpeg_sizes,
    703                     available_processed_sizes,
    704                     (gCamCapability[mCameraId]->picture_sizes_tbl_cnt) * 2,
    705                     MAX_SIZES_CNT * 2,
    706                     gCamCapability[mCameraId]->active_array_size,
    707                     gCamCapability[mCameraId]->max_downscale_factor);
    708 
    709             /* Verify set size against generated sizes table */
    710             for (int i = 0;i < jpeg_sizes_cnt/2; i++) {
    711                 if ((int32_t) rotatedWidth == available_jpeg_sizes[i*2] &&
    712                     (int32_t) rotatedHeight == available_jpeg_sizes[i*2+1]) {
    713                     sizeFound = true;
    714                     break;
    715                 }
    716             }
    717             break;
    718 
    719 
    720         case HAL_PIXEL_FORMAT_YCbCr_420_888:
    721         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    722         default:
    723             /* ZSL stream will be full active array size validate that*/
    724             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
    725                 || newStream->stream_type == CAMERA3_STREAM_INPUT
    726                 || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL){
    727                 if ((int32_t) rotatedWidth ==
    728                     gCamCapability[mCameraId]->active_array_size.width
    729                     && (int32_t) rotatedHeight  ==
    730                     gCamCapability[mCameraId]->active_array_size.height) {
    731                     sizeFound = true;
    732                 }
    733                 /* We could potentially break here to enforce ZSL stream
    734                  * set from frameworks always has full active array size
    735                  * but it is not clear from spec if framework will always
    736                  * follow that, also we have logic to override to full array
    737                  * size, so keeping this logic lenient at the moment.
    738                  */
    739             }
    740 
    741             /* Non ZSL stream still need to conform to advertised sizes*/
    742             for (int i = 0;
    743                 i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
    744                 if ((int32_t) rotatedWidth ==
    745                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width
    746                     && (int32_t) rotatedHeight ==
    747                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height){
    748                     sizeFound = true;
    749                 break;
    750                 }
    751             }
    752             break;
    753         } /* End of switch(newStream->format) */
    754 
    755         /* We error out even if a single stream has unsupported size set */
    756         if (!sizeFound) {
    757             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
    758                   "type:%d", __func__, rotatedWidth, rotatedHeight,
    759                   newStream->format);
    760             ALOGE("%s: Active array size is  %d x %d", __func__,
    761                     gCamCapability[mCameraId]->active_array_size.width,
    762                     gCamCapability[mCameraId]->active_array_size.height);
    763             rc = -EINVAL;
    764             break;
    765         }
    766     } /* End of for each stream */
    767     return rc;
    768 }
    769 
    770 /*==============================================================================
    771  * FUNCTION   : isSupportChannelNeeded
    772  *
    773  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
    774  *
    775  * PARAMETERS :
    776  *   @stream_list : streams to be configured
    777  *
    778  * RETURN     : Boolen true/false decision
    779  *
    780  *==========================================================================*/
    781 bool QCamera3HardwareInterface::isSupportChannelNeeded(camera3_stream_configuration_t *streamList,
    782         cam_stream_size_info_t stream_config_info)
    783 {
    784     uint32_t i;
    785     bool bSuperSetPresent = false;
    786     /* Check for conditions where PProc pipeline does not have any streams*/
    787     for (i = 0; i < stream_config_info.num_streams; i++) {
    788         if (stream_config_info.postprocess_mask[i] == CAM_QCOM_FEATURE_PP_SUPERSET) {
    789             bSuperSetPresent = true;
    790             break;
    791         }
    792     }
    793 
    794     if (bSuperSetPresent == false )
    795         return true;
    796 
    797     /* Dummy stream needed if only raw or jpeg streams present */
    798     for (i = 0;i < streamList->num_streams;i++) {
    799         switch(streamList->streams[i]->format) {
    800             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
    801             case HAL_PIXEL_FORMAT_RAW10:
    802             case HAL_PIXEL_FORMAT_RAW16:
    803             case HAL_PIXEL_FORMAT_BLOB:
    804                 break;
    805             default:
    806                 return false;
    807         }
    808     }
    809     return true;
    810 }
    811 
    812 
    813 /*===========================================================================
    814  * FUNCTION   : configureStreams
    815  *
    816  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
    817  *              and output streams.
    818  *
    819  * PARAMETERS :
    820  *   @stream_list : streams to be configured
    821  *
    822  * RETURN     :
    823  *
    824  *==========================================================================*/
    825 int QCamera3HardwareInterface::configureStreams(
    826         camera3_stream_configuration_t *streamList)
    827 {
    828     ATRACE_CALL();
    829     int rc = 0;
    830 
    831     // Sanity check stream_list
    832     if (streamList == NULL) {
    833         ALOGE("%s: NULL stream configuration", __func__);
    834         return BAD_VALUE;
    835     }
    836     if (streamList->streams == NULL) {
    837         ALOGE("%s: NULL stream list", __func__);
    838         return BAD_VALUE;
    839     }
    840 
    841     if (streamList->num_streams < 1) {
    842         ALOGE("%s: Bad number of streams requested: %d", __func__,
    843                 streamList->num_streams);
    844         return BAD_VALUE;
    845     }
    846 
    847     /* first invalidate all the steams in the mStreamList
    848      * if they appear again, they will be validated */
    849     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
    850             it != mStreamInfo.end(); it++) {
    851         QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
    852         channel->stop();
    853         (*it)->status = INVALID;
    854     }
    855 
    856     if (mRawDumpChannel) {
    857         mRawDumpChannel->stop();
    858         delete mRawDumpChannel;
    859         mRawDumpChannel = NULL;
    860     }
    861 
    862     if (mSupportChannel)
    863         mSupportChannel->stop();
    864     if (mMetadataChannel) {
    865         /* If content of mStreamInfo is not 0, there is metadata stream */
    866         mMetadataChannel->stop();
    867     }
    868 
    869     pthread_mutex_lock(&mMutex);
    870 
    871     /* Check whether we have video stream */
    872     m_bIs4KVideo = false;
    873     m_bIsVideo = false;
    874     bool isZsl = false;
    875     size_t videoWidth = 0;
    876     size_t videoHeight = 0;
    877     size_t rawStreamCnt = 0;
    878     size_t stallStreamCnt = 0;
    879     size_t processedStreamCnt = 0;
    880     // Number of streams on ISP encoder path
    881     size_t numStreamsOnEncoder = 0;
    882     cam_dimension_t maxViewfinderSize;
    883     bool bJpegExceeds4K = false;
    884     bool bUseCommonFeatureMask = false;
    885     uint32_t commonFeatureMask = 0;
    886     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
    887     camera3_stream_t *inputStream = NULL;
    888 
    889     for (size_t i = 0; i < streamList->num_streams; i++) {
    890         camera3_stream_t *newStream = streamList->streams[i];
    891         CDBG_HIGH("%s: stream[%d] type = %d, format = %d, width = %d, "
    892                 "height = %d, rotation = %d",
    893                 __func__, i, newStream->stream_type, newStream->format,
    894                 newStream->width, newStream->height, newStream->rotation);
    895 
    896         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
    897                 newStream->stream_type == CAMERA3_STREAM_INPUT){
    898             isZsl = true;
    899         }
    900         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
    901             inputStream = newStream;
    902         }
    903 
    904         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
    905             if (newStream->width > VIDEO_4K_WIDTH ||
    906                     newStream->height > VIDEO_4K_HEIGHT)
    907                 bJpegExceeds4K = true;
    908         }
    909 
    910         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
    911                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
    912             m_bIsVideo = true;
    913 
    914             if ((VIDEO_4K_WIDTH <= newStream->width) &&
    915                     (VIDEO_4K_HEIGHT <= newStream->height)) {
    916                 videoWidth = newStream->width;
    917                 videoHeight = newStream->height;
    918                 m_bIs4KVideo = true;
    919             }
    920         }
    921         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
    922                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
    923             switch (newStream->format) {
    924             case HAL_PIXEL_FORMAT_BLOB:
    925                 stallStreamCnt++;
    926                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
    927                         newStream->height > (uint32_t)maxViewfinderSize.height) {
    928                     commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
    929                     numStreamsOnEncoder++;
    930                 }
    931                 break;
    932             case HAL_PIXEL_FORMAT_RAW10:
    933             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
    934             case HAL_PIXEL_FORMAT_RAW16:
    935                 rawStreamCnt++;
    936                 break;
    937             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    938                 processedStreamCnt++;
    939                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
    940                         newStream->height > (uint32_t)maxViewfinderSize.height) {
    941                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
    942                             newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) {
    943                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
    944                     } else {
    945                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
    946                     }
    947                     numStreamsOnEncoder++;
    948                 }
    949                 break;
    950             case HAL_PIXEL_FORMAT_YCbCr_420_888:
    951             default:
    952                 processedStreamCnt++;
    953                 if (newStream->width > (uint32_t)maxViewfinderSize.width ||
    954                         newStream->height > (uint32_t)maxViewfinderSize.height) {
    955                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET;
    956                     numStreamsOnEncoder++;
    957                 }
    958                 break;
    959             }
    960 
    961         }
    962     }
    963 
    964     /* Check if num_streams is sane */
    965     if (stallStreamCnt > MAX_STALLING_STREAMS ||
    966             rawStreamCnt > MAX_RAW_STREAMS ||
    967             processedStreamCnt > MAX_PROCESSED_STREAMS) {
    968         ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
    969                 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
    970         pthread_mutex_unlock(&mMutex);
    971         return -EINVAL;
    972     }
    973     /* Check whether we have zsl stream or 4k video case */
    974     if (isZsl && m_bIsVideo) {
    975         ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
    976         pthread_mutex_unlock(&mMutex);
    977         return -EINVAL;
    978     }
    979     /* Check if stream sizes are sane */
    980     if (numStreamsOnEncoder > 2) {
    981         ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
    982                 __func__);
    983         pthread_mutex_unlock(&mMutex);
    984         return -EINVAL;
    985     } else if (1 < numStreamsOnEncoder){
    986         bUseCommonFeatureMask = true;
    987         CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
    988                 __func__);
    989     }
    990     /* Check if BLOB size is greater than 4k in 4k recording case */
    991     if (m_bIs4KVideo && bJpegExceeds4K) {
    992         ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
    993                 __func__);
    994         pthread_mutex_unlock(&mMutex);
    995         return -EINVAL;
    996     }
    997 
    998     rc = validateStreamDimensions(streamList);
    999     if (rc == NO_ERROR) {
   1000         rc = validateStreamRotations(streamList);
   1001     }
   1002     if (rc != NO_ERROR) {
   1003         ALOGE("%s: Invalid stream configuration requested!", __func__);
   1004         pthread_mutex_unlock(&mMutex);
   1005         return rc;
   1006     }
   1007 
   1008     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
   1009     camera3_stream_t *jpegStream = NULL;
   1010     cam_stream_size_info_t stream_config_info;
   1011     memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
   1012     for (size_t i = 0; i < streamList->num_streams; i++) {
   1013         camera3_stream_t *newStream = streamList->streams[i];
   1014         CDBG_HIGH("%s: newStream type = %d, stream format = %d "
   1015                 "stream size : %d x %d, stream rotation = %d",
   1016                 __func__, newStream->stream_type, newStream->format,
   1017                 newStream->width, newStream->height, newStream->rotation);
   1018         //if the stream is in the mStreamList validate it
   1019         bool stream_exists = false;
   1020         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1021                 it != mStreamInfo.end(); it++) {
   1022             if ((*it)->stream == newStream) {
   1023                 QCamera3Channel *channel =
   1024                     (QCamera3Channel*)(*it)->stream->priv;
   1025                 stream_exists = true;
   1026                 if (channel)
   1027                     delete channel;
   1028                 (*it)->status = VALID;
   1029                 (*it)->stream->priv = NULL;
   1030                 (*it)->channel = NULL;
   1031             }
   1032         }
   1033         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
   1034             //new stream
   1035             stream_info_t* stream_info;
   1036             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
   1037             stream_info->stream = newStream;
   1038             stream_info->status = VALID;
   1039             stream_info->channel = NULL;
   1040             mStreamInfo.push_back(stream_info);
   1041         }
   1042         /* Covers Opaque ZSL and API1 F/W ZSL */
   1043         if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL
   1044                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
   1045             if (zslStream != NULL) {
   1046                 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
   1047                 pthread_mutex_unlock(&mMutex);
   1048                 return BAD_VALUE;
   1049             }
   1050             zslStream = newStream;
   1051         }
   1052         /* Covers YUV reprocess */
   1053         if (inputStream != NULL) {
   1054             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
   1055                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1056                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1057                     && inputStream->width == newStream->width
   1058                     && inputStream->height == newStream->height) {
   1059                 if (zslStream != NULL) {
   1060                     /* This scenario indicates multiple YUV streams with same size
   1061                      * as input stream have been requested, since zsl stream handle
   1062                      * is solely use for the purpose of overriding the size of streams
   1063                      * which share h/w streams we will just make a guess here as to
   1064                      * which of the stream is a ZSL stream, this will be refactored
   1065                      * once we make generic logic for streams sharing encoder output
   1066                      */
   1067                     CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
   1068                 }
   1069                 zslStream = newStream;
   1070             }
   1071         }
   1072         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1073             jpegStream = newStream;
   1074         }
   1075     }
   1076 
   1077     cleanAndSortStreamInfo();
   1078     if (mMetadataChannel) {
   1079         delete mMetadataChannel;
   1080         mMetadataChannel = NULL;
   1081     }
   1082     if (mSupportChannel) {
   1083         delete mSupportChannel;
   1084         mSupportChannel = NULL;
   1085     }
   1086 
   1087     //Create metadata channel and initialize it
   1088     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
   1089                     mCameraHandle->ops, captureResultCb,
   1090                     &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
   1091     if (mMetadataChannel == NULL) {
   1092         ALOGE("%s: failed to allocate metadata channel", __func__);
   1093         rc = -ENOMEM;
   1094         pthread_mutex_unlock(&mMutex);
   1095         return rc;
   1096     }
   1097     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
   1098     if (rc < 0) {
   1099         ALOGE("%s: metadata channel initialization failed", __func__);
   1100         delete mMetadataChannel;
   1101         mMetadataChannel = NULL;
   1102         pthread_mutex_unlock(&mMutex);
   1103         return rc;
   1104     }
   1105 
   1106     bool isRawStreamRequested = false;
   1107     /* Allocate channel objects for the requested streams */
   1108     for (size_t i = 0; i < streamList->num_streams; i++) {
   1109         camera3_stream_t *newStream = streamList->streams[i];
   1110         uint32_t stream_usage = newStream->usage;
   1111         stream_config_info.stream_sizes[stream_config_info.num_streams].width = newStream->width;
   1112         stream_config_info.stream_sizes[stream_config_info.num_streams].height = newStream->height;
   1113         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1114                 || newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
   1115                 && newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
   1116                 && jpegStream){
   1117             //for zsl stream the size is active array size
   1118             // FIXME remove this for api zsl bidirectional is alway active array size
   1119             // and for hal 3.3 reprocess, we will *indirectly* control using input size.
   1120             // there is a grey area however when the application whimsically decides to create
   1121             // a standalone zsl stream whose size < jpeg blob size
   1122             /*
   1123             stream_config_info.stream_sizes[stream_config_info.num_streams].width =
   1124                     gCamCapability[mCameraId]->active_array_size.width;
   1125             stream_config_info.stream_sizes[stream_config_info.num_streams].height =
   1126                     gCamCapability[mCameraId]->active_array_size.height;
   1127             */
   1128             stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1129             stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_NONE;
   1130         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1131                 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
   1132         } else {
   1133             //for non zsl streams find out the format
   1134             switch (newStream->format) {
   1135             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
   1136               {
   1137                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
   1138                     stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_VIDEO;
   1139                  } else {
   1140                     stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_PREVIEW;
   1141                  }
   1142                  stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET;
   1143 
   1144                  if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
   1145                          (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
   1146                      stream_config_info.stream_sizes[stream_config_info.num_streams].width =
   1147                              newStream->height;
   1148                      stream_config_info.stream_sizes[stream_config_info.num_streams].height =
   1149                              newStream->width;
   1150                  }
   1151               }
   1152               break;
   1153             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1154               stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_CALLBACK;
   1155               stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET;
   1156               break;
   1157             case HAL_PIXEL_FORMAT_BLOB:
   1158               stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1159               if (m_bIs4KVideo && !isZsl) {
   1160                   stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_PP_SUPERSET;
   1161               } else {
   1162                   if (bUseCommonFeatureMask &&
   1163                           (newStream->width > (uint32_t)maxViewfinderSize.width ||
   1164                                   newStream->height > (uint32_t)maxViewfinderSize.height)) {
   1165                       stream_config_info.postprocess_mask[stream_config_info.num_streams] = commonFeatureMask;
   1166                   } else {
   1167                       stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_NONE;
   1168                   }
   1169               }
   1170               if (isZsl) {
   1171                   if (zslStream) {
   1172                       stream_config_info.stream_sizes[stream_config_info.num_streams].width =
   1173                               zslStream->width;
   1174                       stream_config_info.stream_sizes[stream_config_info.num_streams].height =
   1175                               zslStream->height;
   1176                   } else {
   1177                       ALOGE("%s: Error, No ZSL stream identified",__func__);
   1178                       pthread_mutex_unlock(&mMutex);
   1179                       return -EINVAL;
   1180                   }
   1181               } else if (m_bIs4KVideo) {
   1182                   stream_config_info.stream_sizes[stream_config_info.num_streams].width = videoWidth;
   1183                   stream_config_info.stream_sizes[stream_config_info.num_streams].height = videoHeight;
   1184               }
   1185               break;
   1186             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1187             case HAL_PIXEL_FORMAT_RAW16:
   1188             case HAL_PIXEL_FORMAT_RAW10:
   1189               stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_RAW;
   1190               isRawStreamRequested = true;
   1191               break;
   1192             default:
   1193               stream_config_info.type[stream_config_info.num_streams] = CAM_STREAM_TYPE_DEFAULT;
   1194               stream_config_info.postprocess_mask[stream_config_info.num_streams] = CAM_QCOM_FEATURE_NONE;
   1195               break;
   1196             }
   1197 
   1198         }
   1199         if (newStream->priv == NULL) {
   1200             //New stream, construct channel
   1201             switch (newStream->stream_type) {
   1202             case CAMERA3_STREAM_INPUT:
   1203                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
   1204                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
   1205                 break;
   1206             case CAMERA3_STREAM_BIDIRECTIONAL:
   1207                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
   1208                     GRALLOC_USAGE_HW_CAMERA_WRITE;
   1209                 break;
   1210             case CAMERA3_STREAM_OUTPUT:
   1211                 /* For video encoding stream, set read/write rarely
   1212                  * flag so that they may be set to un-cached */
   1213                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
   1214                     newStream->usage =
   1215                          (GRALLOC_USAGE_SW_READ_RARELY |
   1216                          GRALLOC_USAGE_SW_WRITE_RARELY |
   1217                          GRALLOC_USAGE_HW_CAMERA_WRITE);
   1218                 else if (newStream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL)
   1219                     CDBG("%s: ZSL usage flag skipping", __func__);
   1220                 else
   1221                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
   1222                 break;
   1223             default:
   1224                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
   1225                 break;
   1226             }
   1227 
   1228             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
   1229                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   1230                 QCamera3Channel *channel = NULL;
   1231                 switch (newStream->format) {
   1232                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1233                 case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1234                     newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
   1235                     channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   1236                             mCameraHandle->ops, captureResultCb,
   1237                             &gCamCapability[mCameraId]->padding_info,
   1238                             this,
   1239                             newStream,
   1240                             (cam_stream_type_t) stream_config_info.type[stream_config_info.num_streams],
   1241                             stream_config_info.postprocess_mask[stream_config_info.num_streams]);
   1242                     if (channel == NULL) {
   1243                         ALOGE("%s: allocation of channel failed", __func__);
   1244                         pthread_mutex_unlock(&mMutex);
   1245                         return -ENOMEM;
   1246                     }
   1247 
   1248                     newStream->priv = channel;
   1249                     break;
   1250                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1251                 case HAL_PIXEL_FORMAT_RAW16:
   1252                 case HAL_PIXEL_FORMAT_RAW10:
   1253                     newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
   1254                     mRawChannel = new QCamera3RawChannel(
   1255                             mCameraHandle->camera_handle,
   1256                             mCameraHandle->ops, captureResultCb,
   1257                             &gCamCapability[mCameraId]->padding_info,
   1258                             this, newStream, CAM_QCOM_FEATURE_NONE,
   1259                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
   1260                     if (mRawChannel == NULL) {
   1261                         ALOGE("%s: allocation of raw channel failed", __func__);
   1262                         pthread_mutex_unlock(&mMutex);
   1263                         return -ENOMEM;
   1264                     }
   1265 
   1266                     newStream->priv = (QCamera3Channel*)mRawChannel;
   1267                     break;
   1268                 case HAL_PIXEL_FORMAT_BLOB:
   1269                     newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
   1270                     mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
   1271                             mCameraHandle->ops, captureResultCb,
   1272                             &gCamCapability[mCameraId]->padding_info, this, newStream,
   1273                             stream_config_info.postprocess_mask[stream_config_info.num_streams],
   1274                             m_bIs4KVideo, mMetadataChannel);
   1275                     if (mPictureChannel == NULL) {
   1276                         ALOGE("%s: allocation of channel failed", __func__);
   1277                         pthread_mutex_unlock(&mMutex);
   1278                         return -ENOMEM;
   1279                     }
   1280                     newStream->priv = (QCamera3Channel*)mPictureChannel;
   1281                     break;
   1282 
   1283                 default:
   1284                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
   1285                     break;
   1286                 }
   1287             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1288                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
   1289             } else {
   1290                 ALOGE("%s: Error, Unknown stream type", __func__);
   1291                 return -EINVAL;
   1292             }
   1293 
   1294             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1295                     it != mStreamInfo.end(); it++) {
   1296                 if ((*it)->stream == newStream) {
   1297                     (*it)->channel = (QCamera3Channel*) newStream->priv;
   1298                     break;
   1299                 }
   1300             }
   1301         } else {
   1302             // Channel already exists for this stream
   1303             // Do nothing for now
   1304         }
   1305 
   1306     /* Do not add entries for input stream in metastream info
   1307          * since there is no real stream associated with it
   1308          */
   1309         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
   1310             stream_config_info.num_streams++;
   1311     }
   1312 
   1313     if (isZsl) {
   1314         if (zslStream == NULL) {
   1315             ALOGE("%s: Error Zsl stream handle missing", __func__);
   1316             pthread_mutex_unlock(&mMutex);
   1317             return -EINVAL;
   1318         }
   1319         /* This override is possible since the f/w gaurantees that the ZSL
   1320            stream will always be the active array size in case of Bidirectional
   1321            or will be limited to the max i/p stream size which we can control to
   1322            be equal to be the largest YUV/Opaque stream size
   1323            */
   1324         if (mPictureChannel) {
   1325            mPictureChannel->overrideYuvSize(zslStream->width, zslStream->height);
   1326         }
   1327     } else if (mPictureChannel && m_bIs4KVideo) {
   1328         mPictureChannel->overrideYuvSize(videoWidth, videoHeight);
   1329     }
   1330 
   1331     if (isSupportChannelNeeded(streamList, stream_config_info)) {
   1332         mSupportChannel = new QCamera3SupportChannel(
   1333                 mCameraHandle->camera_handle,
   1334                 mCameraHandle->ops,
   1335                 &gCamCapability[mCameraId]->padding_info,
   1336                 CAM_QCOM_FEATURE_NONE,
   1337                 this);
   1338         if (!mSupportChannel) {
   1339             ALOGE("%s: dummy channel cannot be created", __func__);
   1340             pthread_mutex_unlock(&mMutex);
   1341             return -ENOMEM;
   1342         }
   1343     }
   1344 
   1345     //RAW DUMP channel
   1346     if (mEnableRawDump && isRawStreamRequested == false){
   1347         cam_dimension_t rawDumpSize;
   1348         rawDumpSize = getMaxRawSize(mCameraId);
   1349         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
   1350                                   mCameraHandle->ops,
   1351                                   rawDumpSize,
   1352                                   &gCamCapability[mCameraId]->padding_info,
   1353                                   this, CAM_QCOM_FEATURE_NONE);
   1354         if (!mRawDumpChannel) {
   1355             ALOGE("%s: Raw Dump channel cannot be created", __func__);
   1356             pthread_mutex_unlock(&mMutex);
   1357             return -ENOMEM;
   1358         }
   1359     }
   1360 
   1361 
   1362     if (mSupportChannel) {
   1363         stream_config_info.stream_sizes[stream_config_info.num_streams] =
   1364                 QCamera3SupportChannel::kDim;
   1365         stream_config_info.type[stream_config_info.num_streams] =
   1366                 CAM_STREAM_TYPE_CALLBACK;
   1367         stream_config_info.postprocess_mask[stream_config_info.num_streams] =
   1368                 CAM_QCOM_FEATURE_PP_SUPERSET;
   1369         stream_config_info.num_streams++;
   1370     }
   1371 
   1372     if (mRawDumpChannel) {
   1373         cam_dimension_t rawSize;
   1374         rawSize = getMaxRawSize(mCameraId);
   1375         stream_config_info.stream_sizes[stream_config_info.num_streams] =
   1376                 rawSize;
   1377         stream_config_info.type[stream_config_info.num_streams] =
   1378                 CAM_STREAM_TYPE_RAW;
   1379         stream_config_info.num_streams++;
   1380     }
   1381 
   1382     mStreamConfigInfo = stream_config_info;
   1383 
   1384     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
   1385     mPendingRequestsList.clear();
   1386     mPendingFrameDropList.clear();
   1387     // Initialize/Reset the pending buffers list
   1388     mPendingBuffersMap.num_buffers = 0;
   1389     mPendingBuffersMap.mPendingBufferList.clear();
   1390     mPendingReprocessResultList.clear();
   1391 
   1392     mFirstRequest = true;
   1393 
   1394     //Get min frame duration for this streams configuration
   1395     deriveMinFrameDuration();
   1396 
   1397     pthread_mutex_unlock(&mMutex);
   1398     return rc;
   1399 }
   1400 
   1401 /*===========================================================================
   1402  * FUNCTION   : validateCaptureRequest
   1403  *
   1404  * DESCRIPTION: validate a capture request from camera service
   1405  *
   1406  * PARAMETERS :
   1407  *   @request : request from framework to process
   1408  *
   1409  * RETURN     :
   1410  *
   1411  *==========================================================================*/
   1412 int QCamera3HardwareInterface::validateCaptureRequest(
   1413                     camera3_capture_request_t *request)
   1414 {
   1415     ssize_t idx = 0;
   1416     const camera3_stream_buffer_t *b;
   1417     CameraMetadata meta;
   1418 
   1419     /* Sanity check the request */
   1420     if (request == NULL) {
   1421         ALOGE("%s: NULL capture request", __func__);
   1422         return BAD_VALUE;
   1423     }
   1424 
   1425     if (request->settings == NULL && mFirstRequest) {
   1426         /*settings cannot be null for the first request*/
   1427         return BAD_VALUE;
   1428     }
   1429 
   1430     uint32_t frameNumber = request->frame_number;
   1431     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
   1432         ALOGE("%s: Request %d: No output buffers provided!",
   1433                 __FUNCTION__, frameNumber);
   1434         return BAD_VALUE;
   1435     }
   1436     if (request->input_buffer != NULL) {
   1437         b = request->input_buffer;
   1438         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   1439             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
   1440                     __func__, frameNumber, (long)idx);
   1441             return BAD_VALUE;
   1442         }
   1443         if (b->release_fence != -1) {
   1444             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
   1445                     __func__, frameNumber, (long)idx);
   1446             return BAD_VALUE;
   1447         }
   1448         if (b->buffer == NULL) {
   1449             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
   1450                     __func__, frameNumber, (long)idx);
   1451             return BAD_VALUE;
   1452         }
   1453     }
   1454 
   1455     // Validate all buffers
   1456     b = request->output_buffers;
   1457     do {
   1458         QCamera3Channel *channel =
   1459                 static_cast<QCamera3Channel*>(b->stream->priv);
   1460         if (channel == NULL) {
   1461             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
   1462                     __func__, frameNumber, (long)idx);
   1463             return BAD_VALUE;
   1464         }
   1465         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   1466             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
   1467                     __func__, frameNumber, (long)idx);
   1468             return BAD_VALUE;
   1469         }
   1470         if (b->release_fence != -1) {
   1471             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
   1472                     __func__, frameNumber, (long)idx);
   1473             return BAD_VALUE;
   1474         }
   1475         if (b->buffer == NULL) {
   1476             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
   1477                     __func__, frameNumber, (long)idx);
   1478             return BAD_VALUE;
   1479         }
   1480         if (*(b->buffer) == NULL) {
   1481             ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
   1482                     __func__, frameNumber, (long)idx);
   1483             return BAD_VALUE;
   1484         }
   1485         idx++;
   1486         b = request->output_buffers + idx;
   1487     } while (idx < (ssize_t)request->num_output_buffers);
   1488 
   1489     return NO_ERROR;
   1490 }
   1491 
   1492 /*===========================================================================
   1493  * FUNCTION   : deriveMinFrameDuration
   1494  *
   1495  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
   1496  *              on currently configured streams.
   1497  *
   1498  * PARAMETERS : NONE
   1499  *
   1500  * RETURN     : NONE
   1501  *
   1502  *==========================================================================*/
   1503 void QCamera3HardwareInterface::deriveMinFrameDuration()
   1504 {
   1505     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
   1506 
   1507     maxJpegDim = 0;
   1508     maxProcessedDim = 0;
   1509     maxRawDim = 0;
   1510 
   1511     // Figure out maximum jpeg, processed, and raw dimensions
   1512     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   1513         it != mStreamInfo.end(); it++) {
   1514 
   1515         // Input stream doesn't have valid stream_type
   1516         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
   1517             continue;
   1518 
   1519         int32_t dimension = (*it)->stream->width * (*it)->stream->height;
   1520         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   1521             if (dimension > maxJpegDim)
   1522                 maxJpegDim = dimension;
   1523         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   1524                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   1525                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
   1526             if (dimension > maxRawDim)
   1527                 maxRawDim = dimension;
   1528         } else {
   1529             if (dimension > maxProcessedDim)
   1530                 maxProcessedDim = dimension;
   1531         }
   1532     }
   1533 
   1534     //Assume all jpeg dimensions are in processed dimensions.
   1535     if (maxJpegDim > maxProcessedDim)
   1536         maxProcessedDim = maxJpegDim;
   1537     //Find the smallest raw dimension that is greater or equal to jpeg dimension
   1538     if (maxProcessedDim > maxRawDim) {
   1539         maxRawDim = INT32_MAX;
   1540         for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
   1541             i++) {
   1542 
   1543             int32_t dimension =
   1544                 gCamCapability[mCameraId]->raw_dim[i].width *
   1545                 gCamCapability[mCameraId]->raw_dim[i].height;
   1546 
   1547             if (dimension >= maxProcessedDim && dimension < maxRawDim)
   1548                 maxRawDim = dimension;
   1549         }
   1550     }
   1551 
   1552     //Find minimum durations for processed, jpeg, and raw
   1553     for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
   1554             i++) {
   1555         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
   1556                 gCamCapability[mCameraId]->raw_dim[i].height) {
   1557             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
   1558             break;
   1559         }
   1560     }
   1561     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   1562         if (maxProcessedDim ==
   1563             gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
   1564             gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
   1565             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   1566             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   1567             break;
   1568         }
   1569     }
   1570 }
   1571 
   1572 /*===========================================================================
   1573  * FUNCTION   : getMinFrameDuration
   1574  *
   1575  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
   1576  *              and current request configuration.
   1577  *
   1578  * PARAMETERS : @request: requset sent by the frameworks
   1579  *
   1580  * RETURN     : min farme duration for a particular request
   1581  *
   1582  *==========================================================================*/
   1583 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
   1584 {
   1585     bool hasJpegStream = false;
   1586     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
   1587         const camera3_stream_t *stream = request->output_buffers[i].stream;
   1588         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
   1589             hasJpegStream = true;
   1590     }
   1591 
   1592     if (!hasJpegStream)
   1593         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
   1594     else
   1595         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
   1596 }
   1597 
   1598 /*===========================================================================
   1599  * FUNCTION   : handlePendingReprocResults
   1600  *
   1601  * DESCRIPTION: check and notify on any pending reprocess results
   1602  *
   1603  * PARAMETERS :
   1604  *   @frame_number   : Pending request frame number
   1605  *
   1606  * RETURN     : int32_t type of status
   1607  *              NO_ERROR  -- success
   1608  *              none-zero failure code
   1609  *==========================================================================*/
   1610 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
   1611 {
   1612     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
   1613             j != mPendingReprocessResultList.end(); j++) {
   1614         if (j->frame_number == frame_number) {
   1615             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
   1616 
   1617             CDBG("%s: Delayed reprocess notify %d", __func__,
   1618                     frame_number);
   1619 
   1620             for (List<PendingRequestInfo>::iterator k = mPendingRequestsList.begin();
   1621                 k != mPendingRequestsList.end(); k++) {
   1622 
   1623                 if (k->frame_number == j->frame_number) {
   1624                     CDBG("%s: Found reprocess frame number %d in pending reprocess List "
   1625                             "Take it out!!", __func__,
   1626                             k->frame_number);
   1627 
   1628                     camera3_capture_result result;
   1629                     memset(&result, 0, sizeof(camera3_capture_result));
   1630                     result.frame_number = frame_number;
   1631                     result.num_output_buffers = 1;
   1632                     result.output_buffers =  &j->buffer;
   1633                     result.input_buffer = k->input_buffer;
   1634                     result.result = k->settings;
   1635                     result.partial_result = PARTIAL_RESULT_COUNT;
   1636                     mCallbackOps->process_capture_result(mCallbackOps, &result);
   1637 
   1638                     mPendingRequestsList.erase(k);
   1639                     mPendingRequest--;
   1640                     break;
   1641                 }
   1642             }
   1643             mPendingReprocessResultList.erase(j);
   1644             break;
   1645         }
   1646     }
   1647     return NO_ERROR;
   1648 }
   1649 
   1650 /*===========================================================================
   1651  * FUNCTION   : handleMetadataWithLock
   1652  *
   1653  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
   1654  *
   1655  * PARAMETERS : @metadata_buf: metadata buffer
   1656  *
   1657  * RETURN     :
   1658  *
   1659  *==========================================================================*/
   1660 void QCamera3HardwareInterface::handleMetadataWithLock(
   1661     mm_camera_super_buf_t *metadata_buf)
   1662 {
   1663     ATRACE_CALL();
   1664 
   1665     int32_t  frame_number_valid        = 0;
   1666     uint32_t frame_number              = 0;
   1667     int64_t  capture_time              = 0;
   1668     int32_t  urgent_frame_number_valid = 0;
   1669     uint32_t urgent_frame_number       = 0;
   1670 
   1671     metadata_buffer_t   *metadata      = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   1672     cam_frame_dropped_t cam_frame_drop =
   1673             *(cam_frame_dropped_t *) POINTER_OF_META(CAM_INTF_META_FRAME_DROPPED, metadata);
   1674 
   1675     int32_t  *p_frame_number_valid        =
   1676             (int32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   1677     uint32_t *p_frame_number              =
   1678             (uint32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   1679     int64_t  *p_capture_time              =
   1680             (int64_t *) POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   1681     int32_t  *p_urgent_frame_number_valid =
   1682             (int32_t *) POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   1683     uint32_t *p_urgent_frame_number       =
   1684             (uint32_t *) POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   1685 
   1686     if ((NULL == p_frame_number_valid)        ||
   1687             (NULL == p_frame_number)              ||
   1688             (NULL == p_capture_time)              ||
   1689             (NULL == p_urgent_frame_number_valid) ||
   1690             (NULL == p_urgent_frame_number))
   1691     {
   1692         mMetadataChannel->bufDone(metadata_buf);
   1693         free(metadata_buf);
   1694         goto done_metadata;
   1695     }
   1696     else
   1697     {
   1698         frame_number_valid        = *p_frame_number_valid;
   1699         frame_number              = *p_frame_number;
   1700         capture_time              = *p_capture_time;
   1701         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   1702         urgent_frame_number       = *p_urgent_frame_number;
   1703     }
   1704 
   1705     if (urgent_frame_number_valid) {
   1706         CDBG("%s: valid urgent frame_number = %d, capture_time = %lld",
   1707           __func__, urgent_frame_number, capture_time);
   1708 
   1709         //Recieved an urgent Frame Number, handle it
   1710         //using partial results
   1711         for (List<PendingRequestInfo>::iterator i =
   1712             mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
   1713             CDBG("%s: Iterator Frame = %d urgent frame = %d",
   1714                 __func__, i->frame_number, urgent_frame_number);
   1715 
   1716             if (i->frame_number < urgent_frame_number &&
   1717                 i->partial_result_cnt == 0) {
   1718                 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
   1719                     __func__, i->frame_number);
   1720             }
   1721 
   1722             if (i->frame_number == urgent_frame_number &&
   1723                      i->bUrgentReceived == 0) {
   1724 
   1725                 camera3_capture_result_t result;
   1726                 memset(&result, 0, sizeof(camera3_capture_result_t));
   1727 
   1728                 i->partial_result_cnt++;
   1729                 i->bUrgentReceived = 1;
   1730                 // Extract 3A metadata
   1731                 result.result =
   1732                     translateCbUrgentMetadataToResultMetadata(metadata);
   1733 
   1734                 if (result.result == NULL)
   1735                 {
   1736                     CameraMetadata dummyMetadata;
   1737                     dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
   1738                             &i->timestamp, 1);
   1739                     dummyMetadata.update(ANDROID_REQUEST_ID,
   1740                             &(i->request_id), 1);
   1741                     result.result = dummyMetadata.release();
   1742                 }
   1743 
   1744                 // Populate metadata result
   1745                 result.frame_number = urgent_frame_number;
   1746                 result.num_output_buffers = 0;
   1747                 result.output_buffers = NULL;
   1748                 result.partial_result = i->partial_result_cnt;
   1749 
   1750                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   1751                 CDBG("%s: urgent frame_number = %d, capture_time = %lld",
   1752                      __func__, result.frame_number, capture_time);
   1753                 free_camera_metadata((camera_metadata_t *)result.result);
   1754                 break;
   1755             }
   1756         }
   1757     }
   1758 
   1759     if (!frame_number_valid) {
   1760         CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
   1761         mMetadataChannel->bufDone(metadata_buf);
   1762         free(metadata_buf);
   1763         goto done_metadata;
   1764     }
   1765     CDBG("%s: valid frame_number = %d, capture_time = %lld", __func__,
   1766             frame_number, capture_time);
   1767 
   1768     // Go through the pending requests info and send shutter/results to frameworks
   1769     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1770         i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   1771         camera3_capture_result_t result;
   1772         memset(&result, 0, sizeof(camera3_capture_result_t));
   1773 
   1774         CDBG("%s: frame_number in the list is %d", __func__, i->frame_number);
   1775         i->partial_result_cnt++;
   1776         result.partial_result = i->partial_result_cnt;
   1777 
   1778         // Flush out all entries with less or equal frame numbers.
   1779         mPendingRequest--;
   1780 
   1781         // Check whether any stream buffer corresponding to this is dropped or not
   1782         // If dropped, then send the ERROR_BUFFER for the corresponding stream
   1783         if (cam_frame_drop.frame_dropped) {
   1784             camera3_notify_msg_t notify_msg;
   1785             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1786                     j != i->buffers.end(); j++) {
   1787                 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
   1788                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1789                 for (uint32_t k = 0; k < cam_frame_drop.cam_stream_ID.num_streams; k++) {
   1790                    if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
   1791                        // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   1792                        ALOGW("%s: Start of reporting error frame#=%d, streamID=%d",
   1793                               __func__, i->frame_number, streamID);
   1794                        notify_msg.type = CAMERA3_MSG_ERROR;
   1795                        notify_msg.message.error.frame_number = i->frame_number;
   1796                        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
   1797                        notify_msg.message.error.error_stream = j->stream;
   1798                        mCallbackOps->notify(mCallbackOps, &notify_msg);
   1799                        ALOGW("%s: End of reporting error frame#=%d, streamID=%d",
   1800                               __func__, i->frame_number, streamID);
   1801                        PendingFrameDropInfo PendingFrameDrop;
   1802                        PendingFrameDrop.frame_number=i->frame_number;
   1803                        PendingFrameDrop.stream_ID = streamID;
   1804                        // Add the Frame drop info to mPendingFrameDropList
   1805                        mPendingFrameDropList.push_back(PendingFrameDrop);
   1806                    }
   1807                 }
   1808             }
   1809         }
   1810 
   1811         // Send empty metadata with already filled buffers for dropped metadata
   1812         // and send valid metadata with already filled buffers for current metadata
   1813         if (i->frame_number < frame_number) {
   1814             camera3_notify_msg_t notify_msg;
   1815             notify_msg.type = CAMERA3_MSG_SHUTTER;
   1816             notify_msg.message.shutter.frame_number = i->frame_number;
   1817             notify_msg.message.shutter.timestamp = capture_time -
   1818                     (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
   1819             mCallbackOps->notify(mCallbackOps, &notify_msg);
   1820             i->timestamp = notify_msg.message.shutter.timestamp;
   1821             CDBG("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
   1822                     __func__, i->frame_number, notify_msg.message.shutter.timestamp);
   1823 
   1824             CameraMetadata dummyMetadata;
   1825             dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
   1826                     &i->timestamp, 1);
   1827             dummyMetadata.update(ANDROID_REQUEST_ID,
   1828                     &(i->request_id), 1);
   1829             result.result = dummyMetadata.release();
   1830         } else {
   1831             // Send shutter notify to frameworks
   1832             camera3_notify_msg_t notify_msg;
   1833             notify_msg.type = CAMERA3_MSG_SHUTTER;
   1834             notify_msg.message.shutter.frame_number = i->frame_number;
   1835             notify_msg.message.shutter.timestamp = capture_time;
   1836             mCallbackOps->notify(mCallbackOps, &notify_msg);
   1837 
   1838             i->timestamp = capture_time;
   1839 
   1840             result.result = translateFromHalMetadata(metadata,
   1841                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
   1842                     i->capture_intent);
   1843 
   1844             if (i->blob_request) {
   1845                 {
   1846                     //Dump tuning metadata if enabled and available
   1847                     char prop[PROPERTY_VALUE_MAX];
   1848                     memset(prop, 0, sizeof(prop));
   1849                     property_get("persist.camera.dumpmetadata", prop, "0");
   1850                     int32_t enabled = atoi(prop);
   1851                     if (enabled && metadata->is_tuning_params_valid) {
   1852                         dumpMetadataToFile(metadata->tuning_params,
   1853                                mMetaFrameCount,
   1854                                enabled,
   1855                                "Snapshot",
   1856                                frame_number);
   1857                     }
   1858                 }
   1859 
   1860 
   1861                 mPictureChannel->queueReprocMetadata(metadata_buf);
   1862             } else {
   1863                 // Return metadata buffer
   1864                 mMetadataChannel->bufDone(metadata_buf);
   1865                 free(metadata_buf);
   1866             }
   1867         }
   1868         if (!result.result) {
   1869             ALOGE("%s: metadata is NULL", __func__);
   1870         }
   1871         result.frame_number = i->frame_number;
   1872         result.num_output_buffers = 0;
   1873         result.output_buffers = NULL;
   1874         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1875                     j != i->buffers.end(); j++) {
   1876             if (j->buffer) {
   1877                 result.num_output_buffers++;
   1878             }
   1879         }
   1880 
   1881         if (result.num_output_buffers > 0) {
   1882             camera3_stream_buffer_t *result_buffers =
   1883                 new camera3_stream_buffer_t[result.num_output_buffers];
   1884             if (!result_buffers) {
   1885                 ALOGE("%s: Fatal error: out of memory", __func__);
   1886             }
   1887             size_t result_buffers_idx = 0;
   1888             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   1889                     j != i->buffers.end(); j++) {
   1890                 if (j->buffer) {
   1891                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   1892                             m != mPendingFrameDropList.end(); m++) {
   1893                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   1894                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1895                         if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
   1896                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   1897                             CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   1898                                   __func__, frame_number, streamID);
   1899                             m = mPendingFrameDropList.erase(m);
   1900                             break;
   1901                         }
   1902                     }
   1903 
   1904                     for (List<PendingBufferInfo>::iterator k =
   1905                       mPendingBuffersMap.mPendingBufferList.begin();
   1906                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
   1907                       if (k->buffer == j->buffer->buffer) {
   1908                         CDBG("%s: Found buffer %p in pending buffer List "
   1909                               "for frame %d, Take it out!!", __func__,
   1910                                k->buffer, k->frame_number);
   1911                         mPendingBuffersMap.num_buffers--;
   1912                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   1913                         break;
   1914                       }
   1915                     }
   1916 
   1917                     result_buffers[result_buffers_idx++] = *(j->buffer);
   1918                     free(j->buffer);
   1919                     j->buffer = NULL;
   1920                 }
   1921             }
   1922             result.output_buffers = result_buffers;
   1923             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1924             CDBG("%s: meta frame_number = %d, capture_time = %lld",
   1925                     __func__, result.frame_number, i->timestamp);
   1926             free_camera_metadata((camera_metadata_t *)result.result);
   1927             delete[] result_buffers;
   1928         } else {
   1929             mCallbackOps->process_capture_result(mCallbackOps, &result);
   1930             CDBG("%s: meta frame_number = %d, capture_time = %lld",
   1931                         __func__, result.frame_number, i->timestamp);
   1932             free_camera_metadata((camera_metadata_t *)result.result);
   1933         }
   1934         // erase the element from the list
   1935         i = mPendingRequestsList.erase(i);
   1936 
   1937         if (!mPendingReprocessResultList.empty()) {
   1938             handlePendingReprocResults(frame_number + 1);
   1939         }
   1940     }
   1941 
   1942 done_metadata:
   1943     for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1944         i != mPendingRequestsList.end() ;i++) {
   1945         i->pipeline_depth++;
   1946     }
   1947     unblockRequestIfNecessary();
   1948 
   1949 }
   1950 
   1951 /*===========================================================================
   1952  * FUNCTION   : handleBufferWithLock
   1953  *
   1954  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   1955  *
   1956  * PARAMETERS : @buffer: image buffer for the callback
   1957  *              @frame_number: frame number of the image buffer
   1958  *
   1959  * RETURN     :
   1960  *
   1961  *==========================================================================*/
   1962 void QCamera3HardwareInterface::handleBufferWithLock(
   1963     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   1964 {
   1965     ATRACE_CALL();
   1966     // If the frame number doesn't exist in the pending request list,
   1967     // directly send the buffer to the frameworks, and update pending buffers map
   1968     // Otherwise, book-keep the buffer.
   1969     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   1970     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   1971         i++;
   1972     }
   1973     if (i == mPendingRequestsList.end()) {
   1974         // Verify all pending requests frame_numbers are greater
   1975         for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
   1976                 j != mPendingRequestsList.end(); j++) {
   1977             if (j->frame_number < frame_number) {
   1978                 ALOGE("%s: Error: pending frame number %d is smaller than %d",
   1979                         __func__, j->frame_number, frame_number);
   1980             }
   1981         }
   1982         camera3_capture_result_t result;
   1983         memset(&result, 0, sizeof(camera3_capture_result_t));
   1984         result.result = NULL;
   1985         result.frame_number = frame_number;
   1986         result.num_output_buffers = 1;
   1987         result.partial_result = 0;
   1988         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   1989                 m != mPendingFrameDropList.end(); m++) {
   1990             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
   1991             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   1992             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
   1993                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   1994                 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   1995                         __func__, frame_number, streamID);
   1996                 m = mPendingFrameDropList.erase(m);
   1997                 break;
   1998             }
   1999         }
   2000         result.output_buffers = buffer;
   2001         CDBG("%s: result frame_number = %d, buffer = %p",
   2002                 __func__, frame_number, buffer->buffer);
   2003 
   2004         for (List<PendingBufferInfo>::iterator k =
   2005                 mPendingBuffersMap.mPendingBufferList.begin();
   2006                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
   2007             if (k->buffer == buffer->buffer) {
   2008                 CDBG("%s: Found Frame buffer, take it out from list",
   2009                         __func__);
   2010 
   2011                 mPendingBuffersMap.num_buffers--;
   2012                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2013                 break;
   2014             }
   2015         }
   2016         CDBG("%s: mPendingBuffersMap.num_buffers = %d",
   2017             __func__, mPendingBuffersMap.num_buffers);
   2018 
   2019         mCallbackOps->process_capture_result(mCallbackOps, &result);
   2020     } else {
   2021         if (i->input_buffer) {
   2022             CameraMetadata settings;
   2023             camera3_notify_msg_t notify_msg;
   2024             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2025             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   2026             if(i->settings) {
   2027                 settings = i->settings;
   2028                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   2029                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   2030                 } else {
   2031                     ALOGE("%s: No timestamp in input settings! Using current one.",
   2032                             __func__);
   2033                 }
   2034             } else {
   2035                 ALOGE("%s: Input settings missing!", __func__);
   2036             }
   2037 
   2038             notify_msg.type = CAMERA3_MSG_SHUTTER;
   2039             notify_msg.message.shutter.frame_number = frame_number;
   2040             notify_msg.message.shutter.timestamp = capture_time;
   2041 
   2042             if (i->input_buffer->release_fence != -1) {
   2043                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   2044                close(i->input_buffer->release_fence);
   2045                if (rc != OK) {
   2046                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   2047                }
   2048             }
   2049 
   2050             for (List<PendingBufferInfo>::iterator k =
   2051                     mPendingBuffersMap.mPendingBufferList.begin();
   2052                     k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
   2053                 if (k->buffer == buffer->buffer) {
   2054                     CDBG("%s: Found Frame buffer, take it out from list",
   2055                             __func__);
   2056 
   2057                     mPendingBuffersMap.num_buffers--;
   2058                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2059                     break;
   2060                 }
   2061             }
   2062             CDBG("%s: mPendingBuffersMap.num_buffers = %d",
   2063                 __func__, mPendingBuffersMap.num_buffers);
   2064 
   2065             bool notifyNow = true;
   2066             for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
   2067                     j != mPendingRequestsList.end(); j++) {
   2068                 if (j->frame_number < frame_number) {
   2069                     notifyNow = false;
   2070                     break;
   2071                 }
   2072             }
   2073 
   2074             if (notifyNow) {
   2075                 camera3_capture_result result;
   2076                 memset(&result, 0, sizeof(camera3_capture_result));
   2077                 result.frame_number = frame_number;
   2078                 result.result = i->settings;
   2079                 result.input_buffer = i->input_buffer;
   2080                 result.num_output_buffers = 1;
   2081                 result.output_buffers = buffer;
   2082                 result.partial_result = PARTIAL_RESULT_COUNT;
   2083 
   2084                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   2085                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   2086                 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
   2087                 i = mPendingRequestsList.erase(i);
   2088                 mPendingRequest--;
   2089             } else {
   2090                 // Cache reprocess result for later
   2091                 PendingReprocessResult pendingResult;
   2092                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
   2093                 pendingResult.notify_msg = notify_msg;
   2094                 pendingResult.buffer = *buffer;
   2095                 pendingResult.frame_number = frame_number;
   2096                 mPendingReprocessResultList.push_back(pendingResult);
   2097                 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
   2098             }
   2099         } else {
   2100             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   2101                 j != i->buffers.end(); j++) {
   2102                 if (j->stream == buffer->stream) {
   2103                     if (j->buffer != NULL) {
   2104                         ALOGE("%s: Error: buffer is already set", __func__);
   2105                     } else {
   2106                         j->buffer = (camera3_stream_buffer_t *)malloc(
   2107                             sizeof(camera3_stream_buffer_t));
   2108                         *(j->buffer) = *buffer;
   2109                         CDBG("%s: cache buffer %p at result frame_number %d",
   2110                             __func__, buffer, frame_number);
   2111                     }
   2112                 }
   2113             }
   2114         }
   2115     }
   2116 }
   2117 
   2118 /*===========================================================================
   2119  * FUNCTION   : unblockRequestIfNecessary
   2120  *
   2121  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
   2122  *              that mMutex is held when this function is called.
   2123  *
   2124  * PARAMETERS :
   2125  *
   2126  * RETURN     :
   2127  *
   2128  *==========================================================================*/
   2129 void QCamera3HardwareInterface::unblockRequestIfNecessary()
   2130 {
   2131    // Unblock process_capture_request
   2132    pthread_cond_signal(&mRequestCond);
   2133 }
   2134 
   2135 /*===========================================================================
   2136  * FUNCTION   : processCaptureRequest
   2137  *
   2138  * DESCRIPTION: process a capture request from camera service
   2139  *
   2140  * PARAMETERS :
   2141  *   @request : request from framework to process
   2142  *
   2143  * RETURN     :
   2144  *
   2145  *==========================================================================*/
   2146 int QCamera3HardwareInterface::processCaptureRequest(
   2147                     camera3_capture_request_t *request)
   2148 {
   2149     ATRACE_CALL();
   2150     int rc = NO_ERROR;
   2151     int32_t request_id;
   2152     CameraMetadata meta;
   2153 
   2154     pthread_mutex_lock(&mMutex);
   2155 
   2156     rc = validateCaptureRequest(request);
   2157     if (rc != NO_ERROR) {
   2158         ALOGE("%s: incoming request is not valid", __func__);
   2159         pthread_mutex_unlock(&mMutex);
   2160         return rc;
   2161     }
   2162 
   2163     meta = request->settings;
   2164 
   2165     // For first capture request, send capture intent, and
   2166     // stream on all streams
   2167     if (mFirstRequest) {
   2168 
   2169         // settings/parameters don't carry over for new configureStreams
   2170         int32_t hal_version = CAM_HAL_V3;
   2171         memset(mParameters, 0, sizeof(metadata_buffer_t));
   2172 
   2173         AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   2174                 sizeof(hal_version), &hal_version);
   2175 
   2176         AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
   2177                 sizeof(cam_stream_size_info_t), &mStreamConfigInfo);
   2178 
   2179         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   2180 		     CDBG_HIGH("%s STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x",
   2181                      __func__, mStreamConfigInfo.type[i],
   2182                      mStreamConfigInfo.stream_sizes[i].width,
   2183                      mStreamConfigInfo.stream_sizes[i].height,
   2184                      mStreamConfigInfo.postprocess_mask[i]);
   2185         }
   2186 
   2187         int32_t tintless_value = 1;
   2188         AddSetParmEntryToBatch(mParameters,CAM_INTF_PARM_TINTLESS,
   2189                 sizeof(tintless_value), &tintless_value);
   2190 
   2191         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   2192          /* get eis information for stream configuration */
   2193         cam_is_type_t is_type;
   2194         char is_type_value[PROPERTY_VALUE_MAX];
   2195         property_get("camera.is_type", is_type_value, "0");
   2196         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
   2197 
   2198         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   2199             int32_t hal_version = CAM_HAL_V3;
   2200             uint8_t captureIntent =
   2201                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   2202             mCaptureIntent = captureIntent;
   2203             memset(mParameters, 0, sizeof(parm_buffer_t));
   2204             AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   2205                 sizeof(hal_version), &hal_version);
   2206             AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
   2207                 sizeof(captureIntent), &captureIntent);
   2208         }
   2209 
   2210         //If EIS is enabled, turn it on for video
   2211         //for camera use case, front camcorder and 4k video, no eis
   2212         bool setEis = mEisEnable && (gCamCapability[mCameraId]->position == CAM_POSITION_BACK &&
   2213             (mCaptureIntent ==  CAMERA3_TEMPLATE_VIDEO_RECORD ||
   2214              mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT));
   2215         int32_t vsMode;
   2216         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
   2217         rc = AddSetParmEntryToBatch(mParameters,
   2218                 CAM_INTF_PARM_DIS_ENABLE,
   2219                 sizeof(vsMode), &vsMode);
   2220 
   2221         //IS type will be 0 unless EIS is supported. If EIS is supported
   2222         //it could either be 1 or 4 depending on the stream and video size
   2223         if (setEis){
   2224             if (m_bIs4KVideo) {
   2225                 is_type = IS_TYPE_DIS;
   2226             } else {
   2227                 is_type = IS_TYPE_EIS_2_0;
   2228             }
   2229         }
   2230 
   2231         for (size_t i = 0; i < request->num_output_buffers; i++) {
   2232             const camera3_stream_buffer_t& output = request->output_buffers[i];
   2233             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   2234             /*for livesnapshot stream is_type will be DIS*/
   2235             if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2236                 rc = channel->registerBuffer(output.buffer, IS_TYPE_DIS);
   2237             } else {
   2238                 rc = channel->registerBuffer(output.buffer, is_type);
   2239             }
   2240             if (rc < 0) {
   2241                 ALOGE("%s: registerBuffer failed",
   2242                         __func__);
   2243                 pthread_mutex_unlock(&mMutex);
   2244                 return -ENODEV;
   2245             }
   2246         }
   2247 
   2248         /*set the capture intent, hal version and dis enable parameters to the backend*/
   2249         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   2250                     mParameters);
   2251 
   2252 
   2253         //First initialize all streams
   2254         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2255             it != mStreamInfo.end(); it++) {
   2256             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   2257             if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2258                 rc = channel->initialize(IS_TYPE_DIS);
   2259             } else {
   2260                 rc = channel->initialize(is_type);
   2261             }
   2262             if (NO_ERROR != rc) {
   2263                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
   2264                 pthread_mutex_unlock(&mMutex);
   2265                 return rc;
   2266             }
   2267         }
   2268 
   2269         if (mRawDumpChannel) {
   2270             rc = mRawDumpChannel->initialize(is_type);
   2271             if (rc != NO_ERROR) {
   2272                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
   2273                 pthread_mutex_unlock(&mMutex);
   2274                 return rc;
   2275             }
   2276         }
   2277         if (mSupportChannel) {
   2278             rc = mSupportChannel->initialize(is_type);
   2279             if (rc < 0) {
   2280                 ALOGE("%s: Support channel initialization failed", __func__);
   2281                 pthread_mutex_unlock(&mMutex);
   2282                 return rc;
   2283             }
   2284         }
   2285 
   2286         //Then start them.
   2287         CDBG_HIGH("%s: Start META Channel", __func__);
   2288         rc = mMetadataChannel->start();
   2289         if (rc < 0) {
   2290             ALOGE("%s: META channel start failed", __func__);
   2291             pthread_mutex_unlock(&mMutex);
   2292             return rc;
   2293         }
   2294 
   2295         if (mSupportChannel) {
   2296             rc = mSupportChannel->start();
   2297             if (rc < 0) {
   2298                 ALOGE("%s: Support channel start failed", __func__);
   2299                 mMetadataChannel->stop();
   2300                 pthread_mutex_unlock(&mMutex);
   2301                 return rc;
   2302             }
   2303         }
   2304         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2305             it != mStreamInfo.end(); it++) {
   2306             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   2307             CDBG_HIGH("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
   2308             rc = channel->start();
   2309             if (rc < 0) {
   2310                 ALOGE("%s: channel start failed", __func__);
   2311                 pthread_mutex_unlock(&mMutex);
   2312                 return rc;
   2313             }
   2314         }
   2315 
   2316         if (mRawDumpChannel) {
   2317             CDBG("%s: Starting raw dump stream",__func__);
   2318             rc = mRawDumpChannel->start();
   2319             if (rc != NO_ERROR) {
   2320                 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
   2321                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2322                       it != mStreamInfo.end(); it++) {
   2323                     QCamera3Channel *channel =
   2324                         (QCamera3Channel *)(*it)->stream->priv;
   2325                     ALOGE("%s: Stopping Regular Channel mask=%d", __func__,
   2326                         channel->getStreamTypeMask());
   2327                     channel->stop();
   2328                 }
   2329                 if (mSupportChannel)
   2330                     mSupportChannel->stop();
   2331                 mMetadataChannel->stop();
   2332                 pthread_mutex_unlock(&mMutex);
   2333                 return rc;
   2334             }
   2335         }
   2336         mWokenUpByDaemon = false;
   2337         mPendingRequest = 0;
   2338     }
   2339 
   2340     uint32_t frameNumber = request->frame_number;
   2341     cam_stream_ID_t streamID;
   2342 
   2343     if (meta.exists(ANDROID_REQUEST_ID)) {
   2344         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
   2345         mCurrentRequestId = request_id;
   2346         CDBG("%s: Received request with id: %d",__func__, request_id);
   2347     } else if (mFirstRequest || mCurrentRequestId == -1){
   2348         ALOGE("%s: Unable to find request id field, \
   2349                 & no previous id available", __func__);
   2350         return NAME_NOT_FOUND;
   2351     } else {
   2352         CDBG("%s: Re-using old request id", __func__);
   2353         request_id = mCurrentRequestId;
   2354     }
   2355 
   2356     CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
   2357                                     __func__, __LINE__,
   2358                                     request->num_output_buffers,
   2359                                     request->input_buffer,
   2360                                     frameNumber);
   2361     // Acquire all request buffers first
   2362     streamID.num_streams = 0;
   2363     int blob_request = 0;
   2364     uint32_t snapshotStreamId = 0;
   2365     for (size_t i = 0; i < request->num_output_buffers; i++) {
   2366         const camera3_stream_buffer_t& output = request->output_buffers[i];
   2367         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   2368 
   2369         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2370             //Call function to store local copy of jpeg data for encode params.
   2371             blob_request = 1;
   2372             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
   2373         }
   2374 
   2375         if (output.acquire_fence != -1) {
   2376            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
   2377            close(output.acquire_fence);
   2378            if (rc != OK) {
   2379               ALOGE("%s: sync wait failed %d", __func__, rc);
   2380               pthread_mutex_unlock(&mMutex);
   2381               return rc;
   2382            }
   2383         }
   2384 
   2385         streamID.streamID[streamID.num_streams] =
   2386             channel->getStreamID(channel->getStreamTypeMask());
   2387         streamID.num_streams++;
   2388 
   2389 
   2390     }
   2391 
   2392     if (blob_request && mRawDumpChannel) {
   2393         CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
   2394         streamID.streamID[streamID.num_streams] =
   2395             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
   2396         streamID.num_streams++;
   2397     }
   2398 
   2399     if(request->input_buffer == NULL) {
   2400        rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
   2401         if (rc < 0) {
   2402             ALOGE("%s: fail to set frame parameters", __func__);
   2403             pthread_mutex_unlock(&mMutex);
   2404             return rc;
   2405         }
   2406     } else {
   2407 
   2408         if (request->input_buffer->acquire_fence != -1) {
   2409            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
   2410            close(request->input_buffer->acquire_fence);
   2411            if (rc != OK) {
   2412               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   2413               pthread_mutex_unlock(&mMutex);
   2414               return rc;
   2415            }
   2416         }
   2417     }
   2418 
   2419     /* Update pending request list and pending buffers map */
   2420     PendingRequestInfo pendingRequest;
   2421     pendingRequest.frame_number = frameNumber;
   2422     pendingRequest.num_buffers = request->num_output_buffers;
   2423     pendingRequest.request_id = request_id;
   2424     pendingRequest.blob_request = blob_request;
   2425     pendingRequest.bUrgentReceived = 0;
   2426 
   2427     pendingRequest.input_buffer = request->input_buffer;
   2428     pendingRequest.settings = request->settings;
   2429     pendingRequest.pipeline_depth = 0;
   2430     pendingRequest.partial_result_cnt = 0;
   2431     extractJpegMetadata(pendingRequest.jpegMetadata, request);
   2432 
   2433     //extract capture intent
   2434     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   2435         mCaptureIntent =
   2436                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   2437     }
   2438     pendingRequest.capture_intent = mCaptureIntent;
   2439 
   2440     for (size_t i = 0; i < request->num_output_buffers; i++) {
   2441         RequestedBufferInfo requestedBuf;
   2442         requestedBuf.stream = request->output_buffers[i].stream;
   2443         requestedBuf.buffer = NULL;
   2444         pendingRequest.buffers.push_back(requestedBuf);
   2445 
   2446         // Add to buffer handle the pending buffers list
   2447         PendingBufferInfo bufferInfo;
   2448         bufferInfo.frame_number = frameNumber;
   2449         bufferInfo.buffer = request->output_buffers[i].buffer;
   2450         bufferInfo.stream = request->output_buffers[i].stream;
   2451         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
   2452         mPendingBuffersMap.num_buffers++;
   2453         CDBG("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
   2454           __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
   2455           bufferInfo.stream->format);
   2456     }
   2457     CDBG("%s: mPendingBuffersMap.num_buffers = %d",
   2458           __func__, mPendingBuffersMap.num_buffers);
   2459 
   2460     mPendingBuffersMap.last_frame_number = frameNumber;
   2461     mPendingRequestsList.push_back(pendingRequest);
   2462 
   2463     if(mFlush) {
   2464         pthread_mutex_unlock(&mMutex);
   2465         return NO_ERROR;
   2466     }
   2467 
   2468     // Notify metadata channel we receive a request
   2469     mMetadataChannel->request(NULL, frameNumber);
   2470 
   2471     metadata_buffer_t reproc_meta;
   2472     memset(&reproc_meta, 0, sizeof(metadata_buffer_t));
   2473 
   2474     if(request->input_buffer != NULL){
   2475         rc = setReprocParameters(request, &reproc_meta, snapshotStreamId);
   2476         if (NO_ERROR != rc) {
   2477             ALOGE("%s: fail to set reproc parameters", __func__);
   2478             pthread_mutex_unlock(&mMutex);
   2479             return rc;
   2480         }
   2481     }
   2482 
   2483     // Call request on other streams
   2484     for (size_t i = 0; i < request->num_output_buffers; i++) {
   2485         const camera3_stream_buffer_t& output = request->output_buffers[i];
   2486         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   2487 
   2488         if (channel == NULL) {
   2489             ALOGE("%s: invalid channel pointer for stream", __func__);
   2490             continue;
   2491         }
   2492 
   2493         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2494             if (request->input_buffer) {
   2495                 rc = channel->request(output.buffer, frameNumber,
   2496                         request->input_buffer, &reproc_meta);
   2497             } else if (!request->settings) {
   2498                 rc = channel->request(output.buffer, frameNumber,
   2499                         NULL, mPrevParameters);
   2500             } else {
   2501                 rc = channel->request(output.buffer, frameNumber,
   2502                         NULL, mParameters);
   2503             }
   2504             if (rc < 0) {
   2505                 ALOGE("%s: Fail to request on picture channel", __func__);
   2506                 pthread_mutex_unlock(&mMutex);
   2507                 return rc;
   2508             }
   2509         } else {
   2510             CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
   2511                 __LINE__, output.buffer, frameNumber);
   2512             rc = channel->request(output.buffer, frameNumber);
   2513         }
   2514         if (rc < 0)
   2515             ALOGE("%s: request failed", __func__);
   2516     }
   2517 
   2518     if(request->input_buffer == NULL) {
   2519         /*set the parameters to backend*/
   2520         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   2521     }
   2522 
   2523     mFirstRequest = false;
   2524     // Added a timed condition wait
   2525     struct timespec ts;
   2526     uint8_t isValidTimeout = 1;
   2527     rc = clock_gettime(CLOCK_REALTIME, &ts);
   2528     if (rc < 0) {
   2529       isValidTimeout = 0;
   2530       ALOGE("%s: Error reading the real time clock!!", __func__);
   2531     }
   2532     else {
   2533       // Make timeout as 5 sec for request to be honored
   2534       ts.tv_sec += 5;
   2535     }
   2536     //Block on conditional variable
   2537 
   2538     mPendingRequest++;
   2539     while (mPendingRequest >= MIN_INFLIGHT_REQUESTS) {
   2540         if (!isValidTimeout) {
   2541             CDBG("%s: Blocking on conditional wait", __func__);
   2542             pthread_cond_wait(&mRequestCond, &mMutex);
   2543         }
   2544         else {
   2545             CDBG("%s: Blocking on timed conditional wait", __func__);
   2546             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
   2547             if (rc == ETIMEDOUT) {
   2548                 rc = -ENODEV;
   2549                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
   2550                 break;
   2551             }
   2552         }
   2553         CDBG("%s: Unblocked", __func__);
   2554         if (mWokenUpByDaemon) {
   2555             mWokenUpByDaemon = false;
   2556             if (mPendingRequest < MAX_INFLIGHT_REQUESTS)
   2557                 break;
   2558         }
   2559     }
   2560     pthread_mutex_unlock(&mMutex);
   2561 
   2562     return rc;
   2563 }
   2564 
   2565 /*===========================================================================
   2566  * FUNCTION   : dump
   2567  *
   2568  * DESCRIPTION:
   2569  *
   2570  * PARAMETERS :
   2571  *
   2572  *
   2573  * RETURN     :
   2574  *==========================================================================*/
   2575 void QCamera3HardwareInterface::dump(int fd)
   2576 {
   2577     pthread_mutex_lock(&mMutex);
   2578     dprintf(fd, "\n Camera HAL3 information Begin \n");
   2579 
   2580     dprintf(fd, "\nNumber of pending requests: %d \n",
   2581         mPendingRequestsList.size());
   2582     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   2583     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
   2584     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   2585     for(List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   2586         i != mPendingRequestsList.end(); i++) {
   2587         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
   2588         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
   2589         i->input_buffer);
   2590     }
   2591     dprintf(fd, "\nPending buffer map: Number of buffers: %d\n",
   2592                 mPendingBuffersMap.num_buffers);
   2593     dprintf(fd, "-------+-------------\n");
   2594     dprintf(fd, " Frame | Stream type \n");
   2595     dprintf(fd, "-------+-------------\n");
   2596     for(List<PendingBufferInfo>::iterator i =
   2597         mPendingBuffersMap.mPendingBufferList.begin();
   2598         i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
   2599         dprintf(fd, " %5d | %11d \n",
   2600             i->frame_number, i->stream->stream_type);
   2601     }
   2602     dprintf(fd, "-------+-------------\n");
   2603 
   2604     dprintf(fd, "\nPending frame drop list: %d\n",
   2605         mPendingFrameDropList.size());
   2606     dprintf(fd, "-------+-----------\n");
   2607     dprintf(fd, " Frame | Stream ID \n");
   2608     dprintf(fd, "-------+-----------\n");
   2609     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
   2610         i != mPendingFrameDropList.end(); i++) {
   2611         dprintf(fd, " %5d | %9d \n",
   2612             i->frame_number, i->stream_ID);
   2613     }
   2614     dprintf(fd, "-------+-----------\n");
   2615 
   2616     dprintf(fd, "\n Camera HAL3 information End \n");
   2617     pthread_mutex_unlock(&mMutex);
   2618     return;
   2619 }
   2620 
   2621 /*===========================================================================
   2622  * FUNCTION   : flush
   2623  *
   2624  * DESCRIPTION:
   2625  *
   2626  * PARAMETERS :
   2627  *
   2628  *
   2629  * RETURN     :
   2630  *==========================================================================*/
   2631 int QCamera3HardwareInterface::flush()
   2632 {
   2633     ATRACE_CALL();
   2634     unsigned int frameNum = 0;
   2635     camera3_notify_msg_t notify_msg;
   2636     camera3_capture_result_t result;
   2637     camera3_stream_buffer_t *pStream_Buf = NULL;
   2638     FlushMap flushMap;
   2639 
   2640     CDBG("%s: Unblocking Process Capture Request", __func__);
   2641     pthread_mutex_lock(&mMutex);
   2642     mFlush = true;
   2643     pthread_mutex_unlock(&mMutex);
   2644 
   2645     memset(&result, 0, sizeof(camera3_capture_result_t));
   2646 
   2647     // Stop the Streams/Channels
   2648     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2649         it != mStreamInfo.end(); it++) {
   2650         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   2651         channel->stop();
   2652         (*it)->status = INVALID;
   2653     }
   2654 
   2655     if (mSupportChannel) {
   2656         mSupportChannel->stop();
   2657     }
   2658     if (mRawDumpChannel) {
   2659         mRawDumpChannel->stop();
   2660     }
   2661     if (mMetadataChannel) {
   2662         /* If content of mStreamInfo is not 0, there is metadata stream */
   2663         mMetadataChannel->stop();
   2664     }
   2665 
   2666     // Mutex Lock
   2667     pthread_mutex_lock(&mMutex);
   2668 
   2669     // Unblock process_capture_request
   2670     mPendingRequest = 0;
   2671     pthread_cond_signal(&mRequestCond);
   2672 
   2673     List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
   2674     frameNum = i->frame_number;
   2675     CDBG("%s: Oldest frame num on  mPendingRequestsList = %d",
   2676       __func__, frameNum);
   2677 
   2678     // Go through the pending buffers and group them depending
   2679     // on frame number
   2680     for (List<PendingBufferInfo>::iterator k =
   2681             mPendingBuffersMap.mPendingBufferList.begin();
   2682             k != mPendingBuffersMap.mPendingBufferList.end();) {
   2683 
   2684         if (k->frame_number < frameNum) {
   2685             ssize_t idx = flushMap.indexOfKey(k->frame_number);
   2686             if (idx == NAME_NOT_FOUND) {
   2687                 Vector<PendingBufferInfo> pending;
   2688                 pending.add(*k);
   2689                 flushMap.add(k->frame_number, pending);
   2690             } else {
   2691                 Vector<PendingBufferInfo> &pending =
   2692                         flushMap.editValueFor(k->frame_number);
   2693                 pending.add(*k);
   2694             }
   2695 
   2696             mPendingBuffersMap.num_buffers--;
   2697             k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2698         } else {
   2699             k++;
   2700         }
   2701     }
   2702 
   2703     for (size_t i = 0; i < flushMap.size(); i++) {
   2704         uint32_t frame_number = flushMap.keyAt(i);
   2705         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
   2706 
   2707         // Send Error notify to frameworks for each buffer for which
   2708         // metadata buffer is already sent
   2709         CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
   2710           __func__, frame_number, pending.size());
   2711 
   2712         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   2713         if (NULL == pStream_Buf) {
   2714             ALOGE("%s: No memory for pending buffers array", __func__);
   2715             pthread_mutex_unlock(&mMutex);
   2716             return NO_MEMORY;
   2717         }
   2718 
   2719         for (size_t j = 0; j < pending.size(); j++) {
   2720             const PendingBufferInfo &info = pending.itemAt(j);
   2721             notify_msg.type = CAMERA3_MSG_ERROR;
   2722             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
   2723             notify_msg.message.error.error_stream = info.stream;
   2724             notify_msg.message.error.frame_number = frame_number;
   2725             pStream_Buf[j].acquire_fence = -1;
   2726             pStream_Buf[j].release_fence = -1;
   2727             pStream_Buf[j].buffer = info.buffer;
   2728             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   2729             pStream_Buf[j].stream = info.stream;
   2730             mCallbackOps->notify(mCallbackOps, &notify_msg);
   2731             CDBG("%s: notify frame_number = %d stream %p", __func__,
   2732                     frame_number, info.stream);
   2733         }
   2734 
   2735         result.result = NULL;
   2736         result.frame_number = frame_number;
   2737         result.num_output_buffers = pending.size();
   2738         result.output_buffers = pStream_Buf;
   2739         mCallbackOps->process_capture_result(mCallbackOps, &result);
   2740 
   2741         delete [] pStream_Buf;
   2742     }
   2743 
   2744     CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
   2745 
   2746     flushMap.clear();
   2747     for (List<PendingBufferInfo>::iterator k =
   2748             mPendingBuffersMap.mPendingBufferList.begin();
   2749             k != mPendingBuffersMap.mPendingBufferList.end();) {
   2750         ssize_t idx = flushMap.indexOfKey(k->frame_number);
   2751         if (idx == NAME_NOT_FOUND) {
   2752             Vector<PendingBufferInfo> pending;
   2753             pending.add(*k);
   2754             flushMap.add(k->frame_number, pending);
   2755         } else {
   2756             Vector<PendingBufferInfo> &pending =
   2757                     flushMap.editValueFor(k->frame_number);
   2758             pending.add(*k);
   2759         }
   2760 
   2761         mPendingBuffersMap.num_buffers--;
   2762         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2763     }
   2764 
   2765     // Go through the pending requests info and send error request to framework
   2766     for (size_t i = 0; i < flushMap.size(); i++) {
   2767         uint32_t frame_number = flushMap.keyAt(i);
   2768         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
   2769         CDBG("%s:Sending ERROR REQUEST for frame %d",
   2770               __func__, frame_number);
   2771 
   2772         // Send shutter notify to frameworks
   2773         notify_msg.type = CAMERA3_MSG_ERROR;
   2774         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   2775         notify_msg.message.error.error_stream = NULL;
   2776         notify_msg.message.error.frame_number = frame_number;
   2777         mCallbackOps->notify(mCallbackOps, &notify_msg);
   2778 
   2779         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   2780         if (NULL == pStream_Buf) {
   2781             ALOGE("%s: No memory for pending buffers array", __func__);
   2782             pthread_mutex_unlock(&mMutex);
   2783             return NO_MEMORY;
   2784         }
   2785 
   2786         for (size_t j = 0; j < pending.size(); j++) {
   2787             const PendingBufferInfo &info = pending.itemAt(j);
   2788             pStream_Buf[j].acquire_fence = -1;
   2789             pStream_Buf[j].release_fence = -1;
   2790             pStream_Buf[j].buffer = info.buffer;
   2791             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   2792             pStream_Buf[j].stream = info.stream;
   2793         }
   2794 
   2795         result.num_output_buffers = pending.size();
   2796         result.output_buffers = pStream_Buf;
   2797         result.result = NULL;
   2798         result.frame_number = frame_number;
   2799         mCallbackOps->process_capture_result(mCallbackOps, &result);
   2800         delete [] pStream_Buf;
   2801     }
   2802 
   2803     /* Reset pending buffer list and requests list */
   2804     mPendingRequestsList.clear();
   2805     /* Reset pending frame Drop list and requests list */
   2806     mPendingFrameDropList.clear();
   2807 
   2808     flushMap.clear();
   2809     mPendingBuffersMap.num_buffers = 0;
   2810     mPendingBuffersMap.mPendingBufferList.clear();
   2811     mPendingReprocessResultList.clear();
   2812     CDBG("%s: Cleared all the pending buffers ", __func__);
   2813 
   2814     mFlush = false;
   2815 
   2816     // Start the Streams/Channels
   2817     int rc = NO_ERROR;
   2818     if (mMetadataChannel) {
   2819         /* If content of mStreamInfo is not 0, there is metadata stream */
   2820         rc = mMetadataChannel->start();
   2821         if (rc < 0) {
   2822             ALOGE("%s: META channel start failed", __func__);
   2823             pthread_mutex_unlock(&mMutex);
   2824             return rc;
   2825         }
   2826     }
   2827     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2828         it != mStreamInfo.end(); it++) {
   2829         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   2830         rc = channel->start();
   2831         if (rc < 0) {
   2832             ALOGE("%s: channel start failed", __func__);
   2833             pthread_mutex_unlock(&mMutex);
   2834             return rc;
   2835         }
   2836     }
   2837     if (mSupportChannel) {
   2838         rc = mSupportChannel->start();
   2839         if (rc < 0) {
   2840             ALOGE("%s: Support channel start failed", __func__);
   2841             pthread_mutex_unlock(&mMutex);
   2842             return rc;
   2843         }
   2844     }
   2845     if (mRawDumpChannel) {
   2846         rc = mRawDumpChannel->start();
   2847         if (rc < 0) {
   2848             ALOGE("%s: RAW dump channel start failed", __func__);
   2849             pthread_mutex_unlock(&mMutex);
   2850             return rc;
   2851         }
   2852     }
   2853 
   2854     pthread_mutex_unlock(&mMutex);
   2855 
   2856     return 0;
   2857 }
   2858 
   2859 /*===========================================================================
   2860  * FUNCTION   : captureResultCb
   2861  *
   2862  * DESCRIPTION: Callback handler for all capture result
   2863  *              (streams, as well as metadata)
   2864  *
   2865  * PARAMETERS :
   2866  *   @metadata : metadata information
   2867  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   2868  *               NULL if metadata.
   2869  *
   2870  * RETURN     : NONE
   2871  *==========================================================================*/
   2872 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   2873                 camera3_stream_buffer_t *buffer, uint32_t frame_number)
   2874 {
   2875     pthread_mutex_lock(&mMutex);
   2876 
   2877     /* Assume flush() is called before any reprocessing. Send
   2878      * notify and result immediately upon receipt of any callback*/
   2879     if (mLoopBackResult) {
   2880         /* Send notify */
   2881         camera3_notify_msg_t notify_msg;
   2882         notify_msg.type = CAMERA3_MSG_SHUTTER;
   2883         notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
   2884         notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
   2885         mCallbackOps->notify(mCallbackOps, &notify_msg);
   2886         /* Send capture result */
   2887         mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
   2888         free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
   2889         free(mLoopBackResult);
   2890         mLoopBackResult = NULL;
   2891     }
   2892 
   2893     if (metadata_buf)
   2894         handleMetadataWithLock(metadata_buf);
   2895     else
   2896         handleBufferWithLock(buffer, frame_number);
   2897     pthread_mutex_unlock(&mMutex);
   2898 }
   2899 
   2900 /*===========================================================================
   2901  * FUNCTION   : translateFromHalMetadata
   2902  *
   2903  * DESCRIPTION:
   2904  *
   2905  * PARAMETERS :
   2906  *   @metadata : metadata information from callback
   2907  *   @timestamp: metadata buffer timestamp
   2908  *   @request_id: request id
   2909  *   @jpegMetadata: additional jpeg metadata
   2910  *
   2911  * RETURN     : camera_metadata_t*
   2912  *              metadata in a format specified by fwk
   2913  *==========================================================================*/
   2914 camera_metadata_t*
   2915 QCamera3HardwareInterface::translateFromHalMetadata(
   2916                                  metadata_buffer_t *metadata,
   2917                                  nsecs_t timestamp,
   2918                                  int32_t request_id,
   2919                                  const CameraMetadata& jpegMetadata,
   2920                                  uint8_t pipeline_depth,
   2921                                  uint8_t capture_intent)
   2922 {
   2923     CameraMetadata camMetadata;
   2924     camera_metadata_t* resultMetadata;
   2925 
   2926     if (jpegMetadata.entryCount())
   2927         camMetadata.append(jpegMetadata);
   2928 
   2929     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   2930     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   2931     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
   2932     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
   2933 
   2934     if (IS_META_AVAILABLE(CAM_INTF_META_FRAME_NUMBER, metadata)) {
   2935         int64_t frame_number = *(uint32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2936         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
   2937     }
   2938 
   2939 
   2940     if (IS_META_AVAILABLE(CAM_INTF_PARM_FPS_RANGE, metadata)) {
   2941         int32_t fps_range[2];
   2942         cam_fps_range_t * float_range =
   2943           (cam_fps_range_t *)POINTER_OF_PARAM(CAM_INTF_PARM_FPS_RANGE, metadata);
   2944         fps_range[0] = (int32_t)float_range->min_fps;
   2945         fps_range[1] = (int32_t)float_range->max_fps;
   2946         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   2947                                       fps_range, 2);
   2948         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
   2949             __func__, fps_range[0], fps_range[1]);
   2950     }
   2951 
   2952 
   2953     if (IS_META_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata)) {
   2954         int32_t  *expCompensation =
   2955           (int32_t *)POINTER_OF_META(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
   2956         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   2957                                       expCompensation, 1);
   2958     }
   2959 
   2960     if (IS_META_AVAILABLE(CAM_INTF_PARM_BESTSHOT_MODE, metadata)) {
   2961         uint8_t sceneMode =
   2962                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_BESTSHOT_MODE, metadata));
   2963         uint8_t fwkSceneMode =
   2964             (uint8_t)lookupFwkName(SCENE_MODES_MAP,
   2965             sizeof(SCENE_MODES_MAP)/
   2966             sizeof(SCENE_MODES_MAP[0]), sceneMode);
   2967         camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
   2968              &fwkSceneMode, 1);
   2969     }
   2970 
   2971     if (IS_META_AVAILABLE(CAM_INTF_PARM_AEC_LOCK, metadata)) {
   2972         uint8_t  ae_lock =
   2973                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AEC_LOCK, metadata));
   2974         camMetadata.update(ANDROID_CONTROL_AE_LOCK,
   2975                 &ae_lock, 1);
   2976     }
   2977 
   2978     if (IS_META_AVAILABLE(CAM_INTF_PARM_AWB_LOCK, metadata)) {
   2979         uint8_t awb_lock =
   2980                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AWB_LOCK, metadata));
   2981         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
   2982     }
   2983 
   2984     if (IS_META_AVAILABLE(CAM_INTF_META_FACE_DETECTION, metadata)){
   2985         cam_face_detection_data_t *faceDetectionInfo =
   2986             (cam_face_detection_data_t *)POINTER_OF_META(CAM_INTF_META_FACE_DETECTION, metadata);
   2987         uint8_t numFaces = MIN(faceDetectionInfo->num_faces_detected, MAX_ROI);
   2988         int32_t faceIds[MAX_ROI];
   2989         uint8_t faceScores[MAX_ROI];
   2990         int32_t faceRectangles[MAX_ROI * 4];
   2991         int32_t faceLandmarks[MAX_ROI * 6];
   2992         int j = 0, k = 0;
   2993         for (int i = 0; i < numFaces; i++) {
   2994             faceIds[i] = faceDetectionInfo->faces[i].face_id;
   2995             faceScores[i] = faceDetectionInfo->faces[i].score;
   2996             convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   2997                 faceRectangles+j, -1);
   2998             convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
   2999             j+= 4;
   3000             k+= 6;
   3001         }
   3002         if (numFaces <= 0) {
   3003             memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
   3004             memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
   3005             memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
   3006             memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
   3007         }
   3008         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   3009         camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
   3010         camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   3011             faceRectangles, numFaces*4);
   3012         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   3013             faceLandmarks, numFaces*6);
   3014     }
   3015     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_MODE, metadata)){
   3016         uint8_t  *color_correct_mode =
   3017             (uint8_t *)POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
   3018         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
   3019     }
   3020     if (IS_META_AVAILABLE(CAM_INTF_META_EDGE_MODE, metadata)) {
   3021         cam_edge_application_t  *edgeApplication =
   3022             (cam_edge_application_t *)POINTER_OF_META(CAM_INTF_META_EDGE_MODE, metadata);
   3023         uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
   3024         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
   3025         camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
   3026     }
   3027     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_POWER, metadata)) {
   3028         uint8_t  *flashPower =
   3029             (uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_POWER, metadata);
   3030         camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
   3031     }
   3032     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_FIRING_TIME, metadata)) {
   3033         int64_t  *flashFiringTime =
   3034             (int64_t *)POINTER_OF_META(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
   3035         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   3036     }
   3037     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_STATE, metadata)) {
   3038         uint8_t  flashState =
   3039             *((uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_STATE, metadata));
   3040         if (!gCamCapability[mCameraId]->flash_available) {
   3041             flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   3042         }
   3043         camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
   3044     }
   3045     if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_MODE, metadata)){
   3046         uint8_t flashMode = *((uint8_t*)
   3047             POINTER_OF_META(CAM_INTF_META_FLASH_MODE, metadata));
   3048         uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
   3049             sizeof(FLASH_MODES_MAP), flashMode);
   3050         camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
   3051     }
   3052     if (IS_META_AVAILABLE(CAM_INTF_META_HOTPIXEL_MODE, metadata)) {
   3053         uint8_t  *hotPixelMode =
   3054             (uint8_t *)POINTER_OF_META(CAM_INTF_META_HOTPIXEL_MODE, metadata);
   3055         camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
   3056     }
   3057     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_APERTURE, metadata)){
   3058         float  *lensAperture =
   3059             (float *)POINTER_OF_META(CAM_INTF_META_LENS_APERTURE, metadata);
   3060         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   3061     }
   3062     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FILTERDENSITY, metadata)) {
   3063         float  *filterDensity =
   3064             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
   3065         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   3066     }
   3067     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)){
   3068         float  *focalLength =
   3069             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
   3070         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   3071     }
   3072 
   3073     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata)) {
   3074         uint8_t  *opticalStab =
   3075             (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
   3076         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
   3077     }
   3078     if (IS_META_AVAILABLE(CAM_INTF_PARM_DIS_ENABLE, metadata)) {
   3079         uint8_t *vsMode =
   3080             (uint8_t *)POINTER_OF_META(CAM_INTF_PARM_DIS_ENABLE, metadata);
   3081         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, vsMode, 1);
   3082     }
   3083 
   3084     if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
   3085         uint8_t  *noiseRedMode =
   3086             (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
   3087         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
   3088     }
   3089     if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata)) {
   3090         uint8_t  *noiseRedStrength =
   3091             (uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
   3092         camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
   3093     }
   3094     if (IS_META_AVAILABLE(CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata)) {
   3095         float  *effectiveExposureFactor =
   3096             (float *)POINTER_OF_META(CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata);
   3097         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
   3098     }
   3099     if (IS_META_AVAILABLE(CAM_INTF_META_SCALER_CROP_REGION, metadata)) {
   3100         cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
   3101             POINTER_OF_META(CAM_INTF_META_SCALER_CROP_REGION, metadata);
   3102         int32_t scalerCropRegion[4];
   3103         scalerCropRegion[0] = hScalerCropRegion->left;
   3104         scalerCropRegion[1] = hScalerCropRegion->top;
   3105         scalerCropRegion[2] = hScalerCropRegion->width;
   3106         scalerCropRegion[3] = hScalerCropRegion->height;
   3107         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   3108     }
   3109     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)){
   3110         int64_t  *sensorExpTime =
   3111             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
   3112         CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
   3113         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   3114     }
   3115     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata)){
   3116         int64_t  *sensorFameDuration =
   3117             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
   3118         CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
   3119         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   3120     }
   3121     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata)){
   3122         int64_t  *sensorRollingShutterSkew =
   3123             (int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
   3124                 metadata);
   3125         CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
   3126         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   3127                 sensorRollingShutterSkew, 1);
   3128     }
   3129 
   3130     if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)){
   3131         int32_t sensorSensitivity =
   3132             *((int32_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_SENSITIVITY, metadata));
   3133         CDBG("%s: sensorSensitivity = %d", __func__, sensorSensitivity);
   3134         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, &sensorSensitivity, 1);
   3135 
   3136         //calculate the noise profile based on sensitivity
   3137         double noise_profile_S = computeNoiseModelEntryS(sensorSensitivity);
   3138         double noise_profile_O = computeNoiseModelEntryO(sensorSensitivity);
   3139         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
   3140         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i +=2) {
   3141            noise_profile[i]   = noise_profile_S;
   3142            noise_profile[i+1] = noise_profile_O;
   3143         }
   3144         CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
   3145              noise_profile_S, noise_profile_O);
   3146         camMetadata.update( ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
   3147                             2 * gCamCapability[mCameraId]->num_color_channels);
   3148     }
   3149 
   3150 
   3151     if (IS_META_AVAILABLE(CAM_INTF_META_SHADING_MODE, metadata)) {
   3152         uint8_t  *shadingMode =
   3153             (uint8_t *)POINTER_OF_META(CAM_INTF_META_SHADING_MODE, metadata);
   3154      camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
   3155     }
   3156     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata)) {
   3157         uint8_t  *faceDetectMode =
   3158             (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
   3159         uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
   3160             sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]), *faceDetectMode);
   3161         camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
   3162     }
   3163     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata)) {
   3164         uint8_t  *histogramMode =
   3165             (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
   3166          camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
   3167     }
   3168     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata)){
   3169        uint8_t  *sharpnessMapMode =
   3170           (uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
   3171        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   3172                           sharpnessMapMode, 1);
   3173     }
   3174     if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata)){
   3175        cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
   3176        POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
   3177        camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
   3178                           (int32_t*)sharpnessMap->sharpness,
   3179                           CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
   3180     }
   3181     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP, metadata)) {
   3182        cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
   3183        POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP, metadata);
   3184        int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
   3185        int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
   3186        camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   3187                           (float*)lensShadingMap->lens_shading,
   3188                           4*map_width*map_height);
   3189     }
   3190     if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_MODE, metadata)) {
   3191         uint8_t  *toneMapMode =
   3192             (uint8_t *)POINTER_OF_META(CAM_INTF_META_TONEMAP_MODE, metadata);
   3193         camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
   3194     }
   3195     if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_CURVES, metadata)){
   3196         //Populate CAM_INTF_META_TONEMAP_CURVES
   3197         /* ch0 = G, ch 1 = B, ch 2 = R*/
   3198         cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
   3199                 POINTER_OF_META(CAM_INTF_META_TONEMAP_CURVES, metadata);
   3200         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   3201             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   3202                     __func__, tonemap->tonemap_points_cnt,
   3203                     CAM_MAX_TONEMAP_CURVE_SIZE);
   3204             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   3205         }
   3206 
   3207         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
   3208                         (float*)tonemap->curves[0].tonemap_points,
   3209                         tonemap->tonemap_points_cnt * 2);
   3210 
   3211         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
   3212                         (float*)tonemap->curves[1].tonemap_points,
   3213                         tonemap->tonemap_points_cnt * 2);
   3214 
   3215         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
   3216                         (float*)tonemap->curves[2].tonemap_points,
   3217                         tonemap->tonemap_points_cnt * 2);
   3218     }
   3219     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata)){
   3220         cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
   3221             POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
   3222         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
   3223     }
   3224     if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata)){
   3225         cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
   3226         POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
   3227         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   3228             (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
   3229     }
   3230     if (IS_META_AVAILABLE(CAM_INTF_META_PROFILE_TONE_CURVE, metadata)) {
   3231         cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
   3232                 POINTER_OF_META(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
   3233         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   3234             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   3235                     __func__, toneCurve->tonemap_points_cnt,
   3236                     CAM_MAX_TONEMAP_CURVE_SIZE);
   3237             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   3238         }
   3239         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
   3240                 (float*)toneCurve->curve.tonemap_points,
   3241                 toneCurve->tonemap_points_cnt * 2);
   3242     }
   3243     if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata)){
   3244         cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
   3245             POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
   3246         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   3247             predColorCorrectionGains->gains, 4);
   3248     }
   3249     if (IS_META_AVAILABLE(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata)){
   3250         cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
   3251             POINTER_OF_META(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
   3252         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   3253             (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
   3254     }
   3255     if (IS_META_AVAILABLE(CAM_INTF_META_OTP_WB_GRGB, metadata)) {
   3256         float *otpWbGrGb = (float*) POINTER_OF_META(
   3257                 CAM_INTF_META_OTP_WB_GRGB, metadata);
   3258         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
   3259     }
   3260     if (IS_META_AVAILABLE(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata)){
   3261         uint8_t *blackLevelLock = (uint8_t*)
   3262             POINTER_OF_META(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
   3263         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
   3264     }
   3265     if (IS_META_AVAILABLE(CAM_INTF_META_SCENE_FLICKER, metadata)){
   3266         uint8_t *sceneFlicker = (uint8_t*)
   3267             POINTER_OF_META(CAM_INTF_META_SCENE_FLICKER, metadata);
   3268         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
   3269     }
   3270     if (IS_META_AVAILABLE(CAM_INTF_PARM_EFFECT, metadata)) {
   3271         uint8_t *effectMode = (uint8_t*)
   3272             POINTER_OF_META(CAM_INTF_PARM_EFFECT, metadata);
   3273         uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
   3274                                             sizeof(EFFECT_MODES_MAP),
   3275                                             *effectMode);
   3276         camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
   3277     }
   3278     if (IS_META_AVAILABLE(CAM_INTF_META_TEST_PATTERN_DATA, metadata)) {
   3279         cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
   3280             POINTER_OF_META(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
   3281         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
   3282                 sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
   3283                 testPatternData->mode);
   3284         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
   3285                 &fwk_testPatternMode, 1);
   3286         int32_t fwk_testPatternData[4];
   3287         fwk_testPatternData[0] = testPatternData->r;
   3288         fwk_testPatternData[3] = testPatternData->b;
   3289         switch (gCamCapability[mCameraId]->color_arrangement) {
   3290         case CAM_FILTER_ARRANGEMENT_RGGB:
   3291         case CAM_FILTER_ARRANGEMENT_GRBG:
   3292             fwk_testPatternData[1] = testPatternData->gr;
   3293             fwk_testPatternData[2] = testPatternData->gb;
   3294             break;
   3295         case CAM_FILTER_ARRANGEMENT_GBRG:
   3296         case CAM_FILTER_ARRANGEMENT_BGGR:
   3297             fwk_testPatternData[2] = testPatternData->gr;
   3298             fwk_testPatternData[1] = testPatternData->gb;
   3299             break;
   3300         default:
   3301             ALOGE("%s: color arrangement %d is not supported", __func__,
   3302                 gCamCapability[mCameraId]->color_arrangement);
   3303             break;
   3304         }
   3305         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
   3306     }
   3307     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
   3308         double *gps_coords = (double *)POINTER_OF_META(
   3309             CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
   3310         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
   3311     }
   3312     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
   3313         char *gps_methods = (char *)POINTER_OF_META(
   3314             CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
   3315         String8 str(gps_methods);
   3316         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
   3317     }
   3318     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
   3319         int64_t *gps_timestamp = (int64_t *)POINTER_OF_META(
   3320                 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
   3321         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
   3322     }
   3323     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
   3324         int32_t *jpeg_orientation = (int32_t *)POINTER_OF_META(
   3325                 CAM_INTF_META_JPEG_ORIENTATION, metadata);
   3326         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
   3327     }
   3328     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_QUALITY, metadata)) {
   3329         uint8_t *jpeg_quality = (uint8_t *)POINTER_OF_META(
   3330                 CAM_INTF_META_JPEG_QUALITY, metadata);
   3331         camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
   3332     }
   3333     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
   3334         uint8_t *thumb_quality = (uint8_t *)POINTER_OF_META(
   3335                 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
   3336         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
   3337     }
   3338     if (IS_META_AVAILABLE(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
   3339         cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF_META(
   3340                 CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
   3341         // Note: cam_dimension_t should have the right layout, but for safety just copy it.
   3342         int32_t thumbnail_size[2];
   3343         thumbnail_size[0] = thumb_size->width;
   3344         thumbnail_size[1] = thumb_size->height;
   3345         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size, 2);
   3346     }
   3347     if (IS_META_AVAILABLE(CAM_INTF_META_PRIVATE_DATA, metadata)) {
   3348         int32_t *privateData = (int32_t *)
   3349                 POINTER_OF_META(CAM_INTF_META_PRIVATE_DATA, metadata);
   3350         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
   3351                 privateData, MAX_METADATA_PRIVATE_PAYLOAD_SIZE);
   3352     }
   3353     if (metadata->is_tuning_params_valid) {
   3354         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
   3355         uint8_t *data = (uint8_t*)&tuning_meta_data_blob[0];
   3356         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
   3357 
   3358 
   3359         memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_data_version),
   3360                 sizeof(uint32_t));
   3361         data += sizeof(uint32_t);
   3362 
   3363         memcpy(data, ((uint8_t*)&metadata->tuning_params.tuning_sensor_data_size),
   3364                 sizeof(uint32_t));
   3365         CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
   3366         data += sizeof(uint32_t);
   3367 
   3368         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
   3369                 sizeof(uint32_t));
   3370         CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
   3371         data += sizeof(uint32_t);
   3372 
   3373         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
   3374                 sizeof(uint32_t));
   3375         CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
   3376         data += sizeof(uint32_t);
   3377 
   3378         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
   3379                 sizeof(uint32_t));
   3380         CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
   3381         data += sizeof(uint32_t);
   3382 
   3383         metadata->tuning_params.tuning_mod3_data_size = 0;
   3384         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
   3385                 sizeof(uint32_t));
   3386         CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
   3387         data += sizeof(uint32_t);
   3388 
   3389         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
   3390                 metadata->tuning_params.tuning_sensor_data_size);
   3391         data += metadata->tuning_params.tuning_sensor_data_size;
   3392 
   3393         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
   3394                 metadata->tuning_params.tuning_vfe_data_size);
   3395         data += metadata->tuning_params.tuning_vfe_data_size;
   3396 
   3397         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
   3398                 metadata->tuning_params.tuning_cpp_data_size);
   3399         data += metadata->tuning_params.tuning_cpp_data_size;
   3400 
   3401 
   3402         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
   3403                 metadata->tuning_params.tuning_cac_data_size);
   3404         data += metadata->tuning_params.tuning_cac_data_size;
   3405 
   3406         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
   3407             (int32_t*)tuning_meta_data_blob, (data-tuning_meta_data_blob)/sizeof(uint32_t));
   3408     }
   3409     if (IS_META_AVAILABLE(CAM_INTF_META_NEUTRAL_COL_POINT, metadata)) {
   3410         cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
   3411                 POINTER_OF_META(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
   3412         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   3413                 (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
   3414     }
   3415 
   3416     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata)) {
   3417          uint8_t  shadingMapMode =
   3418                  *((uint32_t *)POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata));
   3419          camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingMapMode, 1);
   3420     }
   3421 
   3422     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_ROI, metadata)) {
   3423         cam_area_t  *hAeRegions =
   3424                 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AEC_ROI, metadata);
   3425         int32_t aeRegions[5];
   3426         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   3427         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
   3428         CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   3429                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
   3430                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
   3431                 hAeRegions->rect.height);
   3432     }
   3433 
   3434     if (IS_META_AVAILABLE(CAM_INTF_META_AF_ROI, metadata)) {
   3435         /*af regions*/
   3436         cam_area_t  *hAfRegions =
   3437                 (cam_area_t *)POINTER_OF_META(CAM_INTF_META_AF_ROI, metadata);
   3438         int32_t afRegions[5];
   3439         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   3440         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
   3441         CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   3442                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
   3443                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
   3444                 hAfRegions->rect.height);
   3445     }
   3446 
   3447     if (IS_META_AVAILABLE(CAM_INTF_PARM_ANTIBANDING, metadata)) {
   3448         uint8_t hal_ab_mode =
   3449                 *((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_ANTIBANDING, metadata));
   3450         uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
   3451                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   3452                 hal_ab_mode);
   3453         camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
   3454                 &fwk_ab_mode, 1);
   3455     }
   3456 
   3457     if (IS_META_AVAILABLE(CAM_INTF_META_MODE, metadata)) {
   3458          uint8_t mode =
   3459                  *((uint32_t *)POINTER_OF_META(CAM_INTF_META_MODE, metadata));
   3460          camMetadata.update(ANDROID_CONTROL_MODE, &mode, 1);
   3461     }
   3462 
   3463     /* Constant metadata values to be update*/
   3464     uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   3465     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
   3466 
   3467     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   3468     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   3469 
   3470     int32_t hotPixelMap[2];
   3471     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
   3472 
   3473     uint8_t vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   3474     camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
   3475 
   3476     // CDS
   3477     if (IS_META_AVAILABLE(CAM_INTF_PARM_CDS_MODE, metadata)) {
   3478         cam_cds_mode_type_t *cds = (cam_cds_mode_type_t *)
   3479                 POINTER_OF_META(CAM_INTF_PARM_CDS_MODE, metadata);
   3480         int32_t mode = *cds;
   3481         camMetadata.update(QCAMERA3_CDS_MODE,
   3482                 &mode, 1);
   3483     }
   3484 
   3485     // Reprocess crop data
   3486     if (IS_META_AVAILABLE(CAM_INTF_META_CROP_DATA, metadata)) {
   3487         cam_crop_data_t *crop_data = (cam_crop_data_t *)
   3488                 POINTER_OF_PARAM(CAM_INTF_META_CROP_DATA, metadata);
   3489         uint8_t cnt = crop_data->num_of_streams;
   3490         if ((0 < cnt) && (cnt < MAX_NUM_STREAMS)) {
   3491             int rc = NO_ERROR;
   3492             int32_t *crop = new int32_t[cnt*4];
   3493             if (NULL == crop) {
   3494                 rc = NO_MEMORY;
   3495             }
   3496 
   3497             int32_t *crop_stream_ids = new int32_t[cnt];
   3498             if (NULL == crop_stream_ids) {
   3499                 rc = NO_MEMORY;
   3500             }
   3501 
   3502             if (NO_ERROR == rc) {
   3503                 int32_t steams_found = 0;
   3504                 for (size_t i = 0; i < cnt; i++) {
   3505                     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3506                         it != mStreamInfo.end(); it++) {
   3507                         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3508                         if (NULL != channel) {
   3509                             if (crop_data->crop_info[i].stream_id ==
   3510                                     channel->mStreams[0]->getMyServerID()) {
   3511                                 crop[steams_found*4] = crop_data->crop_info[i].crop.left;
   3512                                 crop[steams_found*4 + 1] = crop_data->crop_info[i].crop.top;
   3513                                 crop[steams_found*4 + 2] = crop_data->crop_info[i].crop.width;
   3514                                 crop[steams_found*4 + 3] = crop_data->crop_info[i].crop.height;
   3515                                 // In a more general case we may want to generate
   3516                                 // unique id depending on width, height, stream, private
   3517                                 // data etc.
   3518                                 crop_stream_ids[steams_found] = (int32_t)(*it)->stream;
   3519                                 steams_found++;
   3520                                 CDBG("%s: Adding reprocess crop data for stream %p %dx%d, %dx%d",
   3521                                         __func__,
   3522                                         (*it)->stream,
   3523                                         crop_data->crop_info[i].crop.left,
   3524                                         crop_data->crop_info[i].crop.top,
   3525                                         crop_data->crop_info[i].crop.width,
   3526                                         crop_data->crop_info[i].crop.height);
   3527                                 break;
   3528                             }
   3529                         }
   3530                     }
   3531                 }
   3532 
   3533                 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
   3534                         &steams_found, 1);
   3535                 camMetadata.update(QCAMERA3_CROP_REPROCESS,
   3536                         crop, steams_found*4);
   3537                 camMetadata.update(QCAMERA3_CROP_STREAM_ID_REPROCESS,
   3538                         crop_stream_ids, steams_found);
   3539             }
   3540 
   3541             if (crop) {
   3542                 delete [] crop;
   3543             }
   3544             if (crop_stream_ids) {
   3545                 delete [] crop_stream_ids;
   3546             }
   3547         } else {
   3548             // mm-qcamera-daemon only posts crop_data for streams
   3549             // not linked to pproc. So no valid crop metadata is not
   3550             // necessarily an error case.
   3551             CDBG("%s: No valid crop metadata entries", __func__);
   3552         }
   3553     }
   3554 
   3555     if (IS_PARAM_AVAILABLE(CAM_INTF_PARM_CAC, metadata)) {
   3556         cam_aberration_mode_t  *cacMode = (cam_aberration_mode_t *)
   3557                 POINTER_OF_PARAM(CAM_INTF_PARM_CAC, metadata);
   3558         int32_t cac = lookupFwkName(COLOR_ABERRATION_MAP,
   3559                 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
   3560                 *cacMode);
   3561         if (NAME_NOT_FOUND != cac) {
   3562             uint8_t val = (uint8_t) cac;
   3563             camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   3564                     &val,
   3565                     1);
   3566         } else {
   3567             ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
   3568         }
   3569     }
   3570 
   3571     resultMetadata = camMetadata.release();
   3572     return resultMetadata;
   3573 }
   3574 
   3575 /*===========================================================================
   3576  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
   3577  *
   3578  * DESCRIPTION:
   3579  *
   3580  * PARAMETERS :
   3581  *   @metadata : metadata information from callback
   3582  *
   3583  * RETURN     : camera_metadata_t*
   3584  *              metadata in a format specified by fwk
   3585  *==========================================================================*/
   3586 camera_metadata_t*
   3587 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
   3588                                 (metadata_buffer_t *metadata)
   3589 {
   3590     CameraMetadata camMetadata;
   3591     camera_metadata_t* resultMetadata;
   3592     uint8_t aeMode = CAM_AE_MODE_MAX;
   3593     int32_t *flashMode = NULL;
   3594     int32_t *redeye = NULL;
   3595 
   3596     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_STATE, metadata)) {
   3597         uint8_t *ae_state = (uint8_t *)
   3598             POINTER_OF_META(CAM_INTF_META_AEC_STATE, metadata);
   3599         camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
   3600         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
   3601     }
   3602 
   3603     if (IS_META_AVAILABLE(CAM_INTF_META_AF_STATE, metadata)) {
   3604         uint8_t  *afState = (uint8_t *)
   3605             POINTER_OF_META(CAM_INTF_META_AF_STATE, metadata);
   3606         camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
   3607         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE %d", __func__, *afState);
   3608     }
   3609 
   3610     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata)) {
   3611         float  *focusDistance =
   3612             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
   3613         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   3614     }
   3615 
   3616     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCUS_RANGE, metadata)) {
   3617         float  *focusRange =
   3618             (float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
   3619         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
   3620     }
   3621 
   3622     if (IS_META_AVAILABLE(CAM_INTF_META_AWB_STATE, metadata)) {
   3623         uint8_t  *whiteBalanceState = (uint8_t *)
   3624             POINTER_OF_META(CAM_INTF_META_AWB_STATE, metadata);
   3625         camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
   3626         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
   3627     }
   3628 
   3629     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata)) {
   3630         cam_trigger_t *aecTrigger =
   3631                 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata);
   3632         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
   3633                 &aecTrigger->trigger, 1);
   3634         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
   3635                 &aecTrigger->trigger_id, 1);
   3636     }
   3637 
   3638     if (IS_META_AVAILABLE(CAM_INTF_PARM_FOCUS_MODE, metadata)) {
   3639         uint8_t  *focusMode = (uint8_t *)
   3640             POINTER_OF_META(CAM_INTF_PARM_FOCUS_MODE, metadata);
   3641         uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
   3642             sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
   3643         camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
   3644     }
   3645 
   3646     if (IS_META_AVAILABLE(CAM_INTF_META_AF_TRIGGER, metadata)) {
   3647         cam_trigger_t *af_trigger =
   3648                 (cam_trigger_t *)POINTER_OF_META(CAM_INTF_META_AF_TRIGGER, metadata);
   3649         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
   3650                 &af_trigger->trigger, 1);
   3651         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
   3652     }
   3653 
   3654     if (IS_META_AVAILABLE(CAM_INTF_PARM_WHITE_BALANCE, metadata)) {
   3655         uint8_t  *whiteBalance = (uint8_t *)
   3656             POINTER_OF_META(CAM_INTF_PARM_WHITE_BALANCE, metadata);
   3657         uint8_t fwkWhiteBalanceMode =
   3658             (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
   3659                 sizeof(WHITE_BALANCE_MODES_MAP)/
   3660                 sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
   3661         camMetadata.update(ANDROID_CONTROL_AWB_MODE,
   3662             &fwkWhiteBalanceMode, 1);
   3663     }
   3664 
   3665     if (IS_META_AVAILABLE(CAM_INTF_META_AEC_MODE, metadata)) {
   3666         aeMode = *((uint32_t*) POINTER_OF_META(CAM_INTF_META_AEC_MODE, metadata));
   3667     }
   3668     if (IS_META_AVAILABLE(CAM_INTF_PARM_LED_MODE, metadata)) {
   3669         flashMode = (int32_t*)
   3670                 POINTER_OF_PARAM(CAM_INTF_PARM_LED_MODE, metadata);
   3671     }
   3672     if (IS_META_AVAILABLE(CAM_INTF_PARM_REDEYE_REDUCTION, metadata)) {
   3673         redeye = (int32_t*)
   3674                 POINTER_OF_PARAM(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
   3675     }
   3676 
   3677     uint8_t fwk_aeMode;
   3678     if (redeye != NULL && *redeye == 1) {
   3679         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   3680         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   3681     } else if (flashMode != NULL &&
   3682             ((*flashMode == CAM_FLASH_MODE_AUTO)||
   3683              (*flashMode == CAM_FLASH_MODE_ON))) {
   3684         fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
   3685                 sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
   3686         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   3687     } else if (aeMode == CAM_AE_MODE_ON) {
   3688         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
   3689         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   3690     } else if (aeMode == CAM_AE_MODE_OFF) {
   3691         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   3692         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   3693     } else {
   3694         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%d!!!",__func__,
   3695               redeye, flashMode, aeMode);
   3696     }
   3697 
   3698     if (IS_META_AVAILABLE(CAM_INTF_META_LENS_STATE, metadata)) {
   3699         uint8_t *lensState = (uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_STATE, metadata);
   3700         camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
   3701     }
   3702 
   3703     resultMetadata = camMetadata.release();
   3704     return resultMetadata;
   3705 }
   3706 
   3707 /*===========================================================================
   3708  * FUNCTION   : dumpMetadataToFile
   3709  *
   3710  * DESCRIPTION: Dumps tuning metadata to file system
   3711  *
   3712  * PARAMETERS :
   3713  *   @meta           : tuning metadata
   3714  *   @dumpFrameCount : current dump frame count
   3715  *   @enabled        : Enable mask
   3716  *
   3717  *==========================================================================*/
   3718 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
   3719                                                    uint32_t &dumpFrameCount,
   3720                                                    int32_t enabled,
   3721                                                    const char *type,
   3722                                                    uint32_t frameNumber)
   3723 {
   3724     //Some sanity checks
   3725     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
   3726         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
   3727               __func__,
   3728               meta.tuning_sensor_data_size,
   3729               TUNING_SENSOR_DATA_MAX);
   3730         return;
   3731     }
   3732 
   3733     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
   3734         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
   3735               __func__,
   3736               meta.tuning_vfe_data_size,
   3737               TUNING_VFE_DATA_MAX);
   3738         return;
   3739     }
   3740 
   3741     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
   3742         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
   3743               __func__,
   3744               meta.tuning_cpp_data_size,
   3745               TUNING_CPP_DATA_MAX);
   3746         return;
   3747     }
   3748 
   3749     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
   3750         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
   3751               __func__,
   3752               meta.tuning_cac_data_size,
   3753               TUNING_CAC_DATA_MAX);
   3754         return;
   3755     }
   3756     //
   3757 
   3758     if(enabled){
   3759         char timeBuf[FILENAME_MAX];
   3760         char buf[FILENAME_MAX];
   3761         memset(buf, 0, sizeof(buf));
   3762         memset(timeBuf, 0, sizeof(timeBuf));
   3763         time_t current_time;
   3764         struct tm * timeinfo;
   3765         time (&current_time);
   3766         timeinfo = localtime (&current_time);
   3767         strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
   3768         String8 filePath(timeBuf);
   3769         snprintf(buf,
   3770                 sizeof(buf),
   3771                 "%dm_%s_%d.bin",
   3772                 dumpFrameCount,
   3773                 type,
   3774                 frameNumber);
   3775         filePath.append(buf);
   3776         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
   3777         if (file_fd >= 0) {
   3778             int written_len = 0;
   3779             meta.tuning_data_version = TUNING_DATA_VERSION;
   3780             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
   3781             written_len += write(file_fd, data, sizeof(uint32_t));
   3782             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
   3783             CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
   3784             written_len += write(file_fd, data, sizeof(uint32_t));
   3785             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
   3786             CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
   3787             written_len += write(file_fd, data, sizeof(uint32_t));
   3788             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
   3789             CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
   3790             written_len += write(file_fd, data, sizeof(uint32_t));
   3791             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
   3792             CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
   3793             written_len += write(file_fd, data, sizeof(uint32_t));
   3794             meta.tuning_mod3_data_size = 0;
   3795             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
   3796             CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
   3797             written_len += write(file_fd, data, sizeof(uint32_t));
   3798             int total_size = meta.tuning_sensor_data_size;
   3799             data = (void *)((uint8_t *)&meta.data);
   3800             written_len += write(file_fd, data, total_size);
   3801             total_size = meta.tuning_vfe_data_size;
   3802             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
   3803             written_len += write(file_fd, data, total_size);
   3804             total_size = meta.tuning_cpp_data_size;
   3805             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
   3806             written_len += write(file_fd, data, total_size);
   3807             total_size = meta.tuning_cac_data_size;
   3808             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
   3809             written_len += write(file_fd, data, total_size);
   3810             close(file_fd);
   3811         }else {
   3812             ALOGE("%s: fail to open file for metadata dumping", __func__);
   3813         }
   3814     }
   3815 }
   3816 
   3817 /*===========================================================================
   3818  * FUNCTION   : cleanAndSortStreamInfo
   3819  *
   3820  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
   3821  *              and sort them such that raw stream is at the end of the list
   3822  *              This is a workaround for camera daemon constraint.
   3823  *
   3824  * PARAMETERS : None
   3825  *
   3826  *==========================================================================*/
   3827 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
   3828 {
   3829     List<stream_info_t *> newStreamInfo;
   3830 
   3831     /*clean up invalid streams*/
   3832     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   3833             it != mStreamInfo.end();) {
   3834         if(((*it)->status) == INVALID){
   3835             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
   3836             delete channel;
   3837             free(*it);
   3838             it = mStreamInfo.erase(it);
   3839         } else {
   3840             it++;
   3841         }
   3842     }
   3843 
   3844     // Move preview/video/callback/snapshot streams into newList
   3845     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3846             it != mStreamInfo.end();) {
   3847         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
   3848                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
   3849                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
   3850             newStreamInfo.push_back(*it);
   3851             it = mStreamInfo.erase(it);
   3852         } else
   3853             it++;
   3854     }
   3855     // Move raw streams into newList
   3856     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3857             it != mStreamInfo.end();) {
   3858         newStreamInfo.push_back(*it);
   3859         it = mStreamInfo.erase(it);
   3860     }
   3861 
   3862     mStreamInfo = newStreamInfo;
   3863 }
   3864 
   3865 /*===========================================================================
   3866  * FUNCTION   : extractJpegMetadata
   3867  *
   3868  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
   3869  *              JPEG metadata is cached in HAL, and return as part of capture
   3870  *              result when metadata is returned from camera daemon.
   3871  *
   3872  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
   3873  *              @request:      capture request
   3874  *
   3875  *==========================================================================*/
   3876 void QCamera3HardwareInterface::extractJpegMetadata(
   3877         CameraMetadata& jpegMetadata,
   3878         const camera3_capture_request_t *request)
   3879 {
   3880     CameraMetadata frame_settings;
   3881     frame_settings = request->settings;
   3882 
   3883     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
   3884         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
   3885                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
   3886                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
   3887 
   3888     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
   3889         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
   3890                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
   3891                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
   3892 
   3893     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
   3894         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
   3895                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
   3896                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
   3897 
   3898     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
   3899         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
   3900                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
   3901                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
   3902 
   3903     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
   3904         jpegMetadata.update(ANDROID_JPEG_QUALITY,
   3905                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
   3906                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
   3907 
   3908     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
   3909         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
   3910                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
   3911                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
   3912 
   3913     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   3914         int32_t thumbnail_size[2];
   3915         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   3916         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   3917         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   3918             int32_t orientation =
   3919                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   3920             if ((orientation == 90) || (orientation == 270)) {
   3921                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
   3922                int32_t temp;
   3923                temp = thumbnail_size[0];
   3924                thumbnail_size[0] = thumbnail_size[1];
   3925                thumbnail_size[1] = temp;
   3926             }
   3927          }
   3928          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
   3929                 thumbnail_size,
   3930                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   3931     }
   3932 }
   3933 
   3934 /*===========================================================================
   3935  * FUNCTION   : convertToRegions
   3936  *
   3937  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   3938  *
   3939  * PARAMETERS :
   3940  *   @rect   : cam_rect_t struct to convert
   3941  *   @region : int32_t destination array
   3942  *   @weight : if we are converting from cam_area_t, weight is valid
   3943  *             else weight = -1
   3944  *
   3945  *==========================================================================*/
   3946 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
   3947     region[0] = rect.left;
   3948     region[1] = rect.top;
   3949     region[2] = rect.left + rect.width;
   3950     region[3] = rect.top + rect.height;
   3951     if (weight > -1) {
   3952         region[4] = weight;
   3953     }
   3954 }
   3955 
   3956 /*===========================================================================
   3957  * FUNCTION   : convertFromRegions
   3958  *
   3959  * DESCRIPTION: helper method to convert from array to cam_rect_t
   3960  *
   3961  * PARAMETERS :
   3962  *   @rect   : cam_rect_t struct to convert
   3963  *   @region : int32_t destination array
   3964  *   @weight : if we are converting from cam_area_t, weight is valid
   3965  *             else weight = -1
   3966  *
   3967  *==========================================================================*/
   3968 void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
   3969                                                    const camera_metadata_t *settings,
   3970                                                    uint32_t tag){
   3971     CameraMetadata frame_settings;
   3972     frame_settings = settings;
   3973     int32_t x_min = frame_settings.find(tag).data.i32[0];
   3974     int32_t y_min = frame_settings.find(tag).data.i32[1];
   3975     int32_t x_max = frame_settings.find(tag).data.i32[2];
   3976     int32_t y_max = frame_settings.find(tag).data.i32[3];
   3977     roi->weight = frame_settings.find(tag).data.i32[4];
   3978     roi->rect.left = x_min;
   3979     roi->rect.top = y_min;
   3980     roi->rect.width = x_max - x_min;
   3981     roi->rect.height = y_max - y_min;
   3982 }
   3983 
   3984 /*===========================================================================
   3985  * FUNCTION   : resetIfNeededROI
   3986  *
   3987  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   3988  *              crop region
   3989  *
   3990  * PARAMETERS :
   3991  *   @roi       : cam_area_t struct to resize
   3992  *   @scalerCropRegion : cam_crop_region_t region to compare against
   3993  *
   3994  *
   3995  *==========================================================================*/
   3996 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   3997                                                  const cam_crop_region_t* scalerCropRegion)
   3998 {
   3999     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   4000     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   4001     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
   4002     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
   4003 
   4004     /* According to spec weight = 0 is used to indicate roi needs to be disabled
   4005      * without having this check the calculations below to validate if the roi
   4006      * is inside scalar crop region will fail resulting in the roi not being
   4007      * reset causing algorithm to continue to use stale roi window
   4008      */
   4009     if (roi->weight == 0) {
   4010         return true;
   4011     }
   4012 
   4013     if ((roi_x_max < scalerCropRegion->left) ||
   4014         // right edge of roi window is left of scalar crop's left edge
   4015         (roi_y_max < scalerCropRegion->top)  ||
   4016         // bottom edge of roi window is above scalar crop's top edge
   4017         (roi->rect.left > crop_x_max) ||
   4018         // left edge of roi window is beyond(right) of scalar crop's right edge
   4019         (roi->rect.top > crop_y_max)){
   4020         // top edge of roi windo is above scalar crop's top edge
   4021         return false;
   4022     }
   4023     if (roi->rect.left < scalerCropRegion->left) {
   4024         roi->rect.left = scalerCropRegion->left;
   4025     }
   4026     if (roi->rect.top < scalerCropRegion->top) {
   4027         roi->rect.top = scalerCropRegion->top;
   4028     }
   4029     if (roi_x_max > crop_x_max) {
   4030         roi_x_max = crop_x_max;
   4031     }
   4032     if (roi_y_max > crop_y_max) {
   4033         roi_y_max = crop_y_max;
   4034     }
   4035     roi->rect.width = roi_x_max - roi->rect.left;
   4036     roi->rect.height = roi_y_max - roi->rect.top;
   4037     return true;
   4038 }
   4039 
   4040 /*===========================================================================
   4041  * FUNCTION   : convertLandmarks
   4042  *
   4043  * DESCRIPTION: helper method to extract the landmarks from face detection info
   4044  *
   4045  * PARAMETERS :
   4046  *   @face   : cam_rect_t struct to convert
   4047  *   @landmarks : int32_t destination array
   4048  *
   4049  *
   4050  *==========================================================================*/
   4051 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
   4052 {
   4053     landmarks[0] = face.left_eye_center.x;
   4054     landmarks[1] = face.left_eye_center.y;
   4055     landmarks[2] = face.right_eye_center.x;
   4056     landmarks[3] = face.right_eye_center.y;
   4057     landmarks[4] = face.mouth_center.x;
   4058     landmarks[5] = face.mouth_center.y;
   4059 }
   4060 
   4061 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   4062 /*===========================================================================
   4063  * FUNCTION   : initCapabilities
   4064  *
   4065  * DESCRIPTION: initialize camera capabilities in static data struct
   4066  *
   4067  * PARAMETERS :
   4068  *   @cameraId  : camera Id
   4069  *
   4070  * RETURN     : int32_t type of status
   4071  *              NO_ERROR  -- success
   4072  *              none-zero failure code
   4073  *==========================================================================*/
   4074 int QCamera3HardwareInterface::initCapabilities(int cameraId)
   4075 {
   4076     int rc = 0;
   4077     mm_camera_vtbl_t *cameraHandle = NULL;
   4078     QCamera3HeapMemory *capabilityHeap = NULL;
   4079 
   4080     cameraHandle = camera_open(cameraId);
   4081     if (!cameraHandle) {
   4082         ALOGE("%s: camera_open failed", __func__);
   4083         rc = -1;
   4084         goto open_failed;
   4085     }
   4086 
   4087     capabilityHeap = new QCamera3HeapMemory();
   4088     if (capabilityHeap == NULL) {
   4089         ALOGE("%s: creation of capabilityHeap failed", __func__);
   4090         goto heap_creation_failed;
   4091     }
   4092     /* Allocate memory for capability buffer */
   4093     rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
   4094     if(rc != OK) {
   4095         ALOGE("%s: No memory for cappability", __func__);
   4096         goto allocate_failed;
   4097     }
   4098 
   4099     /* Map memory for capability buffer */
   4100     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   4101     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   4102                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   4103                                 capabilityHeap->getFd(0),
   4104                                 sizeof(cam_capability_t));
   4105     if(rc < 0) {
   4106         ALOGE("%s: failed to map capability buffer", __func__);
   4107         goto map_failed;
   4108     }
   4109 
   4110     /* Query Capability */
   4111     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   4112     if(rc < 0) {
   4113         ALOGE("%s: failed to query capability",__func__);
   4114         goto query_failed;
   4115     }
   4116     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   4117     if (!gCamCapability[cameraId]) {
   4118         ALOGE("%s: out of memory", __func__);
   4119         goto query_failed;
   4120     }
   4121     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   4122                                         sizeof(cam_capability_t));
   4123     rc = 0;
   4124 
   4125 query_failed:
   4126     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   4127                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   4128 map_failed:
   4129     capabilityHeap->deallocate();
   4130 allocate_failed:
   4131     delete capabilityHeap;
   4132 heap_creation_failed:
   4133     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   4134     cameraHandle = NULL;
   4135 open_failed:
   4136     return rc;
   4137 }
   4138 
   4139 /*===========================================================================
   4140  * FUNCTION   : initParameters
   4141  *
   4142  * DESCRIPTION: initialize camera parameters
   4143  *
   4144  * PARAMETERS :
   4145  *
   4146  * RETURN     : int32_t type of status
   4147  *              NO_ERROR  -- success
   4148  *              none-zero failure code
   4149  *==========================================================================*/
   4150 int QCamera3HardwareInterface::initParameters()
   4151 {
   4152     int rc = 0;
   4153 
   4154     //Allocate Set Param Buffer
   4155     mParamHeap = new QCamera3HeapMemory();
   4156     rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
   4157     if(rc != OK) {
   4158         rc = NO_MEMORY;
   4159         ALOGE("Failed to allocate SETPARM Heap memory");
   4160         delete mParamHeap;
   4161         mParamHeap = NULL;
   4162         return rc;
   4163     }
   4164 
   4165     //Map memory for parameters buffer
   4166     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   4167             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   4168             mParamHeap->getFd(0),
   4169             sizeof(metadata_buffer_t));
   4170     if(rc < 0) {
   4171         ALOGE("%s:failed to map SETPARM buffer",__func__);
   4172         rc = FAILED_TRANSACTION;
   4173         mParamHeap->deallocate();
   4174         delete mParamHeap;
   4175         mParamHeap = NULL;
   4176         return rc;
   4177     }
   4178 
   4179     mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
   4180     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
   4181     return rc;
   4182 }
   4183 
   4184 /*===========================================================================
   4185  * FUNCTION   : deinitParameters
   4186  *
   4187  * DESCRIPTION: de-initialize camera parameters
   4188  *
   4189  * PARAMETERS :
   4190  *
   4191  * RETURN     : NONE
   4192  *==========================================================================*/
   4193 void QCamera3HardwareInterface::deinitParameters()
   4194 {
   4195     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   4196             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   4197 
   4198     mParamHeap->deallocate();
   4199     delete mParamHeap;
   4200     mParamHeap = NULL;
   4201 
   4202     mParameters = NULL;
   4203 
   4204     free(mPrevParameters);
   4205     mPrevParameters = NULL;
   4206 }
   4207 
   4208 /*===========================================================================
   4209  * FUNCTION   : calcMaxJpegSize
   4210  *
   4211  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   4212  *
   4213  * PARAMETERS :
   4214  *
   4215  * RETURN     : max_jpeg_size
   4216  *==========================================================================*/
   4217 int QCamera3HardwareInterface::calcMaxJpegSize(uint8_t camera_id)
   4218 {
   4219     int32_t max_jpeg_size = 0;
   4220     int temp_width, temp_height;
   4221     for (int i = 0; i < gCamCapability[camera_id]->picture_sizes_tbl_cnt; i++) {
   4222         temp_width = gCamCapability[camera_id]->picture_sizes_tbl[i].width;
   4223         temp_height = gCamCapability[camera_id]->picture_sizes_tbl[i].height;
   4224         if (temp_width * temp_height > max_jpeg_size ) {
   4225             max_jpeg_size = temp_width * temp_height;
   4226         }
   4227     }
   4228     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   4229     return max_jpeg_size;
   4230 }
   4231 
   4232 /*===========================================================================
   4233  * FUNCTION   : getMaxRawSize
   4234  *
   4235  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
   4236  *
   4237  * PARAMETERS :
   4238  *
   4239  * RETURN     : Largest supported Raw Dimension
   4240  *==========================================================================*/
   4241 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint8_t camera_id)
   4242 {
   4243     int max_width = 0;
   4244     cam_dimension_t maxRawSize;
   4245 
   4246     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
   4247     for (int i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
   4248         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
   4249             max_width = gCamCapability[camera_id]->raw_dim[i].width;
   4250             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
   4251         }
   4252     }
   4253     return maxRawSize;
   4254 }
   4255 
   4256 
   4257 /*===========================================================================
   4258  * FUNCTION   : calcMaxJpegDim
   4259  *
   4260  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
   4261  *
   4262  * PARAMETERS :
   4263  *
   4264  * RETURN     : max_jpeg_dim
   4265  *==========================================================================*/
   4266 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
   4267 {
   4268     cam_dimension_t max_jpeg_dim;
   4269     cam_dimension_t curr_jpeg_dim;
   4270     max_jpeg_dim.width = 0;
   4271     max_jpeg_dim.height = 0;
   4272     curr_jpeg_dim.width = 0;
   4273     curr_jpeg_dim.height = 0;
   4274     for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   4275         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   4276         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   4277         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
   4278             max_jpeg_dim.width * max_jpeg_dim.height ) {
   4279             max_jpeg_dim.width = curr_jpeg_dim.width;
   4280             max_jpeg_dim.height = curr_jpeg_dim.height;
   4281         }
   4282     }
   4283     return max_jpeg_dim;
   4284 }
   4285 
   4286 
   4287 /*===========================================================================
   4288  * FUNCTION   : initStaticMetadata
   4289  *
   4290  * DESCRIPTION: initialize the static metadata
   4291  *
   4292  * PARAMETERS :
   4293  *   @cameraId  : camera Id
   4294  *
   4295  * RETURN     : int32_t type of status
   4296  *              0  -- success
   4297  *              non-zero failure code
   4298  *==========================================================================*/
   4299 int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
   4300 {
   4301     int rc = 0;
   4302     CameraMetadata staticInfo;
   4303 
   4304     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   4305     if (!facingBack)
   4306         gCamCapability[cameraId]->supported_raw_dim_cnt = 0;
   4307 
   4308      /* android.info: hardware level */
   4309     uint8_t supportedHardwareLevel = (facingBack)? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
   4310       ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
   4311     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   4312         &supportedHardwareLevel, 1);
   4313     /*HAL 3 only*/
   4314     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   4315                     &gCamCapability[cameraId]->min_focus_distance, 1);
   4316 
   4317     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   4318             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   4319 
   4320     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   4321             &aeLockAvailable, 1);
   4322 
   4323     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   4324             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   4325 
   4326     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   4327             &awbLockAvailable, 1);
   4328 
   4329     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   4330                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   4331 
   4332     /*should be using focal lengths but sensor doesn't provide that info now*/
   4333     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   4334                       &gCamCapability[cameraId]->focal_length,
   4335                       1);
   4336 
   4337     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   4338                       gCamCapability[cameraId]->apertures,
   4339                       gCamCapability[cameraId]->apertures_count);
   4340 
   4341     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   4342                 gCamCapability[cameraId]->filter_densities,
   4343                 gCamCapability[cameraId]->filter_densities_count);
   4344 
   4345 
   4346     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   4347                       (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
   4348                       gCamCapability[cameraId]->optical_stab_modes_count);
   4349 
   4350     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
   4351                                        gCamCapability[cameraId]->lens_shading_map_size.height};
   4352     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   4353                       lens_shading_map_size,
   4354                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   4355 
   4356     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   4357             gCamCapability[cameraId]->sensor_physical_size, 2);
   4358 
   4359     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   4360             gCamCapability[cameraId]->exposure_time_range, 2);
   4361 
   4362     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   4363             &gCamCapability[cameraId]->max_frame_duration, 1);
   4364 
   4365     camera_metadata_rational baseGainFactor = {
   4366             gCamCapability[cameraId]->base_gain_factor.numerator,
   4367             gCamCapability[cameraId]->base_gain_factor.denominator};
   4368     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   4369                       &baseGainFactor, 1);
   4370 
   4371     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   4372                      (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
   4373 
   4374     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   4375                                   gCamCapability[cameraId]->pixel_array_size.height};
   4376     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   4377                       pixel_array_size, 2);
   4378 
   4379     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
   4380                                                 gCamCapability[cameraId]->active_array_size.top,
   4381                                                 gCamCapability[cameraId]->active_array_size.width,
   4382                                                 gCamCapability[cameraId]->active_array_size.height};
   4383     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   4384                       active_array_size, 4);
   4385 
   4386     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   4387             &gCamCapability[cameraId]->white_level, 1);
   4388 
   4389     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   4390             gCamCapability[cameraId]->black_level_pattern, 4);
   4391 
   4392     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   4393                       &gCamCapability[cameraId]->flash_charge_duration, 1);
   4394 
   4395     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   4396                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   4397 
   4398     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
   4399     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   4400                       (int32_t*)&maxFaces, 1);
   4401 
   4402     uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
   4403     if (0 && gCamCapability[cameraId]->isTimestampCalibrated) {
   4404         timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
   4405     }
   4406     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   4407             &timestampSource, 1);
   4408 
   4409     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   4410                       &gCamCapability[cameraId]->histogram_size, 1);
   4411 
   4412     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   4413             &gCamCapability[cameraId]->max_histogram_count, 1);
   4414 
   4415     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
   4416                                     gCamCapability[cameraId]->sharpness_map_size.height};
   4417 
   4418     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   4419             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   4420 
   4421     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   4422             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   4423 
   4424     int32_t scalar_formats[] = {
   4425             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
   4426             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
   4427             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
   4428             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
   4429             HAL_PIXEL_FORMAT_RAW10,
   4430             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
   4431     int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
   4432     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   4433                       scalar_formats,
   4434                       scalar_formats_count);
   4435 
   4436     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
   4437     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   4438               gCamCapability[cameraId]->picture_sizes_tbl_cnt,
   4439               available_processed_sizes);
   4440     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   4441                 available_processed_sizes,
   4442                 (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
   4443 
   4444     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
   4445     makeTable(gCamCapability[cameraId]->raw_dim,
   4446               gCamCapability[cameraId]->supported_raw_dim_cnt,
   4447               available_raw_sizes);
   4448     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   4449                 available_raw_sizes,
   4450                 gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
   4451 
   4452     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   4453     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   4454                  gCamCapability[cameraId]->fps_ranges_tbl_cnt,
   4455                  available_fps_ranges);
   4456     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   4457             available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
   4458 
   4459     camera_metadata_rational exposureCompensationStep = {
   4460             gCamCapability[cameraId]->exp_compensation_step.numerator,
   4461             gCamCapability[cameraId]->exp_compensation_step.denominator};
   4462     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   4463                       &exposureCompensationStep, 1);
   4464 
   4465     uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
   4466     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   4467                       availableVstabModes, sizeof(availableVstabModes));
   4468 
   4469     /*HAL 1 and HAL 3 common*/
   4470     float maxZoom = 4;
   4471     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   4472             &maxZoom, 1);
   4473 
   4474     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
   4475     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
   4476 
   4477     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
   4478     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
   4479         max3aRegions[2] = 0; /* AF not supported */
   4480     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   4481             max3aRegions, 3);
   4482 
   4483     uint8_t availableFaceDetectModes[] = {
   4484             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
   4485             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
   4486     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   4487                       availableFaceDetectModes,
   4488                       sizeof(availableFaceDetectModes));
   4489 
   4490     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
   4491                                            gCamCapability[cameraId]->exposure_compensation_max};
   4492     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   4493             exposureCompensationRange,
   4494             sizeof(exposureCompensationRange)/sizeof(int32_t));
   4495 
   4496     uint8_t lensFacing = (facingBack) ?
   4497             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   4498     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   4499 
   4500     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   4501                       available_thumbnail_sizes,
   4502                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   4503 
   4504     /*all sizes will be clubbed into this tag*/
   4505     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
   4506     uint8_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
   4507             (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2,
   4508              MAX_SIZES_CNT * 2,
   4509              gCamCapability[cameraId]->active_array_size,
   4510              gCamCapability[cameraId]->max_downscale_factor);
   4511     /*android.scaler.availableStreamConfigurations*/
   4512     int32_t max_stream_configs_size =
   4513             gCamCapability[cameraId]->picture_sizes_tbl_cnt *
   4514             sizeof(scalar_formats)/sizeof(int32_t) * 4;
   4515     int32_t available_stream_configs[max_stream_configs_size];
   4516     int idx = 0;
   4517 
   4518     /* Add input/output stream configurations for each scalar formats*/
   4519     for (int j = 0; j < scalar_formats_count; j++) {
   4520         switch (scalar_formats[j]) {
   4521         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   4522         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   4523         case HAL_PIXEL_FORMAT_RAW10:
   4524             for (int i = 0;
   4525                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   4526                 available_stream_configs[idx] = scalar_formats[j];
   4527                 available_stream_configs[idx+1] =
   4528                     gCamCapability[cameraId]->raw_dim[i].width;
   4529                 available_stream_configs[idx+2] =
   4530                     gCamCapability[cameraId]->raw_dim[i].height;
   4531                 available_stream_configs[idx+3] =
   4532                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
   4533                 idx+=4;
   4534             }
   4535             break;
   4536         case HAL_PIXEL_FORMAT_BLOB:
   4537             for (int i = 0; i < jpeg_sizes_cnt/2; i++) {
   4538                 available_stream_configs[idx] = scalar_formats[j];
   4539                 available_stream_configs[idx+1] = available_jpeg_sizes[i*2];
   4540                 available_stream_configs[idx+2] = available_jpeg_sizes[i*2+1];
   4541                 available_stream_configs[idx+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
   4542                 idx+=4;
   4543             }
   4544             break;
   4545 
   4546         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   4547         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   4548         default:
   4549             cam_dimension_t largest_picture_size;
   4550             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
   4551             for (int i = 0;
   4552                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   4553                 available_stream_configs[idx] = scalar_formats[j];
   4554                 available_stream_configs[idx+1] =
   4555                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   4556                 available_stream_configs[idx+2] =
   4557                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   4558                 available_stream_configs[idx+3] =
   4559                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
   4560                 idx+=4;
   4561 
   4562                 /* Book keep largest */
   4563                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
   4564                         >= largest_picture_size.width &&
   4565                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
   4566                         >= largest_picture_size.height)
   4567                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
   4568             }
   4569 
   4570             break;
   4571         }
   4572     }
   4573 
   4574     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   4575                       available_stream_configs, idx);
   4576     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   4577     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   4578 
   4579     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   4580     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   4581 
   4582     /* android.scaler.availableMinFrameDurations */
   4583     int64_t available_min_durations[max_stream_configs_size];
   4584     idx = 0;
   4585     for (int j = 0; j < scalar_formats_count; j++) {
   4586         switch (scalar_formats[j]) {
   4587         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   4588         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   4589         case HAL_PIXEL_FORMAT_RAW10:
   4590             for (int i = 0;
   4591                 i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   4592                 available_min_durations[idx] = scalar_formats[j];
   4593                 available_min_durations[idx+1] =
   4594                     gCamCapability[cameraId]->raw_dim[i].width;
   4595                 available_min_durations[idx+2] =
   4596                     gCamCapability[cameraId]->raw_dim[i].height;
   4597                 available_min_durations[idx+3] =
   4598                     gCamCapability[cameraId]->raw_min_duration[i];
   4599                 idx+=4;
   4600             }
   4601             break;
   4602         default:
   4603             for (int i = 0;
   4604                 i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   4605                 available_min_durations[idx] = scalar_formats[j];
   4606                 available_min_durations[idx+1] =
   4607                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   4608                 available_min_durations[idx+2] =
   4609                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   4610                 available_min_durations[idx+3] =
   4611                     gCamCapability[cameraId]->picture_min_duration[i];
   4612                 idx+=4;
   4613             }
   4614             break;
   4615         }
   4616     }
   4617     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   4618                       &available_min_durations[0], idx);
   4619 
   4620     int32_t max_jpeg_size = calcMaxJpegSize(cameraId);
   4621     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   4622                       &max_jpeg_size, 1);
   4623 
   4624     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   4625     size_t size = 0;
   4626     for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
   4627         int32_t val = lookupFwkName(EFFECT_MODES_MAP,
   4628                                    sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
   4629                                    gCamCapability[cameraId]->supported_effects[i]);
   4630         if (val != NAME_NOT_FOUND) {
   4631             avail_effects[size] = (uint8_t)val;
   4632             size++;
   4633         }
   4634     }
   4635     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   4636                       avail_effects,
   4637                       size);
   4638 
   4639     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   4640     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   4641     int32_t supported_scene_modes_cnt = 0;
   4642     for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
   4643         int32_t val = lookupFwkName(SCENE_MODES_MAP,
   4644                                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   4645                                 gCamCapability[cameraId]->supported_scene_modes[i]);
   4646         if (val != NAME_NOT_FOUND) {
   4647             avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   4648             supported_indexes[supported_scene_modes_cnt] = i;
   4649             supported_scene_modes_cnt++;
   4650         }
   4651     }
   4652 
   4653     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   4654                       avail_scene_modes,
   4655                       supported_scene_modes_cnt);
   4656 
   4657     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
   4658     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   4659                       supported_scene_modes_cnt,
   4660                       scene_mode_overrides,
   4661                       supported_indexes,
   4662                       cameraId);
   4663     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   4664                       scene_mode_overrides,
   4665                       supported_scene_modes_cnt*3);
   4666 
   4667     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
   4668                                          ANDROID_CONTROL_MODE_AUTO,
   4669                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
   4670     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
   4671             available_control_modes,
   4672             3);
   4673 
   4674     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   4675     size = 0;
   4676     for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
   4677         int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
   4678                                  sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   4679                                  gCamCapability[cameraId]->supported_antibandings[i]);
   4680         if (val != NAME_NOT_FOUND) {
   4681             avail_antibanding_modes[size] = (uint8_t)val;
   4682             size++;
   4683         }
   4684 
   4685     }
   4686     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   4687                       avail_antibanding_modes,
   4688                       size);
   4689 
   4690     uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
   4691     size = 0;
   4692     if (0 == gCamCapability[cameraId]->aberration_modes_count) {
   4693         avail_abberation_modes[0] =
   4694                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   4695         size++;
   4696     } else {
   4697         for (size_t i = 0; i < gCamCapability[cameraId]->aberration_modes_count; i++) {
   4698             int32_t val = lookupFwkName(COLOR_ABERRATION_MAP,
   4699                     sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
   4700                     gCamCapability[cameraId]->aberration_modes[i]);
   4701             if (val != NAME_NOT_FOUND) {
   4702                 avail_abberation_modes[size] = (uint8_t)val;
   4703                 size++;
   4704             } else {
   4705                 ALOGE("%s: Invalid CAC mode %d", __func__,
   4706                         gCamCapability[cameraId]->aberration_modes[i]);
   4707                 break;
   4708             }
   4709         }
   4710 
   4711     }
   4712     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   4713             avail_abberation_modes,
   4714             size);
   4715 
   4716     char cafProp[PROPERTY_VALUE_MAX];
   4717     memset(cafProp, 0, sizeof(cafProp));
   4718     property_get("persist.camera.caf.disable", cafProp, "0");
   4719     uint8_t cafDisabled = atoi(cafProp);
   4720 
   4721     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   4722     size = 0;
   4723     for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
   4724         if (cafDisabled &&
   4725             ((gCamCapability[cameraId]->supported_focus_modes[i]
   4726               == CAM_FOCUS_MODE_CONTINOUS_PICTURE) ||
   4727              (gCamCapability[cameraId]->supported_focus_modes[i]
   4728               == CAM_FOCUS_MODE_CONTINOUS_VIDEO)))
   4729             continue;
   4730 
   4731         int32_t val = lookupFwkName(FOCUS_MODES_MAP,
   4732                                 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   4733                                 gCamCapability[cameraId]->supported_focus_modes[i]);
   4734         if (val != NAME_NOT_FOUND) {
   4735             avail_af_modes[size] = (uint8_t)val;
   4736             size++;
   4737         }
   4738     }
   4739     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   4740                       avail_af_modes,
   4741                       size);
   4742 
   4743     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   4744     size = 0;
   4745     for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
   4746         int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   4747                                     sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   4748                                     gCamCapability[cameraId]->supported_white_balances[i]);
   4749         if (val != NAME_NOT_FOUND) {
   4750             avail_awb_modes[size] = (uint8_t)val;
   4751             size++;
   4752         }
   4753     }
   4754     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   4755                       avail_awb_modes,
   4756                       size);
   4757 
   4758     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   4759     for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
   4760       available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
   4761 
   4762     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   4763             available_flash_levels,
   4764             gCamCapability[cameraId]->supported_flash_firing_level_cnt);
   4765 
   4766     uint8_t flashAvailable;
   4767     if (gCamCapability[cameraId]->flash_available)
   4768         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
   4769     else
   4770         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
   4771     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   4772             &flashAvailable, 1);
   4773 
   4774     uint8_t avail_ae_modes[5];
   4775     size = 0;
   4776     for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
   4777         avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
   4778         size++;
   4779     }
   4780     if (flashAvailable) {
   4781         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
   4782         avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
   4783     }
   4784     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   4785                       avail_ae_modes,
   4786                       size);
   4787 
   4788     int32_t sensitivity_range[2];
   4789     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   4790     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   4791     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   4792                       sensitivity_range,
   4793                       sizeof(sensitivity_range) / sizeof(int32_t));
   4794 
   4795     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   4796                       &gCamCapability[cameraId]->max_analog_sensitivity,
   4797                       1);
   4798 
   4799     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   4800     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   4801                       &sensor_orientation,
   4802                       1);
   4803 
   4804     int32_t max_output_streams[3] = {
   4805             MAX_STALLING_STREAMS,
   4806             MAX_PROCESSED_STREAMS,
   4807             MAX_RAW_STREAMS};
   4808     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   4809                       max_output_streams,
   4810                       3);
   4811 
   4812     uint8_t avail_leds = 0;
   4813     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
   4814                       &avail_leds, 0);
   4815 
   4816     uint8_t focus_dist_calibrated;
   4817     int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
   4818             sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
   4819             gCamCapability[cameraId]->focus_dist_calibrated);
   4820     if (val != NAME_NOT_FOUND) {
   4821         focus_dist_calibrated = (uint8_t)val;
   4822         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   4823                      &focus_dist_calibrated, 1);
   4824     }
   4825 
   4826     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
   4827     size = 0;
   4828     for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
   4829             i++) {
   4830         int32_t val = lookupFwkName(TEST_PATTERN_MAP,
   4831                                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
   4832                                     gCamCapability[cameraId]->supported_test_pattern_modes[i]);
   4833         if (val != NAME_NOT_FOUND) {
   4834             avail_testpattern_modes[size] = val;
   4835             size++;
   4836         }
   4837     }
   4838     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   4839                       avail_testpattern_modes,
   4840                       size);
   4841 
   4842     uint8_t max_pipeline_depth = MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY;
   4843     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
   4844                       &max_pipeline_depth,
   4845                       1);
   4846 
   4847     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
   4848     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   4849                       &partial_result_count,
   4850                        1);
   4851 
   4852     int32_t max_stall_duration = MAX_REPROCESS_STALL;
   4853     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
   4854 
   4855     uint8_t available_capabilities[MAX_AVAILABLE_CAPABILITIES];
   4856     uint8_t available_capabilities_count = 0;
   4857     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE;
   4858     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR;
   4859     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING;
   4860     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS;
   4861     available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE;
   4862     if (facingBack) {
   4863         available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW;
   4864     }
   4865     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   4866                       available_capabilities,
   4867                       available_capabilities_count);
   4868 
   4869     int32_t max_input_streams = 0;
   4870     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   4871                       &max_input_streams,
   4872                       1);
   4873 
   4874     int32_t io_format_map[] = {};
   4875 ;
   4876     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   4877                       io_format_map, 0);
   4878 
   4879     int32_t max_latency = (facingBack)? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL:CAM_MAX_SYNC_LATENCY;
   4880     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
   4881                       &max_latency,
   4882                       1);
   4883 
   4884     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
   4885                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
   4886     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   4887                       available_hot_pixel_modes,
   4888                       2);
   4889 
   4890     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
   4891                                          ANDROID_SHADING_MODE_FAST,
   4892                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
   4893     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
   4894                       available_shading_modes,
   4895                       3);
   4896 
   4897     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
   4898                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
   4899     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   4900                       available_lens_shading_map_modes,
   4901                       2);
   4902 
   4903     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
   4904                                       ANDROID_EDGE_MODE_FAST,
   4905                                       ANDROID_EDGE_MODE_HIGH_QUALITY};
   4906     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   4907                       available_edge_modes,
   4908                       3);
   4909 
   4910     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
   4911                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
   4912                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
   4913                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL};
   4914     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   4915                       available_noise_red_modes,
   4916                       4);
   4917 
   4918     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
   4919                                          ANDROID_TONEMAP_MODE_FAST,
   4920                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
   4921     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   4922                       available_tonemap_modes,
   4923                       3);
   4924 
   4925     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
   4926     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   4927                       available_hot_pixel_map_modes,
   4928                       1);
   4929 
   4930     uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
   4931         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
   4932         gCamCapability[cameraId]->reference_illuminant1);
   4933     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
   4934                       &fwkReferenceIlluminant, 1);
   4935 
   4936     fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
   4937         sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
   4938         gCamCapability[cameraId]->reference_illuminant2);
   4939     staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   4940                       &fwkReferenceIlluminant, 1);
   4941 
   4942     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
   4943                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
   4944                       3*3);
   4945 
   4946     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
   4947                       (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
   4948                       3*3);
   4949 
   4950     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
   4951                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
   4952                       3*3);
   4953 
   4954     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
   4955                    (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
   4956                       3*3);
   4957 
   4958     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   4959                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
   4960                       3*3);
   4961 
   4962     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
   4963                    (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
   4964                       3*3);
   4965 
   4966     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
   4967        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
   4968        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   4969        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   4970        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
   4971        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   4972        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
   4973        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
   4974        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
   4975        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
   4976        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
   4977        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
   4978        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   4979        ANDROID_JPEG_GPS_COORDINATES,
   4980        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
   4981        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
   4982        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
   4983        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   4984        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
   4985        ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
   4986        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
   4987        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
   4988        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
   4989        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
   4990        ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
   4991        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   4992        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
   4993        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   4994        ANDROID_BLACK_LEVEL_LOCK };
   4995 
   4996     size_t request_keys_cnt =
   4997             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
   4998     //NOTE: Please increase available_request_keys array size before
   4999     //adding any new entries.
   5000     int32_t available_request_keys[request_keys_cnt+1];
   5001     memcpy(available_request_keys, request_keys_basic,
   5002             sizeof(request_keys_basic));
   5003     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   5004         available_request_keys[request_keys_cnt++] =
   5005                 ANDROID_CONTROL_AF_REGIONS;
   5006     }
   5007     //NOTE: Please increase available_request_keys array size before
   5008     //adding any new entries.
   5009     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
   5010                       available_request_keys, request_keys_cnt);
   5011 
   5012     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
   5013        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
   5014        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
   5015        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
   5016        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
   5017        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   5018        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
   5019        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
   5020        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
   5021        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   5022        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   5023        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
   5024        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
   5025        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
   5026        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   5027        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
   5028        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   5029        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
   5030        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   5031        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   5032        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
   5033        ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
   5034        ANDROID_STATISTICS_FACE_SCORES};
   5035     size_t result_keys_cnt =
   5036             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
   5037     //NOTE: Please increase available_result_keys array size before
   5038     //adding any new entries.
   5039     int32_t available_result_keys[result_keys_cnt+3];
   5040     memcpy(available_result_keys, result_keys_basic,
   5041             sizeof(result_keys_basic));
   5042     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   5043         available_result_keys[result_keys_cnt++] =
   5044                 ANDROID_CONTROL_AF_REGIONS;
   5045     }
   5046     if (facingBack) {
   5047        available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_NOISE_PROFILE;
   5048        available_result_keys[result_keys_cnt++] = ANDROID_SENSOR_GREEN_SPLIT;
   5049     }
   5050     //NOTE: Please increase available_result_keys array size before
   5051     //adding any new entries.
   5052 
   5053     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   5054                       available_result_keys, result_keys_cnt);
   5055 
   5056     int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   5057        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   5058        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
   5059        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
   5060        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   5061        ANDROID_SCALER_CROPPING_TYPE,
   5062        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   5063        ANDROID_SYNC_MAX_LATENCY,
   5064        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   5065        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   5066        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
   5067        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
   5068        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   5069        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   5070        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   5071        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   5072        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   5073        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   5074        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   5075        ANDROID_LENS_FACING,
   5076        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   5077        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   5078        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   5079        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   5080        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   5081        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   5082        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   5083        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
   5084        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
   5085        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   5086        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
   5087        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   5088        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   5089        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   5090        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   5091        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   5092        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
   5093        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   5094        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   5095        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   5096        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   5097        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   5098        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   5099        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   5100        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   5101        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   5102        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   5103        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   5104        ANDROID_TONEMAP_MAX_CURVE_POINTS,
   5105        ANDROID_CONTROL_AVAILABLE_MODES,
   5106        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   5107        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   5108        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   5109        ANDROID_SHADING_AVAILABLE_MODES,
   5110        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
   5111     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   5112                       available_characteristics_keys,
   5113                       sizeof(available_characteristics_keys)/sizeof(int32_t));
   5114 
   5115     /*available stall durations depend on the hw + sw and will be different for different devices */
   5116     /*have to add for raw after implementation*/
   5117     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
   5118     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
   5119 
   5120     size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
   5121     int64_t available_stall_durations[available_stall_size];
   5122     idx = 0;
   5123     for (uint32_t j = 0; j < stall_formats_count; j++) {
   5124        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
   5125           for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   5126              available_stall_durations[idx]   = stall_formats[j];
   5127              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   5128              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   5129              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
   5130              idx+=4;
   5131           }
   5132        } else {
   5133           for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   5134              available_stall_durations[idx]   = stall_formats[j];
   5135              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
   5136              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
   5137              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
   5138              idx+=4;
   5139           }
   5140        }
   5141     }
   5142     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   5143                       available_stall_durations,
   5144                       idx);
   5145     //QCAMERA3_OPAQUE_RAW
   5146     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   5147     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   5148     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
   5149     case LEGACY_RAW:
   5150         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
   5151             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
   5152         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
   5153             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   5154         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
   5155             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
   5156         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   5157         break;
   5158     case MIPI_RAW:
   5159         if (gCamCapability[cameraId]->white_level == (1<<8)-1)
   5160             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
   5161         else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
   5162             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
   5163         else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
   5164             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
   5165         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
   5166         break;
   5167     default:
   5168         ALOGE("%s: unknown opaque_raw_format %d", __func__,
   5169                 gCamCapability[cameraId]->opaque_raw_fmt);
   5170         break;
   5171     }
   5172     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
   5173 
   5174     if (gCamCapability[cameraId]->supported_raw_dim_cnt) {
   5175         int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
   5176         for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   5177             cam_stream_buf_plane_info_t buf_planes;
   5178             strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
   5179             strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
   5180             mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   5181                 &gCamCapability[cameraId]->padding_info, &buf_planes);
   5182             strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
   5183         }
   5184         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
   5185                 3*gCamCapability[cameraId]->supported_raw_dim_cnt);
   5186     }
   5187     gStaticMetadata[cameraId] = staticInfo.release();
   5188     return rc;
   5189 }
   5190 
   5191 /*===========================================================================
   5192  * FUNCTION   : makeTable
   5193  *
   5194  * DESCRIPTION: make a table of sizes
   5195  *
   5196  * PARAMETERS :
   5197  *
   5198  *
   5199  *==========================================================================*/
   5200 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
   5201                                           int32_t* sizeTable)
   5202 {
   5203     int j = 0;
   5204     for (int i = 0; i < size; i++) {
   5205         sizeTable[j] = dimTable[i].width;
   5206         sizeTable[j+1] = dimTable[i].height;
   5207         j+=2;
   5208     }
   5209 }
   5210 
   5211 /*===========================================================================
   5212  * FUNCTION   : makeFPSTable
   5213  *
   5214  * DESCRIPTION: make a table of fps ranges
   5215  *
   5216  * PARAMETERS :
   5217  *
   5218  *==========================================================================*/
   5219 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
   5220                                           int32_t* fpsRangesTable)
   5221 {
   5222     int j = 0;
   5223     for (int i = 0; i < size; i++) {
   5224         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   5225         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   5226         j+=2;
   5227     }
   5228 }
   5229 
   5230 /*===========================================================================
   5231  * FUNCTION   : makeOverridesList
   5232  *
   5233  * DESCRIPTION: make a list of scene mode overrides
   5234  *
   5235  * PARAMETERS :
   5236  *
   5237  *
   5238  *==========================================================================*/
   5239 void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
   5240                                                   uint8_t size, uint8_t* overridesList,
   5241                                                   uint8_t* supported_indexes,
   5242                                                   int camera_id)
   5243 {
   5244     /*daemon will give a list of overrides for all scene modes.
   5245       However we should send the fwk only the overrides for the scene modes
   5246       supported by the framework*/
   5247     int j = 0, index = 0, supt = 0;
   5248     uint8_t focus_override;
   5249     for (int i = 0; i < size; i++) {
   5250         supt = 0;
   5251         index = supported_indexes[i];
   5252         overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
   5253         overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
   5254                                  sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
   5255                                                     overridesTable[index].awb_mode);
   5256         focus_override = (uint8_t)overridesTable[index].af_mode;
   5257         for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
   5258            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   5259               supt = 1;
   5260               break;
   5261            }
   5262         }
   5263         if (supt) {
   5264            overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
   5265                                               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
   5266                                               focus_override);
   5267         } else {
   5268            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   5269         }
   5270         j+=3;
   5271     }
   5272 }
   5273 
   5274 /*===========================================================================
   5275  * FUNCTION   : filterJpegSizes
   5276  *
   5277  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
   5278  *              could be downscaled to
   5279  *
   5280  * PARAMETERS :
   5281  *
   5282  * RETURN     : length of jpegSizes array
   5283  *==========================================================================*/
   5284 
   5285 uint8_t QCamera3HardwareInterface::filterJpegSizes(int32_t* jpegSizes, int32_t* processedSizes,
   5286                                                    uint8_t processedSizesCnt,
   5287                                                    uint8_t maxCount,
   5288                                                    cam_rect_t active_array_size,
   5289                                                    uint8_t downscale_factor)
   5290 {
   5291    if (downscale_factor == 0) {
   5292       downscale_factor = 1;
   5293    }
   5294     int32_t min_width = active_array_size.width / downscale_factor;
   5295     int32_t min_height = active_array_size.height / downscale_factor;
   5296     uint8_t jpegSizesCnt = 0;
   5297     if (processedSizesCnt > maxCount) {
   5298         processedSizesCnt = maxCount;
   5299     }
   5300     for (int i = 0; i < processedSizesCnt; i+=2) {
   5301         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
   5302             jpegSizes[jpegSizesCnt] = processedSizes[i];
   5303             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
   5304             jpegSizesCnt += 2;
   5305         }
   5306     }
   5307     return jpegSizesCnt;
   5308 }
   5309 
   5310 /*===========================================================================
   5311  * FUNCTION   : getPreviewHalPixelFormat
   5312  *
   5313  * DESCRIPTION: convert the format to type recognized by framework
   5314  *
   5315  * PARAMETERS : format : the format from backend
   5316  *
   5317  ** RETURN    : format recognized by framework
   5318  *
   5319  *==========================================================================*/
   5320 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
   5321 {
   5322     int32_t halPixelFormat;
   5323 
   5324     switch (format) {
   5325     case CAM_FORMAT_YUV_420_NV12:
   5326         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
   5327         break;
   5328     case CAM_FORMAT_YUV_420_NV21:
   5329         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   5330         break;
   5331     case CAM_FORMAT_YUV_420_NV21_ADRENO:
   5332         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
   5333         break;
   5334     case CAM_FORMAT_YUV_420_YV12:
   5335         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
   5336         break;
   5337     case CAM_FORMAT_YUV_422_NV16:
   5338     case CAM_FORMAT_YUV_422_NV61:
   5339     default:
   5340         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   5341         break;
   5342     }
   5343     return halPixelFormat;
   5344 }
   5345 /*===========================================================================
   5346  * FUNCTION   : computeNoiseModelEntryS
   5347  *
   5348  * DESCRIPTION: function to map a given sensitivity to the S noise
   5349  *              model parameters in the DNG noise model.
   5350  *
   5351  * PARAMETERS : sens : the sensor sensitivity
   5352  *
   5353  ** RETURN    : S (sensor amplification) noise
   5354  *
   5355  *==========================================================================*/
   5356 
   5357 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
   5358     double s = 4.290559e-06 * sens + 4.370087e-05;
   5359     return s < 0.0 ? 0.0 : s;
   5360 }
   5361 
   5362 /*===========================================================================
   5363  * FUNCTION   : computeNoiseModelEntryO
   5364  *
   5365  * DESCRIPTION: function to map a given sensitivity to the O noise
   5366  *              model parameters in the DNG noise model.
   5367  *
   5368  * PARAMETERS : sens : the sensor sensitivity
   5369  *
   5370  ** RETURN    : O (sensor readout) noise
   5371  *
   5372  *==========================================================================*/
   5373 
   5374 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
   5375     double digital_gain = sens / 320.0;
   5376     digital_gain = digital_gain < 1.0 ? 1.0 : digital_gain;
   5377     double o = 6.011498e-11 * sens * sens + 2.173219e-06 * digital_gain * digital_gain;
   5378     return o < 0.0 ? 0.0 : o;
   5379 }
   5380 
   5381 /*===========================================================================
   5382  * FUNCTION   : getSensorSensitivity
   5383  *
   5384  * DESCRIPTION: convert iso_mode to an integer value
   5385  *
   5386  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   5387  *
   5388  ** RETURN    : sensitivity supported by sensor
   5389  *
   5390  *==========================================================================*/
   5391 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   5392 {
   5393     int32_t sensitivity;
   5394 
   5395     switch (iso_mode) {
   5396     case CAM_ISO_MODE_100:
   5397         sensitivity = 100;
   5398         break;
   5399     case CAM_ISO_MODE_200:
   5400         sensitivity = 200;
   5401         break;
   5402     case CAM_ISO_MODE_400:
   5403         sensitivity = 400;
   5404         break;
   5405     case CAM_ISO_MODE_800:
   5406         sensitivity = 800;
   5407         break;
   5408     case CAM_ISO_MODE_1600:
   5409         sensitivity = 1600;
   5410         break;
   5411     default:
   5412         sensitivity = -1;
   5413         break;
   5414     }
   5415     return sensitivity;
   5416 }
   5417 
   5418 /*===========================================================================
   5419  * FUNCTION   : AddSetParmEntryToBatch
   5420  *
   5421  * DESCRIPTION: add set parameter entry into batch
   5422  *
   5423  * PARAMETERS :
   5424  *   @p_table     : ptr to parameter buffer
   5425  *   @paramType   : parameter type
   5426  *   @paramLength : length of parameter value
   5427  *   @paramValue  : ptr to parameter value
   5428  *
   5429  * RETURN     : int32_t type of status
   5430  *              NO_ERROR  -- success
   5431  *              none-zero failure code
   5432  *==========================================================================*/
   5433 int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table,
   5434                                                           cam_intf_parm_type_t paramType,
   5435                                                           uint32_t paramLength,
   5436                                                           void *paramValue)
   5437 {
   5438     void* dst;
   5439     if ((NULL == p_table) || (NULL == paramValue) ||
   5440         (paramType >= CAM_INTF_PARM_MAX)) {
   5441         ALOGE("%s: Invalid p_table: %p, paramValue: %p, param type: %d",
   5442             __func__, p_table, paramValue, paramType);
   5443         return BAD_VALUE;
   5444     }
   5445     /*************************************************************************
   5446     *                   Copy contents into entry                             *
   5447     *************************************************************************/
   5448     if (paramLength > get_size_of(paramType)) {
   5449         ALOGE("%s: input larger than max entry size, type=%d, length =%d",
   5450                 __func__, paramType, paramLength);
   5451         return BAD_VALUE;
   5452     }
   5453     dst = get_pointer_of(paramType, p_table);
   5454     if(NULL != dst){
   5455         memcpy(dst, paramValue, paramLength);
   5456         p_table->is_valid[paramType] = 1;
   5457     }
   5458     return NO_ERROR;
   5459 }
   5460 
   5461 /*===========================================================================
   5462  * FUNCTION   : lookupFwkName
   5463  *
   5464  * DESCRIPTION: In case the enum is not same in fwk and backend
   5465  *              make sure the parameter is correctly propogated
   5466  *
   5467  * PARAMETERS  :
   5468  *   @arr      : map between the two enums
   5469  *   @len      : len of the map
   5470  *   @hal_name : name of the hal_parm to map
   5471  *
   5472  * RETURN     : int type of status
   5473  *              fwk_name  -- success
   5474  *              none-zero failure code
   5475  *==========================================================================*/
   5476 int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
   5477                                              int len, int hal_name)
   5478 {
   5479 
   5480     for (int i = 0; i < len; i++) {
   5481         if (arr[i].hal_name == hal_name)
   5482             return arr[i].fwk_name;
   5483     }
   5484 
   5485     /* Not able to find matching framework type is not necessarily
   5486      * an error case. This happens when mm-camera supports more attributes
   5487      * than the frameworks do */
   5488     CDBG_HIGH("%s: Cannot find matching framework type", __func__);
   5489     return NAME_NOT_FOUND;
   5490 }
   5491 
   5492 /*===========================================================================
   5493  * FUNCTION   : lookupHalName
   5494  *
   5495  * DESCRIPTION: In case the enum is not same in fwk and backend
   5496  *              make sure the parameter is correctly propogated
   5497  *
   5498  * PARAMETERS  :
   5499  *   @arr      : map between the two enums
   5500  *   @len      : len of the map
   5501  *   @fwk_name : name of the hal_parm to map
   5502  *
   5503  * RETURN     : int32_t type of status
   5504  *              hal_name  -- success
   5505  *              none-zero failure code
   5506  *==========================================================================*/
   5507 int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
   5508                                              int len, unsigned int fwk_name)
   5509 {
   5510     for (int i = 0; i < len; i++) {
   5511        if (arr[i].fwk_name == fwk_name)
   5512            return arr[i].hal_name;
   5513     }
   5514     ALOGE("%s: Cannot find matching hal type", __func__);
   5515     return NAME_NOT_FOUND;
   5516 }
   5517 
   5518 /*===========================================================================
   5519  * FUNCTION   : lookupProp
   5520  *
   5521  * DESCRIPTION: lookup a value by its name
   5522  *
   5523  * PARAMETERS :
   5524  *   @attr    : map contains <name, value>
   5525  *   @len     : size of the map
   5526  *   @name    : name to be looked up
   5527  *
   5528  * RETURN     : Value if found
   5529  *              CAM_CDS_MODE_MAX if not found
   5530  *==========================================================================*/
   5531 cam_cds_mode_type_t QCamera3HardwareInterface::lookupProp(const QCameraPropMap arr[],
   5532         int len, const char *name)
   5533 {
   5534     if (name) {
   5535         for (int i = 0; i < len; i++) {
   5536             if (!strcmp(arr[i].desc, name)) {
   5537                 return arr[i].val;
   5538             }
   5539         }
   5540     }
   5541     return CAM_CDS_MODE_MAX;
   5542 }
   5543 
   5544 /*===========================================================================
   5545  * FUNCTION   : getCapabilities
   5546  *
   5547  * DESCRIPTION: query camera capabilities
   5548  *
   5549  * PARAMETERS :
   5550  *   @cameraId  : camera Id
   5551  *   @info      : camera info struct to be filled in with camera capabilities
   5552  *
   5553  * RETURN     : int32_t type of status
   5554  *              NO_ERROR  -- success
   5555  *              none-zero failure code
   5556  *==========================================================================*/
   5557 int QCamera3HardwareInterface::getCamInfo(int cameraId,
   5558                                     struct camera_info *info)
   5559 {
   5560     ATRACE_CALL();
   5561     int rc = 0;
   5562 
   5563     if (NULL == gCamCapability[cameraId]) {
   5564         rc = initCapabilities(cameraId);
   5565         if (rc < 0) {
   5566             //pthread_mutex_unlock(&g_camlock);
   5567             return rc;
   5568         }
   5569     }
   5570 
   5571     if (NULL == gStaticMetadata[cameraId]) {
   5572         rc = initStaticMetadata(cameraId);
   5573         if (rc < 0) {
   5574             return rc;
   5575         }
   5576     }
   5577 
   5578     switch(gCamCapability[cameraId]->position) {
   5579     case CAM_POSITION_BACK:
   5580         info->facing = CAMERA_FACING_BACK;
   5581         break;
   5582 
   5583     case CAM_POSITION_FRONT:
   5584         info->facing = CAMERA_FACING_FRONT;
   5585         break;
   5586 
   5587     default:
   5588         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
   5589         rc = -1;
   5590         break;
   5591     }
   5592 
   5593     info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
   5594     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
   5595     info->static_camera_characteristics = gStaticMetadata[cameraId];
   5596 
   5597     //For now assume both cameras can operate independently.
   5598     info->conflicting_devices = NULL;
   5599     info->conflicting_devices_length = 0;
   5600 
   5601     //resource cost is 100 * MIN(1.0, m/M),
   5602     //where m is throughput requirement with maximum stream configuration
   5603     //and M is CPP maximum throughput.
   5604     float max_fps = 0.0;
   5605     for (uint32_t i = 0;
   5606             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
   5607         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
   5608             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
   5609     }
   5610     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
   5611             gCamCapability[cameraId]->active_array_size.width *
   5612             gCamCapability[cameraId]->active_array_size.height * max_fps /
   5613             gCamCapability[cameraId]->max_pixel_bandwidth;
   5614     info->resource_cost = 100 * MIN(1.0, ratio);
   5615     ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
   5616             info->resource_cost);
   5617 
   5618     return rc;
   5619 }
   5620 
   5621 /*===========================================================================
   5622  * FUNCTION   : translateCapabilityToMetadata
   5623  *
   5624  * DESCRIPTION: translate the capability into camera_metadata_t
   5625  *
   5626  * PARAMETERS : type of the request
   5627  *
   5628  *
   5629  * RETURN     : success: camera_metadata_t*
   5630  *              failure: NULL
   5631  *
   5632  *==========================================================================*/
   5633 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   5634 {
   5635     pthread_mutex_lock(&mMutex);
   5636 
   5637     if (mDefaultMetadata[type] != NULL) {
   5638         pthread_mutex_unlock(&mMutex);
   5639         return mDefaultMetadata[type];
   5640     }
   5641     //first time we are handling this request
   5642     //fill up the metadata structure using the wrapper class
   5643     CameraMetadata settings;
   5644     //translate from cam_capability_t to camera_metadata_tag_t
   5645     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   5646     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   5647     int32_t defaultRequestID = 0;
   5648     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   5649 
   5650     /* OIS disable */
   5651     char ois_prop[PROPERTY_VALUE_MAX];
   5652     memset(ois_prop, 0, sizeof(ois_prop));
   5653     property_get("persist.camera.ois.disable", ois_prop, "0");
   5654     uint8_t ois_disable = atoi(ois_prop);
   5655 
   5656     /* OIS/EIS disable */
   5657     char eis_prop[PROPERTY_VALUE_MAX];
   5658     memset(eis_prop, 0, sizeof(eis_prop));
   5659     property_get("camera.eis.enable", eis_prop, "0");
   5660     mEisEnable = atoi(eis_prop);
   5661 
   5662     /* Force video to use OIS */
   5663     char videoOisProp[PROPERTY_VALUE_MAX];
   5664     memset(videoOisProp, 0, sizeof(videoOisProp));
   5665     property_get("persist.camera.ois.video", videoOisProp, "1");
   5666     uint8_t forceVideoOis = atoi(videoOisProp);
   5667 
   5668     uint8_t controlIntent = 0;
   5669     uint8_t focusMode;
   5670     uint8_t vsMode;
   5671     uint8_t optStabMode;
   5672     uint8_t cacMode;
   5673     uint8_t edge_mode;
   5674     uint8_t noise_red_mode;
   5675     uint8_t tonemap_mode;
   5676     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   5677     switch (type) {
   5678       case CAMERA3_TEMPLATE_PREVIEW:
   5679         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   5680         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   5681         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   5682         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   5683         edge_mode = ANDROID_EDGE_MODE_FAST;
   5684         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   5685         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   5686         break;
   5687       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   5688         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   5689         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   5690         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   5691         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
   5692         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   5693         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   5694         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   5695         break;
   5696       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   5697         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   5698         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   5699         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   5700         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   5701         edge_mode = ANDROID_EDGE_MODE_FAST;
   5702         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   5703         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   5704         if (forceVideoOis)
   5705             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   5706         break;
   5707       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   5708         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   5709         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   5710         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   5711         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   5712         edge_mode = ANDROID_EDGE_MODE_FAST;
   5713         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   5714         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   5715         if (forceVideoOis)
   5716             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   5717         break;
   5718       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   5719         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   5720         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   5721         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   5722         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   5723         edge_mode = ANDROID_EDGE_MODE_FAST;
   5724         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   5725         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   5726         break;
   5727       case CAMERA3_TEMPLATE_MANUAL:
   5728         edge_mode = ANDROID_EDGE_MODE_FAST;
   5729         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   5730         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   5731         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   5732         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
   5733         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   5734         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   5735         break;
   5736       default:
   5737         edge_mode = ANDROID_EDGE_MODE_FAST;
   5738         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   5739         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   5740         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   5741         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   5742         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   5743         break;
   5744     }
   5745     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
   5746     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   5747     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
   5748     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
   5749         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   5750     }
   5751     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   5752 
   5753     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   5754             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
   5755         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   5756     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   5757             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
   5758             || ois_disable)
   5759         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   5760     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
   5761 
   5762     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   5763             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   5764 
   5765     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   5766     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   5767 
   5768     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   5769     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   5770 
   5771     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   5772     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   5773 
   5774     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   5775     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   5776 
   5777     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   5778     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   5779 
   5780     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   5781     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   5782 
   5783     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   5784     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   5785 
   5786     /*flash*/
   5787     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   5788     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   5789 
   5790     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   5791     settings.update(ANDROID_FLASH_FIRING_POWER,
   5792             &flashFiringLevel, 1);
   5793 
   5794     /* lens */
   5795     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   5796     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   5797 
   5798     if (gCamCapability[mCameraId]->filter_densities_count) {
   5799         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   5800         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   5801                         gCamCapability[mCameraId]->filter_densities_count);
   5802     }
   5803 
   5804     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   5805     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   5806 
   5807     float default_focus_distance = 0;
   5808     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
   5809 
   5810     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   5811     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   5812 
   5813     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   5814     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   5815 
   5816     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
   5817     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
   5818 
   5819     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
   5820     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   5821 
   5822     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   5823     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   5824 
   5825     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   5826     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   5827 
   5828     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   5829     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   5830 
   5831     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   5832     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
   5833 
   5834     /* Exposure time(Update the Min Exposure Time)*/
   5835     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   5836     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   5837 
   5838     /* frame duration */
   5839     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
   5840     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
   5841 
   5842     /* sensitivity */
   5843     static const int32_t default_sensitivity = 100;
   5844     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   5845 
   5846     /*edge mode*/
   5847     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
   5848 
   5849     /*noise reduction mode*/
   5850     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
   5851 
   5852     /*color correction mode*/
   5853     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   5854     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
   5855 
   5856     /*transform matrix mode*/
   5857     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
   5858 
   5859     uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
   5860     settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
   5861 
   5862     int32_t scaler_crop_region[4];
   5863     scaler_crop_region[0] = 0;
   5864     scaler_crop_region[1] = 0;
   5865     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
   5866     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
   5867     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
   5868 
   5869     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   5870     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
   5871 
   5872     /*focus distance*/
   5873     float focus_distance = 0.0;
   5874     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
   5875 
   5876     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
   5877     float max_range = 0.0;
   5878     float max_fixed_fps = 0.0;
   5879     int32_t fps_range[2] = {0, 0};
   5880     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
   5881             i++) {
   5882         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
   5883             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   5884         if (type == CAMERA3_TEMPLATE_PREVIEW ||
   5885                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
   5886                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
   5887             if (range > max_range) {
   5888                 fps_range[0] =
   5889                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   5890                 fps_range[1] =
   5891                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   5892                 max_range = range;
   5893             }
   5894         } else {
   5895             if (range < 0.01 && max_fixed_fps <
   5896                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
   5897                 fps_range[0] =
   5898                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   5899                 fps_range[1] =
   5900                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   5901                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   5902             }
   5903         }
   5904     }
   5905     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
   5906 
   5907     /*precapture trigger*/
   5908     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
   5909     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
   5910 
   5911     /*af trigger*/
   5912     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   5913     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
   5914 
   5915     /* ae & af regions */
   5916     int32_t active_region[] = {
   5917             gCamCapability[mCameraId]->active_array_size.left,
   5918             gCamCapability[mCameraId]->active_array_size.top,
   5919             gCamCapability[mCameraId]->active_array_size.left +
   5920                     gCamCapability[mCameraId]->active_array_size.width,
   5921             gCamCapability[mCameraId]->active_array_size.top +
   5922                     gCamCapability[mCameraId]->active_array_size.height,
   5923             0};
   5924     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
   5925     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
   5926 
   5927     /* black level lock */
   5928     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   5929     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
   5930 
   5931     /* face detect mode */
   5932     uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   5933     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
   5934 
   5935     /* lens shading map mode */
   5936     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   5937     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type &&
   5938         gCamCapability[mCameraId]->supported_raw_dim_cnt > 0) {
   5939         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
   5940     }
   5941     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
   5942 
   5943     //special defaults for manual template
   5944     if (type == CAMERA3_TEMPLATE_MANUAL) {
   5945         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
   5946         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
   5947 
   5948         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
   5949         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
   5950 
   5951         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
   5952         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
   5953 
   5954         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
   5955         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
   5956 
   5957         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
   5958         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
   5959 
   5960         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
   5961         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
   5962     }
   5963 
   5964     /* CDS default */
   5965     char prop[PROPERTY_VALUE_MAX];
   5966     memset(prop, 0, sizeof(prop));
   5967     property_get("persist.camera.CDS", prop, "Auto");
   5968     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
   5969     cds_mode = lookupProp(CDS_MAP, sizeof(CDS_MAP)/sizeof(QCameraPropMap), prop);
   5970     if (CAM_CDS_MODE_MAX == cds_mode) {
   5971         cds_mode = CAM_CDS_MODE_AUTO;
   5972     }
   5973     int32_t mode = cds_mode;
   5974     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
   5975 
   5976     mDefaultMetadata[type] = settings.release();
   5977 
   5978     pthread_mutex_unlock(&mMutex);
   5979     return mDefaultMetadata[type];
   5980 }
   5981 
   5982 /*===========================================================================
   5983  * FUNCTION   : setFrameParameters
   5984  *
   5985  * DESCRIPTION: set parameters per frame as requested in the metadata from
   5986  *              framework
   5987  *
   5988  * PARAMETERS :
   5989  *   @request   : request that needs to be serviced
   5990  *   @streamID : Stream ID of all the requested streams
   5991  *   @blob_request: Whether this request is a blob request or not
   5992  *
   5993  * RETURN     : success: NO_ERROR
   5994  *              failure:
   5995  *==========================================================================*/
   5996 int QCamera3HardwareInterface::setFrameParameters(
   5997                     camera3_capture_request_t *request,
   5998                     cam_stream_ID_t streamID,
   5999                     int blob_request,
   6000                     uint32_t snapshotStreamId)
   6001 {
   6002     /*translate from camera_metadata_t type to parm_type_t*/
   6003     int rc = 0;
   6004     int32_t hal_version = CAM_HAL_V3;
   6005 
   6006     memset(mParameters, 0, sizeof(parm_buffer_t));
   6007     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
   6008                 sizeof(hal_version), &hal_version);
   6009     if (rc < 0) {
   6010         ALOGE("%s: Failed to set hal version in the parameters", __func__);
   6011         return BAD_VALUE;
   6012     }
   6013 
   6014     /*we need to update the frame number in the parameters*/
   6015     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
   6016                                 sizeof(request->frame_number), &(request->frame_number));
   6017     if (rc < 0) {
   6018         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   6019         return BAD_VALUE;
   6020     }
   6021 
   6022     /* Update stream id of all the requested buffers */
   6023     rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
   6024                                 sizeof(cam_stream_ID_t), &streamID);
   6025 
   6026     if (rc < 0) {
   6027         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
   6028         return BAD_VALUE;
   6029     }
   6030 
   6031     if(request->settings != NULL){
   6032         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
   6033         if (blob_request)
   6034                 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
   6035     }
   6036 
   6037     return rc;
   6038 }
   6039 
   6040 /*===========================================================================
   6041  * FUNCTION   : setReprocParameters
   6042  *
   6043  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
   6044  *              return it.
   6045  *
   6046  * PARAMETERS :
   6047  *   @request   : request that needs to be serviced
   6048  *
   6049  * RETURN     : success: NO_ERROR
   6050  *              failure:
   6051  *==========================================================================*/
   6052 int32_t QCamera3HardwareInterface::setReprocParameters(
   6053         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
   6054         uint32_t snapshotStreamId)
   6055 {
   6056     /*translate from camera_metadata_t type to parm_type_t*/
   6057     int rc = 0;
   6058 
   6059     if (NULL == request->settings){
   6060         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
   6061         return BAD_VALUE;
   6062     }
   6063 
   6064     if (NULL == reprocParam) {
   6065         ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
   6066         return BAD_VALUE;
   6067     }
   6068     memset(reprocParam, 0, sizeof(metadata_buffer_t));
   6069 
   6070     /*we need to update the frame number in the parameters*/
   6071     rc = AddSetParmEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
   6072                                 sizeof(request->frame_number), &(request->frame_number));
   6073     if (rc < 0) {
   6074         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   6075         return rc;
   6076     }
   6077 
   6078     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
   6079     if (rc < 0) {
   6080         ALOGE("%s: Failed to translate reproc request", __func__);
   6081         return rc;
   6082     }
   6083 
   6084     CameraMetadata frame_settings;
   6085     frame_settings = request->settings;
   6086     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
   6087             frame_settings.exists(QCAMERA3_CROP_REPROCESS) &&
   6088             frame_settings.exists(QCAMERA3_CROP_STREAM_ID_REPROCESS)) {
   6089         int32_t *crop_count =
   6090                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
   6091         int32_t *crop_data =
   6092                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
   6093         int32_t *crop_stream_ids =
   6094                 frame_settings.find(QCAMERA3_CROP_STREAM_ID_REPROCESS).data.i32;
   6095         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
   6096             bool found = false;
   6097             int32_t i;
   6098             for (i = 0; i < *crop_count; i++) {
   6099                 if (crop_stream_ids[i] == (int32_t) request->input_buffer->stream) {
   6100                     found = true;
   6101                     break;
   6102                 }
   6103             }
   6104 
   6105             if (found) {
   6106                 cam_crop_data_t crop_meta;
   6107                 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
   6108                 crop_meta.num_of_streams = 1;
   6109                 crop_meta.crop_info[0].crop.left   = crop_data[i*4];
   6110                 crop_meta.crop_info[0].crop.top    = crop_data[i*4 + 1];
   6111                 crop_meta.crop_info[0].crop.width  = crop_data[i*4 + 2];
   6112                 crop_meta.crop_info[0].crop.height = crop_data[i*4 + 3];
   6113                 rc = AddSetParmEntryToBatch(reprocParam,
   6114                         CAM_INTF_META_CROP_DATA,
   6115                         sizeof(cam_crop_data_t), &crop_meta);
   6116                 CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
   6117                         __func__,
   6118                         request->input_buffer->stream,
   6119                         crop_meta.crop_info[0].crop.left,
   6120                         crop_meta.crop_info[0].crop.top,
   6121                         crop_meta.crop_info[0].crop.width,
   6122                         crop_meta.crop_info[0].crop.height);
   6123             } else {
   6124                 ALOGE("%s: No matching reprocess input stream found!", __func__);
   6125             }
   6126         } else {
   6127             ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
   6128         }
   6129     }
   6130 
   6131     return rc;
   6132 }
   6133 
   6134 /*===========================================================================
   6135  * FUNCTION   : translateToHalMetadata
   6136  *
   6137  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   6138  *
   6139  *
   6140  * PARAMETERS :
   6141  *   @request  : request sent from framework
   6142  *
   6143  *
   6144  * RETURN     : success: NO_ERROR
   6145  *              failure:
   6146  *==========================================================================*/
   6147 int QCamera3HardwareInterface::translateToHalMetadata
   6148                                   (const camera3_capture_request_t *request,
   6149                                    metadata_buffer_t *hal_metadata,
   6150                                    uint32_t snapshotStreamId)
   6151 {
   6152     int rc = 0;
   6153     CameraMetadata frame_settings;
   6154     frame_settings = request->settings;
   6155 
   6156     /* Do not change the order of the following list unless you know what you are
   6157      * doing.
   6158      * The order is laid out in such a way that parameters in the front of the table
   6159      * may be used to override the parameters later in the table. Examples are:
   6160      * 1. META_MODE should precede AEC/AWB/AF MODE
   6161      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
   6162      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
   6163      * 4. Any mode should precede it's corresponding settings
   6164      */
   6165     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   6166         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   6167         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_MODE,
   6168                 sizeof(metaMode), &metaMode);
   6169         if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   6170            camera_metadata_entry entry = frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
   6171            if (0 < entry.count) {
   6172                uint8_t fwk_sceneMode = entry.data.u8[0];
   6173                uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
   6174                                                  sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   6175                                                  fwk_sceneMode);
   6176                rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
   6177                     sizeof(sceneMode), &sceneMode);
   6178            }
   6179         } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
   6180            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   6181            rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
   6182                 sizeof(sceneMode), &sceneMode);
   6183         } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
   6184            uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   6185            rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
   6186                 sizeof(sceneMode), &sceneMode);
   6187         }
   6188     }
   6189 
   6190     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   6191         uint8_t fwk_aeMode =
   6192             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   6193         uint8_t aeMode;
   6194         int32_t redeye;
   6195 
   6196         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   6197             aeMode = CAM_AE_MODE_OFF;
   6198         } else {
   6199             aeMode = CAM_AE_MODE_ON;
   6200         }
   6201         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   6202             redeye = 1;
   6203         } else {
   6204             redeye = 0;
   6205         }
   6206 
   6207         int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
   6208                                           sizeof(AE_FLASH_MODE_MAP),
   6209                                           fwk_aeMode);
   6210         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
   6211                 sizeof(aeMode), &aeMode);
   6212         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
   6213                 sizeof(flashMode), &flashMode);
   6214         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
   6215                 sizeof(redeye), &redeye);
   6216     }
   6217 
   6218     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   6219         uint8_t fwk_whiteLevel =
   6220             frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   6221         uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
   6222                 sizeof(WHITE_BALANCE_MODES_MAP),
   6223                 fwk_whiteLevel);
   6224         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
   6225                 sizeof(whiteLevel), &whiteLevel);
   6226     }
   6227 
   6228     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
   6229         uint8_t fwk_cacMode =
   6230                 frame_settings.find(
   6231                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
   6232         int8_t val = lookupHalName(COLOR_ABERRATION_MAP,
   6233                 sizeof(COLOR_ABERRATION_MAP)/sizeof(COLOR_ABERRATION_MAP[0]),
   6234                 fwk_cacMode);
   6235         if (NAME_NOT_FOUND != val) {
   6236             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
   6237             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_CAC,
   6238                     sizeof(cacMode), &cacMode);
   6239         } else {
   6240             ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
   6241         }
   6242     }
   6243 
   6244     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   6245         uint8_t fwk_focusMode =
   6246             frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   6247         uint8_t focusMode;
   6248         focusMode = lookupHalName(FOCUS_MODES_MAP,
   6249                                    sizeof(FOCUS_MODES_MAP),
   6250                                    fwk_focusMode);
   6251         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
   6252                 sizeof(focusMode), &focusMode);
   6253     }
   6254 
   6255     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   6256         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   6257         rc = AddSetParmEntryToBatch(hal_metadata,
   6258                 CAM_INTF_META_LENS_FOCUS_DISTANCE,
   6259                 sizeof(focalDistance), &focalDistance);
   6260     }
   6261 
   6262     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   6263         uint8_t fwk_antibandingMode =
   6264             frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
   6265         int32_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
   6266                      sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
   6267                      fwk_antibandingMode);
   6268         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
   6269                 sizeof(hal_antibandingMode), &hal_antibandingMode);
   6270     }
   6271 
   6272     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   6273         int32_t expCompensation = frame_settings.find(
   6274             ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   6275         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   6276             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   6277         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   6278             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   6279         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   6280           sizeof(expCompensation), &expCompensation);
   6281     }
   6282 
   6283     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   6284         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   6285         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
   6286                 sizeof(aeLock), &aeLock);
   6287     }
   6288     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   6289         cam_fps_range_t fps_range;
   6290         fps_range.min_fps =
   6291             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   6292         fps_range.max_fps =
   6293             frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   6294         fps_range.video_min_fps = fps_range.min_fps;
   6295         fps_range.video_max_fps = fps_range.max_fps;
   6296         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
   6297                 sizeof(fps_range), &fps_range);
   6298     }
   6299 
   6300     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   6301         uint8_t awbLock =
   6302             frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   6303         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
   6304                 sizeof(awbLock), &awbLock);
   6305     }
   6306 
   6307     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   6308         uint8_t fwk_effectMode =
   6309             frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   6310         uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
   6311                 sizeof(EFFECT_MODES_MAP),
   6312                 fwk_effectMode);
   6313         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
   6314                 sizeof(effectMode), &effectMode);
   6315     }
   6316 
   6317     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   6318         uint8_t colorCorrectMode =
   6319             frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   6320         rc =
   6321             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
   6322                     sizeof(colorCorrectMode), &colorCorrectMode);
   6323     }
   6324 
   6325     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   6326         cam_color_correct_gains_t colorCorrectGains;
   6327         for (int i = 0; i < 4; i++) {
   6328             colorCorrectGains.gains[i] =
   6329                 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   6330         }
   6331         rc =
   6332             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
   6333                     sizeof(colorCorrectGains), &colorCorrectGains);
   6334     }
   6335 
   6336     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   6337         cam_color_correct_matrix_t colorCorrectTransform;
   6338         cam_rational_type_t transform_elem;
   6339         int num = 0;
   6340         for (int i = 0; i < 3; i++) {
   6341            for (int j = 0; j < 3; j++) {
   6342               transform_elem.numerator =
   6343                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   6344               transform_elem.denominator =
   6345                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   6346               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   6347               num++;
   6348            }
   6349         }
   6350         rc =
   6351             AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   6352                     sizeof(colorCorrectTransform), &colorCorrectTransform);
   6353     }
   6354 
   6355     cam_trigger_t aecTrigger;
   6356     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
   6357     aecTrigger.trigger_id = -1;
   6358     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   6359         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   6360         aecTrigger.trigger =
   6361             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   6362         aecTrigger.trigger_id =
   6363             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   6364         rc = AddSetParmEntryToBatch(hal_metadata,
   6365                 CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   6366                 sizeof(aecTrigger), &aecTrigger);
   6367     }
   6368     /*af_trigger must come with a trigger id*/
   6369     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   6370         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   6371         cam_trigger_t af_trigger;
   6372         af_trigger.trigger =
   6373             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   6374         af_trigger.trigger_id =
   6375             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   6376         rc = AddSetParmEntryToBatch(hal_metadata,
   6377                 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
   6378     }
   6379 
   6380     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   6381         int32_t demosaic =
   6382             frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   6383         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
   6384                 sizeof(demosaic), &demosaic);
   6385     }
   6386 
   6387     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   6388         cam_edge_application_t edge_application;
   6389         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   6390         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
   6391             edge_application.sharpness = 0;
   6392         } else {
   6393             if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
   6394                 uint8_t edgeStrength =
   6395                     frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
   6396                 edge_application.sharpness = (int32_t)edgeStrength;
   6397             } else {
   6398                 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
   6399             }
   6400         }
   6401         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
   6402                 sizeof(edge_application), &edge_application);
   6403     }
   6404 
   6405     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   6406         int32_t respectFlashMode = 1;
   6407         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   6408             uint8_t fwk_aeMode =
   6409                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   6410             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   6411                 respectFlashMode = 0;
   6412                 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
   6413                     __func__);
   6414             }
   6415         }
   6416         if (respectFlashMode) {
   6417             uint8_t flashMode =
   6418                 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
   6419             flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
   6420                                           sizeof(FLASH_MODES_MAP),
   6421                                           flashMode);
   6422             CDBG_HIGH("%s: flash mode after mapping %d", __func__, flashMode);
   6423             // To check: CAM_INTF_META_FLASH_MODE usage
   6424             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
   6425                           sizeof(flashMode), &flashMode);
   6426         }
   6427     }
   6428 
   6429     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   6430         uint8_t flashPower =
   6431             frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   6432         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
   6433                 sizeof(flashPower), &flashPower);
   6434     }
   6435 
   6436     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   6437         int64_t flashFiringTime =
   6438             frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   6439         rc = AddSetParmEntryToBatch(hal_metadata,
   6440                 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
   6441     }
   6442 
   6443     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   6444         uint8_t hotPixelMode =
   6445             frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   6446         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
   6447                 sizeof(hotPixelMode), &hotPixelMode);
   6448     }
   6449 
   6450     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   6451         float lensAperture =
   6452             frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   6453         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
   6454                 sizeof(lensAperture), &lensAperture);
   6455     }
   6456 
   6457     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   6458         float filterDensity =
   6459             frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   6460         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
   6461                 sizeof(filterDensity), &filterDensity);
   6462     }
   6463 
   6464     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   6465         float focalLength =
   6466             frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   6467         rc = AddSetParmEntryToBatch(hal_metadata,
   6468                 CAM_INTF_META_LENS_FOCAL_LENGTH,
   6469                 sizeof(focalLength), &focalLength);
   6470     }
   6471 
   6472     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   6473         uint8_t optStabMode =
   6474             frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   6475         rc = AddSetParmEntryToBatch(hal_metadata,
   6476                 CAM_INTF_META_LENS_OPT_STAB_MODE,
   6477                 sizeof(optStabMode), &optStabMode);
   6478     }
   6479 
   6480     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   6481         uint8_t noiseRedMode =
   6482             frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   6483         rc = AddSetParmEntryToBatch(hal_metadata,
   6484                 CAM_INTF_META_NOISE_REDUCTION_MODE,
   6485                 sizeof(noiseRedMode), &noiseRedMode);
   6486     }
   6487 
   6488     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
   6489         uint8_t noiseRedStrength =
   6490             frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
   6491         rc = AddSetParmEntryToBatch(hal_metadata,
   6492                 CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
   6493                 sizeof(noiseRedStrength), &noiseRedStrength);
   6494     }
   6495 
   6496     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
   6497         float reprocessEffectiveExposureFactor =
   6498             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
   6499         rc = AddSetParmEntryToBatch(hal_metadata,
   6500                 CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
   6501                 sizeof(reprocessEffectiveExposureFactor), &reprocessEffectiveExposureFactor);
   6502     }
   6503 
   6504     cam_crop_region_t scalerCropRegion;
   6505     bool scalerCropSet = false;
   6506     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   6507         scalerCropRegion.left =
   6508             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   6509         scalerCropRegion.top =
   6510             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   6511         scalerCropRegion.width =
   6512             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   6513         scalerCropRegion.height =
   6514             frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   6515         rc = AddSetParmEntryToBatch(hal_metadata,
   6516                 CAM_INTF_META_SCALER_CROP_REGION,
   6517                 sizeof(scalerCropRegion), &scalerCropRegion);
   6518         scalerCropSet = true;
   6519     }
   6520 
   6521     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   6522         int64_t sensorExpTime =
   6523             frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   6524         CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
   6525         rc = AddSetParmEntryToBatch(hal_metadata,
   6526                 CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   6527                 sizeof(sensorExpTime), &sensorExpTime);
   6528     }
   6529 
   6530     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   6531         int64_t sensorFrameDuration =
   6532             frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   6533         int64_t minFrameDuration = getMinFrameDuration(request);
   6534         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
   6535         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   6536             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   6537         CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
   6538         rc = AddSetParmEntryToBatch(hal_metadata,
   6539                 CAM_INTF_META_SENSOR_FRAME_DURATION,
   6540                 sizeof(sensorFrameDuration), &sensorFrameDuration);
   6541     }
   6542 
   6543     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   6544         int32_t sensorSensitivity =
   6545             frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   6546         if (sensorSensitivity <
   6547                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   6548             sensorSensitivity =
   6549                 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   6550         if (sensorSensitivity >
   6551                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   6552             sensorSensitivity =
   6553                 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   6554         CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
   6555         rc = AddSetParmEntryToBatch(hal_metadata,
   6556                 CAM_INTF_META_SENSOR_SENSITIVITY,
   6557                 sizeof(sensorSensitivity), &sensorSensitivity);
   6558     }
   6559 
   6560     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   6561         uint8_t shadingMode =
   6562             frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   6563         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
   6564                 sizeof(shadingMode), &shadingMode);
   6565     }
   6566 
   6567     if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
   6568         uint8_t shadingStrength =
   6569             frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
   6570         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
   6571                 sizeof(shadingStrength), &shadingStrength);
   6572     }
   6573 
   6574     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   6575         uint8_t fwk_facedetectMode =
   6576             frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   6577         uint8_t facedetectMode =
   6578             lookupHalName(FACEDETECT_MODES_MAP,
   6579                 sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
   6580         rc = AddSetParmEntryToBatch(hal_metadata,
   6581                 CAM_INTF_META_STATS_FACEDETECT_MODE,
   6582                 sizeof(facedetectMode), &facedetectMode);
   6583     }
   6584 
   6585     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   6586         uint8_t histogramMode =
   6587             frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   6588         rc = AddSetParmEntryToBatch(hal_metadata,
   6589                 CAM_INTF_META_STATS_HISTOGRAM_MODE,
   6590                 sizeof(histogramMode), &histogramMode);
   6591     }
   6592 
   6593     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   6594         uint8_t sharpnessMapMode =
   6595             frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   6596         rc = AddSetParmEntryToBatch(hal_metadata,
   6597                 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   6598                 sizeof(sharpnessMapMode), &sharpnessMapMode);
   6599     }
   6600 
   6601     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   6602         uint8_t tonemapMode =
   6603             frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   6604         rc = AddSetParmEntryToBatch(hal_metadata,
   6605                 CAM_INTF_META_TONEMAP_MODE,
   6606                 sizeof(tonemapMode), &tonemapMode);
   6607     }
   6608     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
   6609     /*All tonemap channels will have the same number of points*/
   6610     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
   6611         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
   6612         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   6613         cam_rgb_tonemap_curves tonemapCurves;
   6614         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
   6615         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   6616             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   6617                     __func__, tonemapCurves.tonemap_points_cnt,
   6618                     CAM_MAX_TONEMAP_CURVE_SIZE);
   6619             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   6620         }
   6621 
   6622         /* ch0 = G*/
   6623         int point = 0;
   6624         cam_tonemap_curve_t tonemapCurveGreen;
   6625         for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
   6626             for (int j = 0; j < 2; j++) {
   6627                tonemapCurveGreen.tonemap_points[i][j] =
   6628                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   6629                point++;
   6630             }
   6631         }
   6632         tonemapCurves.curves[0] = tonemapCurveGreen;
   6633 
   6634         /* ch 1 = B */
   6635         point = 0;
   6636         cam_tonemap_curve_t tonemapCurveBlue;
   6637         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   6638             for (int j = 0; j < 2; j++) {
   6639                tonemapCurveBlue.tonemap_points[i][j] =
   6640                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   6641                point++;
   6642             }
   6643         }
   6644         tonemapCurves.curves[1] = tonemapCurveBlue;
   6645 
   6646         /* ch 2 = R */
   6647         point = 0;
   6648         cam_tonemap_curve_t tonemapCurveRed;
   6649         for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   6650             for (int j = 0; j < 2; j++) {
   6651                tonemapCurveRed.tonemap_points[i][j] =
   6652                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   6653                point++;
   6654             }
   6655         }
   6656         tonemapCurves.curves[2] = tonemapCurveRed;
   6657 
   6658         rc = AddSetParmEntryToBatch(hal_metadata,
   6659                 CAM_INTF_META_TONEMAP_CURVES,
   6660                 sizeof(tonemapCurves), &tonemapCurves);
   6661     }
   6662 
   6663     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   6664         uint8_t captureIntent =
   6665             frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   6666         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
   6667                 sizeof(captureIntent), &captureIntent);
   6668     }
   6669 
   6670     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   6671         uint8_t blackLevelLock =
   6672             frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   6673         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
   6674                 sizeof(blackLevelLock), &blackLevelLock);
   6675     }
   6676 
   6677     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   6678         uint8_t lensShadingMapMode =
   6679             frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   6680         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   6681                 sizeof(lensShadingMapMode), &lensShadingMapMode);
   6682     }
   6683 
   6684     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   6685         cam_area_t roi;
   6686         bool reset = true;
   6687         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
   6688         if (scalerCropSet) {
   6689             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   6690         }
   6691         if (reset) {
   6692             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
   6693                     sizeof(roi), &roi);
   6694         }
   6695     }
   6696 
   6697     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   6698         cam_area_t roi;
   6699         bool reset = true;
   6700         convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
   6701         if (scalerCropSet) {
   6702             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   6703         }
   6704         if (reset) {
   6705             rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
   6706                     sizeof(roi), &roi);
   6707         }
   6708     }
   6709 
   6710     // CDS
   6711     if (frame_settings.exists(QCAMERA3_CDS_MODE)) {
   6712         int32_t* cds =
   6713             frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
   6714         if ((CAM_CDS_MODE_MAX <= (*cds)) || (0 > (*cds))) {
   6715             ALOGE("%s: Invalid CDS mode %d!", __func__, *cds);
   6716         } else {
   6717             cam_cds_mode_type_t mode = (cam_cds_mode_type_t) *cds;
   6718             rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_CDS_MODE,
   6719                 sizeof(mode), &mode);
   6720         }
   6721     }
   6722 
   6723     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
   6724         cam_test_pattern_data_t testPatternData;
   6725         uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
   6726         uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
   6727                sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
   6728 
   6729         memset(&testPatternData, 0, sizeof(testPatternData));
   6730         testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
   6731         if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
   6732                 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
   6733             int32_t* fwk_testPatternData = frame_settings.find(
   6734                     ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
   6735             testPatternData.r = fwk_testPatternData[0];
   6736             testPatternData.b = fwk_testPatternData[3];
   6737             switch (gCamCapability[mCameraId]->color_arrangement) {
   6738             case CAM_FILTER_ARRANGEMENT_RGGB:
   6739             case CAM_FILTER_ARRANGEMENT_GRBG:
   6740                 testPatternData.gr = fwk_testPatternData[1];
   6741                 testPatternData.gb = fwk_testPatternData[2];
   6742                 break;
   6743             case CAM_FILTER_ARRANGEMENT_GBRG:
   6744             case CAM_FILTER_ARRANGEMENT_BGGR:
   6745                 testPatternData.gr = fwk_testPatternData[2];
   6746                 testPatternData.gb = fwk_testPatternData[1];
   6747                 break;
   6748             default:
   6749                 ALOGE("%s: color arrangement %d is not supported", __func__,
   6750                     gCamCapability[mCameraId]->color_arrangement);
   6751                 break;
   6752             }
   6753         }
   6754         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
   6755             sizeof(testPatternData), &testPatternData);
   6756     }
   6757 
   6758     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   6759         double *gps_coords =
   6760             frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
   6761         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
   6762     }
   6763 
   6764     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   6765         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
   6766         const char *gps_methods_src = (const char *)
   6767                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
   6768         memset(gps_methods, '\0', sizeof(gps_methods));
   6769         strncpy(gps_methods, gps_methods_src, sizeof(gps_methods)-1);
   6770         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
   6771     }
   6772 
   6773     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   6774         int64_t gps_timestamp =
   6775             frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   6776         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
   6777     }
   6778 
   6779     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   6780         int32_t orientation =
   6781             frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   6782         cam_rotation_info_t rotation_info;
   6783         if (orientation == 0) {
   6784            rotation_info.rotation = ROTATE_0;
   6785         } else if (orientation == 90) {
   6786            rotation_info.rotation = ROTATE_90;
   6787         } else if (orientation == 180) {
   6788            rotation_info.rotation = ROTATE_180;
   6789         } else if (orientation == 270) {
   6790            rotation_info.rotation = ROTATE_270;
   6791         }
   6792         rotation_info.streamId = snapshotStreamId;
   6793         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
   6794         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_ROTATION, sizeof(rotation_info), &rotation_info);
   6795     }
   6796 
   6797     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
   6798         int8_t quality =
   6799             frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   6800         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
   6801     }
   6802 
   6803     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
   6804         int8_t thumb_quality =
   6805             frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
   6806         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
   6807     }
   6808 
   6809     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   6810         cam_dimension_t dim;
   6811         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   6812         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   6813         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
   6814     }
   6815 
   6816     // Internal metadata
   6817     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
   6818         int32_t* privatedata =
   6819                 frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.i32;
   6820         rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
   6821                 sizeof(int32_t) * MAX_METADATA_PRIVATE_PAYLOAD_SIZE, privatedata);
   6822     }
   6823 
   6824     if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
   6825        uint8_t* use_av_timer =
   6826           frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
   6827        rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_META_USE_AV_TIMER,
   6828             sizeof(uint8_t), use_av_timer);
   6829     }
   6830 
   6831     // EV step
   6832     rc = AddSetParmEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
   6833             sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
   6834 
   6835     return rc;
   6836 }
   6837 
   6838 /*===========================================================================
   6839  * FUNCTION   : captureResultCb
   6840  *
   6841  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   6842  *
   6843  * PARAMETERS :
   6844  *   @frame  : frame information from mm-camera-interface
   6845  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   6846  *   @userdata: userdata
   6847  *
   6848  * RETURN     : NONE
   6849  *==========================================================================*/
   6850 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   6851                 camera3_stream_buffer_t *buffer,
   6852                 uint32_t frame_number, void *userdata)
   6853 {
   6854     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   6855     if (hw == NULL) {
   6856         ALOGE("%s: Invalid hw %p", __func__, hw);
   6857         return;
   6858     }
   6859 
   6860     hw->captureResultCb(metadata, buffer, frame_number);
   6861     return;
   6862 }
   6863 
   6864 
   6865 /*===========================================================================
   6866  * FUNCTION   : initialize
   6867  *
   6868  * DESCRIPTION: Pass framework callback pointers to HAL
   6869  *
   6870  * PARAMETERS :
   6871  *
   6872  *
   6873  * RETURN     : Success : 0
   6874  *              Failure: -ENODEV
   6875  *==========================================================================*/
   6876 
   6877 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   6878                                   const camera3_callback_ops_t *callback_ops)
   6879 {
   6880     CDBG("%s: E", __func__);
   6881     QCamera3HardwareInterface *hw =
   6882         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   6883     if (!hw) {
   6884         ALOGE("%s: NULL camera device", __func__);
   6885         return -ENODEV;
   6886     }
   6887 
   6888     int rc = hw->initialize(callback_ops);
   6889     CDBG("%s: X", __func__);
   6890     return rc;
   6891 }
   6892 
   6893 /*===========================================================================
   6894  * FUNCTION   : configure_streams
   6895  *
   6896  * DESCRIPTION:
   6897  *
   6898  * PARAMETERS :
   6899  *
   6900  *
   6901  * RETURN     : Success: 0
   6902  *              Failure: -EINVAL (if stream configuration is invalid)
   6903  *                       -ENODEV (fatal error)
   6904  *==========================================================================*/
   6905 
   6906 int QCamera3HardwareInterface::configure_streams(
   6907         const struct camera3_device *device,
   6908         camera3_stream_configuration_t *stream_list)
   6909 {
   6910     CDBG("%s: E", __func__);
   6911     QCamera3HardwareInterface *hw =
   6912         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   6913     if (!hw) {
   6914         ALOGE("%s: NULL camera device", __func__);
   6915         return -ENODEV;
   6916     }
   6917     int rc = hw->configureStreams(stream_list);
   6918     CDBG("%s: X", __func__);
   6919     return rc;
   6920 }
   6921 
   6922 /*===========================================================================
   6923  * FUNCTION   : construct_default_request_settings
   6924  *
   6925  * DESCRIPTION: Configure a settings buffer to meet the required use case
   6926  *
   6927  * PARAMETERS :
   6928  *
   6929  *
   6930  * RETURN     : Success: Return valid metadata
   6931  *              Failure: Return NULL
   6932  *==========================================================================*/
   6933 const camera_metadata_t* QCamera3HardwareInterface::
   6934     construct_default_request_settings(const struct camera3_device *device,
   6935                                         int type)
   6936 {
   6937 
   6938     CDBG("%s: E", __func__);
   6939     camera_metadata_t* fwk_metadata = NULL;
   6940     QCamera3HardwareInterface *hw =
   6941         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   6942     if (!hw) {
   6943         ALOGE("%s: NULL camera device", __func__);
   6944         return NULL;
   6945     }
   6946 
   6947     fwk_metadata = hw->translateCapabilityToMetadata(type);
   6948 
   6949     CDBG("%s: X", __func__);
   6950     return fwk_metadata;
   6951 }
   6952 
   6953 /*===========================================================================
   6954  * FUNCTION   : process_capture_request
   6955  *
   6956  * DESCRIPTION:
   6957  *
   6958  * PARAMETERS :
   6959  *
   6960  *
   6961  * RETURN     :
   6962  *==========================================================================*/
   6963 int QCamera3HardwareInterface::process_capture_request(
   6964                     const struct camera3_device *device,
   6965                     camera3_capture_request_t *request)
   6966 {
   6967     CDBG("%s: E", __func__);
   6968     QCamera3HardwareInterface *hw =
   6969         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   6970     if (!hw) {
   6971         ALOGE("%s: NULL camera device", __func__);
   6972         return -EINVAL;
   6973     }
   6974 
   6975     int rc = hw->processCaptureRequest(request);
   6976     CDBG("%s: X", __func__);
   6977     return rc;
   6978 }
   6979 
   6980 /*===========================================================================
   6981  * FUNCTION   : dump
   6982  *
   6983  * DESCRIPTION:
   6984  *
   6985  * PARAMETERS :
   6986  *
   6987  *
   6988  * RETURN     :
   6989  *==========================================================================*/
   6990 
   6991 void QCamera3HardwareInterface::dump(
   6992                 const struct camera3_device *device, int fd)
   6993 {
   6994     /* Log level property is read when "adb shell dumpsys media.camera" is
   6995        called so that the log level can be controlled without restarting
   6996        the media server */
   6997     getLogLevel();
   6998 
   6999     CDBG("%s: E", __func__);
   7000     QCamera3HardwareInterface *hw =
   7001         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   7002     if (!hw) {
   7003         ALOGE("%s: NULL camera device", __func__);
   7004         return;
   7005     }
   7006 
   7007     hw->dump(fd);
   7008     CDBG("%s: X", __func__);
   7009     return;
   7010 }
   7011 
   7012 /*===========================================================================
   7013  * FUNCTION   : flush
   7014  *
   7015  * DESCRIPTION:
   7016  *
   7017  * PARAMETERS :
   7018  *
   7019  *
   7020  * RETURN     :
   7021  *==========================================================================*/
   7022 
   7023 int QCamera3HardwareInterface::flush(
   7024                 const struct camera3_device *device)
   7025 {
   7026     int rc;
   7027     CDBG("%s: E", __func__);
   7028     QCamera3HardwareInterface *hw =
   7029         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   7030     if (!hw) {
   7031         ALOGE("%s: NULL camera device", __func__);
   7032         return -EINVAL;
   7033     }
   7034 
   7035     rc = hw->flush();
   7036     CDBG("%s: X", __func__);
   7037     return rc;
   7038 }
   7039 
   7040 /*===========================================================================
   7041  * FUNCTION   : close_camera_device
   7042  *
   7043  * DESCRIPTION:
   7044  *
   7045  * PARAMETERS :
   7046  *
   7047  *
   7048  * RETURN     :
   7049  *==========================================================================*/
   7050 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   7051 {
   7052     CDBG("%s: E", __func__);
   7053     int ret = NO_ERROR;
   7054     QCamera3HardwareInterface *hw =
   7055         reinterpret_cast<QCamera3HardwareInterface *>(
   7056             reinterpret_cast<camera3_device_t *>(device)->priv);
   7057     if (!hw) {
   7058         ALOGE("NULL camera device");
   7059         return BAD_VALUE;
   7060     }
   7061     delete hw;
   7062 
   7063     CDBG("%s: X", __func__);
   7064     return ret;
   7065 }
   7066 
   7067 /*===========================================================================
   7068  * FUNCTION   : getWaveletDenoiseProcessPlate
   7069  *
   7070  * DESCRIPTION: query wavelet denoise process plate
   7071  *
   7072  * PARAMETERS : None
   7073  *
   7074  * RETURN     : WNR prcocess plate vlaue
   7075  *==========================================================================*/
   7076 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   7077 {
   7078     char prop[PROPERTY_VALUE_MAX];
   7079     memset(prop, 0, sizeof(prop));
   7080     property_get("persist.denoise.process.plates", prop, "0");
   7081     int processPlate = atoi(prop);
   7082     switch(processPlate) {
   7083     case 0:
   7084         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   7085     case 1:
   7086         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   7087     case 2:
   7088         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   7089     case 3:
   7090         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   7091     default:
   7092         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   7093     }
   7094 }
   7095 
   7096 /*===========================================================================
   7097  * FUNCTION   : needRotationReprocess
   7098  *
   7099  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   7100  *
   7101  * PARAMETERS : none
   7102  *
   7103  * RETURN     : true: needed
   7104  *              false: no need
   7105  *==========================================================================*/
   7106 bool QCamera3HardwareInterface::needRotationReprocess()
   7107 {
   7108     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
   7109         // current rotation is not zero, and pp has the capability to process rotation
   7110         CDBG_HIGH("%s: need do reprocess for rotation", __func__);
   7111         return true;
   7112     }
   7113 
   7114     return false;
   7115 }
   7116 
   7117 /*===========================================================================
   7118  * FUNCTION   : needReprocess
   7119  *
   7120  * DESCRIPTION: if reprocess in needed
   7121  *
   7122  * PARAMETERS : none
   7123  *
   7124  * RETURN     : true: needed
   7125  *              false: no need
   7126  *==========================================================================*/
   7127 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
   7128 {
   7129     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
   7130         // TODO: add for ZSL HDR later
   7131         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   7132         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
   7133             CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
   7134             return true;
   7135         } else {
   7136             CDBG_HIGH("%s: already post processed frame", __func__);
   7137             return false;
   7138         }
   7139     }
   7140     return needRotationReprocess();
   7141 }
   7142 
   7143 /*===========================================================================
   7144  * FUNCTION   : needJpegRotation
   7145  *
   7146  * DESCRIPTION: if rotation from jpeg is needed
   7147  *
   7148  * PARAMETERS : none
   7149  *
   7150  * RETURN     : true: needed
   7151  *              false: no need
   7152  *==========================================================================*/
   7153 bool QCamera3HardwareInterface::needJpegRotation()
   7154 {
   7155    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
   7156     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
   7157        CDBG("%s: Need Jpeg to do the rotation", __func__);
   7158        return true;
   7159     }
   7160     return false;
   7161 }
   7162 
   7163 /*===========================================================================
   7164  * FUNCTION   : addOfflineReprocChannel
   7165  *
   7166  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
   7167  *              coming from input channel
   7168  *
   7169  * PARAMETERS :
   7170  *   @config  : reprocess configuration
   7171  *
   7172  *
   7173  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   7174  *==========================================================================*/
   7175 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
   7176         const reprocess_config_t &config, QCamera3PicChannel *picChHandle,
   7177         metadata_buffer_t *metadata)
   7178 {
   7179     int32_t rc = NO_ERROR;
   7180     QCamera3ReprocessChannel *pChannel = NULL;
   7181 
   7182     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   7183             mCameraHandle->ops, NULL, config.padding, CAM_QCOM_FEATURE_NONE, this, picChHandle);
   7184     if (NULL == pChannel) {
   7185         ALOGE("%s: no mem for reprocess channel", __func__);
   7186         return NULL;
   7187     }
   7188 
   7189     rc = pChannel->initialize(IS_TYPE_NONE);
   7190     if (rc != NO_ERROR) {
   7191         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
   7192         delete pChannel;
   7193         return NULL;
   7194     }
   7195 
   7196     // pp feature config
   7197     cam_pp_feature_config_t pp_config;
   7198     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   7199 
   7200     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET;
   7201 
   7202     rc = pChannel->addReprocStreamsFromSource(pp_config,
   7203             config,
   7204             IS_TYPE_NONE,
   7205             mMetadataChannel);
   7206 
   7207     if (rc != NO_ERROR) {
   7208         delete pChannel;
   7209         return NULL;
   7210     }
   7211     return pChannel;
   7212 }
   7213 
   7214 
   7215 bool  QCamera3HardwareInterface::isCACEnabled() {
   7216     char prop[PROPERTY_VALUE_MAX];
   7217     memset(prop, 0, sizeof(prop));
   7218     property_get("persist.camera.feature.cac", prop, "0");
   7219     int enableCAC = atoi(prop);
   7220     return enableCAC;
   7221 }
   7222 /*===========================================================================
   7223 * FUNCTION   : getLogLevel
   7224 *
   7225 * DESCRIPTION: Reads the log level property into a variable
   7226 *
   7227 * PARAMETERS :
   7228 *   None
   7229 *
   7230 * RETURN     :
   7231 *   None
   7232 *==========================================================================*/
   7233 void QCamera3HardwareInterface::getLogLevel()
   7234 {
   7235     char prop[PROPERTY_VALUE_MAX];
   7236 
   7237     property_get("persist.camera.logs", prop, "0");
   7238     gCamHal3LogLevel = atoi(prop);
   7239 
   7240     return;
   7241 }
   7242 
   7243 /*===========================================================================
   7244 * FUNCTION   : getFlashInfo
   7245 *
   7246 * DESCRIPTION: Retrieve information about whether the device has a flash.
   7247 *
   7248 * PARAMETERS :
   7249 *   @cameraId  : Camera id to query
   7250 *   @hasFlash  : Boolean indicating whether there is a flash device
   7251 *                associated with given camera
   7252 *   @flashNode : If a flash device exists, this will be its device node.
   7253 *
   7254 * RETURN     :
   7255 *   None
   7256 *==========================================================================*/
   7257 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
   7258         bool& hasFlash,
   7259         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
   7260 {
   7261     cam_capability_t* camCapability = gCamCapability[cameraId];
   7262     if (NULL == camCapability) {
   7263         hasFlash = false;
   7264         flashNode[0] = '\0';
   7265     } else {
   7266         hasFlash = camCapability->flash_available;
   7267         strlcpy(flashNode,
   7268                 (char*)camCapability->flash_dev_name,
   7269                 QCAMERA_MAX_FILEPATH_LENGTH);
   7270     }
   7271 }
   7272 
   7273 /*===========================================================================
   7274  * FUNCTION   : validateStreamRotations
   7275  *
   7276  * DESCRIPTION: Check if the rotations requested are supported
   7277  *
   7278  * PARAMETERS :
   7279  *   @stream_list : streams to be configured
   7280  *
   7281  * RETURN     : NO_ERROR on success
   7282  *              -EINVAL on failure
   7283  *
   7284  *==========================================================================*/
   7285 int QCamera3HardwareInterface::validateStreamRotations(
   7286         camera3_stream_configuration_t *streamList)
   7287 {
   7288     int rc = NO_ERROR;
   7289 
   7290     /*
   7291     * Loop through all streams requested in configuration
   7292     * Check if unsupported rotations have been requested on any of them
   7293     */
   7294     for (size_t j = 0; j < streamList->num_streams; j++){
   7295         camera3_stream_t *newStream = streamList->streams[j];
   7296 
   7297         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
   7298         bool isImplDef = (newStream->format ==
   7299                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
   7300         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
   7301                 isImplDef);
   7302 
   7303         if (isRotated && (!isImplDef || isZsl)) {
   7304             ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
   7305                     "type:%d and stream format:%d", __func__,
   7306                     newStream->rotation, newStream->stream_type,
   7307                     newStream->format);
   7308             rc = -EINVAL;
   7309             break;
   7310         }
   7311     }
   7312 
   7313     return rc;
   7314 }
   7315 
   7316 }; //end namespace qcamera
   7317