Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define ATRACE_TAG ATRACE_TAG_CAMERA
     31 #define LOG_TAG "QCamera3HWI"
     32 //#define LOG_NDEBUG 0
     33 
     34 #define __STDC_LIMIT_MACROS
     35 #include <cutils/properties.h>
     36 #include <hardware/camera3.h>
     37 #include <camera/CameraMetadata.h>
     38 #include <stdio.h>
     39 #include <stdlib.h>
     40 #include <fcntl.h>
     41 #include <stdint.h>
     42 #include <utils/Log.h>
     43 #include <utils/Errors.h>
     44 #include <utils/Trace.h>
     45 #include <sync/sync.h>
     46 #include <gralloc_priv.h>
     47 #include "util/QCameraFlash.h"
     48 #include "QCamera3HWI.h"
     49 #include "QCamera3Mem.h"
     50 #include "QCamera3Channel.h"
     51 #include "QCamera3PostProc.h"
     52 #include "QCamera3VendorTags.h"
     53 
     54 using namespace android;
     55 
     56 namespace qcamera {
     57 
     58 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     59 
     60 #define EMPTY_PIPELINE_DELAY 2
     61 #define PARTIAL_RESULT_COUNT 3
     62 #define FRAME_SKIP_DELAY     0
     63 #define CAM_MAX_SYNC_LATENCY 4
     64 
     65 #define MAX_VALUE_8BIT ((1<<8)-1)
     66 #define MAX_VALUE_10BIT ((1<<10)-1)
     67 #define MAX_VALUE_12BIT ((1<<12)-1)
     68 
     69 #define VIDEO_4K_WIDTH  3840
     70 #define VIDEO_4K_HEIGHT 2160
     71 
     72 #define MAX_EIS_WIDTH 1920
     73 #define MAX_EIS_HEIGHT 1080
     74 
     75 #define MAX_RAW_STREAMS        1
     76 #define MAX_STALLING_STREAMS   1
     77 #define MAX_PROCESSED_STREAMS  3
     78 /* Batch mode is enabled only if FPS set is equal to or greater than this */
     79 #define MIN_FPS_FOR_BATCH_MODE (120)
     80 #define PREVIEW_FPS_FOR_HFR    (30)
     81 #define DEFAULT_VIDEO_FPS      (30.0)
     82 #define MAX_HFR_BATCH_SIZE     (8)
     83 #define REGIONS_TUPLE_COUNT    5
     84 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
     85 
     86 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
     87 
     88 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
     89                                               CAM_QCOM_FEATURE_CROP |\
     90                                               CAM_QCOM_FEATURE_ROTATION |\
     91                                               CAM_QCOM_FEATURE_SHARPNESS |\
     92                                               CAM_QCOM_FEATURE_SCALE |\
     93                                               CAM_QCOM_FEATURE_CAC |\
     94                                               CAM_QCOM_FEATURE_CDS )
     95 
     96 #define TIMEOUT_NEVER -1
     97 
     98 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
     99 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
    100 static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
    101 volatile uint32_t gCamHal3LogLevel = 1;
    102 
    103 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
    104     {"On",  CAM_CDS_MODE_ON},
    105     {"Off", CAM_CDS_MODE_OFF},
    106     {"Auto",CAM_CDS_MODE_AUTO}
    107 };
    108 
    109 const QCamera3HardwareInterface::QCameraMap<
    110         camera_metadata_enum_android_control_effect_mode_t,
    111         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
    112     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
    113     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
    114     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
    115     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
    116     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
    117     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
    118     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
    119     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
    120     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
    121 };
    122 
    123 const QCamera3HardwareInterface::QCameraMap<
    124         camera_metadata_enum_android_control_awb_mode_t,
    125         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
    126     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
    127     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
    128     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
    129     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
    130     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
    131     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
    132     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
    133     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
    134     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
    135 };
    136 
    137 const QCamera3HardwareInterface::QCameraMap<
    138         camera_metadata_enum_android_control_scene_mode_t,
    139         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
    140     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
    141     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
    142     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
    143     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
    144     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
    145     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
    146     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
    147     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
    148     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
    149     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
    150     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
    151     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
    152     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
    153     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    154     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    155     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    156 };
    157 
    158 const QCamera3HardwareInterface::QCameraMap<
    159         camera_metadata_enum_android_control_af_mode_t,
    160         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    161     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
    162     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    163     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    164     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    165     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    166     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    167     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    168 };
    169 
    170 const QCamera3HardwareInterface::QCameraMap<
    171         camera_metadata_enum_android_color_correction_aberration_mode_t,
    172         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
    173     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
    174             CAM_COLOR_CORRECTION_ABERRATION_OFF },
    175     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
    176             CAM_COLOR_CORRECTION_ABERRATION_FAST },
    177     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
    178             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
    179 };
    180 
    181 const QCamera3HardwareInterface::QCameraMap<
    182         camera_metadata_enum_android_control_ae_antibanding_mode_t,
    183         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    184     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    185     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    186     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    187     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    188 };
    189 
    190 const QCamera3HardwareInterface::QCameraMap<
    191         camera_metadata_enum_android_control_ae_mode_t,
    192         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    193     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    194     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    195     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    196     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    197     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    198 };
    199 
    200 const QCamera3HardwareInterface::QCameraMap<
    201         camera_metadata_enum_android_flash_mode_t,
    202         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    203     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    204     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    205     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    206 };
    207 
    208 const QCamera3HardwareInterface::QCameraMap<
    209         camera_metadata_enum_android_statistics_face_detect_mode_t,
    210         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    211     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    212     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
    213     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    214 };
    215 
    216 const QCamera3HardwareInterface::QCameraMap<
    217         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
    218         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
    219     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
    220       CAM_FOCUS_UNCALIBRATED },
    221     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
    222       CAM_FOCUS_APPROXIMATE },
    223     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
    224       CAM_FOCUS_CALIBRATED }
    225 };
    226 
    227 const QCamera3HardwareInterface::QCameraMap<
    228         camera_metadata_enum_android_lens_state_t,
    229         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
    230     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
    231     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
    232 };
    233 
    234 const int32_t available_thumbnail_sizes[] = {0, 0,
    235                                              176, 144,
    236                                              320, 240,
    237                                              432, 288,
    238                                              480, 288,
    239                                              512, 288,
    240                                              512, 384};
    241 
    242 const QCamera3HardwareInterface::QCameraMap<
    243         camera_metadata_enum_android_sensor_test_pattern_mode_t,
    244         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
    245     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
    246     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
    247     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
    248     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
    249     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
    250 };
    251 
    252 /* Since there is no mapping for all the options some Android enum are not listed.
    253  * Also, the order in this list is important because while mapping from HAL to Android it will
    254  * traverse from lower to higher index which means that for HAL values that are map to different
    255  * Android values, the traverse logic will select the first one found.
    256  */
    257 const QCamera3HardwareInterface::QCameraMap<
    258         camera_metadata_enum_android_sensor_reference_illuminant1_t,
    259         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
    260     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
    261     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    262     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
    263     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
    264     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
    265     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
    266     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
    267     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
    268     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
    269     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
    270     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
    271     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
    272     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
    273     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
    274     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    275     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
    276 };
    277 
    278 const QCamera3HardwareInterface::QCameraMap<
    279         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
    280     { 60, CAM_HFR_MODE_60FPS},
    281     { 90, CAM_HFR_MODE_90FPS},
    282     { 120, CAM_HFR_MODE_120FPS},
    283     { 150, CAM_HFR_MODE_150FPS},
    284     { 180, CAM_HFR_MODE_180FPS},
    285     { 210, CAM_HFR_MODE_210FPS},
    286     { 240, CAM_HFR_MODE_240FPS},
    287     { 480, CAM_HFR_MODE_480FPS},
    288 };
    289 
    290 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    291     .initialize =                         QCamera3HardwareInterface::initialize,
    292     .configure_streams =                  QCamera3HardwareInterface::configure_streams,
    293     .register_stream_buffers =            NULL,
    294     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
    295     .process_capture_request =            QCamera3HardwareInterface::process_capture_request,
    296     .get_metadata_vendor_tag_ops =        NULL,
    297     .dump =                               QCamera3HardwareInterface::dump,
    298     .flush =                              QCamera3HardwareInterface::flush,
    299     .reserved =                           {0},
    300 };
    301 
    302 /*===========================================================================
    303  * FUNCTION   : QCamera3HardwareInterface
    304  *
    305  * DESCRIPTION: constructor of QCamera3HardwareInterface
    306  *
    307  * PARAMETERS :
    308  *   @cameraId  : camera ID
    309  *
    310  * RETURN     : none
    311  *==========================================================================*/
    312 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
    313         const camera_module_callbacks_t *callbacks)
    314     : mCameraId(cameraId),
    315       mCameraHandle(NULL),
    316       mCameraOpened(false),
    317       mCameraInitialized(false),
    318       mCallbackOps(NULL),
    319       mMetadataChannel(NULL),
    320       mPictureChannel(NULL),
    321       mRawChannel(NULL),
    322       mSupportChannel(NULL),
    323       mAnalysisChannel(NULL),
    324       mRawDumpChannel(NULL),
    325       mDummyBatchChannel(NULL),
    326       mChannelHandle(0),
    327       mFirstRequest(false),
    328       mFirstConfiguration(true),
    329       mFlush(false),
    330       mParamHeap(NULL),
    331       mParameters(NULL),
    332       mPrevParameters(NULL),
    333       m_bIsVideo(false),
    334       m_bIs4KVideo(false),
    335       m_bEisSupportedSize(false),
    336       m_bEisEnable(false),
    337       m_MobicatMask(0),
    338       mMinProcessedFrameDuration(0),
    339       mMinJpegFrameDuration(0),
    340       mMinRawFrameDuration(0),
    341       mMetaFrameCount(0U),
    342       mUpdateDebugLevel(false),
    343       mCallbacks(callbacks),
    344       mCaptureIntent(0),
    345       mHybridAeEnable(0),
    346       mBatchSize(0),
    347       mToBeQueuedVidBufs(0),
    348       mHFRVideoFps(DEFAULT_VIDEO_FPS),
    349       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
    350       mFirstFrameNumberInBatch(0),
    351       mNeedSensorRestart(false),
    352       mLdafCalibExist(false),
    353       mPowerHintEnabled(false),
    354       mLastCustIntentFrmNum(-1)
    355 {
    356     getLogLevel();
    357     m_perfLock.lock_init();
    358     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    359     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
    360     mCameraDevice.common.close = close_camera_device;
    361     mCameraDevice.ops = &mCameraOps;
    362     mCameraDevice.priv = this;
    363     gCamCapability[cameraId]->version = CAM_HAL_V3;
    364     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    365     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    366     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    367     pthread_cond_init(&mRequestCond, NULL);
    368     mPendingLiveRequest = 0;
    369     mCurrentRequestId = -1;
    370     pthread_mutex_init(&mMutex, NULL);
    371 
    372     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    373         mDefaultMetadata[i] = NULL;
    374 
    375     // Getting system props of different kinds
    376     char prop[PROPERTY_VALUE_MAX];
    377     memset(prop, 0, sizeof(prop));
    378     property_get("persist.camera.raw.dump", prop, "0");
    379     mEnableRawDump = atoi(prop);
    380     if (mEnableRawDump)
    381         CDBG("%s: Raw dump from Camera HAL enabled", __func__);
    382 
    383     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
    384     memset(mLdafCalib, 0, sizeof(mLdafCalib));
    385 
    386     memset(prop, 0, sizeof(prop));
    387     property_get("persist.camera.tnr.preview", prop, "1");
    388     m_bTnrPreview = (uint8_t)atoi(prop);
    389 
    390     memset(prop, 0, sizeof(prop));
    391     property_get("persist.camera.tnr.video", prop, "1");
    392     m_bTnrVideo = (uint8_t)atoi(prop);
    393 
    394     mPendingBuffersMap.num_buffers = 0;
    395     mPendingBuffersMap.last_frame_number = -1;
    396 }
    397 
    398 /*===========================================================================
    399  * FUNCTION   : ~QCamera3HardwareInterface
    400  *
    401  * DESCRIPTION: destructor of QCamera3HardwareInterface
    402  *
    403  * PARAMETERS : none
    404  *
    405  * RETURN     : none
    406  *==========================================================================*/
    407 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    408 {
    409     CDBG("%s: E", __func__);
    410     bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
    411 
    412     /* Turn off current power hint before acquiring perfLock in case they
    413      * conflict with each other */
    414     disablePowerHint();
    415 
    416     m_perfLock.lock_acq();
    417 
    418     /* We need to stop all streams before deleting any stream */
    419     if (mRawDumpChannel) {
    420         mRawDumpChannel->stop();
    421     }
    422 
    423     // NOTE: 'camera3_stream_t *' objects are already freed at
    424     //        this stage by the framework
    425     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    426         it != mStreamInfo.end(); it++) {
    427         QCamera3ProcessingChannel *channel = (*it)->channel;
    428         if (channel) {
    429             channel->stop();
    430         }
    431     }
    432     if (mSupportChannel)
    433         mSupportChannel->stop();
    434 
    435     if (mAnalysisChannel) {
    436         mAnalysisChannel->stop();
    437     }
    438     if (mMetadataChannel) {
    439         mMetadataChannel->stop();
    440     }
    441     if (mChannelHandle) {
    442         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
    443                 mChannelHandle);
    444         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
    445     }
    446 
    447     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    448         it != mStreamInfo.end(); it++) {
    449         QCamera3ProcessingChannel *channel = (*it)->channel;
    450         if (channel)
    451             delete channel;
    452         free (*it);
    453     }
    454     if (mSupportChannel) {
    455         delete mSupportChannel;
    456         mSupportChannel = NULL;
    457     }
    458 
    459     if (mAnalysisChannel) {
    460         delete mAnalysisChannel;
    461         mAnalysisChannel = NULL;
    462     }
    463     if (mRawDumpChannel) {
    464         delete mRawDumpChannel;
    465         mRawDumpChannel = NULL;
    466     }
    467     if (mDummyBatchChannel) {
    468         delete mDummyBatchChannel;
    469         mDummyBatchChannel = NULL;
    470     }
    471     mPictureChannel = NULL;
    472 
    473     if (mMetadataChannel) {
    474         delete mMetadataChannel;
    475         mMetadataChannel = NULL;
    476     }
    477 
    478     /* Clean up all channels */
    479     if (mCameraInitialized) {
    480         if(!mFirstConfiguration){
    481             clear_metadata_buffer(mParameters);
    482 
    483             // Check if there is still pending buffer not yet returned.
    484             if (hasPendingBuffers) {
    485                 for (auto& pendingBuffer : mPendingBuffersMap.mPendingBufferList) {
    486                     ALOGE("%s: Buffer not yet returned for stream. Frame number %d, format 0x%x, width %d, height %d",
    487                         __func__, pendingBuffer.frame_number, pendingBuffer.stream->format, pendingBuffer.stream->width,
    488                         pendingBuffer.stream->height);
    489                 }
    490                 ALOGE("%s: Last requested frame number is %d", __func__, mPendingBuffersMap.last_frame_number);
    491                 uint8_t restart = TRUE;
    492                 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DAEMON_RESTART,
    493                         restart);
    494             }
    495 
    496             //send the last unconfigure
    497             cam_stream_size_info_t stream_config_info;
    498             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
    499             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
    500             stream_config_info.buffer_info.max_buffers =
    501                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
    502             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
    503                     stream_config_info);
    504 
    505             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    506             if (rc < 0) {
    507                 ALOGE("%s: set_parms failed for unconfigure", __func__);
    508             }
    509         }
    510         deinitParameters();
    511     }
    512 
    513     if (mChannelHandle) {
    514         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
    515                 mChannelHandle);
    516         ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
    517         mChannelHandle = 0;
    518     }
    519 
    520     if (mCameraOpened)
    521         closeCamera();
    522 
    523     mPendingBuffersMap.mPendingBufferList.clear();
    524     mPendingReprocessResultList.clear();
    525     for (pendingRequestIterator i = mPendingRequestsList.begin();
    526             i != mPendingRequestsList.end();) {
    527         i = erasePendingRequest(i);
    528     }
    529     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    530         if (mDefaultMetadata[i])
    531             free_camera_metadata(mDefaultMetadata[i]);
    532 
    533     m_perfLock.lock_rel();
    534     m_perfLock.lock_deinit();
    535 
    536     pthread_cond_destroy(&mRequestCond);
    537 
    538     pthread_mutex_destroy(&mMutex);
    539 
    540     if (hasPendingBuffers) {
    541         ALOGE("%s: Not all buffers were returned. Notified the camera daemon process to restart."
    542                 " Exiting here...", __func__);
    543         exit(EXIT_FAILURE);
    544     }
    545     CDBG("%s: X", __func__);
    546 }
    547 
    548 /*===========================================================================
    549  * FUNCTION   : erasePendingRequest
    550  *
    551  * DESCRIPTION: function to erase a desired pending request after freeing any
    552  *              allocated memory
    553  *
    554  * PARAMETERS :
    555  *   @i       : iterator pointing to pending request to be erased
    556  *
    557  * RETURN     : iterator pointing to the next request
    558  *==========================================================================*/
    559 QCamera3HardwareInterface::pendingRequestIterator
    560         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
    561 {
    562     if (i->input_buffer != NULL) {
    563         free(i->input_buffer);
    564         i->input_buffer = NULL;
    565     }
    566     if (i->settings != NULL)
    567         free_camera_metadata((camera_metadata_t*)i->settings);
    568     return mPendingRequestsList.erase(i);
    569 }
    570 
    571 /*===========================================================================
    572  * FUNCTION   : camEvtHandle
    573  *
    574  * DESCRIPTION: Function registered to mm-camera-interface to handle events
    575  *
    576  * PARAMETERS :
    577  *   @camera_handle : interface layer camera handle
    578  *   @evt           : ptr to event
    579  *   @user_data     : user data ptr
    580  *
    581  * RETURN     : none
    582  *==========================================================================*/
    583 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
    584                                           mm_camera_event_t *evt,
    585                                           void *user_data)
    586 {
    587     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
    588     if (obj && evt) {
    589         switch(evt->server_event_type) {
    590             case CAM_EVENT_TYPE_DAEMON_DIED:
    591                 ALOGE("%s: Fatal, camera daemon died", __func__);
    592                 //close the camera backend
    593                 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
    594                         && obj->mCameraHandle->ops) {
    595                     obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
    596                 } else {
    597                     ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
    598                             __func__);
    599                 }
    600                 camera3_notify_msg_t notify_msg;
    601                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
    602                 notify_msg.type = CAMERA3_MSG_ERROR;
    603                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
    604                 notify_msg.message.error.error_stream = NULL;
    605                 notify_msg.message.error.frame_number = 0;
    606                 obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
    607                 break;
    608 
    609             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
    610                 CDBG("%s: HAL got request pull from Daemon", __func__);
    611                 pthread_mutex_lock(&obj->mMutex);
    612                 obj->mWokenUpByDaemon = true;
    613                 obj->unblockRequestIfNecessary();
    614                 pthread_mutex_unlock(&obj->mMutex);
    615                 break;
    616 
    617             default:
    618                 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
    619                         evt->server_event_type);
    620                 break;
    621         }
    622     } else {
    623         ALOGE("%s: NULL user_data/evt", __func__);
    624     }
    625 }
    626 
    627 /*===========================================================================
    628  * FUNCTION   : openCamera
    629  *
    630  * DESCRIPTION: open camera
    631  *
    632  * PARAMETERS :
    633  *   @hw_device  : double ptr for camera device struct
    634  *
    635  * RETURN     : int32_t type of status
    636  *              NO_ERROR  -- success
    637  *              none-zero failure code
    638  *==========================================================================*/
    639 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    640 {
    641     int rc = 0;
    642     if (mCameraOpened) {
    643         *hw_device = NULL;
    644         return PERMISSION_DENIED;
    645     }
    646     m_perfLock.lock_acq();
    647     rc = openCamera();
    648     if (rc == 0) {
    649         *hw_device = &mCameraDevice.common;
    650     } else
    651         *hw_device = NULL;
    652 
    653     m_perfLock.lock_rel();
    654     return rc;
    655 }
    656 
    657 /*===========================================================================
    658  * FUNCTION   : openCamera
    659  *
    660  * DESCRIPTION: open camera
    661  *
    662  * PARAMETERS : none
    663  *
    664  * RETURN     : int32_t type of status
    665  *              NO_ERROR  -- success
    666  *              none-zero failure code
    667  *==========================================================================*/
    668 int QCamera3HardwareInterface::openCamera()
    669 {
    670     int rc = 0;
    671 
    672     ATRACE_CALL();
    673     if (mCameraHandle) {
    674         ALOGE("Failure: Camera already opened");
    675         return ALREADY_EXISTS;
    676     }
    677 
    678     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
    679     if (rc < 0) {
    680         ALOGE("%s: Failed to reserve flash for camera id: %d",
    681                 __func__,
    682                 mCameraId);
    683         return UNKNOWN_ERROR;
    684     }
    685 
    686     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
    687     if (rc) {
    688         ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
    689         return rc;
    690     }
    691 
    692     mCameraOpened = true;
    693 
    694     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
    695             camEvtHandle, (void *)this);
    696 
    697     if (rc < 0) {
    698         ALOGE("%s: Error, failed to register event callback", __func__);
    699         /* Not closing camera here since it is already handled in destructor */
    700         return FAILED_TRANSACTION;
    701     }
    702     mFirstConfiguration = true;
    703     return NO_ERROR;
    704 }
    705 
    706 /*===========================================================================
    707  * FUNCTION   : closeCamera
    708  *
    709  * DESCRIPTION: close camera
    710  *
    711  * PARAMETERS : none
    712  *
    713  * RETURN     : int32_t type of status
    714  *              NO_ERROR  -- success
    715  *              none-zero failure code
    716  *==========================================================================*/
    717 int QCamera3HardwareInterface::closeCamera()
    718 {
    719     ATRACE_CALL();
    720     int rc = NO_ERROR;
    721 
    722     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    723     mCameraHandle = NULL;
    724     mCameraOpened = false;
    725 
    726     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
    727         CDBG("%s: Failed to release flash for camera id: %d",
    728                 __func__,
    729                 mCameraId);
    730     }
    731 
    732     return rc;
    733 }
    734 
    735 /*===========================================================================
    736  * FUNCTION   : initialize
    737  *
    738  * DESCRIPTION: Initialize frameworks callback functions
    739  *
    740  * PARAMETERS :
    741  *   @callback_ops : callback function to frameworks
    742  *
    743  * RETURN     :
    744  *
    745  *==========================================================================*/
    746 int QCamera3HardwareInterface::initialize(
    747         const struct camera3_callback_ops *callback_ops)
    748 {
    749     ATRACE_CALL();
    750     int rc;
    751 
    752     pthread_mutex_lock(&mMutex);
    753 
    754     rc = initParameters();
    755     if (rc < 0) {
    756         ALOGE("%s: initParamters failed %d", __func__, rc);
    757        goto err1;
    758     }
    759     mCallbackOps = callback_ops;
    760 
    761     mChannelHandle = mCameraHandle->ops->add_channel(
    762             mCameraHandle->camera_handle, NULL, NULL, this);
    763     if (mChannelHandle == 0) {
    764         ALOGE("%s: add_channel failed", __func__);
    765         rc = -ENOMEM;
    766         pthread_mutex_unlock(&mMutex);
    767         return rc;
    768     }
    769 
    770     pthread_mutex_unlock(&mMutex);
    771     mCameraInitialized = true;
    772     return 0;
    773 
    774 err1:
    775     pthread_mutex_unlock(&mMutex);
    776     return rc;
    777 }
    778 
    779 /*===========================================================================
    780  * FUNCTION   : validateStreamDimensions
    781  *
    782  * DESCRIPTION: Check if the configuration requested are those advertised
    783  *
    784  * PARAMETERS :
    785  *   @stream_list : streams to be configured
    786  *
    787  * RETURN     :
    788  *
    789  *==========================================================================*/
    790 int QCamera3HardwareInterface::validateStreamDimensions(
    791         camera3_stream_configuration_t *streamList)
    792 {
    793     int rc = NO_ERROR;
    794     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
    795     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
    796     size_t count = 0;
    797 
    798     camera3_stream_t *inputStream = NULL;
    799     /*
    800     * Loop through all streams to find input stream if it exists*
    801     */
    802     for (size_t i = 0; i< streamList->num_streams; i++) {
    803         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
    804             if (inputStream != NULL) {
    805                 ALOGE("%s: Error, Multiple input streams requested");
    806                 return -EINVAL;
    807             }
    808             inputStream = streamList->streams[i];
    809         }
    810     }
    811     /*
    812     * Loop through all streams requested in configuration
    813     * Check if unsupported sizes have been requested on any of them
    814     */
    815     for (size_t j = 0; j < streamList->num_streams; j++) {
    816         bool sizeFound = false;
    817         size_t jpeg_sizes_cnt = 0;
    818         camera3_stream_t *newStream = streamList->streams[j];
    819 
    820         uint32_t rotatedHeight = newStream->height;
    821         uint32_t rotatedWidth = newStream->width;
    822         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
    823                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
    824             rotatedHeight = newStream->width;
    825             rotatedWidth = newStream->height;
    826         }
    827 
    828         /*
    829         * Sizes are different for each type of stream format check against
    830         * appropriate table.
    831         */
    832         switch (newStream->format) {
    833         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
    834         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
    835         case HAL_PIXEL_FORMAT_RAW10:
    836             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
    837             for (size_t i = 0; i < count; i++) {
    838                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
    839                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
    840                     sizeFound = true;
    841                     break;
    842                 }
    843             }
    844             break;
    845         case HAL_PIXEL_FORMAT_BLOB:
    846             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
    847             /* Generate JPEG sizes table */
    848             makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
    849                     count,
    850                     MAX_SIZES_CNT,
    851                     available_processed_sizes);
    852             jpeg_sizes_cnt = filterJpegSizes(
    853                     available_jpeg_sizes,
    854                     available_processed_sizes,
    855                     count * 2,
    856                     MAX_SIZES_CNT * 2,
    857                     gCamCapability[mCameraId]->active_array_size,
    858                     gCamCapability[mCameraId]->max_downscale_factor);
    859 
    860             /* Verify set size against generated sizes table */
    861             for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
    862                 if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
    863                         ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
    864                     sizeFound = true;
    865                     break;
    866                 }
    867             }
    868             break;
    869         case HAL_PIXEL_FORMAT_YCbCr_420_888:
    870         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    871         default:
    872             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
    873                     || newStream->stream_type == CAMERA3_STREAM_INPUT
    874                     || IS_USAGE_ZSL(newStream->usage)) {
    875                 if (((int32_t)rotatedWidth ==
    876                                 gCamCapability[mCameraId]->active_array_size.width) &&
    877                                 ((int32_t)rotatedHeight ==
    878                                 gCamCapability[mCameraId]->active_array_size.height)) {
    879                     sizeFound = true;
    880                     break;
    881                 }
    882                 /* We could potentially break here to enforce ZSL stream
    883                  * set from frameworks always is full active array size
    884                  * but it is not clear from the spc if framework will always
    885                  * follow that, also we have logic to override to full array
    886                  * size, so keeping the logic lenient at the moment
    887                  */
    888             }
    889             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
    890                     MAX_SIZES_CNT);
    891             for (size_t i = 0; i < count; i++) {
    892                 if (((int32_t)rotatedWidth ==
    893                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
    894                             ((int32_t)rotatedHeight ==
    895                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
    896                     sizeFound = true;
    897                     break;
    898                 }
    899             }
    900             break;
    901         } /* End of switch(newStream->format) */
    902 
    903         /* We error out even if a single stream has unsupported size set */
    904         if (!sizeFound) {
    905             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
    906                   "type:%d", __func__, rotatedWidth, rotatedHeight,
    907                   newStream->format);
    908             ALOGE("%s: Active array size is  %d x %d", __func__,
    909                     gCamCapability[mCameraId]->active_array_size.width,
    910                     gCamCapability[mCameraId]->active_array_size.height);
    911             rc = -EINVAL;
    912             break;
    913         }
    914     } /* End of for each stream */
    915     return rc;
    916 }
    917 
    918 /*==============================================================================
    919  * FUNCTION   : isSupportChannelNeeded
    920  *
    921  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
    922  *
    923  * PARAMETERS :
    924  *   @stream_list : streams to be configured
    925  *   @stream_config_info : the config info for streams to be configured
    926  *
    927  * RETURN     : Boolen true/false decision
    928  *
    929  *==========================================================================*/
    930 bool QCamera3HardwareInterface::isSupportChannelNeeded(
    931         camera3_stream_configuration_t *streamList,
    932         cam_stream_size_info_t stream_config_info)
    933 {
    934     uint32_t i;
    935     bool pprocRequested = false;
    936     /* Check for conditions where PProc pipeline does not have any streams*/
    937     for (i = 0; i < stream_config_info.num_streams; i++) {
    938         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
    939                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
    940             pprocRequested = true;
    941             break;
    942         }
    943     }
    944 
    945     if (pprocRequested == false )
    946         return true;
    947 
    948     /* Dummy stream needed if only raw or jpeg streams present */
    949     for (i = 0; i < streamList->num_streams; i++) {
    950         switch(streamList->streams[i]->format) {
    951             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
    952             case HAL_PIXEL_FORMAT_RAW10:
    953             case HAL_PIXEL_FORMAT_RAW16:
    954             case HAL_PIXEL_FORMAT_BLOB:
    955                 break;
    956             default:
    957                 return false;
    958         }
    959     }
    960     return true;
    961 }
    962 
    963 /*==============================================================================
    964  * FUNCTION   : getSensorOutputSize
    965  *
    966  * DESCRIPTION: Get sensor output size based on current stream configuratoin
    967  *
    968  * PARAMETERS :
    969  *   @sensor_dim : sensor output dimension (output)
    970  *
    971  * RETURN     : int32_t type of status
    972  *              NO_ERROR  -- success
    973  *              none-zero failure code
    974  *
    975  *==========================================================================*/
    976 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
    977 {
    978     int32_t rc = NO_ERROR;
    979 
    980     cam_dimension_t max_dim = {0, 0};
    981     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
    982         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
    983             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
    984         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
    985             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
    986     }
    987 
    988     clear_metadata_buffer(mParameters);
    989 
    990     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
    991             max_dim);
    992     if (rc != NO_ERROR) {
    993         ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
    994         return rc;
    995     }
    996 
    997     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    998     if (rc != NO_ERROR) {
    999         ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
   1000         return rc;
   1001     }
   1002 
   1003     clear_metadata_buffer(mParameters);
   1004     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
   1005 
   1006     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
   1007             mParameters);
   1008     if (rc != NO_ERROR) {
   1009         ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
   1010         return rc;
   1011     }
   1012 
   1013     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
   1014     ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
   1015 
   1016     return rc;
   1017 }
   1018 
   1019 /*==============================================================================
   1020  * FUNCTION   : enablePowerHint
   1021  *
   1022  * DESCRIPTION: enable single powerhint for preview and different video modes.
   1023  *
   1024  * PARAMETERS :
   1025  *
   1026  * RETURN     : NULL
   1027  *
   1028  *==========================================================================*/
   1029 void QCamera3HardwareInterface::enablePowerHint()
   1030 {
   1031     if (!mPowerHintEnabled) {
   1032         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
   1033         mPowerHintEnabled = true;
   1034     }
   1035 }
   1036 
   1037 /*==============================================================================
   1038  * FUNCTION   : disablePowerHint
   1039  *
   1040  * DESCRIPTION: disable current powerhint.
   1041  *
   1042  * PARAMETERS :
   1043  *
   1044  * RETURN     : NULL
   1045  *
   1046  *==========================================================================*/
   1047 void QCamera3HardwareInterface::disablePowerHint()
   1048 {
   1049     if (mPowerHintEnabled) {
   1050         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
   1051         mPowerHintEnabled = false;
   1052     }
   1053 }
   1054 
   1055 /*===========================================================================
   1056  * FUNCTION   : configureStreams
   1057  *
   1058  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
   1059  *              and output streams.
   1060  *
   1061  * PARAMETERS :
   1062  *   @stream_list : streams to be configured
   1063  *
   1064  * RETURN     :
   1065  *
   1066  *==========================================================================*/
   1067 int QCamera3HardwareInterface::configureStreams(
   1068         camera3_stream_configuration_t *streamList)
   1069 {
   1070     ATRACE_CALL();
   1071     int rc = 0;
   1072 
   1073     // Acquire perfLock before configure streams
   1074     m_perfLock.lock_acq();
   1075     rc = configureStreamsPerfLocked(streamList);
   1076     m_perfLock.lock_rel();
   1077 
   1078     return rc;
   1079 }
   1080 
   1081 /*===========================================================================
   1082  * FUNCTION   : configureStreamsPerfLocked
   1083  *
   1084  * DESCRIPTION: configureStreams while perfLock is held.
   1085  *
   1086  * PARAMETERS :
   1087  *   @stream_list : streams to be configured
   1088  *
   1089  * RETURN     : int32_t type of status
   1090  *              NO_ERROR  -- success
   1091  *              none-zero failure code
   1092  *==========================================================================*/
   1093 int QCamera3HardwareInterface::configureStreamsPerfLocked(
   1094         camera3_stream_configuration_t *streamList)
   1095 {
   1096     ATRACE_CALL();
   1097     int rc = 0;
   1098 
   1099     // Sanity check stream_list
   1100     if (streamList == NULL) {
   1101         ALOGE("%s: NULL stream configuration", __func__);
   1102         return BAD_VALUE;
   1103     }
   1104     if (streamList->streams == NULL) {
   1105         ALOGE("%s: NULL stream list", __func__);
   1106         return BAD_VALUE;
   1107     }
   1108 
   1109     if (streamList->num_streams < 1) {
   1110         ALOGE("%s: Bad number of streams requested: %d", __func__,
   1111                 streamList->num_streams);
   1112         return BAD_VALUE;
   1113     }
   1114 
   1115     if (streamList->num_streams >= MAX_NUM_STREAMS) {
   1116         ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
   1117                 MAX_NUM_STREAMS, streamList->num_streams);
   1118         return BAD_VALUE;
   1119     }
   1120 
   1121     mOpMode = streamList->operation_mode;
   1122     CDBG("%s: mOpMode: %d", __func__, mOpMode);
   1123 
   1124     /* first invalidate all the steams in the mStreamList
   1125      * if they appear again, they will be validated */
   1126     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   1127             it != mStreamInfo.end(); it++) {
   1128         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1129         if (channel) {
   1130           channel->stop();
   1131         }
   1132         (*it)->status = INVALID;
   1133     }
   1134 
   1135     if (mRawDumpChannel) {
   1136         mRawDumpChannel->stop();
   1137         delete mRawDumpChannel;
   1138         mRawDumpChannel = NULL;
   1139     }
   1140 
   1141     if (mSupportChannel)
   1142         mSupportChannel->stop();
   1143 
   1144     if (mAnalysisChannel) {
   1145         mAnalysisChannel->stop();
   1146     }
   1147     if (mMetadataChannel) {
   1148         /* If content of mStreamInfo is not 0, there is metadata stream */
   1149         mMetadataChannel->stop();
   1150     }
   1151     if (mChannelHandle) {
   1152         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   1153                 mChannelHandle);
   1154         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
   1155     }
   1156 
   1157     pthread_mutex_lock(&mMutex);
   1158 
   1159     /* Check whether we have video stream */
   1160     m_bIs4KVideo = false;
   1161     m_bIsVideo = false;
   1162     m_bEisSupportedSize = false;
   1163     m_bTnrEnabled = false;
   1164     bool isZsl = false;
   1165     uint32_t videoWidth = 0U;
   1166     uint32_t videoHeight = 0U;
   1167     size_t rawStreamCnt = 0;
   1168     size_t stallStreamCnt = 0;
   1169     size_t processedStreamCnt = 0;
   1170     // Number of streams on ISP encoder path
   1171     size_t numStreamsOnEncoder = 0;
   1172     size_t numYuv888OnEncoder = 0;
   1173     bool bYuv888OverrideJpeg = false;
   1174     cam_dimension_t largeYuv888Size = {0, 0};
   1175     cam_dimension_t maxViewfinderSize = {0, 0};
   1176     bool bJpegExceeds4K = false;
   1177     bool bJpegOnEncoder = false;
   1178     bool bUseCommonFeatureMask = false;
   1179     uint32_t commonFeatureMask = 0;
   1180     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
   1181     camera3_stream_t *inputStream = NULL;
   1182     bool isJpeg = false;
   1183     cam_dimension_t jpegSize = {0, 0};
   1184 
   1185     /*EIS configuration*/
   1186     bool eisSupported = false;
   1187     bool oisSupported = false;
   1188     int32_t margin_index = -1;
   1189     uint8_t eis_prop_set;
   1190     uint32_t maxEisWidth = 0;
   1191     uint32_t maxEisHeight = 0;
   1192     int32_t hal_version = CAM_HAL_V3;
   1193 
   1194     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
   1195 
   1196     size_t count = IS_TYPE_MAX;
   1197     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
   1198     for (size_t i = 0; i < count; i++) {
   1199         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
   1200             eisSupported = true;
   1201             margin_index = (int32_t)i;
   1202             break;
   1203         }
   1204     }
   1205 
   1206     count = CAM_OPT_STAB_MAX;
   1207     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
   1208     for (size_t i = 0; i < count; i++) {
   1209         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
   1210             oisSupported = true;
   1211             break;
   1212         }
   1213     }
   1214 
   1215     if (eisSupported) {
   1216         maxEisWidth = MAX_EIS_WIDTH;
   1217         maxEisHeight = MAX_EIS_HEIGHT;
   1218     }
   1219 
   1220     /* EIS setprop control */
   1221     char eis_prop[PROPERTY_VALUE_MAX];
   1222     memset(eis_prop, 0, sizeof(eis_prop));
   1223     property_get("persist.camera.eis.enable", eis_prop, "0");
   1224     eis_prop_set = (uint8_t)atoi(eis_prop);
   1225 
   1226     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
   1227             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
   1228 
   1229     /* stream configurations */
   1230     for (size_t i = 0; i < streamList->num_streams; i++) {
   1231         camera3_stream_t *newStream = streamList->streams[i];
   1232         ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
   1233                 "height = %d, rotation = %d, usage = 0x%x",
   1234                 __func__, i, newStream->stream_type, newStream->format,
   1235                 newStream->width, newStream->height, newStream->rotation,
   1236                 newStream->usage);
   1237         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1238                 newStream->stream_type == CAMERA3_STREAM_INPUT){
   1239             isZsl = true;
   1240         }
   1241         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
   1242             inputStream = newStream;
   1243         }
   1244 
   1245         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1246             isJpeg = true;
   1247             jpegSize.width = newStream->width;
   1248             jpegSize.height = newStream->height;
   1249             if (newStream->width > VIDEO_4K_WIDTH ||
   1250                     newStream->height > VIDEO_4K_HEIGHT)
   1251                 bJpegExceeds4K = true;
   1252         }
   1253 
   1254         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
   1255                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
   1256             m_bIsVideo = true;
   1257             videoWidth = newStream->width;
   1258             videoHeight = newStream->height;
   1259             if ((VIDEO_4K_WIDTH <= newStream->width) &&
   1260                     (VIDEO_4K_HEIGHT <= newStream->height)) {
   1261                 m_bIs4KVideo = true;
   1262             }
   1263             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
   1264                                   (newStream->height <= maxEisHeight);
   1265         }
   1266         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1267                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
   1268             switch (newStream->format) {
   1269             case HAL_PIXEL_FORMAT_BLOB:
   1270                 stallStreamCnt++;
   1271                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1272                         newStream->height)) {
   1273                     commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
   1274                     numStreamsOnEncoder++;
   1275                     bJpegOnEncoder = true;
   1276                 }
   1277                 break;
   1278             case HAL_PIXEL_FORMAT_RAW10:
   1279             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1280             case HAL_PIXEL_FORMAT_RAW16:
   1281                 rawStreamCnt++;
   1282                 break;
   1283             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1284                 processedStreamCnt++;
   1285                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1286                         newStream->height)) {
   1287                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1288                             IS_USAGE_ZSL(newStream->usage)) {
   1289                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
   1290                     } else {
   1291                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1292                     }
   1293                     numStreamsOnEncoder++;
   1294                 }
   1295                 break;
   1296             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1297                 processedStreamCnt++;
   1298                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1299                         newStream->height)) {
   1300                     // If Yuv888 size is not greater than 4K, set feature mask
   1301                     // to SUPERSET so that it support concurrent request on
   1302                     // YUV and JPEG.
   1303                     if (newStream->width <= VIDEO_4K_WIDTH &&
   1304                             newStream->height <= VIDEO_4K_HEIGHT) {
   1305                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1306                     } else {
   1307                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
   1308                     }
   1309                     numStreamsOnEncoder++;
   1310                     numYuv888OnEncoder++;
   1311                     largeYuv888Size.width = newStream->width;
   1312                     largeYuv888Size.height = newStream->height;
   1313                 }
   1314                 break;
   1315             default:
   1316                 processedStreamCnt++;
   1317                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1318                         newStream->height)) {
   1319                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1320                     numStreamsOnEncoder++;
   1321                 }
   1322                 break;
   1323             }
   1324 
   1325         }
   1326     }
   1327 
   1328     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
   1329         !m_bIsVideo) {
   1330         m_bEisEnable = false;
   1331     }
   1332 
   1333     /* Logic to enable/disable TNR based on specific config size/etc.*/
   1334     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
   1335             ((videoWidth == 1920 && videoHeight == 1080) ||
   1336             (videoWidth == 1280 && videoHeight == 720)) &&
   1337             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
   1338         m_bTnrEnabled = true;
   1339 
   1340     /* Check if num_streams is sane */
   1341     if (stallStreamCnt > MAX_STALLING_STREAMS ||
   1342             rawStreamCnt > MAX_RAW_STREAMS ||
   1343             processedStreamCnt > MAX_PROCESSED_STREAMS) {
   1344         ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
   1345                 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
   1346         pthread_mutex_unlock(&mMutex);
   1347         return -EINVAL;
   1348     }
   1349     /* Check whether we have zsl stream or 4k video case */
   1350     if (isZsl && m_bIsVideo) {
   1351         ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
   1352         pthread_mutex_unlock(&mMutex);
   1353         return -EINVAL;
   1354     }
   1355     /* Check if stream sizes are sane */
   1356     if (numStreamsOnEncoder > 2) {
   1357         ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
   1358                 __func__);
   1359         pthread_mutex_unlock(&mMutex);
   1360         return -EINVAL;
   1361     } else if (1 < numStreamsOnEncoder){
   1362         bUseCommonFeatureMask = true;
   1363         CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
   1364                 __func__);
   1365     }
   1366 
   1367     /* Check if BLOB size is greater than 4k in 4k recording case */
   1368     if (m_bIs4KVideo && bJpegExceeds4K) {
   1369         ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
   1370                 __func__);
   1371         pthread_mutex_unlock(&mMutex);
   1372         return -EINVAL;
   1373     }
   1374 
   1375     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
   1376     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
   1377     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
   1378     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
   1379     // configurations:
   1380     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
   1381     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
   1382     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
   1383     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
   1384         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
   1385                 __func__);
   1386         pthread_mutex_unlock(&mMutex);
   1387         return -EINVAL;
   1388     }
   1389 
   1390     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
   1391     // the YUV stream's size is greater or equal to the JPEG size, set common
   1392     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
   1393     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
   1394             jpegSize.width, jpegSize.height) &&
   1395             largeYuv888Size.width > jpegSize.width &&
   1396             largeYuv888Size.height > jpegSize.height) {
   1397         bYuv888OverrideJpeg = true;
   1398     } else if (!isJpeg && numStreamsOnEncoder > 1) {
   1399         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1400     }
   1401 
   1402     rc = validateStreamDimensions(streamList);
   1403     if (rc == NO_ERROR) {
   1404         rc = validateStreamRotations(streamList);
   1405     }
   1406     if (rc != NO_ERROR) {
   1407         ALOGE("%s: Invalid stream configuration requested!", __func__);
   1408         pthread_mutex_unlock(&mMutex);
   1409         return rc;
   1410     }
   1411 
   1412     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
   1413     camera3_stream_t *jpegStream = NULL;
   1414     for (size_t i = 0; i < streamList->num_streams; i++) {
   1415         camera3_stream_t *newStream = streamList->streams[i];
   1416         CDBG_HIGH("%s: newStream type = %d, stream format = %d "
   1417                 "stream size : %d x %d, stream rotation = %d",
   1418                 __func__, newStream->stream_type, newStream->format,
   1419                 newStream->width, newStream->height, newStream->rotation);
   1420         //if the stream is in the mStreamList validate it
   1421         bool stream_exists = false;
   1422         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1423                 it != mStreamInfo.end(); it++) {
   1424             if ((*it)->stream == newStream) {
   1425                 QCamera3ProcessingChannel *channel =
   1426                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1427                 stream_exists = true;
   1428                 if (channel)
   1429                     delete channel;
   1430                 (*it)->status = VALID;
   1431                 (*it)->stream->priv = NULL;
   1432                 (*it)->channel = NULL;
   1433             }
   1434         }
   1435         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
   1436             //new stream
   1437             stream_info_t* stream_info;
   1438             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
   1439             if (!stream_info) {
   1440                ALOGE("%s: Could not allocate stream info", __func__);
   1441                rc = -ENOMEM;
   1442                pthread_mutex_unlock(&mMutex);
   1443                return rc;
   1444             }
   1445             stream_info->stream = newStream;
   1446             stream_info->status = VALID;
   1447             stream_info->channel = NULL;
   1448             mStreamInfo.push_back(stream_info);
   1449         }
   1450         /* Covers Opaque ZSL and API1 F/W ZSL */
   1451         if (IS_USAGE_ZSL(newStream->usage)
   1452                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
   1453             if (zslStream != NULL) {
   1454                 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
   1455                 pthread_mutex_unlock(&mMutex);
   1456                 return BAD_VALUE;
   1457             }
   1458             zslStream = newStream;
   1459         }
   1460         /* Covers YUV reprocess */
   1461         if (inputStream != NULL) {
   1462             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
   1463                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1464                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1465                     && inputStream->width == newStream->width
   1466                     && inputStream->height == newStream->height) {
   1467                 if (zslStream != NULL) {
   1468                     /* This scenario indicates multiple YUV streams with same size
   1469                      * as input stream have been requested, since zsl stream handle
   1470                      * is solely use for the purpose of overriding the size of streams
   1471                      * which share h/w streams we will just make a guess here as to
   1472                      * which of the stream is a ZSL stream, this will be refactored
   1473                      * once we make generic logic for streams sharing encoder output
   1474                      */
   1475                     CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
   1476                 }
   1477                 zslStream = newStream;
   1478             }
   1479         }
   1480         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1481             jpegStream = newStream;
   1482         }
   1483     }
   1484 
   1485     /* If a zsl stream is set, we know that we have configured at least one input or
   1486        bidirectional stream */
   1487     if (NULL != zslStream) {
   1488         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
   1489         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
   1490         mInputStreamInfo.format = zslStream->format;
   1491         mInputStreamInfo.usage = zslStream->usage;
   1492         CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
   1493                 __func__, mInputStreamInfo.dim.width,
   1494                 mInputStreamInfo.dim.height,
   1495                 mInputStreamInfo.format, mInputStreamInfo.usage);
   1496     }
   1497 
   1498     cleanAndSortStreamInfo();
   1499     if (mMetadataChannel) {
   1500         delete mMetadataChannel;
   1501         mMetadataChannel = NULL;
   1502     }
   1503     if (mSupportChannel) {
   1504         delete mSupportChannel;
   1505         mSupportChannel = NULL;
   1506     }
   1507 
   1508     if (mAnalysisChannel) {
   1509         delete mAnalysisChannel;
   1510         mAnalysisChannel = NULL;
   1511     }
   1512 
   1513     if (mDummyBatchChannel) {
   1514         delete mDummyBatchChannel;
   1515         mDummyBatchChannel = NULL;
   1516     }
   1517 
   1518     //Create metadata channel and initialize it
   1519     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
   1520                     mChannelHandle, mCameraHandle->ops, captureResultCb,
   1521                     &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
   1522     if (mMetadataChannel == NULL) {
   1523         ALOGE("%s: failed to allocate metadata channel", __func__);
   1524         rc = -ENOMEM;
   1525         pthread_mutex_unlock(&mMutex);
   1526         return rc;
   1527     }
   1528     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
   1529     if (rc < 0) {
   1530         ALOGE("%s: metadata channel initialization failed", __func__);
   1531         delete mMetadataChannel;
   1532         mMetadataChannel = NULL;
   1533         pthread_mutex_unlock(&mMutex);
   1534         return rc;
   1535     }
   1536 
   1537     // Create analysis stream all the time, even when h/w support is not available
   1538     {
   1539         mAnalysisChannel = new QCamera3SupportChannel(
   1540                 mCameraHandle->camera_handle,
   1541                 mChannelHandle,
   1542                 mCameraHandle->ops,
   1543                 &gCamCapability[mCameraId]->padding_info,
   1544                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
   1545                 CAM_STREAM_TYPE_ANALYSIS,
   1546                 &gCamCapability[mCameraId]->analysis_recommended_res,
   1547                 gCamCapability[mCameraId]->analysis_recommended_format,
   1548                 this,
   1549                 0); // force buffer count to 0
   1550         if (!mAnalysisChannel) {
   1551             ALOGE("%s: H/W Analysis channel cannot be created", __func__);
   1552             pthread_mutex_unlock(&mMutex);
   1553             return -ENOMEM;
   1554         }
   1555     }
   1556 
   1557     bool isRawStreamRequested = false;
   1558     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
   1559     /* Allocate channel objects for the requested streams */
   1560     for (size_t i = 0; i < streamList->num_streams; i++) {
   1561         camera3_stream_t *newStream = streamList->streams[i];
   1562         uint32_t stream_usage = newStream->usage;
   1563         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
   1564         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
   1565         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1566                 || IS_USAGE_ZSL(newStream->usage)) &&
   1567             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
   1568             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1569             if (bUseCommonFeatureMask) {
   1570                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1571                         commonFeatureMask;
   1572             } else {
   1573                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1574                         CAM_QCOM_FEATURE_NONE;
   1575             }
   1576 
   1577         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1578                 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
   1579         } else {
   1580             //for non zsl streams find out the format
   1581             switch (newStream->format) {
   1582             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
   1583               {
   1584                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
   1585                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1586 
   1587                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
   1588 
   1589                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
   1590                      if (m_bTnrEnabled && m_bTnrVideo) {
   1591                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1592                              CAM_QCOM_FEATURE_CPP_TNR;
   1593                      }
   1594 
   1595                  } else {
   1596 
   1597                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
   1598                      if (m_bTnrEnabled && m_bTnrPreview) {
   1599                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1600                              CAM_QCOM_FEATURE_CPP_TNR;
   1601                      }
   1602                  }
   1603 
   1604                  if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
   1605                          (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
   1606                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1607                              newStream->height;
   1608                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1609                              newStream->width;
   1610                  }
   1611               }
   1612               break;
   1613            case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1614               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
   1615               if (isOnEncoder(maxViewfinderSize, newStream->width,
   1616                       newStream->height)) {
   1617                   if (bUseCommonFeatureMask)
   1618                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1619                               commonFeatureMask;
   1620                   else
   1621                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1622                               CAM_QCOM_FEATURE_NONE;
   1623               } else {
   1624                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1625                           CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1626               }
   1627               break;
   1628            case HAL_PIXEL_FORMAT_BLOB:
   1629               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1630               if (m_bIs4KVideo && !isZsl) {
   1631                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
   1632                           = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1633               } else {
   1634                   if (bUseCommonFeatureMask &&
   1635                           isOnEncoder(maxViewfinderSize, newStream->width,
   1636                                   newStream->height)) {
   1637                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
   1638                   } else {
   1639                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   1640                   }
   1641               }
   1642               if (isZsl) {
   1643                   if (zslStream) {
   1644                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1645                               (int32_t)zslStream->width;
   1646                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1647                               (int32_t)zslStream->height;
   1648                   } else {
   1649                       ALOGE("%s: Error, No ZSL stream identified",__func__);
   1650                       pthread_mutex_unlock(&mMutex);
   1651                       return -EINVAL;
   1652                   }
   1653               } else if (m_bIs4KVideo) {
   1654                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1655                           (int32_t)videoWidth;
   1656                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1657                           (int32_t)videoHeight;
   1658               } else if (bYuv888OverrideJpeg) {
   1659                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1660                           (int32_t)largeYuv888Size.width;
   1661                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1662                           (int32_t)largeYuv888Size.height;
   1663               }
   1664               break;
   1665            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1666            case HAL_PIXEL_FORMAT_RAW16:
   1667            case HAL_PIXEL_FORMAT_RAW10:
   1668               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
   1669               isRawStreamRequested = true;
   1670               break;
   1671            default:
   1672               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
   1673               mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   1674               break;
   1675             }
   1676 
   1677         }
   1678 
   1679         if (newStream->priv == NULL) {
   1680             //New stream, construct channel
   1681             switch (newStream->stream_type) {
   1682             case CAMERA3_STREAM_INPUT:
   1683                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
   1684                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
   1685                 break;
   1686             case CAMERA3_STREAM_BIDIRECTIONAL:
   1687                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
   1688                     GRALLOC_USAGE_HW_CAMERA_WRITE;
   1689                 break;
   1690             case CAMERA3_STREAM_OUTPUT:
   1691                 /* For video encoding stream, set read/write rarely
   1692                  * flag so that they may be set to un-cached */
   1693                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
   1694                     newStream->usage |=
   1695                          (GRALLOC_USAGE_SW_READ_RARELY |
   1696                          GRALLOC_USAGE_SW_WRITE_RARELY |
   1697                          GRALLOC_USAGE_HW_CAMERA_WRITE);
   1698                 else if (IS_USAGE_ZSL(newStream->usage))
   1699                     CDBG("%s: ZSL usage flag skipping", __func__);
   1700                 else if (newStream == zslStream
   1701                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   1702                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
   1703                 } else
   1704                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
   1705                 break;
   1706             default:
   1707                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
   1708                 break;
   1709             }
   1710 
   1711             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
   1712                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   1713                 QCamera3ProcessingChannel *channel = NULL;
   1714                 switch (newStream->format) {
   1715                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1716                     if ((newStream->usage &
   1717                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
   1718                             (streamList->operation_mode ==
   1719                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   1720                     ) {
   1721                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   1722                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   1723                                 &gCamCapability[mCameraId]->padding_info,
   1724                                 this,
   1725                                 newStream,
   1726                                 (cam_stream_type_t)
   1727                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   1728                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1729                                 mMetadataChannel,
   1730                                 0); //heap buffers are not required for HFR video channel
   1731                         if (channel == NULL) {
   1732                             ALOGE("%s: allocation of channel failed", __func__);
   1733                             pthread_mutex_unlock(&mMutex);
   1734                             return -ENOMEM;
   1735                         }
   1736                         //channel->getNumBuffers() will return 0 here so use
   1737                         //MAX_INFLIGH_HFR_REQUESTS
   1738                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
   1739                         newStream->priv = channel;
   1740                         ALOGI("%s: num video buffers in HFR mode: %d",
   1741                                 __func__, MAX_INFLIGHT_HFR_REQUESTS);
   1742                     } else {
   1743                         /* Copy stream contents in HFR preview only case to create
   1744                          * dummy batch channel so that sensor streaming is in
   1745                          * HFR mode */
   1746                         if (!m_bIsVideo && (streamList->operation_mode ==
   1747                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
   1748                             mDummyBatchStream = *newStream;
   1749                         }
   1750                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   1751                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   1752                                 &gCamCapability[mCameraId]->padding_info,
   1753                                 this,
   1754                                 newStream,
   1755                                 (cam_stream_type_t)
   1756                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   1757                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1758                                 mMetadataChannel,
   1759                                 MAX_INFLIGHT_REQUESTS);
   1760                         if (channel == NULL) {
   1761                             ALOGE("%s: allocation of channel failed", __func__);
   1762                             pthread_mutex_unlock(&mMutex);
   1763                             return -ENOMEM;
   1764                         }
   1765                         newStream->max_buffers = channel->getNumBuffers();
   1766                         newStream->priv = channel;
   1767                     }
   1768                     break;
   1769                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
   1770                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
   1771                             mChannelHandle,
   1772                             mCameraHandle->ops, captureResultCb,
   1773                             &gCamCapability[mCameraId]->padding_info,
   1774                             this,
   1775                             newStream,
   1776                             (cam_stream_type_t)
   1777                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   1778                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1779                             mMetadataChannel);
   1780                     if (channel == NULL) {
   1781                         ALOGE("%s: allocation of YUV channel failed", __func__);
   1782                         pthread_mutex_unlock(&mMutex);
   1783                         return -ENOMEM;
   1784                     }
   1785                     newStream->max_buffers = channel->getNumBuffers();
   1786                     newStream->priv = channel;
   1787                     break;
   1788                 }
   1789                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1790                 case HAL_PIXEL_FORMAT_RAW16:
   1791                 case HAL_PIXEL_FORMAT_RAW10:
   1792                     mRawChannel = new QCamera3RawChannel(
   1793                             mCameraHandle->camera_handle, mChannelHandle,
   1794                             mCameraHandle->ops, captureResultCb,
   1795                             &gCamCapability[mCameraId]->padding_info,
   1796                             this, newStream, CAM_QCOM_FEATURE_NONE,
   1797                             mMetadataChannel,
   1798                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
   1799                     if (mRawChannel == NULL) {
   1800                         ALOGE("%s: allocation of raw channel failed", __func__);
   1801                         pthread_mutex_unlock(&mMutex);
   1802                         return -ENOMEM;
   1803                     }
   1804                     newStream->max_buffers = mRawChannel->getNumBuffers();
   1805                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
   1806                     break;
   1807                 case HAL_PIXEL_FORMAT_BLOB:
   1808                     // Max live snapshot inflight buffer is 1. This is to mitigate
   1809                     // frame drop issues for video snapshot. The more buffers being
   1810                     // allocated, the more frame drops there are.
   1811                     mPictureChannel = new QCamera3PicChannel(
   1812                             mCameraHandle->camera_handle, mChannelHandle,
   1813                             mCameraHandle->ops, captureResultCb,
   1814                             &gCamCapability[mCameraId]->padding_info, this, newStream,
   1815                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1816                             m_bIs4KVideo, isZsl, mMetadataChannel,
   1817                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
   1818                     if (mPictureChannel == NULL) {
   1819                         ALOGE("%s: allocation of channel failed", __func__);
   1820                         pthread_mutex_unlock(&mMutex);
   1821                         return -ENOMEM;
   1822                     }
   1823                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
   1824                     newStream->max_buffers = mPictureChannel->getNumBuffers();
   1825                     mPictureChannel->overrideYuvSize(
   1826                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
   1827                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
   1828                     break;
   1829 
   1830                 default:
   1831                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
   1832                     break;
   1833                 }
   1834             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1835                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
   1836             } else {
   1837                 ALOGE("%s: Error, Unknown stream type", __func__);
   1838                 return -EINVAL;
   1839             }
   1840 
   1841             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1842                     it != mStreamInfo.end(); it++) {
   1843                 if ((*it)->stream == newStream) {
   1844                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
   1845                     break;
   1846                 }
   1847             }
   1848         } else {
   1849             // Channel already exists for this stream
   1850             // Do nothing for now
   1851         }
   1852 
   1853     /* Do not add entries for input stream in metastream info
   1854          * since there is no real stream associated with it
   1855          */
   1856         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
   1857             mStreamConfigInfo.num_streams++;
   1858     }
   1859 
   1860     //RAW DUMP channel
   1861     if (mEnableRawDump && isRawStreamRequested == false){
   1862         cam_dimension_t rawDumpSize;
   1863         rawDumpSize = getMaxRawSize(mCameraId);
   1864         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
   1865                                   mChannelHandle,
   1866                                   mCameraHandle->ops,
   1867                                   rawDumpSize,
   1868                                   &gCamCapability[mCameraId]->padding_info,
   1869                                   this, CAM_QCOM_FEATURE_NONE);
   1870         if (!mRawDumpChannel) {
   1871             ALOGE("%s: Raw Dump channel cannot be created", __func__);
   1872             pthread_mutex_unlock(&mMutex);
   1873             return -ENOMEM;
   1874         }
   1875     }
   1876 
   1877 
   1878     if (mAnalysisChannel) {
   1879         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   1880                 gCamCapability[mCameraId]->analysis_recommended_res;
   1881         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1882                 CAM_STREAM_TYPE_ANALYSIS;
   1883         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1884                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1885         mStreamConfigInfo.num_streams++;
   1886     }
   1887 
   1888     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
   1889         mSupportChannel = new QCamera3SupportChannel(
   1890                 mCameraHandle->camera_handle,
   1891                 mChannelHandle,
   1892                 mCameraHandle->ops,
   1893                 &gCamCapability[mCameraId]->padding_info,
   1894                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
   1895                 CAM_STREAM_TYPE_CALLBACK,
   1896                 &QCamera3SupportChannel::kDim,
   1897                 CAM_FORMAT_YUV_420_NV21,
   1898                 this);
   1899         if (!mSupportChannel) {
   1900             ALOGE("%s: dummy channel cannot be created", __func__);
   1901             pthread_mutex_unlock(&mMutex);
   1902             return -ENOMEM;
   1903         }
   1904     }
   1905 
   1906     if (mSupportChannel) {
   1907         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   1908                 QCamera3SupportChannel::kDim;
   1909         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1910                 CAM_STREAM_TYPE_CALLBACK;
   1911         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1912                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1913         mStreamConfigInfo.num_streams++;
   1914     }
   1915 
   1916     if (mRawDumpChannel) {
   1917         cam_dimension_t rawSize;
   1918         rawSize = getMaxRawSize(mCameraId);
   1919         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   1920                 rawSize;
   1921         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1922                 CAM_STREAM_TYPE_RAW;
   1923         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1924                 CAM_QCOM_FEATURE_NONE;
   1925         mStreamConfigInfo.num_streams++;
   1926     }
   1927     /* In HFR mode, if video stream is not added, create a dummy channel so that
   1928      * ISP can create a batch mode even for preview only case. This channel is
   1929      * never 'start'ed (no stream-on), it is only 'initialized'  */
   1930     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   1931             !m_bIsVideo) {
   1932         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   1933                 mChannelHandle,
   1934                 mCameraHandle->ops, captureResultCb,
   1935                 &gCamCapability[mCameraId]->padding_info,
   1936                 this,
   1937                 &mDummyBatchStream,
   1938                 CAM_STREAM_TYPE_VIDEO,
   1939                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
   1940                 mMetadataChannel);
   1941         if (NULL == mDummyBatchChannel) {
   1942             ALOGE("%s: creation of mDummyBatchChannel failed."
   1943                     "Preview will use non-hfr sensor mode ", __func__);
   1944         }
   1945     }
   1946     if (mDummyBatchChannel) {
   1947         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1948                 mDummyBatchStream.width;
   1949         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1950                 mDummyBatchStream.height;
   1951         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1952                 CAM_STREAM_TYPE_VIDEO;
   1953         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1954                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1955         mStreamConfigInfo.num_streams++;
   1956     }
   1957 
   1958     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
   1959     mStreamConfigInfo.buffer_info.max_buffers =
   1960             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   1961 
   1962     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
   1963     for (pendingRequestIterator i = mPendingRequestsList.begin();
   1964             i != mPendingRequestsList.end();) {
   1965         i = erasePendingRequest(i);
   1966     }
   1967     mPendingFrameDropList.clear();
   1968     // Initialize/Reset the pending buffers list
   1969     mPendingBuffersMap.num_buffers = 0;
   1970     mPendingBuffersMap.mPendingBufferList.clear();
   1971     mPendingReprocessResultList.clear();
   1972 
   1973     mFirstRequest = true;
   1974     mCurJpegMeta.clear();
   1975     //Get min frame duration for this streams configuration
   1976     deriveMinFrameDuration();
   1977 
   1978     /* Turn on video hint only if video stream is configured */
   1979 
   1980     pthread_mutex_unlock(&mMutex);
   1981 
   1982     return rc;
   1983 }
   1984 
   1985 /*===========================================================================
   1986  * FUNCTION   : validateCaptureRequest
   1987  *
   1988  * DESCRIPTION: validate a capture request from camera service
   1989  *
   1990  * PARAMETERS :
   1991  *   @request : request from framework to process
   1992  *
   1993  * RETURN     :
   1994  *
   1995  *==========================================================================*/
   1996 int QCamera3HardwareInterface::validateCaptureRequest(
   1997                     camera3_capture_request_t *request)
   1998 {
   1999     ssize_t idx = 0;
   2000     const camera3_stream_buffer_t *b;
   2001     CameraMetadata meta;
   2002 
   2003     /* Sanity check the request */
   2004     if (request == NULL) {
   2005         ALOGE("%s: NULL capture request", __func__);
   2006         return BAD_VALUE;
   2007     }
   2008 
   2009     if (request->settings == NULL && mFirstRequest) {
   2010         /*settings cannot be null for the first request*/
   2011         return BAD_VALUE;
   2012     }
   2013 
   2014     uint32_t frameNumber = request->frame_number;
   2015     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
   2016         ALOGE("%s: Request %d: No output buffers provided!",
   2017                 __FUNCTION__, frameNumber);
   2018         return BAD_VALUE;
   2019     }
   2020     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
   2021         ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
   2022                 __func__, request->num_output_buffers, MAX_NUM_STREAMS);
   2023         return BAD_VALUE;
   2024     }
   2025     if (request->input_buffer != NULL) {
   2026         b = request->input_buffer;
   2027         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2028             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
   2029                     __func__, frameNumber, (long)idx);
   2030             return BAD_VALUE;
   2031         }
   2032         if (b->release_fence != -1) {
   2033             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
   2034                     __func__, frameNumber, (long)idx);
   2035             return BAD_VALUE;
   2036         }
   2037         if (b->buffer == NULL) {
   2038             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
   2039                     __func__, frameNumber, (long)idx);
   2040             return BAD_VALUE;
   2041         }
   2042     }
   2043 
   2044     // Validate all buffers
   2045     b = request->output_buffers;
   2046     do {
   2047         QCamera3ProcessingChannel *channel =
   2048                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
   2049         if (channel == NULL) {
   2050             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
   2051                     __func__, frameNumber, (long)idx);
   2052             return BAD_VALUE;
   2053         }
   2054         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2055             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
   2056                     __func__, frameNumber, (long)idx);
   2057             return BAD_VALUE;
   2058         }
   2059         if (b->release_fence != -1) {
   2060             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
   2061                     __func__, frameNumber, (long)idx);
   2062             return BAD_VALUE;
   2063         }
   2064         if (b->buffer == NULL) {
   2065             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
   2066                     __func__, frameNumber, (long)idx);
   2067             return BAD_VALUE;
   2068         }
   2069         if (*(b->buffer) == NULL) {
   2070             ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
   2071                     __func__, frameNumber, (long)idx);
   2072             return BAD_VALUE;
   2073         }
   2074         idx++;
   2075         b = request->output_buffers + idx;
   2076     } while (idx < (ssize_t)request->num_output_buffers);
   2077 
   2078     return NO_ERROR;
   2079 }
   2080 
   2081 /*===========================================================================
   2082  * FUNCTION   : deriveMinFrameDuration
   2083  *
   2084  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
   2085  *              on currently configured streams.
   2086  *
   2087  * PARAMETERS : NONE
   2088  *
   2089  * RETURN     : NONE
   2090  *
   2091  *==========================================================================*/
   2092 void QCamera3HardwareInterface::deriveMinFrameDuration()
   2093 {
   2094     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
   2095 
   2096     maxJpegDim = 0;
   2097     maxProcessedDim = 0;
   2098     maxRawDim = 0;
   2099 
   2100     // Figure out maximum jpeg, processed, and raw dimensions
   2101     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   2102         it != mStreamInfo.end(); it++) {
   2103 
   2104         // Input stream doesn't have valid stream_type
   2105         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
   2106             continue;
   2107 
   2108         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
   2109         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2110             if (dimension > maxJpegDim)
   2111                 maxJpegDim = dimension;
   2112         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2113                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2114                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
   2115             if (dimension > maxRawDim)
   2116                 maxRawDim = dimension;
   2117         } else {
   2118             if (dimension > maxProcessedDim)
   2119                 maxProcessedDim = dimension;
   2120         }
   2121     }
   2122 
   2123     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
   2124             MAX_SIZES_CNT);
   2125 
   2126     //Assume all jpeg dimensions are in processed dimensions.
   2127     if (maxJpegDim > maxProcessedDim)
   2128         maxProcessedDim = maxJpegDim;
   2129     //Find the smallest raw dimension that is greater or equal to jpeg dimension
   2130     if (maxProcessedDim > maxRawDim) {
   2131         maxRawDim = INT32_MAX;
   2132 
   2133         for (size_t i = 0; i < count; i++) {
   2134             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
   2135                     gCamCapability[mCameraId]->raw_dim[i].height;
   2136             if (dimension >= maxProcessedDim && dimension < maxRawDim)
   2137                 maxRawDim = dimension;
   2138         }
   2139     }
   2140 
   2141     //Find minimum durations for processed, jpeg, and raw
   2142     for (size_t i = 0; i < count; i++) {
   2143         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
   2144                 gCamCapability[mCameraId]->raw_dim[i].height) {
   2145             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
   2146             break;
   2147         }
   2148     }
   2149     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   2150     for (size_t i = 0; i < count; i++) {
   2151         if (maxProcessedDim ==
   2152                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
   2153                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
   2154             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2155             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2156             break;
   2157         }
   2158     }
   2159 }
   2160 
   2161 /*===========================================================================
   2162  * FUNCTION   : getMinFrameDuration
   2163  *
   2164  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
   2165  *              and current request configuration.
   2166  *
   2167  * PARAMETERS : @request: requset sent by the frameworks
   2168  *
   2169  * RETURN     : min farme duration for a particular request
   2170  *
   2171  *==========================================================================*/
   2172 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
   2173 {
   2174     bool hasJpegStream = false;
   2175     bool hasRawStream = false;
   2176     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
   2177         const camera3_stream_t *stream = request->output_buffers[i].stream;
   2178         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
   2179             hasJpegStream = true;
   2180         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2181                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2182                 stream->format == HAL_PIXEL_FORMAT_RAW16)
   2183             hasRawStream = true;
   2184     }
   2185 
   2186     if (!hasJpegStream)
   2187         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
   2188     else
   2189         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
   2190 }
   2191 
   2192 /*===========================================================================
   2193  * FUNCTION   : handlePendingReprocResults
   2194  *
   2195  * DESCRIPTION: check and notify on any pending reprocess results
   2196  *
   2197  * PARAMETERS :
   2198  *   @frame_number   : Pending request frame number
   2199  *
   2200  * RETURN     : int32_t type of status
   2201  *              NO_ERROR  -- success
   2202  *              none-zero failure code
   2203  *==========================================================================*/
   2204 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
   2205 {
   2206     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
   2207             j != mPendingReprocessResultList.end(); j++) {
   2208         if (j->frame_number == frame_number) {
   2209             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
   2210 
   2211             CDBG("%s: Delayed reprocess notify %d", __func__,
   2212                     frame_number);
   2213 
   2214             for (pendingRequestIterator k = mPendingRequestsList.begin();
   2215                     k != mPendingRequestsList.end(); k++) {
   2216 
   2217                 if (k->frame_number == j->frame_number) {
   2218                     CDBG("%s: Found reprocess frame number %d in pending reprocess List "
   2219                             "Take it out!!", __func__,
   2220                             k->frame_number);
   2221 
   2222                     camera3_capture_result result;
   2223                     memset(&result, 0, sizeof(camera3_capture_result));
   2224                     result.frame_number = frame_number;
   2225                     result.num_output_buffers = 1;
   2226                     result.output_buffers =  &j->buffer;
   2227                     result.input_buffer = k->input_buffer;
   2228                     result.result = k->settings;
   2229                     result.partial_result = PARTIAL_RESULT_COUNT;
   2230                     mCallbackOps->process_capture_result(mCallbackOps, &result);
   2231 
   2232                     erasePendingRequest(k);
   2233                     break;
   2234                 }
   2235             }
   2236             mPendingReprocessResultList.erase(j);
   2237             break;
   2238         }
   2239     }
   2240     return NO_ERROR;
   2241 }
   2242 
   2243 /*===========================================================================
   2244  * FUNCTION   : handleBatchMetadata
   2245  *
   2246  * DESCRIPTION: Handles metadata buffer callback in batch mode
   2247  *
   2248  * PARAMETERS : @metadata_buf: metadata buffer
   2249  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2250  *                 the meta buf in this method
   2251  *
   2252  * RETURN     :
   2253  *
   2254  *==========================================================================*/
   2255 void QCamera3HardwareInterface::handleBatchMetadata(
   2256         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
   2257 {
   2258     ATRACE_CALL();
   2259 
   2260     if (NULL == metadata_buf) {
   2261         ALOGE("%s: metadata_buf is NULL", __func__);
   2262         return;
   2263     }
   2264     /* In batch mode, the metdata will contain the frame number and timestamp of
   2265      * the last frame in the batch. Eg: a batch containing buffers from request
   2266      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
   2267      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
   2268      * multiple process_capture_results */
   2269     metadata_buffer_t *metadata =
   2270             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2271     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
   2272     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
   2273     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
   2274     uint32_t frame_number = 0, urgent_frame_number = 0;
   2275     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
   2276     bool invalid_metadata = false;
   2277     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
   2278     size_t loopCount = 1;
   2279 
   2280     int32_t *p_frame_number_valid =
   2281             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2282     uint32_t *p_frame_number =
   2283             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2284     int64_t *p_capture_time =
   2285             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2286     int32_t *p_urgent_frame_number_valid =
   2287             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2288     uint32_t *p_urgent_frame_number =
   2289             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2290 
   2291     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
   2292             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
   2293             (NULL == p_urgent_frame_number)) {
   2294         ALOGE("%s: Invalid metadata", __func__);
   2295         invalid_metadata = true;
   2296     } else {
   2297         frame_number_valid = *p_frame_number_valid;
   2298         last_frame_number = *p_frame_number;
   2299         last_frame_capture_time = *p_capture_time;
   2300         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2301         last_urgent_frame_number = *p_urgent_frame_number;
   2302     }
   2303 
   2304     /* In batchmode, when no video buffers are requested, set_parms are sent
   2305      * for every capture_request. The difference between consecutive urgent
   2306      * frame numbers and frame numbers should be used to interpolate the
   2307      * corresponding frame numbers and time stamps */
   2308     pthread_mutex_lock(&mMutex);
   2309     if (urgent_frame_number_valid) {
   2310         first_urgent_frame_number =
   2311                 mPendingBatchMap.valueFor(last_urgent_frame_number);
   2312         urgentFrameNumDiff = last_urgent_frame_number + 1 -
   2313                 first_urgent_frame_number;
   2314 
   2315         CDBG_HIGH("%s: urgent_frm: valid: %d frm_num: %d - %d",
   2316                 __func__, urgent_frame_number_valid,
   2317                 first_urgent_frame_number, last_urgent_frame_number);
   2318     }
   2319 
   2320     if (frame_number_valid) {
   2321         first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
   2322         frameNumDiff = last_frame_number + 1 -
   2323                 first_frame_number;
   2324         mPendingBatchMap.removeItem(last_frame_number);
   2325 
   2326         CDBG_HIGH("%s:        frm: valid: %d frm_num: %d - %d",
   2327                 __func__, frame_number_valid,
   2328                 first_frame_number, last_frame_number);
   2329 
   2330     }
   2331     pthread_mutex_unlock(&mMutex);
   2332 
   2333     if (urgent_frame_number_valid || frame_number_valid) {
   2334         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
   2335         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
   2336             ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
   2337                     __func__, urgentFrameNumDiff, last_urgent_frame_number);
   2338         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
   2339             ALOGE("%s: frameNumDiff: %d frameNum: %d",
   2340                     __func__, frameNumDiff, last_frame_number);
   2341     }
   2342 
   2343     for (size_t i = 0; i < loopCount; i++) {
   2344         /* handleMetadataWithLock is called even for invalid_metadata for
   2345          * pipeline depth calculation */
   2346         if (!invalid_metadata) {
   2347             /* Infer frame number. Batch metadata contains frame number of the
   2348              * last frame */
   2349             if (urgent_frame_number_valid) {
   2350                 if (i < urgentFrameNumDiff) {
   2351                     urgent_frame_number =
   2352                             first_urgent_frame_number + i;
   2353                     CDBG("%s: inferred urgent frame_number: %d",
   2354                             __func__, urgent_frame_number);
   2355                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2356                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
   2357                 } else {
   2358                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
   2359                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2360                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
   2361                 }
   2362             }
   2363 
   2364             /* Infer frame number. Batch metadata contains frame number of the
   2365              * last frame */
   2366             if (frame_number_valid) {
   2367                 if (i < frameNumDiff) {
   2368                     frame_number = first_frame_number + i;
   2369                     CDBG("%s: inferred frame_number: %d", __func__, frame_number);
   2370                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2371                             CAM_INTF_META_FRAME_NUMBER, frame_number);
   2372                 } else {
   2373                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
   2374                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2375                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
   2376                 }
   2377             }
   2378 
   2379             if (last_frame_capture_time) {
   2380                 //Infer timestamp
   2381                 first_frame_capture_time = last_frame_capture_time -
   2382                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
   2383                 capture_time =
   2384                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
   2385                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2386                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
   2387                 CDBG_HIGH("%s: batch capture_time: %lld, capture_time: %lld",
   2388                         __func__, last_frame_capture_time, capture_time);
   2389             }
   2390         }
   2391         pthread_mutex_lock(&mMutex);
   2392         handleMetadataWithLock(metadata_buf,
   2393                 false /* free_and_bufdone_meta_buf */,
   2394                 (i == 0) /* first metadata in the batch metadata */);
   2395         pthread_mutex_unlock(&mMutex);
   2396     }
   2397 
   2398 done_batch_metadata:
   2399     /* BufDone metadata buffer */
   2400     if (free_and_bufdone_meta_buf) {
   2401         mMetadataChannel->bufDone(metadata_buf);
   2402         free(metadata_buf);
   2403     }
   2404 }
   2405 
   2406 /*===========================================================================
   2407  * FUNCTION   : handleMetadataWithLock
   2408  *
   2409  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
   2410  *
   2411  * PARAMETERS : @metadata_buf: metadata buffer
   2412  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2413  *                 the meta buf in this method
   2414  *              @firstMetadataInBatch: Boolean to indicate whether this is the
   2415  *                  first metadata in a batch. Valid only for batch mode
   2416  *
   2417  * RETURN     :
   2418  *
   2419  *==========================================================================*/
   2420 void QCamera3HardwareInterface::handleMetadataWithLock(
   2421     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
   2422     bool firstMetadataInBatch)
   2423 {
   2424     ATRACE_CALL();
   2425 
   2426     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2427     int32_t frame_number_valid, urgent_frame_number_valid;
   2428     uint32_t frame_number, urgent_frame_number;
   2429     int64_t capture_time;
   2430     bool unfinished_raw_request = false;
   2431 
   2432     int32_t *p_frame_number_valid =
   2433             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2434     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2435     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2436     int32_t *p_urgent_frame_number_valid =
   2437             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2438     uint32_t *p_urgent_frame_number =
   2439             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2440     IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
   2441             metadata) {
   2442         CDBG("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
   2443                 __func__, *p_frame_number_valid, *p_frame_number);
   2444     }
   2445 
   2446     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
   2447             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
   2448         ALOGE("%s: Invalid metadata", __func__);
   2449         if (free_and_bufdone_meta_buf) {
   2450             mMetadataChannel->bufDone(metadata_buf);
   2451             free(metadata_buf);
   2452         }
   2453         goto done_metadata;
   2454     } else {
   2455         frame_number_valid = *p_frame_number_valid;
   2456         frame_number = *p_frame_number;
   2457         capture_time = *p_capture_time;
   2458         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2459         urgent_frame_number = *p_urgent_frame_number;
   2460     }
   2461     //Partial result on process_capture_result for timestamp
   2462     if (urgent_frame_number_valid) {
   2463         CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
   2464           __func__, urgent_frame_number, capture_time);
   2465 
   2466         //Recieved an urgent Frame Number, handle it
   2467         //using partial results
   2468         for (pendingRequestIterator i =
   2469                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
   2470             CDBG("%s: Iterator Frame = %d urgent frame = %d",
   2471                 __func__, i->frame_number, urgent_frame_number);
   2472 
   2473             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
   2474                 (i->partial_result_cnt == 0)) {
   2475                 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
   2476                     __func__, i->frame_number);
   2477             }
   2478 
   2479             if (i->frame_number == urgent_frame_number &&
   2480                      i->bUrgentReceived == 0) {
   2481 
   2482                 camera3_capture_result_t result;
   2483                 memset(&result, 0, sizeof(camera3_capture_result_t));
   2484 
   2485                 i->partial_result_cnt++;
   2486                 i->bUrgentReceived = 1;
   2487                 // Extract 3A metadata
   2488                 result.result =
   2489                     translateCbUrgentMetadataToResultMetadata(metadata);
   2490                 // Populate metadata result
   2491                 result.frame_number = urgent_frame_number;
   2492                 result.num_output_buffers = 0;
   2493                 result.output_buffers = NULL;
   2494                 result.partial_result = i->partial_result_cnt;
   2495 
   2496                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   2497                 CDBG("%s: urgent frame_number = %u, capture_time = %lld",
   2498                      __func__, result.frame_number, capture_time);
   2499                 free_camera_metadata((camera_metadata_t *)result.result);
   2500                 break;
   2501             }
   2502         }
   2503     }
   2504 
   2505     if (!frame_number_valid) {
   2506         CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
   2507         if (free_and_bufdone_meta_buf) {
   2508             mMetadataChannel->bufDone(metadata_buf);
   2509             free(metadata_buf);
   2510         }
   2511         goto done_metadata;
   2512     }
   2513     CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
   2514             frame_number, capture_time);
   2515 
   2516     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2517             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   2518         // Flush out all entries with less or equal frame numbers.
   2519 
   2520         camera3_capture_result_t result;
   2521         memset(&result, 0, sizeof(camera3_capture_result_t));
   2522 
   2523         CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
   2524 
   2525         // Check whether any stream buffer corresponding to this is dropped or not
   2526         // If dropped, then send the ERROR_BUFFER for the corresponding stream
   2527         // The API does not expect a blob buffer to be dropped
   2528         if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
   2529             /* Clear notify_msg structure */
   2530             camera3_notify_msg_t notify_msg;
   2531             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2532             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   2533                     j != i->buffers.end(); j++) {
   2534                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
   2535                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   2536                 for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
   2537                     if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
   2538                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   2539                         ALOGW("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
   2540                                 __func__, i->frame_number, streamID, j->stream->format);
   2541                         notify_msg.type = CAMERA3_MSG_ERROR;
   2542                         notify_msg.message.error.frame_number = i->frame_number;
   2543                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
   2544                         notify_msg.message.error.error_stream = j->stream;
   2545                         mCallbackOps->notify(mCallbackOps, &notify_msg);
   2546                         ALOGW("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
   2547                                 __func__, i->frame_number, streamID, j->stream->format);
   2548                         PendingFrameDropInfo PendingFrameDrop;
   2549                         PendingFrameDrop.frame_number=i->frame_number;
   2550                         PendingFrameDrop.stream_ID = streamID;
   2551                         // Add the Frame drop info to mPendingFrameDropList
   2552                         mPendingFrameDropList.push_back(PendingFrameDrop);
   2553                    }
   2554                }
   2555             }
   2556         }
   2557 
   2558         // Send empty metadata with already filled buffers for dropped metadata
   2559         // and send valid metadata with already filled buffers for current metadata
   2560         /* we could hit this case when we either
   2561          * 1. have a pending reprocess request or
   2562          * 2. miss a metadata buffer callback */
   2563         if (i->frame_number < frame_number) {
   2564             if (i->input_buffer) {
   2565                 /* this will be handled in handleInputBufferWithLock */
   2566                 i++;
   2567                 continue;
   2568             } else if (i->need_dynamic_blklvl) {
   2569                 unfinished_raw_request = true;
   2570                 // i->partial_result_cnt--;
   2571                 CDBG("%s, frame number:%d, partial_result:%d, unfinished raw request..",
   2572                         __func__, i->frame_number, i->partial_result_cnt);
   2573                 i++;
   2574                 continue;
   2575             } else if (i->pending_extra_result) {
   2576                 CDBG("%s, frame_number:%d, partial_result:%d, need_dynamic_blklvl:%d",
   2577                         __func__, i->frame_number, i->partial_result_cnt,
   2578                         i->need_dynamic_blklvl);
   2579                 // i->partial_result_cnt--;
   2580                 i++;
   2581                 continue;
   2582             } else {
   2583                 ALOGE("%s: Missing metadata buffer for frame number %d, reporting CAMERA3_MSG_ERROR_RESULT",
   2584                      __func__, i->frame_number);
   2585 
   2586                 mPendingLiveRequest--;
   2587 
   2588                 CameraMetadata dummyMetadata;
   2589                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
   2590                 result.result = dummyMetadata.release();
   2591 
   2592                 camera3_notify_msg_t notify_msg;
   2593                 memset(&notify_msg, 0, sizeof(notify_msg));
   2594                 notify_msg.type = CAMERA3_MSG_ERROR;
   2595                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
   2596                 notify_msg.message.error.error_stream = NULL;
   2597                 notify_msg.message.error.frame_number = i->frame_number;
   2598                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   2599             }
   2600         } else {
   2601             i->partial_result_cnt++;
   2602             CDBG("%s, frame_number:%d, need_dynamic_blklvl:%d, partial cnt:%d\n",
   2603                     __func__, i->frame_number, i->need_dynamic_blklvl,
   2604                     i->partial_result_cnt);
   2605             if (!i->need_dynamic_blklvl) {
   2606                 CDBG("%s, meta for request without raw, frame number: %d\n",
   2607                         __func__, i->frame_number);
   2608                 if (!unfinished_raw_request) {
   2609                     i->partial_result_cnt++;
   2610                     CDBG("%s, no raw request pending, send the final (cnt:%d) partial result",
   2611                             __func__, i->partial_result_cnt);
   2612                 }
   2613             }
   2614 
   2615             result.partial_result = i->partial_result_cnt;
   2616 
   2617             /* Clear notify_msg structure */
   2618             camera3_notify_msg_t notify_msg;
   2619             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2620 
   2621             // Send shutter notify to frameworks
   2622             notify_msg.type = CAMERA3_MSG_SHUTTER;
   2623             notify_msg.message.shutter.frame_number = i->frame_number;
   2624             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   2625             mCallbackOps->notify(mCallbackOps, &notify_msg);
   2626 
   2627             i->timestamp = capture_time;
   2628 
   2629             // Find channel requiring metadata, meaning internal offline postprocess
   2630             // is needed.
   2631             //TODO: for now, we don't support two streams requiring metadata at the same time.
   2632             // (because we are not making copies, and metadata buffer is not reference counted.
   2633             bool internalPproc = false;
   2634             for (pendingBufferIterator iter = i->buffers.begin();
   2635                     iter != i->buffers.end(); iter++) {
   2636                 if (iter->need_metadata) {
   2637                     internalPproc = true;
   2638                     QCamera3ProcessingChannel *channel =
   2639                             (QCamera3ProcessingChannel *)iter->stream->priv;
   2640                     channel->queueReprocMetadata(metadata_buf);
   2641                     break;
   2642                 }
   2643             }
   2644 
   2645             result.result = translateFromHalMetadata(metadata,
   2646                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
   2647                     i->capture_intent, i->hybrid_ae_enable, internalPproc, i->need_dynamic_blklvl,
   2648                     firstMetadataInBatch);
   2649 
   2650             saveExifParams(metadata);
   2651 
   2652             if (i->blob_request) {
   2653                 {
   2654                     //Dump tuning metadata if enabled and available
   2655                     char prop[PROPERTY_VALUE_MAX];
   2656                     memset(prop, 0, sizeof(prop));
   2657                     property_get("persist.camera.dumpmetadata", prop, "0");
   2658                     int32_t enabled = atoi(prop);
   2659                     if (enabled && metadata->is_tuning_params_valid) {
   2660                         dumpMetadataToFile(metadata->tuning_params,
   2661                                mMetaFrameCount,
   2662                                enabled,
   2663                                "Snapshot",
   2664                                frame_number);
   2665                     }
   2666                 }
   2667             }
   2668 
   2669             if (!internalPproc) {
   2670                 CDBG("%s: couldn't find need_metadata for this metadata", __func__);
   2671                 // Return metadata buffer
   2672                 if (free_and_bufdone_meta_buf) {
   2673                     mMetadataChannel->bufDone(metadata_buf);
   2674                     free(metadata_buf);
   2675                 }
   2676             }
   2677         }
   2678         if (!result.result) {
   2679             ALOGE("%s: metadata is NULL", __func__);
   2680         }
   2681         result.frame_number = i->frame_number;
   2682         result.input_buffer = i->input_buffer;
   2683         result.num_output_buffers = 0;
   2684         result.output_buffers = NULL;
   2685         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   2686                     j != i->buffers.end(); j++) {
   2687             if (j->buffer) {
   2688                 result.num_output_buffers++;
   2689             }
   2690         }
   2691 
   2692         if (result.num_output_buffers > 0) {
   2693             camera3_stream_buffer_t *result_buffers =
   2694                 new camera3_stream_buffer_t[result.num_output_buffers];
   2695             if (!result_buffers) {
   2696                 ALOGE("%s: Fatal error: out of memory", __func__);
   2697             }
   2698             size_t result_buffers_idx = 0;
   2699             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   2700                     j != i->buffers.end(); j++) {
   2701                 if (j->buffer) {
   2702                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   2703                             m != mPendingFrameDropList.end(); m++) {
   2704                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   2705                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   2706                         if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
   2707                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   2708                             ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
   2709                                   __func__, frame_number, streamID);
   2710                             m = mPendingFrameDropList.erase(m);
   2711                             break;
   2712                         }
   2713                     }
   2714 
   2715                     for (List<PendingBufferInfo>::iterator k =
   2716                       mPendingBuffersMap.mPendingBufferList.begin();
   2717                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
   2718                       if (k->buffer == j->buffer->buffer) {
   2719                         CDBG("%s: Found buffer %p in pending buffer List "
   2720                               "for frame %u, Take it out!!", __func__,
   2721                                k->buffer, k->frame_number);
   2722                         mPendingBuffersMap.num_buffers--;
   2723                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2724                         break;
   2725                       }
   2726                     }
   2727 
   2728                     result_buffers[result_buffers_idx++] = *(j->buffer);
   2729                     free(j->buffer);
   2730                     j->buffer = NULL;
   2731                 }
   2732             }
   2733             result.output_buffers = result_buffers;
   2734             mCallbackOps->process_capture_result(mCallbackOps, &result);
   2735             CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
   2736                     __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
   2737             free_camera_metadata((camera_metadata_t *)result.result);
   2738             delete[] result_buffers;
   2739         } else {
   2740             mCallbackOps->process_capture_result(mCallbackOps, &result);
   2741             CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
   2742                         __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
   2743             free_camera_metadata((camera_metadata_t *)result.result);
   2744         }
   2745 
   2746         if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
   2747             mPendingLiveRequest--;
   2748             i = erasePendingRequest(i);
   2749         } else {
   2750             CDBG("%s, keep in list, frame number:%d, partial result:%d",
   2751                     __func__, i->frame_number, i->partial_result_cnt);
   2752             i->pending_extra_result = true;
   2753             i++;
   2754         }
   2755 
   2756         if (!mPendingReprocessResultList.empty()) {
   2757             handlePendingReprocResults(frame_number + 1);
   2758         }
   2759 
   2760     }
   2761 
   2762 done_metadata:
   2763     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2764             i != mPendingRequestsList.end() ;i++) {
   2765         i->pipeline_depth++;
   2766     }
   2767     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
   2768     unblockRequestIfNecessary();
   2769 
   2770 }
   2771 
   2772 /*===========================================================================
   2773  * FUNCTION   : hdrPlusPerfLock
   2774  *
   2775  * DESCRIPTION: perf lock for HDR+ using custom intent
   2776  *
   2777  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
   2778  *
   2779  * RETURN     : None
   2780  *
   2781  *==========================================================================*/
   2782 void QCamera3HardwareInterface::hdrPlusPerfLock(
   2783         mm_camera_super_buf_t *metadata_buf)
   2784 {
   2785     if (NULL == metadata_buf) {
   2786         ALOGE("%s: metadata_buf is NULL", __func__);
   2787         return;
   2788     }
   2789     metadata_buffer_t *metadata =
   2790             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2791     int32_t *p_frame_number_valid =
   2792             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2793     uint32_t *p_frame_number =
   2794             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2795 
   2796     //acquire perf lock for 5 sec after the last HDR frame is captured
   2797     if (*p_frame_number_valid) {
   2798         if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
   2799             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
   2800         }
   2801     }
   2802 
   2803     //release lock after perf lock timer is expired. If lock is already released,
   2804     //isTimerReset returns false
   2805     if (m_perfLock.isTimerReset()) {
   2806         mLastCustIntentFrmNum = -1;
   2807         m_perfLock.lock_rel_timed();
   2808     }
   2809 }
   2810 
   2811 /*===========================================================================
   2812  * FUNCTION   : handleInputBufferWithLock
   2813  *
   2814  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
   2815  *
   2816  * PARAMETERS : @frame_number: frame number of the input buffer
   2817  *
   2818  * RETURN     :
   2819  *
   2820  *==========================================================================*/
   2821 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
   2822 {
   2823     ATRACE_CALL();
   2824     pendingRequestIterator i = mPendingRequestsList.begin();
   2825     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   2826         i++;
   2827     }
   2828     if (i != mPendingRequestsList.end() && i->input_buffer) {
   2829         //found the right request
   2830         if (!i->shutter_notified) {
   2831             CameraMetadata settings;
   2832             camera3_notify_msg_t notify_msg;
   2833             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2834             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   2835             if(i->settings) {
   2836                 settings = i->settings;
   2837                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   2838                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   2839                 } else {
   2840                     ALOGE("%s: No timestamp in input settings! Using current one.",
   2841                             __func__);
   2842                 }
   2843             } else {
   2844                 ALOGE("%s: Input settings missing!", __func__);
   2845             }
   2846 
   2847             notify_msg.type = CAMERA3_MSG_SHUTTER;
   2848             notify_msg.message.shutter.frame_number = frame_number;
   2849             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   2850             mCallbackOps->notify(mCallbackOps, &notify_msg);
   2851             i->shutter_notified = true;
   2852             CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
   2853                        __func__, i->frame_number, notify_msg.message.shutter.timestamp);
   2854         }
   2855 
   2856         if (i->input_buffer->release_fence != -1) {
   2857            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   2858            close(i->input_buffer->release_fence);
   2859            if (rc != OK) {
   2860                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   2861            }
   2862         }
   2863 
   2864         camera3_capture_result result;
   2865         memset(&result, 0, sizeof(camera3_capture_result));
   2866         result.frame_number = frame_number;
   2867         result.result = i->settings;
   2868         result.input_buffer = i->input_buffer;
   2869         result.partial_result = PARTIAL_RESULT_COUNT;
   2870 
   2871         mCallbackOps->process_capture_result(mCallbackOps, &result);
   2872         CDBG("%s: Input request metadata and input buffer frame_number = %u",
   2873                        __func__, i->frame_number);
   2874         i = erasePendingRequest(i);
   2875     } else {
   2876         ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
   2877     }
   2878 }
   2879 
   2880 bool QCamera3HardwareInterface::getBlackLevelRegion(int (&opticalBlackRegions)[4])
   2881 {
   2882     if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
   2883         /*just calculate one region black level and send to fwk*/
   2884         for (size_t i = 0; i <  4; i++) {
   2885             opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
   2886         }
   2887         return TRUE;
   2888     }
   2889 
   2890     return FALSE;
   2891 }
   2892 
   2893 void QCamera3HardwareInterface::sendDynamicBlackLevel(float blacklevel[4], uint32_t frame_number)
   2894 {
   2895     CDBG("%s, E.\n", __func__);
   2896     pthread_mutex_lock(&mMutex);
   2897     sendDynamicBlackLevelWithLock(blacklevel, frame_number);
   2898     pthread_mutex_unlock(&mMutex);
   2899     CDBG("%s, X.\n", __func__);
   2900 }
   2901 
   2902 void QCamera3HardwareInterface::sendDynamicBlackLevelWithLock(float blacklevel[4], uint32_t frame_number)
   2903 {
   2904     CDBG("%s, E. frame_number:%d\n", __func__, frame_number);
   2905 
   2906     pendingRequestIterator i = mPendingRequestsList.begin();
   2907     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   2908         i++;
   2909     }
   2910     if ((i == mPendingRequestsList.end()) || !i->need_dynamic_blklvl) {
   2911         ALOGE("%s, error: invalid frame number.", __func__);
   2912         return;
   2913     }
   2914 
   2915     i->partial_result_cnt++;
   2916 
   2917     CameraMetadata camMetadata;
   2918     int64_t fwk_frame_number = (int64_t)frame_number;
   2919     camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   2920 
   2921     // update dynamic black level here
   2922     camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, blacklevel, 4);
   2923 
   2924     camera3_capture_result_t result;
   2925     memset(&result, 0, sizeof(camera3_capture_result_t));
   2926     result.frame_number = frame_number;
   2927     result.num_output_buffers = 0;
   2928     result.result = camMetadata.release();
   2929     result.partial_result = i->partial_result_cnt;
   2930 
   2931     CDBG("%s, partial result:%d, frame_number:%d, pending extra result:%d\n",
   2932             __func__, result.partial_result, frame_number, i->pending_extra_result);
   2933     mCallbackOps->process_capture_result(mCallbackOps, &result);
   2934     free_camera_metadata((camera_metadata_t *)result.result);
   2935 
   2936     if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
   2937         CDBG("%s, remove cur request from pending list.", __func__);
   2938         mPendingLiveRequest--;
   2939         i = erasePendingRequest(i);
   2940 
   2941         // traverse the remaining pending list to see whether need to send cached ones..
   2942         while (i != mPendingRequestsList.end()) {
   2943             CDBG("%s, frame number:%d, partial_result:%d, pending extra result:%d",
   2944                     __func__, i->frame_number, i->partial_result_cnt,
   2945                     i->pending_extra_result);
   2946 
   2947             if ((i->partial_result_cnt == PARTIAL_RESULT_COUNT - 1)
   2948                     && (i->need_dynamic_blklvl == false) /* in case two consecutive raw requests */) {
   2949                 // send out final result, and remove it from pending list.
   2950                 CameraMetadata camMetadata;
   2951                 int64_t fwk_frame_number = (int64_t)i->frame_number;
   2952                 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   2953 
   2954                 memset(&result, 0, sizeof(camera3_capture_result_t));
   2955                 result.frame_number = i->frame_number;
   2956                 result.num_output_buffers = 0;
   2957                 result.result = camMetadata.release();
   2958                 result.partial_result = i->partial_result_cnt + 1;
   2959 
   2960                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   2961                 free_camera_metadata((camera_metadata_t *)result.result);
   2962 
   2963                 mPendingLiveRequest--;
   2964                 i = erasePendingRequest(i);
   2965                 CDBG("%s, mPendingLiveRequest:%d, pending list size:%d",
   2966                         __func__, mPendingLiveRequest, mPendingRequestsList.size());
   2967             } else {
   2968                 break;
   2969             }
   2970         }
   2971     }
   2972 
   2973     unblockRequestIfNecessary();
   2974     CDBG("%s, X.mPendingLiveRequest = %d\n", __func__, mPendingLiveRequest);
   2975 }
   2976 
   2977 
   2978 /*===========================================================================
   2979  * FUNCTION   : handleBufferWithLock
   2980  *
   2981  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   2982  *
   2983  * PARAMETERS : @buffer: image buffer for the callback
   2984  *              @frame_number: frame number of the image buffer
   2985  *
   2986  * RETURN     :
   2987  *
   2988  *==========================================================================*/
   2989 void QCamera3HardwareInterface::handleBufferWithLock(
   2990     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   2991 {
   2992     ATRACE_CALL();
   2993     // If the frame number doesn't exist in the pending request list,
   2994     // directly send the buffer to the frameworks, and update pending buffers map
   2995     // Otherwise, book-keep the buffer.
   2996     pendingRequestIterator i = mPendingRequestsList.begin();
   2997     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   2998         i++;
   2999     }
   3000     if (i == mPendingRequestsList.end() || i->pending_extra_result == true) {
   3001         if (i != mPendingRequestsList.end()) {
   3002             // though the pendingRequestInfo is still in the list,
   3003             // still send the buffer directly, as the pending_extra_result is true,
   3004             // and we've already received meta for this frame number.
   3005             CDBG("%s, send the buffer directly, frame number:%d",
   3006                     __func__, i->frame_number);
   3007         }
   3008         // Verify all pending requests frame_numbers are greater
   3009         for (pendingRequestIterator j = mPendingRequestsList.begin();
   3010                 j != mPendingRequestsList.end(); j++) {
   3011             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
   3012                 ALOGE("%s: Error: pending live frame number %d is smaller than %d",
   3013                         __func__, j->frame_number, frame_number);
   3014             }
   3015         }
   3016         camera3_capture_result_t result;
   3017         memset(&result, 0, sizeof(camera3_capture_result_t));
   3018         result.result = NULL;
   3019         result.frame_number = frame_number;
   3020         result.num_output_buffers = 1;
   3021         result.partial_result = 0;
   3022         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   3023                 m != mPendingFrameDropList.end(); m++) {
   3024             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
   3025             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3026             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
   3027                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   3028                 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   3029                         __func__, frame_number, streamID);
   3030                 m = mPendingFrameDropList.erase(m);
   3031                 break;
   3032             }
   3033         }
   3034         result.output_buffers = buffer;
   3035         CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
   3036                 __func__, frame_number, buffer->buffer);
   3037 
   3038         for (List<PendingBufferInfo>::iterator k =
   3039                 mPendingBuffersMap.mPendingBufferList.begin();
   3040                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
   3041             if (k->buffer == buffer->buffer) {
   3042                 CDBG("%s: Found Frame buffer, take it out from list",
   3043                         __func__);
   3044 
   3045                 mPendingBuffersMap.num_buffers--;
   3046                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
   3047                 break;
   3048             }
   3049         }
   3050         CDBG("%s: mPendingBuffersMap.num_buffers = %d",
   3051             __func__, mPendingBuffersMap.num_buffers);
   3052 
   3053         mCallbackOps->process_capture_result(mCallbackOps, &result);
   3054     } else {
   3055         if (i->input_buffer) {
   3056             CameraMetadata settings;
   3057             camera3_notify_msg_t notify_msg;
   3058             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3059             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   3060             if(i->settings) {
   3061                 settings = i->settings;
   3062                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   3063                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   3064                 } else {
   3065                     ALOGE("%s: No timestamp in input settings! Using current one.",
   3066                             __func__);
   3067                 }
   3068             } else {
   3069                 ALOGE("%s: Input settings missing!", __func__);
   3070             }
   3071 
   3072             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3073             notify_msg.message.shutter.frame_number = frame_number;
   3074             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3075 
   3076             if (i->input_buffer->release_fence != -1) {
   3077                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   3078                close(i->input_buffer->release_fence);
   3079                if (rc != OK) {
   3080                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   3081                }
   3082             }
   3083 
   3084             for (List<PendingBufferInfo>::iterator k =
   3085                     mPendingBuffersMap.mPendingBufferList.begin();
   3086                     k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
   3087                 if (k->buffer == buffer->buffer) {
   3088                     CDBG("%s: Found Frame buffer, take it out from list",
   3089                             __func__);
   3090 
   3091                     mPendingBuffersMap.num_buffers--;
   3092                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
   3093                     break;
   3094                 }
   3095             }
   3096             CDBG("%s: mPendingBuffersMap.num_buffers = %d",
   3097                 __func__, mPendingBuffersMap.num_buffers);
   3098 
   3099             bool notifyNow = true;
   3100             for (pendingRequestIterator j = mPendingRequestsList.begin();
   3101                     j != mPendingRequestsList.end(); j++) {
   3102                 if (j->frame_number < frame_number) {
   3103                     notifyNow = false;
   3104                     break;
   3105                 }
   3106             }
   3107 
   3108             if (notifyNow) {
   3109                 camera3_capture_result result;
   3110                 memset(&result, 0, sizeof(camera3_capture_result));
   3111                 result.frame_number = frame_number;
   3112                 result.result = i->settings;
   3113                 result.input_buffer = i->input_buffer;
   3114                 result.num_output_buffers = 1;
   3115                 result.output_buffers = buffer;
   3116                 result.partial_result = PARTIAL_RESULT_COUNT;
   3117 
   3118                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   3119                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3120                 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
   3121                 i = erasePendingRequest(i);
   3122             } else {
   3123                 // Cache reprocess result for later
   3124                 PendingReprocessResult pendingResult;
   3125                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
   3126                 pendingResult.notify_msg = notify_msg;
   3127                 pendingResult.buffer = *buffer;
   3128                 pendingResult.frame_number = frame_number;
   3129                 mPendingReprocessResultList.push_back(pendingResult);
   3130                 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
   3131             }
   3132         } else {
   3133             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3134                 j != i->buffers.end(); j++) {
   3135                 if (j->stream == buffer->stream) {
   3136                     if (j->buffer != NULL) {
   3137                         ALOGE("%s: Error: buffer is already set", __func__);
   3138                     } else {
   3139                         j->buffer = (camera3_stream_buffer_t *)malloc(
   3140                             sizeof(camera3_stream_buffer_t));
   3141                         *(j->buffer) = *buffer;
   3142                         CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
   3143                             __func__, buffer, frame_number);
   3144                     }
   3145                 }
   3146             }
   3147         }
   3148     }
   3149 }
   3150 
   3151 /*===========================================================================
   3152  * FUNCTION   : unblockRequestIfNecessary
   3153  *
   3154  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
   3155  *              that mMutex is held when this function is called.
   3156  *
   3157  * PARAMETERS :
   3158  *
   3159  * RETURN     :
   3160  *
   3161  *==========================================================================*/
   3162 void QCamera3HardwareInterface::unblockRequestIfNecessary()
   3163 {
   3164    // Unblock process_capture_request
   3165    pthread_cond_signal(&mRequestCond);
   3166 }
   3167 
   3168 
   3169 /*===========================================================================
   3170  * FUNCTION   : processCaptureRequest
   3171  *
   3172  * DESCRIPTION: process a capture request from camera service
   3173  *
   3174  * PARAMETERS :
   3175  *   @request : request from framework to process
   3176  *
   3177  * RETURN     :
   3178  *
   3179  *==========================================================================*/
   3180 int QCamera3HardwareInterface::processCaptureRequest(
   3181                     camera3_capture_request_t *request)
   3182 {
   3183     ATRACE_CALL();
   3184     int rc = NO_ERROR;
   3185     int32_t request_id;
   3186     CameraMetadata meta;
   3187     uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
   3188     uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
   3189     bool isVidBufRequested = false;
   3190     camera3_stream_buffer_t *pInputBuffer = NULL;
   3191 
   3192     pthread_mutex_lock(&mMutex);
   3193 
   3194     rc = validateCaptureRequest(request);
   3195     if (rc != NO_ERROR) {
   3196         ALOGE("%s: incoming request is not valid", __func__);
   3197         pthread_mutex_unlock(&mMutex);
   3198         return rc;
   3199     }
   3200 
   3201     meta = request->settings;
   3202 
   3203     // For first capture request, send capture intent, and
   3204     // stream on all streams
   3205     if (mFirstRequest) {
   3206         // send an unconfigure to the backend so that the isp
   3207         // resources are deallocated
   3208         if (!mFirstConfiguration) {
   3209             cam_stream_size_info_t stream_config_info;
   3210             int32_t hal_version = CAM_HAL_V3;
   3211             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
   3212             stream_config_info.buffer_info.min_buffers =
   3213                     MIN_INFLIGHT_REQUESTS;
   3214             stream_config_info.buffer_info.max_buffers =
   3215                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   3216             clear_metadata_buffer(mParameters);
   3217             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3218                     CAM_INTF_PARM_HAL_VERSION, hal_version);
   3219             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3220                     CAM_INTF_META_STREAM_INFO, stream_config_info);
   3221             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3222                     mParameters);
   3223             if (rc < 0) {
   3224                 ALOGE("%s: set_parms for unconfigure failed", __func__);
   3225                 pthread_mutex_unlock(&mMutex);
   3226                 return rc;
   3227             }
   3228         }
   3229         m_perfLock.lock_acq();
   3230         /* get eis information for stream configuration */
   3231         cam_is_type_t is_type;
   3232         char is_type_value[PROPERTY_VALUE_MAX];
   3233         property_get("persist.camera.is_type", is_type_value, "0");
   3234         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
   3235 
   3236         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3237             int32_t hal_version = CAM_HAL_V3;
   3238             uint8_t captureIntent =
   3239                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3240             mCaptureIntent = captureIntent;
   3241             clear_metadata_buffer(mParameters);
   3242             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
   3243             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
   3244         }
   3245 
   3246         //If EIS is enabled, turn it on for video
   3247         bool setEis = m_bEisEnable && m_bEisSupportedSize;
   3248         int32_t vsMode;
   3249         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
   3250         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
   3251             rc = BAD_VALUE;
   3252         }
   3253 
   3254         //IS type will be 0 unless EIS is supported. If EIS is supported
   3255         //it could either be 1 or 4 depending on the stream and video size
   3256         if (setEis) {
   3257             if (!m_bEisSupportedSize) {
   3258                 is_type = IS_TYPE_DIS;
   3259             } else {
   3260                 is_type = IS_TYPE_EIS_2_0;
   3261             }
   3262             mStreamConfigInfo.is_type = is_type;
   3263         } else {
   3264             mStreamConfigInfo.is_type = IS_TYPE_NONE;
   3265         }
   3266 
   3267         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3268                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   3269         int32_t tintless_value = 1;
   3270         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3271                 CAM_INTF_PARM_TINTLESS, tintless_value);
   3272         //Disable CDS for HFR mode and if mPprocBypass = true.
   3273         //CDS is a session parameter in the backend/ISP, so need to be set/reset
   3274         //after every configure_stream
   3275         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
   3276                 (m_bIsVideo)) {
   3277             int32_t cds = CAM_CDS_MODE_OFF;
   3278             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3279                     CAM_INTF_PARM_CDS_MODE, cds))
   3280                 ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
   3281 
   3282         }
   3283         setMobicat();
   3284 
   3285         /* Set fps and hfr mode while sending meta stream info so that sensor
   3286          * can configure appropriate streaming mode */
   3287         mHFRVideoFps = DEFAULT_VIDEO_FPS;
   3288         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   3289             rc = setHalFpsRange(meta, mParameters);
   3290             if (rc != NO_ERROR) {
   3291                 ALOGE("%s: setHalFpsRange failed", __func__);
   3292             }
   3293         }
   3294         if (meta.exists(ANDROID_CONTROL_MODE)) {
   3295             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
   3296             rc = extractSceneMode(meta, metaMode, mParameters);
   3297             if (rc != NO_ERROR) {
   3298                 ALOGE("%s: extractSceneMode failed", __func__);
   3299             }
   3300         }
   3301 
   3302         //TODO: validate the arguments, HSV scenemode should have only the
   3303         //advertised fps ranges
   3304 
   3305         /*set the capture intent, hal version, tintless, stream info,
   3306          *and disenable parameters to the backend*/
   3307         CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
   3308         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3309                     mParameters);
   3310 
   3311         cam_dimension_t sensor_dim;
   3312         memset(&sensor_dim, 0, sizeof(sensor_dim));
   3313         rc = getSensorOutputSize(sensor_dim);
   3314         if (rc != NO_ERROR) {
   3315             ALOGE("%s: Failed to get sensor output size", __func__);
   3316             pthread_mutex_unlock(&mMutex);
   3317             goto error_exit;
   3318         }
   3319 
   3320         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
   3321                 gCamCapability[mCameraId]->active_array_size.height,
   3322                 sensor_dim.width, sensor_dim.height);
   3323 
   3324         /* Set batchmode before initializing channel. Since registerBuffer
   3325          * internally initializes some of the channels, better set batchmode
   3326          * even before first register buffer */
   3327         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3328             it != mStreamInfo.end(); it++) {
   3329             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3330             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   3331                     && mBatchSize) {
   3332                 rc = channel->setBatchSize(mBatchSize);
   3333                 //Disable per frame map unmap for HFR/batchmode case
   3334                 rc |= channel->setPerFrameMapUnmap(false);
   3335                 if (NO_ERROR != rc) {
   3336                     ALOGE("%s : Channel init failed %d", __func__, rc);
   3337                     pthread_mutex_unlock(&mMutex);
   3338                     goto error_exit;
   3339                 }
   3340             }
   3341         }
   3342 
   3343         //First initialize all streams
   3344         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3345             it != mStreamInfo.end(); it++) {
   3346             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3347             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
   3348                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
   3349                setEis)
   3350                 rc = channel->initialize(is_type);
   3351             else {
   3352                 rc = channel->initialize(IS_TYPE_NONE);
   3353             }
   3354             if (NO_ERROR != rc) {
   3355                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
   3356                 pthread_mutex_unlock(&mMutex);
   3357                 goto error_exit;
   3358             }
   3359         }
   3360 
   3361         if (mRawDumpChannel) {
   3362             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
   3363             if (rc != NO_ERROR) {
   3364                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
   3365                 pthread_mutex_unlock(&mMutex);
   3366                 goto error_exit;
   3367             }
   3368         }
   3369         if (mSupportChannel) {
   3370             rc = mSupportChannel->initialize(IS_TYPE_NONE);
   3371             if (rc < 0) {
   3372                 ALOGE("%s: Support channel initialization failed", __func__);
   3373                 pthread_mutex_unlock(&mMutex);
   3374                 goto error_exit;
   3375             }
   3376         }
   3377         if (mAnalysisChannel) {
   3378             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
   3379             if (rc < 0) {
   3380                 ALOGE("%s: Analysis channel initialization failed", __func__);
   3381                 pthread_mutex_unlock(&mMutex);
   3382                 goto error_exit;
   3383             }
   3384         }
   3385         if (mDummyBatchChannel) {
   3386             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
   3387             if (rc < 0) {
   3388                 ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
   3389                 pthread_mutex_unlock(&mMutex);
   3390                 goto error_exit;
   3391             }
   3392             rc = mDummyBatchChannel->initialize(is_type);
   3393             if (rc < 0) {
   3394                 ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
   3395                 pthread_mutex_unlock(&mMutex);
   3396                 goto error_exit;
   3397             }
   3398         }
   3399 
   3400         // Set bundle info
   3401         rc = setBundleInfo();
   3402         if (rc < 0) {
   3403             ALOGE("%s: setBundleInfo failed %d", __func__, rc);
   3404             pthread_mutex_unlock(&mMutex);
   3405             goto error_exit;
   3406         }
   3407 
   3408         //Then start them.
   3409         CDBG_HIGH("%s: Start META Channel", __func__);
   3410         rc = mMetadataChannel->start();
   3411         if (rc < 0) {
   3412             ALOGE("%s: META channel start failed", __func__);
   3413             pthread_mutex_unlock(&mMutex);
   3414             goto error_exit;
   3415         }
   3416 
   3417         if (mAnalysisChannel) {
   3418             rc = mAnalysisChannel->start();
   3419             if (rc < 0) {
   3420                 ALOGE("%s: Analysis channel start failed", __func__);
   3421                 mMetadataChannel->stop();
   3422                 pthread_mutex_unlock(&mMutex);
   3423                 goto error_exit;
   3424             }
   3425         }
   3426 
   3427         if (mSupportChannel) {
   3428             rc = mSupportChannel->start();
   3429             if (rc < 0) {
   3430                 ALOGE("%s: Support channel start failed", __func__);
   3431                 mMetadataChannel->stop();
   3432                 /* Although support and analysis are mutually exclusive today
   3433                    adding it in anycase for future proofing */
   3434                 if (mAnalysisChannel) {
   3435                     mAnalysisChannel->stop();
   3436                 }
   3437                 pthread_mutex_unlock(&mMutex);
   3438                 goto error_exit;
   3439             }
   3440         }
   3441         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3442             it != mStreamInfo.end(); it++) {
   3443             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3444             CDBG_HIGH("%s: Start Processing Channel mask=%d",
   3445                     __func__, channel->getStreamTypeMask());
   3446             rc = channel->start();
   3447             if (rc < 0) {
   3448                 ALOGE("%s: channel start failed", __func__);
   3449                 pthread_mutex_unlock(&mMutex);
   3450                 goto error_exit;
   3451             }
   3452         }
   3453 
   3454         if (mRawDumpChannel) {
   3455             CDBG("%s: Starting raw dump stream",__func__);
   3456             rc = mRawDumpChannel->start();
   3457             if (rc != NO_ERROR) {
   3458                 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
   3459                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3460                       it != mStreamInfo.end(); it++) {
   3461                     QCamera3Channel *channel =
   3462                         (QCamera3Channel *)(*it)->stream->priv;
   3463                     ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
   3464                         channel->getStreamTypeMask());
   3465                     channel->stop();
   3466                 }
   3467                 if (mSupportChannel)
   3468                     mSupportChannel->stop();
   3469                 if (mAnalysisChannel) {
   3470                     mAnalysisChannel->stop();
   3471                 }
   3472                 mMetadataChannel->stop();
   3473                 pthread_mutex_unlock(&mMutex);
   3474                 goto error_exit;
   3475             }
   3476         }
   3477 
   3478         if (mChannelHandle) {
   3479 
   3480             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   3481                     mChannelHandle);
   3482             if (rc != NO_ERROR) {
   3483                 ALOGE("%s: start_channel failed %d", __func__, rc);
   3484                 pthread_mutex_unlock(&mMutex);
   3485                 goto error_exit;
   3486             }
   3487         }
   3488 
   3489 
   3490         goto no_error;
   3491 error_exit:
   3492         m_perfLock.lock_rel();
   3493         return rc;
   3494 no_error:
   3495         m_perfLock.lock_rel();
   3496 
   3497         mWokenUpByDaemon = false;
   3498         mPendingLiveRequest = 0;
   3499         mFirstConfiguration = false;
   3500         enablePowerHint();
   3501     }
   3502 
   3503     uint32_t frameNumber = request->frame_number;
   3504     cam_stream_ID_t streamID;
   3505 
   3506     if (meta.exists(ANDROID_REQUEST_ID)) {
   3507         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
   3508         mCurrentRequestId = request_id;
   3509         CDBG("%s: Received request with id: %d",__func__, request_id);
   3510     } else if (mFirstRequest || mCurrentRequestId == -1){
   3511         ALOGE("%s: Unable to find request id field, \
   3512                 & no previous id available", __func__);
   3513         pthread_mutex_unlock(&mMutex);
   3514         return NAME_NOT_FOUND;
   3515     } else {
   3516         CDBG("%s: Re-using old request id", __func__);
   3517         request_id = mCurrentRequestId;
   3518     }
   3519 
   3520     CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
   3521                                     __func__, __LINE__,
   3522                                     request->num_output_buffers,
   3523                                     request->input_buffer,
   3524                                     frameNumber);
   3525     // Acquire all request buffers first
   3526     streamID.num_streams = 0;
   3527     int blob_request = 0;
   3528     uint32_t snapshotStreamId = 0;
   3529     for (size_t i = 0; i < request->num_output_buffers; i++) {
   3530         const camera3_stream_buffer_t& output = request->output_buffers[i];
   3531         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   3532 
   3533         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   3534             //Call function to store local copy of jpeg data for encode params.
   3535             blob_request = 1;
   3536             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
   3537         }
   3538 
   3539         if (output.acquire_fence != -1) {
   3540            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
   3541            close(output.acquire_fence);
   3542            if (rc != OK) {
   3543               ALOGE("%s: sync wait failed %d", __func__, rc);
   3544               pthread_mutex_unlock(&mMutex);
   3545               return rc;
   3546            }
   3547         }
   3548 
   3549         streamID.streamID[streamID.num_streams] =
   3550             channel->getStreamID(channel->getStreamTypeMask());
   3551         streamID.num_streams++;
   3552 
   3553         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
   3554             isVidBufRequested = true;
   3555         }
   3556     }
   3557 
   3558     if (blob_request && mRawDumpChannel) {
   3559         CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
   3560         streamID.streamID[streamID.num_streams] =
   3561             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
   3562         streamID.num_streams++;
   3563     }
   3564 
   3565     if(request->input_buffer == NULL) {
   3566         /* Parse the settings:
   3567          * - For every request in NORMAL MODE
   3568          * - For every request in HFR mode during preview only case
   3569          * - For first request of every batch in HFR mode during video
   3570          * recording. In batchmode the same settings except frame number is
   3571          * repeated in each request of the batch.
   3572          */
   3573         if (!mBatchSize ||
   3574            (mBatchSize && !isVidBufRequested) ||
   3575            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
   3576             rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
   3577             if (rc < 0) {
   3578                 ALOGE("%s: fail to set frame parameters", __func__);
   3579                 pthread_mutex_unlock(&mMutex);
   3580                 return rc;
   3581             }
   3582         }
   3583         /* For batchMode HFR, setFrameParameters is not called for every
   3584          * request. But only frame number of the latest request is parsed.
   3585          * Keep track of first and last frame numbers in a batch so that
   3586          * metadata for the frame numbers of batch can be duplicated in
   3587          * handleBatchMetadta */
   3588         if (mBatchSize) {
   3589             if (!mToBeQueuedVidBufs) {
   3590                 //start of the batch
   3591                 mFirstFrameNumberInBatch = request->frame_number;
   3592             }
   3593             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3594                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
   3595                 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   3596                 return BAD_VALUE;
   3597             }
   3598         }
   3599         if (mNeedSensorRestart) {
   3600             /* Unlock the mutex as restartSensor waits on the channels to be
   3601              * stopped, which in turn calls stream callback functions -
   3602              * handleBufferWithLock and handleMetadataWithLock */
   3603             pthread_mutex_unlock(&mMutex);
   3604             rc = dynamicUpdateMetaStreamInfo();
   3605             if (rc != NO_ERROR) {
   3606                 ALOGE("%s: Restarting the sensor failed", __func__);
   3607                 return BAD_VALUE;
   3608             }
   3609             mNeedSensorRestart = false;
   3610             pthread_mutex_lock(&mMutex);
   3611         }
   3612     } else {
   3613 
   3614         if (request->input_buffer->acquire_fence != -1) {
   3615            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
   3616            close(request->input_buffer->acquire_fence);
   3617            if (rc != OK) {
   3618               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   3619               pthread_mutex_unlock(&mMutex);
   3620               return rc;
   3621            }
   3622         }
   3623     }
   3624 
   3625     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
   3626         mLastCustIntentFrmNum = frameNumber;
   3627     }
   3628     /* Update pending request list and pending buffers map */
   3629     PendingRequestInfo pendingRequest;
   3630     pendingRequestIterator latestRequest;
   3631     pendingRequest.frame_number = frameNumber;
   3632     pendingRequest.num_buffers = request->num_output_buffers;
   3633     pendingRequest.request_id = request_id;
   3634     pendingRequest.blob_request = blob_request;
   3635     pendingRequest.timestamp = 0;
   3636     pendingRequest.bUrgentReceived = 0;
   3637     if (request->input_buffer) {
   3638         pendingRequest.input_buffer =
   3639                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
   3640         *(pendingRequest.input_buffer) = *(request->input_buffer);
   3641         pInputBuffer = pendingRequest.input_buffer;
   3642     } else {
   3643        pendingRequest.input_buffer = NULL;
   3644        pInputBuffer = NULL;
   3645     }
   3646 
   3647     pendingRequest.pipeline_depth = 0;
   3648     pendingRequest.partial_result_cnt = 0;
   3649     extractJpegMetadata(mCurJpegMeta, request);
   3650     pendingRequest.jpegMetadata = mCurJpegMeta;
   3651     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
   3652     pendingRequest.shutter_notified = false;
   3653     pendingRequest.need_dynamic_blklvl = false;
   3654     pendingRequest.pending_extra_result = false;
   3655 
   3656     //extract capture intent
   3657     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3658         mCaptureIntent =
   3659                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3660     }
   3661     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
   3662         mHybridAeEnable =
   3663                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
   3664     }
   3665     pendingRequest.capture_intent = mCaptureIntent;
   3666     pendingRequest.hybrid_ae_enable = mHybridAeEnable;
   3667 
   3668     for (size_t i = 0; i < request->num_output_buffers; i++) {
   3669         RequestedBufferInfo requestedBuf;
   3670         memset(&requestedBuf, 0, sizeof(requestedBuf));
   3671         requestedBuf.stream = request->output_buffers[i].stream;
   3672         requestedBuf.buffer = NULL;
   3673         pendingRequest.buffers.push_back(requestedBuf);
   3674 
   3675         // Add to buffer handle the pending buffers list
   3676         PendingBufferInfo bufferInfo;
   3677         bufferInfo.frame_number = frameNumber;
   3678         bufferInfo.buffer = request->output_buffers[i].buffer;
   3679         bufferInfo.stream = request->output_buffers[i].stream;
   3680         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
   3681         mPendingBuffersMap.num_buffers++;
   3682         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
   3683         CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
   3684                 __func__, frameNumber, bufferInfo.buffer,
   3685                 channel->getStreamTypeMask(), bufferInfo.stream->format);
   3686 
   3687         if (bufferInfo.stream->format == HAL_PIXEL_FORMAT_RAW16) {
   3688             if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
   3689                 CDBG("%s, frame_number:%d, need dynamic blacklevel", __func__, frameNumber);
   3690                 pendingRequest.need_dynamic_blklvl = true;
   3691             }
   3692         }
   3693     }
   3694     mPendingBuffersMap.last_frame_number = frameNumber;
   3695     latestRequest = mPendingRequestsList.insert(
   3696             mPendingRequestsList.end(), pendingRequest);
   3697     if(mFlush) {
   3698         pthread_mutex_unlock(&mMutex);
   3699         return NO_ERROR;
   3700     }
   3701 
   3702     // Notify metadata channel we receive a request
   3703     mMetadataChannel->request(NULL, frameNumber);
   3704 
   3705     if(request->input_buffer != NULL){
   3706         CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
   3707         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
   3708         if (NO_ERROR != rc) {
   3709             ALOGE("%s: fail to set reproc parameters", __func__);
   3710             pthread_mutex_unlock(&mMutex);
   3711             return rc;
   3712         }
   3713     }
   3714 
   3715     // Call request on other streams
   3716     uint32_t streams_need_metadata = 0;
   3717     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
   3718     for (size_t i = 0; i < request->num_output_buffers; i++) {
   3719         const camera3_stream_buffer_t& output = request->output_buffers[i];
   3720         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   3721 
   3722         if (channel == NULL) {
   3723             ALOGE("%s: invalid channel pointer for stream", __func__);
   3724             continue;
   3725         }
   3726 
   3727         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   3728             if(request->input_buffer != NULL){
   3729                 rc = channel->request(output.buffer, frameNumber,
   3730                         pInputBuffer, &mReprocMeta);
   3731                 if (rc < 0) {
   3732                     ALOGE("%s: Fail to request on picture channel", __func__);
   3733                     pthread_mutex_unlock(&mMutex);
   3734                     return rc;
   3735                 }
   3736             } else {
   3737                 CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
   3738                         __LINE__, output.buffer, frameNumber);
   3739                 if (!request->settings) {
   3740                     rc = channel->request(output.buffer, frameNumber,
   3741                             NULL, mPrevParameters);
   3742                 } else {
   3743                     rc = channel->request(output.buffer, frameNumber,
   3744                             NULL, mParameters);
   3745                 }
   3746                 if (rc < 0) {
   3747                     ALOGE("%s: Fail to request on picture channel", __func__);
   3748                     pthread_mutex_unlock(&mMutex);
   3749                     return rc;
   3750                 }
   3751                 pendingBufferIter->need_metadata = true;
   3752                 streams_need_metadata++;
   3753             }
   3754         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   3755             bool needMetadata = false;
   3756             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
   3757             rc = yuvChannel->request(output.buffer, frameNumber,
   3758                     pInputBuffer,
   3759                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
   3760             if (rc < 0) {
   3761                 ALOGE("%s: Fail to request on YUV channel", __func__);
   3762                 pthread_mutex_unlock(&mMutex);
   3763                 return rc;
   3764             }
   3765             pendingBufferIter->need_metadata = needMetadata;
   3766             if (needMetadata)
   3767                 streams_need_metadata += 1;
   3768             CDBG("%s: calling YUV channel request, need_metadata is %d",
   3769                     __func__, needMetadata);
   3770         } else {
   3771             CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
   3772                 __LINE__, output.buffer, frameNumber);
   3773             rc = channel->request(output.buffer, frameNumber);
   3774             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   3775                     && mBatchSize) {
   3776                 mToBeQueuedVidBufs++;
   3777                 if (mToBeQueuedVidBufs == mBatchSize) {
   3778                     channel->queueBatchBuf();
   3779                 }
   3780             }
   3781             if (rc < 0) {
   3782                 ALOGE("%s: request failed", __func__);
   3783                 pthread_mutex_unlock(&mMutex);
   3784                 return rc;
   3785             }
   3786         }
   3787         pendingBufferIter++;
   3788     }
   3789 
   3790     //If 2 streams have need_metadata set to true, fail the request, unless
   3791     //we copy/reference count the metadata buffer
   3792     if (streams_need_metadata > 1) {
   3793         ALOGE("%s: not supporting request in which two streams requires"
   3794                 " 2 HAL metadata for reprocessing", __func__);
   3795         pthread_mutex_unlock(&mMutex);
   3796         return -EINVAL;
   3797     }
   3798 
   3799     if(request->input_buffer == NULL) {
   3800         /* Set the parameters to backend:
   3801          * - For every request in NORMAL MODE
   3802          * - For every request in HFR mode during preview only case
   3803          * - Once every batch in HFR mode during video recording
   3804          */
   3805         if (!mBatchSize ||
   3806            (mBatchSize && !isVidBufRequested) ||
   3807            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
   3808             CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
   3809                     __func__, mBatchSize, isVidBufRequested,
   3810                     mToBeQueuedVidBufs);
   3811             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3812                     mParameters);
   3813             if (rc < 0) {
   3814                 ALOGE("%s: set_parms failed", __func__);
   3815             }
   3816             /* reset to zero coz, the batch is queued */
   3817             mToBeQueuedVidBufs = 0;
   3818             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
   3819         }
   3820         mPendingLiveRequest++;
   3821     }
   3822 
   3823     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
   3824 
   3825     mFirstRequest = false;
   3826     // Added a timed condition wait
   3827     struct timespec ts;
   3828     uint8_t isValidTimeout = 1;
   3829     rc = clock_gettime(CLOCK_REALTIME, &ts);
   3830     if (rc < 0) {
   3831       isValidTimeout = 0;
   3832       ALOGE("%s: Error reading the real time clock!!", __func__);
   3833     }
   3834     else {
   3835       // Make timeout as 5 sec for request to be honored
   3836       ts.tv_sec += 5;
   3837     }
   3838     //Block on conditional variable
   3839     if (mBatchSize) {
   3840         /* For HFR, more buffers are dequeued upfront to improve the performance */
   3841         minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
   3842         maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
   3843     }
   3844     while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer) {
   3845         if (!isValidTimeout) {
   3846             CDBG("%s: Blocking on conditional wait", __func__);
   3847             pthread_cond_wait(&mRequestCond, &mMutex);
   3848         }
   3849         else {
   3850             CDBG("%s: Blocking on timed conditional wait", __func__);
   3851             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
   3852             if (rc == ETIMEDOUT) {
   3853                 rc = -ENODEV;
   3854                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
   3855                 break;
   3856             }
   3857         }
   3858         CDBG("%s: Unblocked", __func__);
   3859         if (mWokenUpByDaemon) {
   3860             mWokenUpByDaemon = false;
   3861             if (mPendingLiveRequest < maxInFlightRequests)
   3862                 break;
   3863         }
   3864     }
   3865     pthread_mutex_unlock(&mMutex);
   3866 
   3867     return rc;
   3868 }
   3869 
   3870 /*===========================================================================
   3871  * FUNCTION   : dump
   3872  *
   3873  * DESCRIPTION:
   3874  *
   3875  * PARAMETERS :
   3876  *
   3877  *
   3878  * RETURN     :
   3879  *==========================================================================*/
   3880 void QCamera3HardwareInterface::dump(int fd)
   3881 {
   3882     pthread_mutex_lock(&mMutex);
   3883     dprintf(fd, "\n Camera HAL3 information Begin \n");
   3884 
   3885     dprintf(fd, "\nNumber of pending requests: %zu \n",
   3886         mPendingRequestsList.size());
   3887     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   3888     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
   3889     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   3890     for(pendingRequestIterator i = mPendingRequestsList.begin();
   3891             i != mPendingRequestsList.end(); i++) {
   3892         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
   3893         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
   3894         i->input_buffer);
   3895     }
   3896     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
   3897                 mPendingBuffersMap.num_buffers);
   3898     dprintf(fd, "-------+------------------\n");
   3899     dprintf(fd, " Frame | Stream type mask \n");
   3900     dprintf(fd, "-------+------------------\n");
   3901     for(List<PendingBufferInfo>::iterator i =
   3902         mPendingBuffersMap.mPendingBufferList.begin();
   3903         i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
   3904         QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
   3905         dprintf(fd, " %5d | %11d \n",
   3906                 i->frame_number, channel->getStreamTypeMask());
   3907     }
   3908     dprintf(fd, "-------+------------------\n");
   3909 
   3910     dprintf(fd, "\nPending frame drop list: %zu\n",
   3911         mPendingFrameDropList.size());
   3912     dprintf(fd, "-------+-----------\n");
   3913     dprintf(fd, " Frame | Stream ID \n");
   3914     dprintf(fd, "-------+-----------\n");
   3915     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
   3916         i != mPendingFrameDropList.end(); i++) {
   3917         dprintf(fd, " %5d | %9d \n",
   3918             i->frame_number, i->stream_ID);
   3919     }
   3920     dprintf(fd, "-------+-----------\n");
   3921 
   3922     dprintf(fd, "\n Camera HAL3 information End \n");
   3923 
   3924     /* use dumpsys media.camera as trigger to send update debug level event */
   3925     mUpdateDebugLevel = true;
   3926     pthread_mutex_unlock(&mMutex);
   3927     return;
   3928 }
   3929 
   3930 /*===========================================================================
   3931  * FUNCTION   : flush
   3932  *
   3933  * DESCRIPTION:
   3934  *
   3935  * PARAMETERS :
   3936  *
   3937  *
   3938  * RETURN     :
   3939  *==========================================================================*/
   3940 int QCamera3HardwareInterface::flush()
   3941 {
   3942     ATRACE_CALL();
   3943     int32_t rc = NO_ERROR;
   3944 
   3945     CDBG("%s: Unblocking Process Capture Request", __func__);
   3946     pthread_mutex_lock(&mMutex);
   3947 
   3948     if (mFirstRequest) {
   3949         pthread_mutex_unlock(&mMutex);
   3950         return NO_ERROR;
   3951     }
   3952 
   3953     mFlush = true;
   3954     pthread_mutex_unlock(&mMutex);
   3955 
   3956     rc = stopAllChannels();
   3957     if (rc < 0) {
   3958         ALOGE("%s: stopAllChannels failed", __func__);
   3959         return rc;
   3960     }
   3961     if (mChannelHandle) {
   3962         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   3963                 mChannelHandle);
   3964     }
   3965 
   3966     // Reset bundle info
   3967     rc = setBundleInfo();
   3968     if (rc < 0) {
   3969         ALOGE("%s: setBundleInfo failed %d", __func__, rc);
   3970         return rc;
   3971     }
   3972 
   3973     // Mutex Lock
   3974     pthread_mutex_lock(&mMutex);
   3975 
   3976     // Unblock process_capture_request
   3977     mPendingLiveRequest = 0;
   3978     pthread_cond_signal(&mRequestCond);
   3979 
   3980     rc = notifyErrorForPendingRequests();
   3981     if (rc < 0) {
   3982         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
   3983         pthread_mutex_unlock(&mMutex);
   3984         return rc;
   3985     }
   3986 
   3987     mFlush = false;
   3988 
   3989     // Start the Streams/Channels
   3990     rc = startAllChannels();
   3991     if (rc < 0) {
   3992         ALOGE("%s: startAllChannels failed", __func__);
   3993         pthread_mutex_unlock(&mMutex);
   3994         return rc;
   3995     }
   3996 
   3997     if (mChannelHandle) {
   3998         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   3999                     mChannelHandle);
   4000         if (rc < 0) {
   4001             ALOGE("%s: start_channel failed", __func__);
   4002             pthread_mutex_unlock(&mMutex);
   4003             return rc;
   4004         }
   4005     }
   4006 
   4007     pthread_mutex_unlock(&mMutex);
   4008 
   4009     return 0;
   4010 }
   4011 
   4012 /*===========================================================================
   4013  * FUNCTION   : captureResultCb
   4014  *
   4015  * DESCRIPTION: Callback handler for all capture result
   4016  *              (streams, as well as metadata)
   4017  *
   4018  * PARAMETERS :
   4019  *   @metadata : metadata information
   4020  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   4021  *               NULL if metadata.
   4022  *
   4023  * RETURN     : NONE
   4024  *==========================================================================*/
   4025 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   4026                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
   4027 {
   4028     if (metadata_buf) {
   4029         if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
   4030             handleBatchMetadata(metadata_buf,
   4031                     true /* free_and_bufdone_meta_buf */);
   4032         } else { /* mBatchSize = 0 */
   4033             hdrPlusPerfLock(metadata_buf);
   4034             pthread_mutex_lock(&mMutex);
   4035             handleMetadataWithLock(metadata_buf,
   4036                     true /* free_and_bufdone_meta_buf */,
   4037                     false /* first frame of batch metadata */ );
   4038             pthread_mutex_unlock(&mMutex);
   4039         }
   4040     } else if (isInputBuffer) {
   4041         pthread_mutex_lock(&mMutex);
   4042         handleInputBufferWithLock(frame_number);
   4043         pthread_mutex_unlock(&mMutex);
   4044     } else {
   4045         pthread_mutex_lock(&mMutex);
   4046         handleBufferWithLock(buffer, frame_number);
   4047         pthread_mutex_unlock(&mMutex);
   4048     }
   4049     return;
   4050 }
   4051 
   4052 /*===========================================================================
   4053  * FUNCTION   : getReprocessibleOutputStreamId
   4054  *
   4055  * DESCRIPTION: Get source output stream id for the input reprocess stream
   4056  *              based on size and format, which would be the largest
   4057  *              output stream if an input stream exists.
   4058  *
   4059  * PARAMETERS :
   4060  *   @id      : return the stream id if found
   4061  *
   4062  * RETURN     : int32_t type of status
   4063  *              NO_ERROR  -- success
   4064  *              none-zero failure code
   4065  *==========================================================================*/
   4066 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
   4067 {
   4068     stream_info_t* stream = NULL;
   4069 
   4070     /* check if any output or bidirectional stream with the same size and format
   4071        and return that stream */
   4072     if ((mInputStreamInfo.dim.width > 0) &&
   4073             (mInputStreamInfo.dim.height > 0)) {
   4074         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4075                 it != mStreamInfo.end(); it++) {
   4076 
   4077             camera3_stream_t *stream = (*it)->stream;
   4078             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
   4079                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
   4080                     (stream->format == mInputStreamInfo.format)) {
   4081                 // Usage flag for an input stream and the source output stream
   4082                 // may be different.
   4083                 CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
   4084                 CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
   4085                         __func__, stream->usage, mInputStreamInfo.usage);
   4086 
   4087                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
   4088                 if (channel != NULL && channel->mStreams[0]) {
   4089                     id = channel->mStreams[0]->getMyServerID();
   4090                     return NO_ERROR;
   4091                 }
   4092             }
   4093         }
   4094     } else {
   4095         CDBG("%s: No input stream, so no reprocessible output stream", __func__);
   4096     }
   4097     return NAME_NOT_FOUND;
   4098 }
   4099 
   4100 /*===========================================================================
   4101  * FUNCTION   : lookupFwkName
   4102  *
   4103  * DESCRIPTION: In case the enum is not same in fwk and backend
   4104  *              make sure the parameter is correctly propogated
   4105  *
   4106  * PARAMETERS  :
   4107  *   @arr      : map between the two enums
   4108  *   @len      : len of the map
   4109  *   @hal_name : name of the hal_parm to map
   4110  *
   4111  * RETURN     : int type of status
   4112  *              fwk_name  -- success
   4113  *              none-zero failure code
   4114  *==========================================================================*/
   4115 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
   4116         size_t len, halType hal_name)
   4117 {
   4118 
   4119     for (size_t i = 0; i < len; i++) {
   4120         if (arr[i].hal_name == hal_name) {
   4121             return arr[i].fwk_name;
   4122         }
   4123     }
   4124 
   4125     /* Not able to find matching framework type is not necessarily
   4126      * an error case. This happens when mm-camera supports more attributes
   4127      * than the frameworks do */
   4128     CDBG_HIGH("%s: Cannot find matching framework type", __func__);
   4129     return NAME_NOT_FOUND;
   4130 }
   4131 
   4132 /*===========================================================================
   4133  * FUNCTION   : lookupHalName
   4134  *
   4135  * DESCRIPTION: In case the enum is not same in fwk and backend
   4136  *              make sure the parameter is correctly propogated
   4137  *
   4138  * PARAMETERS  :
   4139  *   @arr      : map between the two enums
   4140  *   @len      : len of the map
   4141  *   @fwk_name : name of the hal_parm to map
   4142  *
   4143  * RETURN     : int32_t type of status
   4144  *              hal_name  -- success
   4145  *              none-zero failure code
   4146  *==========================================================================*/
   4147 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
   4148         size_t len, fwkType fwk_name)
   4149 {
   4150     for (size_t i = 0; i < len; i++) {
   4151         if (arr[i].fwk_name == fwk_name) {
   4152             return arr[i].hal_name;
   4153         }
   4154     }
   4155 
   4156     ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
   4157     return NAME_NOT_FOUND;
   4158 }
   4159 
   4160 /*===========================================================================
   4161  * FUNCTION   : lookupProp
   4162  *
   4163  * DESCRIPTION: lookup a value by its name
   4164  *
   4165  * PARAMETERS :
   4166  *   @arr     : map between the two enums
   4167  *   @len     : size of the map
   4168  *   @name    : name to be looked up
   4169  *
   4170  * RETURN     : Value if found
   4171  *              CAM_CDS_MODE_MAX if not found
   4172  *==========================================================================*/
   4173 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
   4174         size_t len, const char *name)
   4175 {
   4176     if (name) {
   4177         for (size_t i = 0; i < len; i++) {
   4178             if (!strcmp(arr[i].desc, name)) {
   4179                 return arr[i].val;
   4180             }
   4181         }
   4182     }
   4183     return CAM_CDS_MODE_MAX;
   4184 }
   4185 
   4186 /*===========================================================================
   4187  *
   4188  * DESCRIPTION:
   4189  *
   4190  * PARAMETERS :
   4191  *   @metadata : metadata information from callback
   4192  *   @timestamp: metadata buffer timestamp
   4193  *   @request_id: request id
   4194  *   @hybrid_ae_enable: whether hybrid ae is enabled
   4195  *   @jpegMetadata: additional jpeg metadata
   4196  *   @pprocDone: whether internal offline postprocsesing is done
   4197  *
   4198  * RETURN     : camera_metadata_t*
   4199  *              metadata in a format specified by fwk
   4200  *==========================================================================*/
   4201 camera_metadata_t*
   4202 QCamera3HardwareInterface::translateFromHalMetadata(
   4203                                  metadata_buffer_t *metadata,
   4204                                  nsecs_t timestamp,
   4205                                  int32_t request_id,
   4206                                  const CameraMetadata& jpegMetadata,
   4207                                  uint8_t pipeline_depth,
   4208                                  uint8_t capture_intent,
   4209                                  uint8_t hybrid_ae_enable,
   4210                                  bool pprocDone,
   4211                                  bool dynamic_blklvl,
   4212                                  bool firstMetadataInBatch)
   4213 {
   4214     CameraMetadata camMetadata;
   4215     camera_metadata_t *resultMetadata;
   4216 
   4217     if (mBatchSize && !firstMetadataInBatch) {
   4218         /* In batch mode, use cached metadata from the first metadata
   4219             in the batch */
   4220         camMetadata.clear();
   4221         camMetadata = mCachedMetadata;
   4222     }
   4223 
   4224     if (jpegMetadata.entryCount())
   4225         camMetadata.append(jpegMetadata);
   4226 
   4227     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   4228     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   4229     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
   4230     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
   4231     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
   4232 
   4233     if (mBatchSize && !firstMetadataInBatch) {
   4234         /* In batch mode, use cached metadata instead of parsing metadata buffer again */
   4235         resultMetadata = camMetadata.release();
   4236         return resultMetadata;
   4237     }
   4238 
   4239     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
   4240         int64_t fwk_frame_number = *frame_number;
   4241         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   4242     }
   4243 
   4244     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
   4245         int32_t fps_range[2];
   4246         fps_range[0] = (int32_t)float_range->min_fps;
   4247         fps_range[1] = (int32_t)float_range->max_fps;
   4248         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   4249                                       fps_range, 2);
   4250         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
   4251             __func__, fps_range[0], fps_range[1]);
   4252     }
   4253 
   4254     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
   4255         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
   4256     }
   4257 
   4258     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   4259         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
   4260                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
   4261                 *sceneMode);
   4262         if (NAME_NOT_FOUND != val) {
   4263             uint8_t fwkSceneMode = (uint8_t)val;
   4264             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
   4265             CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
   4266                     __func__, fwkSceneMode);
   4267         }
   4268     }
   4269 
   4270     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
   4271         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
   4272         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
   4273     }
   4274 
   4275     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
   4276         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
   4277         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
   4278     }
   4279 
   4280     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
   4281         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
   4282         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
   4283     }
   4284 
   4285     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
   4286             CAM_INTF_META_EDGE_MODE, metadata) {
   4287         uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
   4288         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
   4289     }
   4290 
   4291     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
   4292         uint8_t fwk_flashPower = (uint8_t) *flashPower;
   4293         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
   4294     }
   4295 
   4296     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
   4297         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   4298     }
   4299 
   4300     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
   4301         if (0 <= *flashState) {
   4302             uint8_t fwk_flashState = (uint8_t) *flashState;
   4303             if (!gCamCapability[mCameraId]->flash_available) {
   4304                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   4305             }
   4306             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
   4307         }
   4308     }
   4309 
   4310     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
   4311         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
   4312         if (NAME_NOT_FOUND != val) {
   4313             uint8_t fwk_flashMode = (uint8_t)val;
   4314             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
   4315         }
   4316     }
   4317 
   4318     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
   4319         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
   4320         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
   4321     }
   4322 
   4323     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
   4324         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   4325     }
   4326 
   4327     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
   4328         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   4329     }
   4330 
   4331     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
   4332         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   4333     }
   4334 
   4335     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
   4336         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
   4337         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
   4338     }
   4339 
   4340     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
   4341         uint8_t fwk_videoStab = (uint8_t) *videoStab;
   4342         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
   4343     }
   4344 
   4345     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
   4346         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
   4347         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
   4348     }
   4349 
   4350     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
   4351         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
   4352     }
   4353 
   4354     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
   4355         CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
   4356 
   4357         CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
   4358           blackLevelSourcePattern->cam_black_level[0],
   4359           blackLevelSourcePattern->cam_black_level[1],
   4360           blackLevelSourcePattern->cam_black_level[2],
   4361           blackLevelSourcePattern->cam_black_level[3]);
   4362     }
   4363 
   4364     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
   4365         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
   4366         float fwk_blackLevelInd[4];
   4367 
   4368         fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
   4369         fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
   4370         fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
   4371         fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
   4372 
   4373         CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
   4374           blackLevelAppliedPattern->cam_black_level[0],
   4375           blackLevelAppliedPattern->cam_black_level[1],
   4376           blackLevelAppliedPattern->cam_black_level[2],
   4377           blackLevelAppliedPattern->cam_black_level[3]);
   4378         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
   4379         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
   4380 
   4381         // if dynmaic_blklvl is true, we calculate blklvl from raw callback
   4382         // otherwise, use the value from linearization LUT.
   4383         if (dynamic_blklvl == false) {
   4384             // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
   4385             // depth space.
   4386             fwk_blackLevelInd[0] /= 64.0;
   4387             fwk_blackLevelInd[1] /= 64.0;
   4388             fwk_blackLevelInd[2] /= 64.0;
   4389             fwk_blackLevelInd[3] /= 64.0;
   4390             camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
   4391         }
   4392     }
   4393 
   4394     // Fixed whitelevel is used by ISP/Sensor
   4395     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
   4396             &gCamCapability[mCameraId]->white_level, 1);
   4397 
   4398     if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
   4399         gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
   4400         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
   4401         for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
   4402             opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
   4403         }
   4404         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
   4405                 opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
   4406     }
   4407 
   4408     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
   4409             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
   4410         int32_t scalerCropRegion[4];
   4411         scalerCropRegion[0] = hScalerCropRegion->left;
   4412         scalerCropRegion[1] = hScalerCropRegion->top;
   4413         scalerCropRegion[2] = hScalerCropRegion->width;
   4414         scalerCropRegion[3] = hScalerCropRegion->height;
   4415 
   4416         // Adjust crop region from sensor output coordinate system to active
   4417         // array coordinate system.
   4418         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
   4419                 scalerCropRegion[2], scalerCropRegion[3]);
   4420 
   4421         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   4422     }
   4423 
   4424     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
   4425         CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
   4426         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   4427     }
   4428 
   4429     IF_META_AVAILABLE(int64_t, sensorFameDuration,
   4430             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
   4431         CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
   4432         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   4433     }
   4434 
   4435     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
   4436             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
   4437         CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
   4438         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   4439                 sensorRollingShutterSkew, 1);
   4440     }
   4441 
   4442     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
   4443         CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
   4444         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
   4445 
   4446         //calculate the noise profile based on sensitivity
   4447         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
   4448         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
   4449         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
   4450         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
   4451             noise_profile[i]   = noise_profile_S;
   4452             noise_profile[i+1] = noise_profile_O;
   4453         }
   4454         CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
   4455                 noise_profile_S, noise_profile_O);
   4456         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
   4457                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
   4458     }
   4459 
   4460     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
   4461         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
   4462         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
   4463     }
   4464 
   4465     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
   4466         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   4467                 *faceDetectMode);
   4468         if (NAME_NOT_FOUND != val) {
   4469             uint8_t fwk_faceDetectMode = (uint8_t)val;
   4470             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
   4471 
   4472             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
   4473                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
   4474                         CAM_INTF_META_FACE_DETECTION, metadata) {
   4475                     uint8_t numFaces = MIN(
   4476                             faceDetectionInfo->num_faces_detected, MAX_ROI);
   4477                     int32_t faceIds[MAX_ROI];
   4478                     uint8_t faceScores[MAX_ROI];
   4479                     int32_t faceRectangles[MAX_ROI * 4];
   4480                     int32_t faceLandmarks[MAX_ROI * 6];
   4481                     size_t j = 0, k = 0;
   4482 
   4483                     for (size_t i = 0; i < numFaces; i++) {
   4484                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
   4485                         // Adjust crop region from sensor output coordinate system to active
   4486                         // array coordinate system.
   4487                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
   4488                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
   4489                                 rect.width, rect.height);
   4490 
   4491                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   4492                                 faceRectangles+j, -1);
   4493 
   4494                         // Map the co-ordinate sensor output coordinate system to active
   4495                         // array coordinate system.
   4496                         cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
   4497                         mCropRegionMapper.toActiveArray(face.left_eye_center.x,
   4498                                 face.left_eye_center.y);
   4499                         mCropRegionMapper.toActiveArray(face.right_eye_center.x,
   4500                                 face.right_eye_center.y);
   4501                         mCropRegionMapper.toActiveArray(face.mouth_center.x,
   4502                                 face.mouth_center.y);
   4503 
   4504                         convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
   4505                         j+= 4;
   4506                         k+= 6;
   4507                     }
   4508                     if (numFaces <= 0) {
   4509                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
   4510                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
   4511                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
   4512                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
   4513                     }
   4514 
   4515                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
   4516                             numFaces);
   4517                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   4518                             faceRectangles, numFaces * 4U);
   4519                     if (fwk_faceDetectMode ==
   4520                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
   4521                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   4522                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   4523                                 faceLandmarks, numFaces * 6U);
   4524                    }
   4525                 }
   4526             }
   4527         }
   4528     }
   4529 
   4530     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
   4531         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
   4532         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
   4533     }
   4534 
   4535     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
   4536             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
   4537         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
   4538         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
   4539     }
   4540 
   4541     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
   4542             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
   4543         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
   4544                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
   4545     }
   4546 
   4547     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
   4548             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
   4549         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
   4550                 CAM_MAX_SHADING_MAP_HEIGHT);
   4551         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
   4552                 CAM_MAX_SHADING_MAP_WIDTH);
   4553         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   4554                 lensShadingMap->lens_shading, 4U * map_width * map_height);
   4555     }
   4556 
   4557     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
   4558         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
   4559         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
   4560     }
   4561 
   4562     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
   4563         //Populate CAM_INTF_META_TONEMAP_CURVES
   4564         /* ch0 = G, ch 1 = B, ch 2 = R*/
   4565         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   4566             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   4567                     __func__, tonemap->tonemap_points_cnt,
   4568                     CAM_MAX_TONEMAP_CURVE_SIZE);
   4569             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   4570         }
   4571 
   4572         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
   4573                         &tonemap->curves[0].tonemap_points[0][0],
   4574                         tonemap->tonemap_points_cnt * 2);
   4575 
   4576         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
   4577                         &tonemap->curves[1].tonemap_points[0][0],
   4578                         tonemap->tonemap_points_cnt * 2);
   4579 
   4580         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
   4581                         &tonemap->curves[2].tonemap_points[0][0],
   4582                         tonemap->tonemap_points_cnt * 2);
   4583     }
   4584 
   4585     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
   4586             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
   4587         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
   4588                 CC_GAINS_COUNT);
   4589     }
   4590 
   4591     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
   4592             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
   4593         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   4594                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
   4595                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
   4596     }
   4597 
   4598     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
   4599             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
   4600         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   4601             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   4602                     __func__, toneCurve->tonemap_points_cnt,
   4603                     CAM_MAX_TONEMAP_CURVE_SIZE);
   4604             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   4605         }
   4606         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
   4607                 (float*)toneCurve->curve.tonemap_points,
   4608                 toneCurve->tonemap_points_cnt * 2);
   4609     }
   4610 
   4611     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
   4612             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
   4613         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   4614                 predColorCorrectionGains->gains, 4);
   4615     }
   4616 
   4617     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
   4618             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
   4619         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   4620                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
   4621                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
   4622     }
   4623 
   4624     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
   4625         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
   4626     }
   4627 
   4628     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
   4629         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
   4630         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
   4631     }
   4632 
   4633     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
   4634         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
   4635         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
   4636     }
   4637 
   4638     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
   4639         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   4640                 *effectMode);
   4641         if (NAME_NOT_FOUND != val) {
   4642             uint8_t fwk_effectMode = (uint8_t)val;
   4643             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
   4644         }
   4645     }
   4646 
   4647     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
   4648             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
   4649         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
   4650                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
   4651         if (NAME_NOT_FOUND != fwk_testPatternMode) {
   4652             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
   4653         }
   4654         int32_t fwk_testPatternData[4];
   4655         fwk_testPatternData[0] = testPatternData->r;
   4656         fwk_testPatternData[3] = testPatternData->b;
   4657         switch (gCamCapability[mCameraId]->color_arrangement) {
   4658         case CAM_FILTER_ARRANGEMENT_RGGB:
   4659         case CAM_FILTER_ARRANGEMENT_GRBG:
   4660             fwk_testPatternData[1] = testPatternData->gr;
   4661             fwk_testPatternData[2] = testPatternData->gb;
   4662             break;
   4663         case CAM_FILTER_ARRANGEMENT_GBRG:
   4664         case CAM_FILTER_ARRANGEMENT_BGGR:
   4665             fwk_testPatternData[2] = testPatternData->gr;
   4666             fwk_testPatternData[1] = testPatternData->gb;
   4667             break;
   4668         default:
   4669             ALOGE("%s: color arrangement %d is not supported", __func__,
   4670                 gCamCapability[mCameraId]->color_arrangement);
   4671             break;
   4672         }
   4673         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
   4674     }
   4675 
   4676     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
   4677         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
   4678     }
   4679 
   4680     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
   4681         String8 str((const char *)gps_methods);
   4682         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
   4683     }
   4684 
   4685     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
   4686         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
   4687     }
   4688 
   4689     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
   4690         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
   4691     }
   4692 
   4693     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
   4694         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
   4695         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
   4696     }
   4697 
   4698     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
   4699         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
   4700         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
   4701     }
   4702 
   4703     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
   4704         int32_t fwk_thumb_size[2];
   4705         fwk_thumb_size[0] = thumb_size->width;
   4706         fwk_thumb_size[1] = thumb_size->height;
   4707         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
   4708     }
   4709 
   4710     IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
   4711         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
   4712                 privateData,
   4713                 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
   4714     }
   4715 
   4716     if (metadata->is_tuning_params_valid) {
   4717         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
   4718         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
   4719         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
   4720 
   4721 
   4722         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
   4723                 sizeof(uint32_t));
   4724         data += sizeof(uint32_t);
   4725 
   4726         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
   4727                 sizeof(uint32_t));
   4728         CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
   4729         data += sizeof(uint32_t);
   4730 
   4731         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
   4732                 sizeof(uint32_t));
   4733         CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
   4734         data += sizeof(uint32_t);
   4735 
   4736         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
   4737                 sizeof(uint32_t));
   4738         CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
   4739         data += sizeof(uint32_t);
   4740 
   4741         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
   4742                 sizeof(uint32_t));
   4743         CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
   4744         data += sizeof(uint32_t);
   4745 
   4746         metadata->tuning_params.tuning_mod3_data_size = 0;
   4747         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
   4748                 sizeof(uint32_t));
   4749         CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
   4750         data += sizeof(uint32_t);
   4751 
   4752         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
   4753                 TUNING_SENSOR_DATA_MAX);
   4754         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
   4755                 count);
   4756         data += count;
   4757 
   4758         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
   4759                 TUNING_VFE_DATA_MAX);
   4760         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
   4761                 count);
   4762         data += count;
   4763 
   4764         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
   4765                 TUNING_CPP_DATA_MAX);
   4766         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
   4767                 count);
   4768         data += count;
   4769 
   4770         count = MIN(metadata->tuning_params.tuning_cac_data_size,
   4771                 TUNING_CAC_DATA_MAX);
   4772         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
   4773                 count);
   4774         data += count;
   4775 
   4776         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
   4777                 (int32_t *)(void *)tuning_meta_data_blob,
   4778                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
   4779     }
   4780 
   4781     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
   4782             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
   4783         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   4784                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
   4785                 NEUTRAL_COL_POINTS);
   4786     }
   4787 
   4788     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
   4789         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
   4790         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
   4791     }
   4792 
   4793     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
   4794         int32_t aeRegions[REGIONS_TUPLE_COUNT];
   4795         // Adjust crop region from sensor output coordinate system to active
   4796         // array coordinate system.
   4797         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
   4798                 hAeRegions->rect.width, hAeRegions->rect.height);
   4799 
   4800         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   4801         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
   4802                 REGIONS_TUPLE_COUNT);
   4803         CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   4804                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
   4805                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
   4806                 hAeRegions->rect.height);
   4807     }
   4808 
   4809     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
   4810         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
   4811         if (NAME_NOT_FOUND != val) {
   4812             uint8_t fwkAfMode = (uint8_t)val;
   4813             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
   4814             CDBG("%s: Metadata : ANDROID_CONTROL_AF_MODE %d", __func__, val);
   4815         } else {
   4816             CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_AF_MODE %d",
   4817                     __func__, val);
   4818         }
   4819     }
   4820 
   4821     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
   4822         uint8_t fwk_afState = (uint8_t) *afState;
   4823         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
   4824         CDBG("%s: Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
   4825     }
   4826 
   4827     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
   4828         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   4829     }
   4830 
   4831     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
   4832         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
   4833     }
   4834 
   4835     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
   4836         uint8_t fwk_lensState = *lensState;
   4837         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
   4838     }
   4839 
   4840     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
   4841         /*af regions*/
   4842         int32_t afRegions[REGIONS_TUPLE_COUNT];
   4843         // Adjust crop region from sensor output coordinate system to active
   4844         // array coordinate system.
   4845         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
   4846                 hAfRegions->rect.width, hAfRegions->rect.height);
   4847 
   4848         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   4849         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
   4850                 REGIONS_TUPLE_COUNT);
   4851         CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   4852                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
   4853                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
   4854                 hAfRegions->rect.height);
   4855     }
   4856 
   4857     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
   4858         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   4859                 *hal_ab_mode);
   4860         if (NAME_NOT_FOUND != val) {
   4861             uint8_t fwk_ab_mode = (uint8_t)val;
   4862             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
   4863         }
   4864     }
   4865 
   4866     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   4867         int val = lookupFwkName(SCENE_MODES_MAP,
   4868                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
   4869         if (NAME_NOT_FOUND != val) {
   4870             uint8_t fwkBestshotMode = (uint8_t)val;
   4871             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
   4872             CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
   4873         } else {
   4874             CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
   4875         }
   4876     }
   4877 
   4878     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
   4879          uint8_t fwk_mode = (uint8_t) *mode;
   4880          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
   4881     }
   4882 
   4883     /* Constant metadata values to be update*/
   4884     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
   4885     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
   4886 
   4887     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   4888     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   4889 
   4890     int32_t hotPixelMap[2];
   4891     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
   4892 
   4893     // CDS
   4894     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
   4895         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
   4896     }
   4897 
   4898     // TNR
   4899     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
   4900         uint8_t tnr_enable       = tnr->denoise_enable;
   4901         int32_t tnr_process_type = (int32_t)tnr->process_plates;
   4902 
   4903         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   4904         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   4905     }
   4906 
   4907     // Reprocess crop data
   4908     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
   4909         uint8_t cnt = crop_data->num_of_streams;
   4910         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
   4911             // mm-qcamera-daemon only posts crop_data for streams
   4912             // not linked to pproc. So no valid crop metadata is not
   4913             // necessarily an error case.
   4914             CDBG("%s: No valid crop metadata entries", __func__);
   4915         } else {
   4916             uint32_t reproc_stream_id;
   4917             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   4918                 CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
   4919             } else {
   4920                 int rc = NO_ERROR;
   4921                 Vector<int32_t> roi_map;
   4922                 int32_t *crop = new int32_t[cnt*4];
   4923                 if (NULL == crop) {
   4924                    rc = NO_MEMORY;
   4925                 }
   4926                 if (NO_ERROR == rc) {
   4927                     int32_t streams_found = 0;
   4928                     for (size_t i = 0; i < cnt; i++) {
   4929                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
   4930                             if (pprocDone) {
   4931                                 // HAL already does internal reprocessing,
   4932                                 // either via reprocessing before JPEG encoding,
   4933                                 // or offline postprocessing for pproc bypass case.
   4934                                 crop[0] = 0;
   4935                                 crop[1] = 0;
   4936                                 crop[2] = mInputStreamInfo.dim.width;
   4937                                 crop[3] = mInputStreamInfo.dim.height;
   4938                             } else {
   4939                                 crop[0] = crop_data->crop_info[i].crop.left;
   4940                                 crop[1] = crop_data->crop_info[i].crop.top;
   4941                                 crop[2] = crop_data->crop_info[i].crop.width;
   4942                                 crop[3] = crop_data->crop_info[i].crop.height;
   4943                             }
   4944                             roi_map.add(crop_data->crop_info[i].roi_map.left);
   4945                             roi_map.add(crop_data->crop_info[i].roi_map.top);
   4946                             roi_map.add(crop_data->crop_info[i].roi_map.width);
   4947                             roi_map.add(crop_data->crop_info[i].roi_map.height);
   4948                             streams_found++;
   4949                             CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
   4950                                     __func__,
   4951                                     crop[0], crop[1], crop[2], crop[3]);
   4952                             CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
   4953                                     __func__,
   4954                                     crop_data->crop_info[i].roi_map.left,
   4955                                     crop_data->crop_info[i].roi_map.top,
   4956                                     crop_data->crop_info[i].roi_map.width,
   4957                                     crop_data->crop_info[i].roi_map.height);
   4958                             break;
   4959 
   4960                        }
   4961                     }
   4962                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
   4963                             &streams_found, 1);
   4964                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
   4965                             crop, (size_t)(streams_found * 4));
   4966                     if (roi_map.array()) {
   4967                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
   4968                                 roi_map.array(), roi_map.size());
   4969                     }
   4970                }
   4971                if (crop) {
   4972                    delete [] crop;
   4973                }
   4974             }
   4975         }
   4976     }
   4977 
   4978     IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
   4979         int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   4980                 *cacMode);
   4981         if (NAME_NOT_FOUND != val) {
   4982             uint8_t fwkCacMode = (uint8_t)val;
   4983             camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
   4984         } else {
   4985             ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
   4986         }
   4987     }
   4988 
   4989     // Post blob of cam_cds_data through vendor tag.
   4990     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
   4991         uint8_t cnt = cdsInfo->num_of_streams;
   4992         cam_cds_data_t cdsDataOverride;
   4993         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
   4994         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
   4995         cdsDataOverride.num_of_streams = 1;
   4996         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
   4997             uint32_t reproc_stream_id;
   4998             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   4999                 CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
   5000             } else {
   5001                 for (size_t i = 0; i < cnt; i++) {
   5002                     if (cdsInfo->cds_info[i].stream_id ==
   5003                             reproc_stream_id) {
   5004                         cdsDataOverride.cds_info[0].cds_enable =
   5005                                 cdsInfo->cds_info[i].cds_enable;
   5006                         break;
   5007                     }
   5008                 }
   5009             }
   5010         } else {
   5011             CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
   5012         }
   5013         camMetadata.update(QCAMERA3_CDS_INFO,
   5014                 (uint8_t *)&cdsDataOverride,
   5015                 sizeof(cam_cds_data_t));
   5016     }
   5017 
   5018     // Ldaf calibration data
   5019     if (!mLdafCalibExist) {
   5020         IF_META_AVAILABLE(uint32_t, ldafCalib,
   5021                 CAM_INTF_META_LDAF_EXIF, metadata) {
   5022             mLdafCalibExist = true;
   5023             mLdafCalib[0] = ldafCalib[0];
   5024             mLdafCalib[1] = ldafCalib[1];
   5025             CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
   5026                     ldafCalib[0], ldafCalib[1]);
   5027         }
   5028     }
   5029 
   5030     // Post Raw Sensitivity Boost = ISP digital gain
   5031     IF_META_AVAILABLE(float, ispDigitalGain, CAM_INTF_META_ISP_DIGITAL_GAIN, metadata) {
   5032         int32_t postRawSensitivity = static_cast<int32_t>(*ispDigitalGain * 100);
   5033         camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &postRawSensitivity, 1);
   5034     }
   5035 
   5036     /* In batch mode, cache the first metadata in the batch */
   5037     if (mBatchSize && firstMetadataInBatch) {
   5038         mCachedMetadata.clear();
   5039         mCachedMetadata = camMetadata;
   5040     }
   5041 
   5042     resultMetadata = camMetadata.release();
   5043     return resultMetadata;
   5044 }
   5045 
   5046 /*===========================================================================
   5047  * FUNCTION   : saveExifParams
   5048  *
   5049  * DESCRIPTION:
   5050  *
   5051  * PARAMETERS :
   5052  *   @metadata : metadata information from callback
   5053  *
   5054  * RETURN     : none
   5055  *
   5056  *==========================================================================*/
   5057 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
   5058 {
   5059     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
   5060             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
   5061         mExifParams.ae_debug_params = *ae_exif_debug_params;
   5062         mExifParams.ae_debug_params_valid = TRUE;
   5063     }
   5064     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
   5065             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
   5066         mExifParams.awb_debug_params = *awb_exif_debug_params;
   5067         mExifParams.awb_debug_params_valid = TRUE;
   5068     }
   5069     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
   5070             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
   5071         mExifParams.af_debug_params = *af_exif_debug_params;
   5072         mExifParams.af_debug_params_valid = TRUE;
   5073     }
   5074     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
   5075             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
   5076         mExifParams.asd_debug_params = *asd_exif_debug_params;
   5077         mExifParams.asd_debug_params_valid = TRUE;
   5078     }
   5079     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
   5080             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
   5081         mExifParams.stats_debug_params = *stats_exif_debug_params;
   5082         mExifParams.stats_debug_params_valid = TRUE;
   5083     }
   5084 }
   5085 
   5086 /*===========================================================================
   5087  * FUNCTION   : get3AExifParams
   5088  *
   5089  * DESCRIPTION:
   5090  *
   5091  * PARAMETERS : none
   5092  *
   5093  *
   5094  * RETURN     : mm_jpeg_exif_params_t
   5095  *
   5096  *==========================================================================*/
   5097 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
   5098 {
   5099     return mExifParams;
   5100 }
   5101 
   5102 /*===========================================================================
   5103  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
   5104  *
   5105  * DESCRIPTION:
   5106  *
   5107  * PARAMETERS :
   5108  *   @metadata : metadata information from callback
   5109  *
   5110  * RETURN     : camera_metadata_t*
   5111  *              metadata in a format specified by fwk
   5112  *==========================================================================*/
   5113 camera_metadata_t*
   5114 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
   5115                                 (metadata_buffer_t *metadata)
   5116 {
   5117     CameraMetadata camMetadata;
   5118     camera_metadata_t *resultMetadata;
   5119 
   5120 
   5121     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
   5122         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
   5123         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
   5124         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
   5125     }
   5126 
   5127     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
   5128         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
   5129                 &aecTrigger->trigger, 1);
   5130         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
   5131                 &aecTrigger->trigger_id, 1);
   5132         CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
   5133                 __func__, aecTrigger->trigger);
   5134         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
   5135                 aecTrigger->trigger_id);
   5136     }
   5137 
   5138     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
   5139         uint8_t fwk_ae_state = (uint8_t) *ae_state;
   5140         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
   5141         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
   5142     }
   5143 
   5144     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
   5145         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
   5146                 &af_trigger->trigger, 1);
   5147         CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
   5148                 __func__, af_trigger->trigger);
   5149         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
   5150         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
   5151                 af_trigger->trigger_id);
   5152     }
   5153 
   5154     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
   5155         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   5156                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
   5157         if (NAME_NOT_FOUND != val) {
   5158             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
   5159             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
   5160             CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
   5161         } else {
   5162             CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
   5163         }
   5164     }
   5165 
   5166     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   5167     uint32_t aeMode = CAM_AE_MODE_MAX;
   5168     int32_t flashMode = CAM_FLASH_MODE_MAX;
   5169     int32_t redeye = -1;
   5170     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
   5171         aeMode = *pAeMode;
   5172     }
   5173     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
   5174         flashMode = *pFlashMode;
   5175     }
   5176     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
   5177         redeye = *pRedeye;
   5178     }
   5179 
   5180     if (1 == redeye) {
   5181         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   5182         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5183     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
   5184         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   5185                 flashMode);
   5186         if (NAME_NOT_FOUND != val) {
   5187             fwk_aeMode = (uint8_t)val;
   5188             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5189         } else {
   5190             ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
   5191         }
   5192     } else if (aeMode == CAM_AE_MODE_ON) {
   5193         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
   5194         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5195     } else if (aeMode == CAM_AE_MODE_OFF) {
   5196         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   5197         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5198     } else {
   5199         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
   5200               "flashMode:%d, aeMode:%u!!!",
   5201                 __func__, redeye, flashMode, aeMode);
   5202     }
   5203 
   5204     resultMetadata = camMetadata.release();
   5205     return resultMetadata;
   5206 }
   5207 
   5208 /*===========================================================================
   5209  * FUNCTION   : dumpMetadataToFile
   5210  *
   5211  * DESCRIPTION: Dumps tuning metadata to file system
   5212  *
   5213  * PARAMETERS :
   5214  *   @meta           : tuning metadata
   5215  *   @dumpFrameCount : current dump frame count
   5216  *   @enabled        : Enable mask
   5217  *
   5218  *==========================================================================*/
   5219 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
   5220                                                    uint32_t &dumpFrameCount,
   5221                                                    bool enabled,
   5222                                                    const char *type,
   5223                                                    uint32_t frameNumber)
   5224 {
   5225     uint32_t frm_num = 0;
   5226 
   5227     //Some sanity checks
   5228     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
   5229         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
   5230               __func__,
   5231               meta.tuning_sensor_data_size,
   5232               TUNING_SENSOR_DATA_MAX);
   5233         return;
   5234     }
   5235 
   5236     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
   5237         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
   5238               __func__,
   5239               meta.tuning_vfe_data_size,
   5240               TUNING_VFE_DATA_MAX);
   5241         return;
   5242     }
   5243 
   5244     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
   5245         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
   5246               __func__,
   5247               meta.tuning_cpp_data_size,
   5248               TUNING_CPP_DATA_MAX);
   5249         return;
   5250     }
   5251 
   5252     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
   5253         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
   5254               __func__,
   5255               meta.tuning_cac_data_size,
   5256               TUNING_CAC_DATA_MAX);
   5257         return;
   5258     }
   5259     //
   5260 
   5261     if(enabled){
   5262         char timeBuf[FILENAME_MAX];
   5263         char buf[FILENAME_MAX];
   5264         memset(buf, 0, sizeof(buf));
   5265         memset(timeBuf, 0, sizeof(timeBuf));
   5266         time_t current_time;
   5267         struct tm * timeinfo;
   5268         time (&current_time);
   5269         timeinfo = localtime (&current_time);
   5270         if (timeinfo != NULL) {
   5271             strftime (timeBuf, sizeof(timeBuf),
   5272                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
   5273         }
   5274         String8 filePath(timeBuf);
   5275         snprintf(buf,
   5276                 sizeof(buf),
   5277                 "%dm_%s_%d.bin",
   5278                 dumpFrameCount,
   5279                 type,
   5280                 frameNumber);
   5281         filePath.append(buf);
   5282         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
   5283         if (file_fd >= 0) {
   5284             ssize_t written_len = 0;
   5285             meta.tuning_data_version = TUNING_DATA_VERSION;
   5286             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
   5287             written_len += write(file_fd, data, sizeof(uint32_t));
   5288             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
   5289             CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
   5290             written_len += write(file_fd, data, sizeof(uint32_t));
   5291             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
   5292             CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
   5293             written_len += write(file_fd, data, sizeof(uint32_t));
   5294             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
   5295             CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
   5296             written_len += write(file_fd, data, sizeof(uint32_t));
   5297             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
   5298             CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
   5299             written_len += write(file_fd, data, sizeof(uint32_t));
   5300             meta.tuning_mod3_data_size = 0;
   5301             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
   5302             CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
   5303             written_len += write(file_fd, data, sizeof(uint32_t));
   5304             size_t total_size = meta.tuning_sensor_data_size;
   5305             data = (void *)((uint8_t *)&meta.data);
   5306             written_len += write(file_fd, data, total_size);
   5307             total_size = meta.tuning_vfe_data_size;
   5308             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
   5309             written_len += write(file_fd, data, total_size);
   5310             total_size = meta.tuning_cpp_data_size;
   5311             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
   5312             written_len += write(file_fd, data, total_size);
   5313             total_size = meta.tuning_cac_data_size;
   5314             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
   5315             written_len += write(file_fd, data, total_size);
   5316             close(file_fd);
   5317         }else {
   5318             ALOGE("%s: fail to open file for metadata dumping", __func__);
   5319         }
   5320     }
   5321 }
   5322 
   5323 /*===========================================================================
   5324  * FUNCTION   : cleanAndSortStreamInfo
   5325  *
   5326  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
   5327  *              and sort them such that raw stream is at the end of the list
   5328  *              This is a workaround for camera daemon constraint.
   5329  *
   5330  * PARAMETERS : None
   5331  *
   5332  *==========================================================================*/
   5333 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
   5334 {
   5335     List<stream_info_t *> newStreamInfo;
   5336 
   5337     /*clean up invalid streams*/
   5338     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   5339             it != mStreamInfo.end();) {
   5340         if(((*it)->status) == INVALID){
   5341             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
   5342             delete channel;
   5343             free(*it);
   5344             it = mStreamInfo.erase(it);
   5345         } else {
   5346             it++;
   5347         }
   5348     }
   5349 
   5350     // Move preview/video/callback/snapshot streams into newList
   5351     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   5352             it != mStreamInfo.end();) {
   5353         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
   5354                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
   5355                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
   5356             newStreamInfo.push_back(*it);
   5357             it = mStreamInfo.erase(it);
   5358         } else
   5359             it++;
   5360     }
   5361     // Move raw streams into newList
   5362     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   5363             it != mStreamInfo.end();) {
   5364         newStreamInfo.push_back(*it);
   5365         it = mStreamInfo.erase(it);
   5366     }
   5367 
   5368     mStreamInfo = newStreamInfo;
   5369 }
   5370 
   5371 /*===========================================================================
   5372  * FUNCTION   : extractJpegMetadata
   5373  *
   5374  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
   5375  *              JPEG metadata is cached in HAL, and return as part of capture
   5376  *              result when metadata is returned from camera daemon.
   5377  *
   5378  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
   5379  *              @request:      capture request
   5380  *
   5381  *==========================================================================*/
   5382 void QCamera3HardwareInterface::extractJpegMetadata(
   5383         CameraMetadata& jpegMetadata,
   5384         const camera3_capture_request_t *request)
   5385 {
   5386     CameraMetadata frame_settings;
   5387     frame_settings = request->settings;
   5388 
   5389     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
   5390         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
   5391                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
   5392                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
   5393 
   5394     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
   5395         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
   5396                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
   5397                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
   5398 
   5399     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
   5400         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
   5401                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
   5402                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
   5403 
   5404     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
   5405         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
   5406                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
   5407                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
   5408 
   5409     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
   5410         jpegMetadata.update(ANDROID_JPEG_QUALITY,
   5411                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
   5412                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
   5413 
   5414     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
   5415         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
   5416                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
   5417                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
   5418 
   5419     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   5420         int32_t thumbnail_size[2];
   5421         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   5422         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   5423         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   5424             int32_t orientation =
   5425                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   5426             if ((orientation == 90) || (orientation == 270)) {
   5427                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
   5428                int32_t temp;
   5429                temp = thumbnail_size[0];
   5430                thumbnail_size[0] = thumbnail_size[1];
   5431                thumbnail_size[1] = temp;
   5432             }
   5433          }
   5434          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
   5435                 thumbnail_size,
   5436                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   5437     }
   5438 
   5439 }
   5440 
   5441 /*===========================================================================
   5442  * FUNCTION   : convertToRegions
   5443  *
   5444  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   5445  *
   5446  * PARAMETERS :
   5447  *   @rect   : cam_rect_t struct to convert
   5448  *   @region : int32_t destination array
   5449  *   @weight : if we are converting from cam_area_t, weight is valid
   5450  *             else weight = -1
   5451  *
   5452  *==========================================================================*/
   5453 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
   5454         int32_t *region, int weight)
   5455 {
   5456     region[0] = rect.left;
   5457     region[1] = rect.top;
   5458     region[2] = rect.left + rect.width;
   5459     region[3] = rect.top + rect.height;
   5460     if (weight > -1) {
   5461         region[4] = weight;
   5462     }
   5463 }
   5464 
   5465 /*===========================================================================
   5466  * FUNCTION   : convertFromRegions
   5467  *
   5468  * DESCRIPTION: helper method to convert from array to cam_rect_t
   5469  *
   5470  * PARAMETERS :
   5471  *   @rect   : cam_rect_t struct to convert
   5472  *   @region : int32_t destination array
   5473  *   @weight : if we are converting from cam_area_t, weight is valid
   5474  *             else weight = -1
   5475  *
   5476  *==========================================================================*/
   5477 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
   5478         const camera_metadata_t *settings, uint32_t tag)
   5479 {
   5480     CameraMetadata frame_settings;
   5481     frame_settings = settings;
   5482     int32_t x_min = frame_settings.find(tag).data.i32[0];
   5483     int32_t y_min = frame_settings.find(tag).data.i32[1];
   5484     int32_t x_max = frame_settings.find(tag).data.i32[2];
   5485     int32_t y_max = frame_settings.find(tag).data.i32[3];
   5486     roi.weight = frame_settings.find(tag).data.i32[4];
   5487     roi.rect.left = x_min;
   5488     roi.rect.top = y_min;
   5489     roi.rect.width = x_max - x_min;
   5490     roi.rect.height = y_max - y_min;
   5491 }
   5492 
   5493 /*===========================================================================
   5494  * FUNCTION   : resetIfNeededROI
   5495  *
   5496  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   5497  *              crop region
   5498  *
   5499  * PARAMETERS :
   5500  *   @roi       : cam_area_t struct to resize
   5501  *   @scalerCropRegion : cam_crop_region_t region to compare against
   5502  *
   5503  *
   5504  *==========================================================================*/
   5505 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   5506                                                  const cam_crop_region_t* scalerCropRegion)
   5507 {
   5508     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   5509     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   5510     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
   5511     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
   5512 
   5513     /* According to spec weight = 0 is used to indicate roi needs to be disabled
   5514      * without having this check the calculations below to validate if the roi
   5515      * is inside scalar crop region will fail resulting in the roi not being
   5516      * reset causing algorithm to continue to use stale roi window
   5517      */
   5518     if (roi->weight == 0) {
   5519         return true;
   5520     }
   5521 
   5522     if ((roi_x_max < scalerCropRegion->left) ||
   5523         // right edge of roi window is left of scalar crop's left edge
   5524         (roi_y_max < scalerCropRegion->top)  ||
   5525         // bottom edge of roi window is above scalar crop's top edge
   5526         (roi->rect.left > crop_x_max) ||
   5527         // left edge of roi window is beyond(right) of scalar crop's right edge
   5528         (roi->rect.top > crop_y_max)){
   5529         // top edge of roi windo is above scalar crop's top edge
   5530         return false;
   5531     }
   5532     if (roi->rect.left < scalerCropRegion->left) {
   5533         roi->rect.left = scalerCropRegion->left;
   5534     }
   5535     if (roi->rect.top < scalerCropRegion->top) {
   5536         roi->rect.top = scalerCropRegion->top;
   5537     }
   5538     if (roi_x_max > crop_x_max) {
   5539         roi_x_max = crop_x_max;
   5540     }
   5541     if (roi_y_max > crop_y_max) {
   5542         roi_y_max = crop_y_max;
   5543     }
   5544     roi->rect.width = roi_x_max - roi->rect.left;
   5545     roi->rect.height = roi_y_max - roi->rect.top;
   5546     return true;
   5547 }
   5548 
   5549 /*===========================================================================
   5550  * FUNCTION   : convertLandmarks
   5551  *
   5552  * DESCRIPTION: helper method to extract the landmarks from face detection info
   5553  *
   5554  * PARAMETERS :
   5555  *   @face   : cam_rect_t struct to convert
   5556  *   @landmarks : int32_t destination array
   5557  *
   5558  *
   5559  *==========================================================================*/
   5560 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
   5561 {
   5562     landmarks[0] = (int32_t)face.left_eye_center.x;
   5563     landmarks[1] = (int32_t)face.left_eye_center.y;
   5564     landmarks[2] = (int32_t)face.right_eye_center.x;
   5565     landmarks[3] = (int32_t)face.right_eye_center.y;
   5566     landmarks[4] = (int32_t)face.mouth_center.x;
   5567     landmarks[5] = (int32_t)face.mouth_center.y;
   5568 }
   5569 
   5570 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   5571 /*===========================================================================
   5572  * FUNCTION   : initCapabilities
   5573  *
   5574  * DESCRIPTION: initialize camera capabilities in static data struct
   5575  *
   5576  * PARAMETERS :
   5577  *   @cameraId  : camera Id
   5578  *
   5579  * RETURN     : int32_t type of status
   5580  *              NO_ERROR  -- success
   5581  *              none-zero failure code
   5582  *==========================================================================*/
   5583 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
   5584 {
   5585     int rc = 0;
   5586     mm_camera_vtbl_t *cameraHandle = NULL;
   5587     QCamera3HeapMemory *capabilityHeap = NULL;
   5588 
   5589     rc = camera_open((uint8_t)cameraId, &cameraHandle);
   5590     if (rc || !cameraHandle) {
   5591         ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
   5592         goto open_failed;
   5593     }
   5594 
   5595     capabilityHeap = new QCamera3HeapMemory(1);
   5596     if (capabilityHeap == NULL) {
   5597         ALOGE("%s: creation of capabilityHeap failed", __func__);
   5598         goto heap_creation_failed;
   5599     }
   5600     /* Allocate memory for capability buffer */
   5601     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
   5602     if(rc != OK) {
   5603         ALOGE("%s: No memory for cappability", __func__);
   5604         goto allocate_failed;
   5605     }
   5606 
   5607     /* Map memory for capability buffer */
   5608     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   5609     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   5610                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   5611                                 capabilityHeap->getFd(0),
   5612                                 sizeof(cam_capability_t));
   5613     if(rc < 0) {
   5614         ALOGE("%s: failed to map capability buffer", __func__);
   5615         goto map_failed;
   5616     }
   5617 
   5618     /* Query Capability */
   5619     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   5620     if(rc < 0) {
   5621         ALOGE("%s: failed to query capability",__func__);
   5622         goto query_failed;
   5623     }
   5624     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   5625     if (!gCamCapability[cameraId]) {
   5626         ALOGE("%s: out of memory", __func__);
   5627         goto query_failed;
   5628     }
   5629     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   5630                                         sizeof(cam_capability_t));
   5631     rc = 0;
   5632 
   5633 query_failed:
   5634     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   5635                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   5636 map_failed:
   5637     capabilityHeap->deallocate();
   5638 allocate_failed:
   5639     delete capabilityHeap;
   5640 heap_creation_failed:
   5641     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   5642     cameraHandle = NULL;
   5643 open_failed:
   5644     return rc;
   5645 }
   5646 
   5647 /*==========================================================================
   5648  * FUNCTION   : get3Aversion
   5649  *
   5650  * DESCRIPTION: get the Q3A S/W version
   5651  *
   5652  * PARAMETERS :
   5653  *  @sw_version: Reference of Q3A structure which will hold version info upon
   5654  *               return
   5655  *
   5656  * RETURN     : None
   5657  *
   5658  *==========================================================================*/
   5659 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
   5660 {
   5661     if(gCamCapability[mCameraId])
   5662         sw_version = gCamCapability[mCameraId]->q3a_version;
   5663     else
   5664         ALOGE("%s:Capability structure NULL!", __func__);
   5665 }
   5666 
   5667 
   5668 /*===========================================================================
   5669  * FUNCTION   : initParameters
   5670  *
   5671  * DESCRIPTION: initialize camera parameters
   5672  *
   5673  * PARAMETERS :
   5674  *
   5675  * RETURN     : int32_t type of status
   5676  *              NO_ERROR  -- success
   5677  *              none-zero failure code
   5678  *==========================================================================*/
   5679 int QCamera3HardwareInterface::initParameters()
   5680 {
   5681     int rc = 0;
   5682 
   5683     //Allocate Set Param Buffer
   5684     mParamHeap = new QCamera3HeapMemory(1);
   5685     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
   5686     if(rc != OK) {
   5687         rc = NO_MEMORY;
   5688         ALOGE("Failed to allocate SETPARM Heap memory");
   5689         delete mParamHeap;
   5690         mParamHeap = NULL;
   5691         return rc;
   5692     }
   5693 
   5694     //Map memory for parameters buffer
   5695     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   5696             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   5697             mParamHeap->getFd(0),
   5698             sizeof(metadata_buffer_t));
   5699     if(rc < 0) {
   5700         ALOGE("%s:failed to map SETPARM buffer",__func__);
   5701         rc = FAILED_TRANSACTION;
   5702         mParamHeap->deallocate();
   5703         delete mParamHeap;
   5704         mParamHeap = NULL;
   5705         return rc;
   5706     }
   5707 
   5708     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
   5709 
   5710     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
   5711     return rc;
   5712 }
   5713 
   5714 /*===========================================================================
   5715  * FUNCTION   : deinitParameters
   5716  *
   5717  * DESCRIPTION: de-initialize camera parameters
   5718  *
   5719  * PARAMETERS :
   5720  *
   5721  * RETURN     : NONE
   5722  *==========================================================================*/
   5723 void QCamera3HardwareInterface::deinitParameters()
   5724 {
   5725     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   5726             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   5727 
   5728     mParamHeap->deallocate();
   5729     delete mParamHeap;
   5730     mParamHeap = NULL;
   5731 
   5732     mParameters = NULL;
   5733 
   5734     free(mPrevParameters);
   5735     mPrevParameters = NULL;
   5736 }
   5737 
   5738 /*===========================================================================
   5739  * FUNCTION   : calcMaxJpegSize
   5740  *
   5741  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   5742  *
   5743  * PARAMETERS :
   5744  *
   5745  * RETURN     : max_jpeg_size
   5746  *==========================================================================*/
   5747 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
   5748 {
   5749     size_t max_jpeg_size = 0;
   5750     size_t temp_width, temp_height;
   5751     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
   5752             MAX_SIZES_CNT);
   5753     for (size_t i = 0; i < count; i++) {
   5754         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
   5755         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
   5756         if (temp_width * temp_height > max_jpeg_size ) {
   5757             max_jpeg_size = temp_width * temp_height;
   5758         }
   5759     }
   5760     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   5761     return max_jpeg_size;
   5762 }
   5763 
   5764 /*===========================================================================
   5765  * FUNCTION   : getMaxRawSize
   5766  *
   5767  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
   5768  *
   5769  * PARAMETERS :
   5770  *
   5771  * RETURN     : Largest supported Raw Dimension
   5772  *==========================================================================*/
   5773 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
   5774 {
   5775     int max_width = 0;
   5776     cam_dimension_t maxRawSize;
   5777 
   5778     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
   5779     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
   5780         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
   5781             max_width = gCamCapability[camera_id]->raw_dim[i].width;
   5782             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
   5783         }
   5784     }
   5785     return maxRawSize;
   5786 }
   5787 
   5788 
   5789 /*===========================================================================
   5790  * FUNCTION   : calcMaxJpegDim
   5791  *
   5792  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
   5793  *
   5794  * PARAMETERS :
   5795  *
   5796  * RETURN     : max_jpeg_dim
   5797  *==========================================================================*/
   5798 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
   5799 {
   5800     cam_dimension_t max_jpeg_dim;
   5801     cam_dimension_t curr_jpeg_dim;
   5802     max_jpeg_dim.width = 0;
   5803     max_jpeg_dim.height = 0;
   5804     curr_jpeg_dim.width = 0;
   5805     curr_jpeg_dim.height = 0;
   5806     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   5807         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   5808         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   5809         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
   5810             max_jpeg_dim.width * max_jpeg_dim.height ) {
   5811             max_jpeg_dim.width = curr_jpeg_dim.width;
   5812             max_jpeg_dim.height = curr_jpeg_dim.height;
   5813         }
   5814     }
   5815     return max_jpeg_dim;
   5816 }
   5817 
   5818 /*===========================================================================
   5819  * FUNCTION   : addStreamConfig
   5820  *
   5821  * DESCRIPTION: adds the stream configuration to the array
   5822  *
   5823  * PARAMETERS :
   5824  * @available_stream_configs : pointer to stream configuration array
   5825  * @scalar_format            : scalar format
   5826  * @dim                      : configuration dimension
   5827  * @config_type              : input or output configuration type
   5828  *
   5829  * RETURN     : NONE
   5830  *==========================================================================*/
   5831 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
   5832         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
   5833 {
   5834     available_stream_configs.add(scalar_format);
   5835     available_stream_configs.add(dim.width);
   5836     available_stream_configs.add(dim.height);
   5837     available_stream_configs.add(config_type);
   5838 }
   5839 
   5840 
   5841 /*===========================================================================
   5842  * FUNCTION   : initStaticMetadata
   5843  *
   5844  * DESCRIPTION: initialize the static metadata
   5845  *
   5846  * PARAMETERS :
   5847  *   @cameraId  : camera Id
   5848  *
   5849  * RETURN     : int32_t type of status
   5850  *              0  -- success
   5851  *              non-zero failure code
   5852  *==========================================================================*/
   5853 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
   5854 {
   5855     int rc = 0;
   5856     CameraMetadata staticInfo;
   5857     size_t count = 0;
   5858     bool limitedDevice = false;
   5859     char prop[PROPERTY_VALUE_MAX];
   5860 
   5861     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
   5862      * guaranteed, its advertised as limited device */
   5863     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
   5864             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
   5865 
   5866     uint8_t supportedHwLvl = limitedDevice ?
   5867             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
   5868             // No capability check done here to distinguish LEVEL_FULL from
   5869             // LEVEL_3 - assuming this HAL will not run on devices that only
   5870             // meet FULL spec
   5871             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
   5872 
   5873     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   5874             &supportedHwLvl, 1);
   5875 
   5876     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   5877     /*HAL 3 only*/
   5878     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   5879                     &gCamCapability[cameraId]->min_focus_distance, 1);
   5880 
   5881     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   5882                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   5883 
   5884     /*should be using focal lengths but sensor doesn't provide that info now*/
   5885     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   5886                       &gCamCapability[cameraId]->focal_length,
   5887                       1);
   5888 
   5889     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   5890                       gCamCapability[cameraId]->apertures,
   5891                       gCamCapability[cameraId]->apertures_count);
   5892 
   5893     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   5894                 gCamCapability[cameraId]->filter_densities,
   5895                 gCamCapability[cameraId]->filter_densities_count);
   5896 
   5897 
   5898     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   5899                       (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
   5900                       gCamCapability[cameraId]->optical_stab_modes_count);
   5901 
   5902     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
   5903             gCamCapability[cameraId]->lens_shading_map_size.height};
   5904     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   5905                       lens_shading_map_size,
   5906                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   5907 
   5908     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   5909             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
   5910 
   5911     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   5912             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
   5913 
   5914     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   5915             &gCamCapability[cameraId]->max_frame_duration, 1);
   5916 
   5917     camera_metadata_rational baseGainFactor = {
   5918             gCamCapability[cameraId]->base_gain_factor.numerator,
   5919             gCamCapability[cameraId]->base_gain_factor.denominator};
   5920     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   5921                       &baseGainFactor, 1);
   5922 
   5923     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   5924                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
   5925 
   5926     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   5927             gCamCapability[cameraId]->pixel_array_size.height};
   5928     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   5929                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
   5930 
   5931     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
   5932                                                 gCamCapability[cameraId]->active_array_size.top,
   5933                                                 gCamCapability[cameraId]->active_array_size.width,
   5934                                                 gCamCapability[cameraId]->active_array_size.height};
   5935     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   5936                       active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
   5937 
   5938     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   5939             &gCamCapability[cameraId]->white_level, 1);
   5940 
   5941     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   5942             gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
   5943 
   5944     bool hasBlackRegions = false;
   5945     if (gCamCapability[cameraId]->optical_black_region_count != 0 &&
   5946             gCamCapability[cameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
   5947         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
   5948         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i+=4) {
   5949             // Left
   5950             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
   5951             //Top
   5952             opticalBlackRegions[i + 1] = gCamCapability[cameraId]->optical_black_regions[i + 1];
   5953             // Width
   5954             opticalBlackRegions[i + 2] = gCamCapability[cameraId]->optical_black_regions[i + 2] -
   5955                     gCamCapability[cameraId]->optical_black_regions[i];
   5956             // Height
   5957             opticalBlackRegions[i + 3] = gCamCapability[cameraId]->optical_black_regions[i + 3] -
   5958                     gCamCapability[cameraId]->optical_black_regions[i + 1];
   5959         }
   5960         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
   5961                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
   5962         hasBlackRegions = true;
   5963     }
   5964 
   5965     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   5966                       &gCamCapability[cameraId]->flash_charge_duration, 1);
   5967 
   5968     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   5969                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   5970 
   5971     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
   5972             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
   5973             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
   5974     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   5975             &timestampSource, 1);
   5976 
   5977     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   5978                       &gCamCapability[cameraId]->histogram_size, 1);
   5979 
   5980     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   5981             &gCamCapability[cameraId]->max_histogram_count, 1);
   5982 
   5983     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
   5984             gCamCapability[cameraId]->sharpness_map_size.height};
   5985 
   5986     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   5987             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   5988 
   5989     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   5990             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   5991 
   5992     int32_t scalar_formats[] = {
   5993             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
   5994             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
   5995             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
   5996             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
   5997             HAL_PIXEL_FORMAT_RAW10,
   5998             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
   5999     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
   6000     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   6001                       scalar_formats,
   6002                       scalar_formats_count);
   6003 
   6004     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
   6005     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   6006     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   6007             count, MAX_SIZES_CNT, available_processed_sizes);
   6008     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   6009             available_processed_sizes, count * 2);
   6010 
   6011     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
   6012     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
   6013     makeTable(gCamCapability[cameraId]->raw_dim,
   6014             count, MAX_SIZES_CNT, available_raw_sizes);
   6015     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   6016             available_raw_sizes, count * 2);
   6017 
   6018     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   6019     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
   6020     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   6021             count, MAX_SIZES_CNT, available_fps_ranges);
   6022     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   6023             available_fps_ranges, count * 2);
   6024 
   6025     camera_metadata_rational exposureCompensationStep = {
   6026             gCamCapability[cameraId]->exp_compensation_step.numerator,
   6027             gCamCapability[cameraId]->exp_compensation_step.denominator};
   6028     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   6029                       &exposureCompensationStep, 1);
   6030 
   6031     Vector<uint8_t> availableVstabModes;
   6032     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
   6033     char eis_prop[PROPERTY_VALUE_MAX];
   6034     memset(eis_prop, 0, sizeof(eis_prop));
   6035     property_get("persist.camera.eis.enable", eis_prop, "0");
   6036     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
   6037     if (facingBack && eis_prop_set) {
   6038         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
   6039     }
   6040     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   6041                       availableVstabModes.array(), availableVstabModes.size());
   6042 
   6043     /*HAL 1 and HAL 3 common*/
   6044     float maxZoom = 4;
   6045     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   6046             &maxZoom, 1);
   6047 
   6048     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
   6049     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
   6050 
   6051     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
   6052     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
   6053         max3aRegions[2] = 0; /* AF not supported */
   6054     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   6055             max3aRegions, 3);
   6056 
   6057     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
   6058     memset(prop, 0, sizeof(prop));
   6059     property_get("persist.camera.facedetect", prop, "1");
   6060     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
   6061     CDBG("%s: Support face detection mode: %d",
   6062             __func__, supportedFaceDetectMode);
   6063 
   6064     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
   6065     Vector<uint8_t> availableFaceDetectModes;
   6066     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
   6067     if (supportedFaceDetectMode == 1) {
   6068         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   6069     } else if (supportedFaceDetectMode == 2) {
   6070         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   6071     } else if (supportedFaceDetectMode == 3) {
   6072         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   6073         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   6074     } else {
   6075         maxFaces = 0;
   6076     }
   6077     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   6078             availableFaceDetectModes.array(),
   6079             availableFaceDetectModes.size());
   6080     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   6081             (int32_t *)&maxFaces, 1);
   6082 
   6083     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
   6084                                            gCamCapability[cameraId]->exposure_compensation_max};
   6085     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   6086             exposureCompensationRange,
   6087             sizeof(exposureCompensationRange)/sizeof(int32_t));
   6088 
   6089     uint8_t lensFacing = (facingBack) ?
   6090             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   6091     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   6092 
   6093     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   6094                       available_thumbnail_sizes,
   6095                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   6096 
   6097     /*all sizes will be clubbed into this tag*/
   6098     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
   6099     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   6100     size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
   6101             count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
   6102             gCamCapability[cameraId]->max_downscale_factor);
   6103     /*android.scaler.availableStreamConfigurations*/
   6104     size_t max_stream_configs_size = count * scalar_formats_count * 4;
   6105     Vector<int32_t> available_stream_configs;
   6106     cam_dimension_t active_array_dim;
   6107     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
   6108     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
   6109     /* Add input/output stream configurations for each scalar formats*/
   6110     for (size_t j = 0; j < scalar_formats_count; j++) {
   6111         switch (scalar_formats[j]) {
   6112         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   6113         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   6114         case HAL_PIXEL_FORMAT_RAW10:
   6115             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   6116                 addStreamConfig(available_stream_configs, scalar_formats[j],
   6117                         gCamCapability[cameraId]->raw_dim[i],
   6118                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   6119             }
   6120             break;
   6121         case HAL_PIXEL_FORMAT_BLOB:
   6122             cam_dimension_t jpeg_size;
   6123             for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
   6124                 jpeg_size.width  = available_jpeg_sizes[i*2];
   6125                 jpeg_size.height = available_jpeg_sizes[i*2+1];
   6126                 addStreamConfig(available_stream_configs, scalar_formats[j],
   6127                         jpeg_size,
   6128                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   6129             }
   6130             break;
   6131         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   6132         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   6133         default:
   6134             cam_dimension_t largest_picture_size;
   6135             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
   6136             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   6137                 addStreamConfig(available_stream_configs, scalar_formats[j],
   6138                         gCamCapability[cameraId]->picture_sizes_tbl[i],
   6139                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   6140                 /* Book keep largest */
   6141                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
   6142                         >= largest_picture_size.width &&
   6143                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
   6144                         >= largest_picture_size.height)
   6145                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
   6146             }
   6147             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
   6148             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
   6149                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   6150                  addStreamConfig(available_stream_configs, scalar_formats[j],
   6151                          largest_picture_size,
   6152                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
   6153             }
   6154             break;
   6155         }
   6156     }
   6157 
   6158     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   6159                       available_stream_configs.array(), available_stream_configs.size());
   6160     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   6161     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   6162 
   6163     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   6164     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   6165 
   6166     /* android.scaler.availableMinFrameDurations */
   6167     int64_t available_min_durations[max_stream_configs_size];
   6168     size_t idx = 0;
   6169     for (size_t j = 0; j < scalar_formats_count; j++) {
   6170         switch (scalar_formats[j]) {
   6171         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   6172         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   6173         case HAL_PIXEL_FORMAT_RAW10:
   6174             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   6175                 available_min_durations[idx] = scalar_formats[j];
   6176                 available_min_durations[idx+1] =
   6177                     gCamCapability[cameraId]->raw_dim[i].width;
   6178                 available_min_durations[idx+2] =
   6179                     gCamCapability[cameraId]->raw_dim[i].height;
   6180                 available_min_durations[idx+3] =
   6181                     gCamCapability[cameraId]->raw_min_duration[i];
   6182                 idx+=4;
   6183             }
   6184             break;
   6185         default:
   6186             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   6187                 available_min_durations[idx] = scalar_formats[j];
   6188                 available_min_durations[idx+1] =
   6189                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   6190                 available_min_durations[idx+2] =
   6191                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   6192                 available_min_durations[idx+3] =
   6193                     gCamCapability[cameraId]->picture_min_duration[i];
   6194                 idx+=4;
   6195             }
   6196             break;
   6197         }
   6198     }
   6199     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   6200                       &available_min_durations[0], idx);
   6201 
   6202     Vector<int32_t> available_hfr_configs;
   6203     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
   6204         int32_t fps = 0;
   6205         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
   6206         case CAM_HFR_MODE_60FPS:
   6207             fps = 60;
   6208             break;
   6209         case CAM_HFR_MODE_90FPS:
   6210             fps = 90;
   6211             break;
   6212         case CAM_HFR_MODE_120FPS:
   6213             fps = 120;
   6214             break;
   6215         case CAM_HFR_MODE_150FPS:
   6216             fps = 150;
   6217             break;
   6218         case CAM_HFR_MODE_180FPS:
   6219             fps = 180;
   6220             break;
   6221         case CAM_HFR_MODE_210FPS:
   6222             fps = 210;
   6223             break;
   6224         case CAM_HFR_MODE_240FPS:
   6225             fps = 240;
   6226             break;
   6227         case CAM_HFR_MODE_480FPS:
   6228             fps = 480;
   6229             break;
   6230         case CAM_HFR_MODE_OFF:
   6231         case CAM_HFR_MODE_MAX:
   6232         default:
   6233             break;
   6234         }
   6235 
   6236         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
   6237         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
   6238             /* For each HFR frame rate, need to advertise one variable fps range
   6239              * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
   6240              * [120, 120]. While camcorder preview alone is running [30, 120] is
   6241              * set by the app. When video recording is started, [120, 120] is
   6242              * set. This way sensor configuration does not change when recording
   6243              * is started */
   6244 
   6245             /* (width, height, fps_min, fps_max, batch_size_max) */
   6246             available_hfr_configs.add(
   6247                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
   6248             available_hfr_configs.add(
   6249                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
   6250             available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
   6251             available_hfr_configs.add(fps);
   6252             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   6253 
   6254             /* (width, height, fps_min, fps_max, batch_size_max) */
   6255             available_hfr_configs.add(
   6256                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
   6257             available_hfr_configs.add(
   6258                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
   6259             available_hfr_configs.add(fps);
   6260             available_hfr_configs.add(fps);
   6261             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   6262        }
   6263     }
   6264     //Advertise HFR capability only if the property is set
   6265     memset(prop, 0, sizeof(prop));
   6266     property_get("persist.camera.hal3hfr.enable", prop, "1");
   6267     uint8_t hfrEnable = (uint8_t)atoi(prop);
   6268 
   6269     if(hfrEnable && available_hfr_configs.array()) {
   6270         staticInfo.update(
   6271                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
   6272                 available_hfr_configs.array(), available_hfr_configs.size());
   6273     }
   6274 
   6275     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
   6276     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   6277                       &max_jpeg_size, 1);
   6278 
   6279     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   6280     size_t size = 0;
   6281     count = CAM_EFFECT_MODE_MAX;
   6282     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
   6283     for (size_t i = 0; i < count; i++) {
   6284         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   6285                 gCamCapability[cameraId]->supported_effects[i]);
   6286         if (NAME_NOT_FOUND != val) {
   6287             avail_effects[size] = (uint8_t)val;
   6288             size++;
   6289         }
   6290     }
   6291     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   6292                       avail_effects,
   6293                       size);
   6294 
   6295     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   6296     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   6297     size_t supported_scene_modes_cnt = 0;
   6298     count = CAM_SCENE_MODE_MAX;
   6299     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
   6300     for (size_t i = 0; i < count; i++) {
   6301         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
   6302                 CAM_SCENE_MODE_OFF) {
   6303             int val = lookupFwkName(SCENE_MODES_MAP,
   6304                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
   6305                     gCamCapability[cameraId]->supported_scene_modes[i]);
   6306             if (NAME_NOT_FOUND != val) {
   6307                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   6308                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
   6309                 supported_scene_modes_cnt++;
   6310             }
   6311         }
   6312     }
   6313     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   6314                       avail_scene_modes,
   6315                       supported_scene_modes_cnt);
   6316 
   6317     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
   6318     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   6319                       supported_scene_modes_cnt,
   6320                       CAM_SCENE_MODE_MAX,
   6321                       scene_mode_overrides,
   6322                       supported_indexes,
   6323                       cameraId);
   6324 
   6325     if (supported_scene_modes_cnt == 0) {
   6326         supported_scene_modes_cnt = 1;
   6327         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
   6328     }
   6329 
   6330     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   6331             scene_mode_overrides, supported_scene_modes_cnt * 3);
   6332 
   6333     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
   6334                                          ANDROID_CONTROL_MODE_AUTO,
   6335                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
   6336     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
   6337             available_control_modes,
   6338             3);
   6339 
   6340     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   6341     size = 0;
   6342     count = CAM_ANTIBANDING_MODE_MAX;
   6343     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
   6344     for (size_t i = 0; i < count; i++) {
   6345         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   6346                 gCamCapability[cameraId]->supported_antibandings[i]);
   6347         if (NAME_NOT_FOUND != val) {
   6348             avail_antibanding_modes[size] = (uint8_t)val;
   6349             size++;
   6350         }
   6351 
   6352     }
   6353     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   6354                       avail_antibanding_modes,
   6355                       size);
   6356 
   6357     uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
   6358     size = 0;
   6359     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
   6360     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
   6361     if (0 == count) {
   6362         avail_abberation_modes[0] =
   6363                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   6364         size++;
   6365     } else {
   6366         for (size_t i = 0; i < count; i++) {
   6367             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   6368                     gCamCapability[cameraId]->aberration_modes[i]);
   6369             if (NAME_NOT_FOUND != val) {
   6370                 avail_abberation_modes[size] = (uint8_t)val;
   6371                 size++;
   6372             } else {
   6373                 ALOGE("%s: Invalid CAC mode %d", __func__,
   6374                         gCamCapability[cameraId]->aberration_modes[i]);
   6375                 break;
   6376             }
   6377         }
   6378 
   6379     }
   6380     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   6381             avail_abberation_modes,
   6382             size);
   6383 
   6384     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   6385     size = 0;
   6386     count = CAM_FOCUS_MODE_MAX;
   6387     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
   6388     for (size_t i = 0; i < count; i++) {
   6389         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   6390                 gCamCapability[cameraId]->supported_focus_modes[i]);
   6391         if (NAME_NOT_FOUND != val) {
   6392             avail_af_modes[size] = (uint8_t)val;
   6393             size++;
   6394         }
   6395     }
   6396     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   6397                       avail_af_modes,
   6398                       size);
   6399 
   6400     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   6401     size = 0;
   6402     count = CAM_WB_MODE_MAX;
   6403     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
   6404     for (size_t i = 0; i < count; i++) {
   6405         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   6406                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   6407                 gCamCapability[cameraId]->supported_white_balances[i]);
   6408         if (NAME_NOT_FOUND != val) {
   6409             avail_awb_modes[size] = (uint8_t)val;
   6410             size++;
   6411         }
   6412     }
   6413     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   6414                       avail_awb_modes,
   6415                       size);
   6416 
   6417     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   6418     count = CAM_FLASH_FIRING_LEVEL_MAX;
   6419     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
   6420             count);
   6421     for (size_t i = 0; i < count; i++) {
   6422         available_flash_levels[i] =
   6423                 gCamCapability[cameraId]->supported_firing_levels[i];
   6424     }
   6425     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   6426             available_flash_levels, count);
   6427 
   6428     uint8_t flashAvailable;
   6429     if (gCamCapability[cameraId]->flash_available)
   6430         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
   6431     else
   6432         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
   6433     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   6434             &flashAvailable, 1);
   6435 
   6436     Vector<uint8_t> avail_ae_modes;
   6437     count = CAM_AE_MODE_MAX;
   6438     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
   6439     for (size_t i = 0; i < count; i++) {
   6440         avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
   6441     }
   6442     if (flashAvailable) {
   6443         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
   6444         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
   6445     }
   6446     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   6447                       avail_ae_modes.array(),
   6448                       avail_ae_modes.size());
   6449 
   6450     int32_t sensitivity_range[2];
   6451     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   6452     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   6453     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   6454                       sensitivity_range,
   6455                       sizeof(sensitivity_range) / sizeof(int32_t));
   6456 
   6457     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   6458                       &gCamCapability[cameraId]->max_analog_sensitivity,
   6459                       1);
   6460 
   6461     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   6462     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   6463                       &sensor_orientation,
   6464                       1);
   6465 
   6466     int32_t max_output_streams[] = {
   6467             MAX_STALLING_STREAMS,
   6468             MAX_PROCESSED_STREAMS,
   6469             MAX_RAW_STREAMS};
   6470     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   6471             max_output_streams,
   6472             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
   6473 
   6474     uint8_t avail_leds = 0;
   6475     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
   6476                       &avail_leds, 0);
   6477 
   6478     uint8_t focus_dist_calibrated;
   6479     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
   6480             gCamCapability[cameraId]->focus_dist_calibrated);
   6481     if (NAME_NOT_FOUND != val) {
   6482         focus_dist_calibrated = (uint8_t)val;
   6483         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   6484                      &focus_dist_calibrated, 1);
   6485     }
   6486 
   6487     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
   6488     size = 0;
   6489     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
   6490             MAX_TEST_PATTERN_CNT);
   6491     for (size_t i = 0; i < count; i++) {
   6492         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
   6493                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
   6494         if (NAME_NOT_FOUND != testpatternMode) {
   6495             avail_testpattern_modes[size] = testpatternMode;
   6496             size++;
   6497         }
   6498     }
   6499     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   6500                       avail_testpattern_modes,
   6501                       size);
   6502 
   6503     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
   6504     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
   6505                       &max_pipeline_depth,
   6506                       1);
   6507 
   6508     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
   6509     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   6510                       &partial_result_count,
   6511                        1);
   6512 
   6513     int32_t max_stall_duration = MAX_REPROCESS_STALL;
   6514     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
   6515 
   6516     Vector<uint8_t> available_capabilities;
   6517     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   6518     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
   6519     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
   6520     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
   6521     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
   6522     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   6523     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
   6524     if (hfrEnable && available_hfr_configs.array()) {
   6525         available_capabilities.add(
   6526                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
   6527     }
   6528 
   6529     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
   6530         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
   6531     }
   6532     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   6533             available_capabilities.array(),
   6534             available_capabilities.size());
   6535 
   6536     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
   6537     //BURST_CAPTURE.
   6538     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   6539             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   6540 
   6541     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   6542             &aeLockAvailable, 1);
   6543 
   6544     //awbLockAvailable to be set to true if capabilities has
   6545     //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
   6546     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   6547             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   6548 
   6549     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   6550             &awbLockAvailable, 1);
   6551 
   6552     int32_t max_input_streams = 1;
   6553     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   6554                       &max_input_streams,
   6555                       1);
   6556 
   6557     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
   6558     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
   6559             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
   6560             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
   6561             HAL_PIXEL_FORMAT_YCbCr_420_888};
   6562     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   6563                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
   6564 
   6565     int32_t max_latency = (limitedDevice) ?
   6566             CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
   6567     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
   6568                       &max_latency,
   6569                       1);
   6570 
   6571     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
   6572                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
   6573     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   6574             available_hot_pixel_modes,
   6575             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
   6576 
   6577     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
   6578                                          ANDROID_SHADING_MODE_FAST,
   6579                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
   6580     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
   6581                       available_shading_modes,
   6582                       3);
   6583 
   6584     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
   6585                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
   6586     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   6587                       available_lens_shading_map_modes,
   6588                       2);
   6589 
   6590     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
   6591                                       ANDROID_EDGE_MODE_FAST,
   6592                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
   6593                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
   6594     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   6595             available_edge_modes,
   6596             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
   6597 
   6598     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
   6599                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
   6600                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
   6601                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
   6602                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
   6603     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   6604             available_noise_red_modes,
   6605             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
   6606 
   6607     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
   6608                                          ANDROID_TONEMAP_MODE_FAST,
   6609                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
   6610     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   6611             available_tonemap_modes,
   6612             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
   6613 
   6614     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
   6615     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   6616             available_hot_pixel_map_modes,
   6617             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
   6618 
   6619     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   6620             gCamCapability[cameraId]->reference_illuminant1);
   6621     if (NAME_NOT_FOUND != val) {
   6622         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   6623         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
   6624     }
   6625 
   6626     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   6627             gCamCapability[cameraId]->reference_illuminant2);
   6628     if (NAME_NOT_FOUND != val) {
   6629         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   6630         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
   6631     }
   6632 
   6633     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
   6634             (void *)gCamCapability[cameraId]->forward_matrix1,
   6635             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   6636 
   6637     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
   6638             (void *)gCamCapability[cameraId]->forward_matrix2,
   6639             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   6640 
   6641     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
   6642             (void *)gCamCapability[cameraId]->color_transform1,
   6643             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   6644 
   6645     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
   6646             (void *)gCamCapability[cameraId]->color_transform2,
   6647             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   6648 
   6649     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
   6650             (void *)gCamCapability[cameraId]->calibration_transform1,
   6651             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   6652 
   6653     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
   6654             (void *)gCamCapability[cameraId]->calibration_transform2,
   6655             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   6656 
   6657     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
   6658        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
   6659        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   6660        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   6661        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
   6662        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   6663        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
   6664        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
   6665        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
   6666        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
   6667        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
   6668        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
   6669        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   6670        ANDROID_JPEG_GPS_COORDINATES,
   6671        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
   6672        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
   6673        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
   6674        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   6675        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
   6676        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
   6677        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
   6678        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
   6679        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
   6680        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
   6681        ANDROID_STATISTICS_FACE_DETECT_MODE,
   6682        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   6683        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
   6684        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   6685        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
   6686 
   6687     size_t request_keys_cnt =
   6688             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
   6689     Vector<int32_t> available_request_keys;
   6690     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
   6691     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   6692         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
   6693     }
   6694 
   6695     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
   6696             available_request_keys.array(), available_request_keys.size());
   6697 
   6698     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
   6699        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
   6700        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
   6701        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
   6702        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
   6703        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   6704        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
   6705        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
   6706        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
   6707        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   6708        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   6709        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
   6710        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
   6711        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
   6712        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   6713        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
   6714        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   6715        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
   6716        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   6717        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   6718        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
   6719        ANDROID_STATISTICS_FACE_SCORES,
   6720        ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
   6721        ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
   6722        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
   6723     size_t result_keys_cnt =
   6724             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
   6725 
   6726     Vector<int32_t> available_result_keys;
   6727     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
   6728     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   6729         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
   6730     }
   6731     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
   6732        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
   6733        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
   6734     }
   6735     if (supportedFaceDetectMode == 1) {
   6736         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
   6737         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
   6738     } else if ((supportedFaceDetectMode == 2) ||
   6739             (supportedFaceDetectMode == 3)) {
   6740         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
   6741         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
   6742     }
   6743     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   6744             available_result_keys.array(), available_result_keys.size());
   6745 
   6746     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   6747        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   6748        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
   6749        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
   6750        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   6751        ANDROID_SCALER_CROPPING_TYPE,
   6752        ANDROID_SYNC_MAX_LATENCY,
   6753        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   6754        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   6755        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   6756        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
   6757        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
   6758        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   6759        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   6760        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   6761        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   6762        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   6763        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   6764        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   6765        ANDROID_LENS_FACING,
   6766        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   6767        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   6768        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   6769        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   6770        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   6771        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   6772        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   6773        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
   6774        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
   6775        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   6776        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
   6777        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   6778        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   6779        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   6780        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   6781        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   6782        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
   6783        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   6784        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   6785        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   6786        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   6787        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   6788        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   6789        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   6790        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   6791        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   6792        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   6793        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   6794        ANDROID_TONEMAP_MAX_CURVE_POINTS,
   6795        ANDROID_CONTROL_AVAILABLE_MODES,
   6796        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   6797        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   6798        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   6799        ANDROID_SHADING_AVAILABLE_MODES,
   6800        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
   6801 
   6802     Vector<int32_t> available_characteristics_keys;
   6803     available_characteristics_keys.appendArray(characteristics_keys_basic,
   6804             sizeof(characteristics_keys_basic)/sizeof(int32_t));
   6805     if (hasBlackRegions) {
   6806         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
   6807     }
   6808     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   6809                       available_characteristics_keys.array(),
   6810                       available_characteristics_keys.size());
   6811 
   6812     /*available stall durations depend on the hw + sw and will be different for different devices */
   6813     /*have to add for raw after implementation*/
   6814     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
   6815     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
   6816 
   6817     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   6818     size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
   6819             MAX_SIZES_CNT);
   6820     size_t available_stall_size = count * 4;
   6821     int64_t available_stall_durations[available_stall_size];
   6822     idx = 0;
   6823     for (uint32_t j = 0; j < stall_formats_count; j++) {
   6824        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
   6825           for (uint32_t i = 0; i < count; i++) {
   6826              available_stall_durations[idx]   = stall_formats[j];
   6827              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   6828              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   6829              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
   6830              idx+=4;
   6831           }
   6832        } else {
   6833           for (uint32_t i = 0; i < raw_count; i++) {
   6834              available_stall_durations[idx]   = stall_formats[j];
   6835              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
   6836              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
   6837              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
   6838              idx+=4;
   6839           }
   6840        }
   6841     }
   6842     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   6843                       available_stall_durations,
   6844                       idx);
   6845     //QCAMERA3_OPAQUE_RAW
   6846     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   6847     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   6848     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
   6849     case LEGACY_RAW:
   6850         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   6851             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
   6852         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   6853             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   6854         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   6855             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
   6856         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   6857         break;
   6858     case MIPI_RAW:
   6859         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   6860             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
   6861         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   6862             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
   6863         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   6864             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
   6865         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
   6866         break;
   6867     default:
   6868         ALOGE("%s: unknown opaque_raw_format %d", __func__,
   6869                 gCamCapability[cameraId]->opaque_raw_fmt);
   6870         break;
   6871     }
   6872     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
   6873 
   6874     int32_t strides[3*raw_count];
   6875     for (size_t i = 0; i < raw_count; i++) {
   6876         cam_stream_buf_plane_info_t buf_planes;
   6877         strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
   6878         strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
   6879         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   6880             &gCamCapability[cameraId]->padding_info, &buf_planes);
   6881         strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
   6882     }
   6883     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
   6884             3*raw_count);
   6885 
   6886     gStaticMetadata[cameraId] = staticInfo.release();
   6887     return rc;
   6888 }
   6889 
   6890 /*===========================================================================
   6891  * FUNCTION   : makeTable
   6892  *
   6893  * DESCRIPTION: make a table of sizes
   6894  *
   6895  * PARAMETERS :
   6896  *
   6897  *
   6898  *==========================================================================*/
   6899 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
   6900         size_t max_size, int32_t *sizeTable)
   6901 {
   6902     size_t j = 0;
   6903     if (size > max_size) {
   6904        size = max_size;
   6905     }
   6906     for (size_t i = 0; i < size; i++) {
   6907         sizeTable[j] = dimTable[i].width;
   6908         sizeTable[j+1] = dimTable[i].height;
   6909         j+=2;
   6910     }
   6911 }
   6912 
   6913 /*===========================================================================
   6914  * FUNCTION   : makeFPSTable
   6915  *
   6916  * DESCRIPTION: make a table of fps ranges
   6917  *
   6918  * PARAMETERS :
   6919  *
   6920  *==========================================================================*/
   6921 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
   6922         size_t max_size, int32_t *fpsRangesTable)
   6923 {
   6924     size_t j = 0;
   6925     if (size > max_size) {
   6926        size = max_size;
   6927     }
   6928     for (size_t i = 0; i < size; i++) {
   6929         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   6930         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   6931         j+=2;
   6932     }
   6933 }
   6934 
   6935 /*===========================================================================
   6936  * FUNCTION   : makeOverridesList
   6937  *
   6938  * DESCRIPTION: make a list of scene mode overrides
   6939  *
   6940  * PARAMETERS :
   6941  *
   6942  *
   6943  *==========================================================================*/
   6944 void QCamera3HardwareInterface::makeOverridesList(
   6945         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
   6946         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
   6947 {
   6948     /*daemon will give a list of overrides for all scene modes.
   6949       However we should send the fwk only the overrides for the scene modes
   6950       supported by the framework*/
   6951     size_t j = 0;
   6952     if (size > max_size) {
   6953        size = max_size;
   6954     }
   6955     size_t focus_count = CAM_FOCUS_MODE_MAX;
   6956     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
   6957             focus_count);
   6958     for (size_t i = 0; i < size; i++) {
   6959         bool supt = false;
   6960         size_t index = supported_indexes[i];
   6961         overridesList[j] = gCamCapability[camera_id]->flash_available ?
   6962                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
   6963         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   6964                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   6965                 overridesTable[index].awb_mode);
   6966         if (NAME_NOT_FOUND != val) {
   6967             overridesList[j+1] = (uint8_t)val;
   6968         }
   6969         uint8_t focus_override = overridesTable[index].af_mode;
   6970         for (size_t k = 0; k < focus_count; k++) {
   6971            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   6972               supt = true;
   6973               break;
   6974            }
   6975         }
   6976         if (supt) {
   6977             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   6978                     focus_override);
   6979             if (NAME_NOT_FOUND != val) {
   6980                 overridesList[j+2] = (uint8_t)val;
   6981             }
   6982         } else {
   6983            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   6984         }
   6985         j+=3;
   6986     }
   6987 }
   6988 
   6989 /*===========================================================================
   6990  * FUNCTION   : filterJpegSizes
   6991  *
   6992  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
   6993  *              could be downscaled to
   6994  *
   6995  * PARAMETERS :
   6996  *
   6997  * RETURN     : length of jpegSizes array
   6998  *==========================================================================*/
   6999 
   7000 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
   7001         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
   7002         uint8_t downscale_factor)
   7003 {
   7004     if (0 == downscale_factor) {
   7005         downscale_factor = 1;
   7006     }
   7007 
   7008     int32_t min_width = active_array_size.width / downscale_factor;
   7009     int32_t min_height = active_array_size.height / downscale_factor;
   7010     size_t jpegSizesCnt = 0;
   7011     if (processedSizesCnt > maxCount) {
   7012         processedSizesCnt = maxCount;
   7013     }
   7014     for (size_t i = 0; i < processedSizesCnt; i+=2) {
   7015         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
   7016             jpegSizes[jpegSizesCnt] = processedSizes[i];
   7017             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
   7018             jpegSizesCnt += 2;
   7019         }
   7020     }
   7021     return jpegSizesCnt;
   7022 }
   7023 
   7024 /*===========================================================================
   7025  * FUNCTION   : getPreviewHalPixelFormat
   7026  *
   7027  * DESCRIPTION: convert the format to type recognized by framework
   7028  *
   7029  * PARAMETERS : format : the format from backend
   7030  *
   7031  ** RETURN    : format recognized by framework
   7032  *
   7033  *==========================================================================*/
   7034 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
   7035 {
   7036     int32_t halPixelFormat;
   7037 
   7038     switch (format) {
   7039     case CAM_FORMAT_YUV_420_NV12:
   7040         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
   7041         break;
   7042     case CAM_FORMAT_YUV_420_NV21:
   7043         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   7044         break;
   7045     case CAM_FORMAT_YUV_420_NV21_ADRENO:
   7046         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
   7047         break;
   7048     case CAM_FORMAT_YUV_420_YV12:
   7049         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
   7050         break;
   7051     case CAM_FORMAT_YUV_422_NV16:
   7052     case CAM_FORMAT_YUV_422_NV61:
   7053     default:
   7054         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   7055         break;
   7056     }
   7057     return halPixelFormat;
   7058 }
   7059 
   7060 /*===========================================================================
   7061  * FUNCTION   : computeNoiseModelEntryS
   7062  *
   7063  * DESCRIPTION: function to map a given sensitivity to the S noise
   7064  *              model parameters in the DNG noise model.
   7065  *
   7066  * PARAMETERS : sens : the sensor sensitivity
   7067  *
   7068  ** RETURN    : S (sensor amplification) noise
   7069  *
   7070  *==========================================================================*/
   7071 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
   7072     double s = gCamCapability[mCameraId]->gradient_S * sens +
   7073             gCamCapability[mCameraId]->offset_S;
   7074     return ((s < 0.0) ? 0.0 : s);
   7075 }
   7076 
   7077 /*===========================================================================
   7078  * FUNCTION   : computeNoiseModelEntryO
   7079  *
   7080  * DESCRIPTION: function to map a given sensitivity to the O noise
   7081  *              model parameters in the DNG noise model.
   7082  *
   7083  * PARAMETERS : sens : the sensor sensitivity
   7084  *
   7085  ** RETURN    : O (sensor readout) noise
   7086  *
   7087  *==========================================================================*/
   7088 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
   7089     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
   7090     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
   7091             1.0 : (1.0 * sens / max_analog_sens);
   7092     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
   7093             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
   7094     return ((o < 0.0) ? 0.0 : o);
   7095 }
   7096 
   7097 /*===========================================================================
   7098  * FUNCTION   : getSensorSensitivity
   7099  *
   7100  * DESCRIPTION: convert iso_mode to an integer value
   7101  *
   7102  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   7103  *
   7104  ** RETURN    : sensitivity supported by sensor
   7105  *
   7106  *==========================================================================*/
   7107 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   7108 {
   7109     int32_t sensitivity;
   7110 
   7111     switch (iso_mode) {
   7112     case CAM_ISO_MODE_100:
   7113         sensitivity = 100;
   7114         break;
   7115     case CAM_ISO_MODE_200:
   7116         sensitivity = 200;
   7117         break;
   7118     case CAM_ISO_MODE_400:
   7119         sensitivity = 400;
   7120         break;
   7121     case CAM_ISO_MODE_800:
   7122         sensitivity = 800;
   7123         break;
   7124     case CAM_ISO_MODE_1600:
   7125         sensitivity = 1600;
   7126         break;
   7127     default:
   7128         sensitivity = -1;
   7129         break;
   7130     }
   7131     return sensitivity;
   7132 }
   7133 
   7134 /*===========================================================================
   7135  * FUNCTION   : getCamInfo
   7136  *
   7137  * DESCRIPTION: query camera capabilities
   7138  *
   7139  * PARAMETERS :
   7140  *   @cameraId  : camera Id
   7141  *   @info      : camera info struct to be filled in with camera capabilities
   7142  *
   7143  * RETURN     : int type of status
   7144  *              NO_ERROR  -- success
   7145  *              none-zero failure code
   7146  *==========================================================================*/
   7147 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
   7148         struct camera_info *info)
   7149 {
   7150     ATRACE_CALL();
   7151     int rc = 0;
   7152 
   7153     pthread_mutex_lock(&gCamLock);
   7154     if (NULL == gCamCapability[cameraId]) {
   7155         rc = initCapabilities(cameraId);
   7156         if (rc < 0) {
   7157             pthread_mutex_unlock(&gCamLock);
   7158             return rc;
   7159         }
   7160     }
   7161 
   7162     if (NULL == gStaticMetadata[cameraId]) {
   7163         rc = initStaticMetadata(cameraId);
   7164         if (rc < 0) {
   7165             pthread_mutex_unlock(&gCamLock);
   7166             return rc;
   7167         }
   7168     }
   7169 
   7170     switch(gCamCapability[cameraId]->position) {
   7171     case CAM_POSITION_BACK:
   7172         info->facing = CAMERA_FACING_BACK;
   7173         break;
   7174 
   7175     case CAM_POSITION_FRONT:
   7176         info->facing = CAMERA_FACING_FRONT;
   7177         break;
   7178 
   7179     default:
   7180         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
   7181         rc = -1;
   7182         break;
   7183     }
   7184 
   7185 
   7186     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
   7187     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
   7188     info->static_camera_characteristics = gStaticMetadata[cameraId];
   7189 
   7190     //For now assume both cameras can operate independently.
   7191     info->conflicting_devices = NULL;
   7192     info->conflicting_devices_length = 0;
   7193 
   7194     //resource cost is 100 * MIN(1.0, m/M),
   7195     //where m is throughput requirement with maximum stream configuration
   7196     //and M is CPP maximum throughput.
   7197     float max_fps = 0.0;
   7198     for (uint32_t i = 0;
   7199             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
   7200         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
   7201             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
   7202     }
   7203     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
   7204             gCamCapability[cameraId]->active_array_size.width *
   7205             gCamCapability[cameraId]->active_array_size.height * max_fps /
   7206             gCamCapability[cameraId]->max_pixel_bandwidth;
   7207     info->resource_cost = 100 * MIN(1.0, ratio);
   7208     ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
   7209             info->resource_cost);
   7210 
   7211     pthread_mutex_unlock(&gCamLock);
   7212     return rc;
   7213 }
   7214 
   7215 /*===========================================================================
   7216  * FUNCTION   : translateCapabilityToMetadata
   7217  *
   7218  * DESCRIPTION: translate the capability into camera_metadata_t
   7219  *
   7220  * PARAMETERS : type of the request
   7221  *
   7222  *
   7223  * RETURN     : success: camera_metadata_t*
   7224  *              failure: NULL
   7225  *
   7226  *==========================================================================*/
   7227 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   7228 {
   7229     if (mDefaultMetadata[type] != NULL) {
   7230         return mDefaultMetadata[type];
   7231     }
   7232     //first time we are handling this request
   7233     //fill up the metadata structure using the wrapper class
   7234     CameraMetadata settings;
   7235     //translate from cam_capability_t to camera_metadata_tag_t
   7236     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   7237     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   7238     int32_t defaultRequestID = 0;
   7239     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   7240 
   7241     /* OIS disable */
   7242     char ois_prop[PROPERTY_VALUE_MAX];
   7243     memset(ois_prop, 0, sizeof(ois_prop));
   7244     property_get("persist.camera.ois.disable", ois_prop, "0");
   7245     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
   7246 
   7247     /* Force video to use OIS */
   7248     char videoOisProp[PROPERTY_VALUE_MAX];
   7249     memset(videoOisProp, 0, sizeof(videoOisProp));
   7250     property_get("persist.camera.ois.video", videoOisProp, "1");
   7251     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
   7252 
   7253     // EIS enable/disable
   7254     char eis_prop[PROPERTY_VALUE_MAX];
   7255     memset(eis_prop, 0, sizeof(eis_prop));
   7256     property_get("persist.camera.eis.enable", eis_prop, "0");
   7257     const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
   7258 
   7259     // Hybrid AE enable/disable
   7260     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
   7261     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
   7262     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
   7263     const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
   7264 
   7265     const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
   7266     // This is a bit hacky. EIS is enabled only when the above setprop
   7267     // is set to non-zero value and on back camera (for 2015 Nexus).
   7268     // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
   7269     // configureStream is called before this function. In other words,
   7270     // we cannot guarantee the app will call configureStream before
   7271     // calling createDefaultRequest.
   7272     const bool eisEnabled = facingBack && eis_prop_set;
   7273 
   7274     uint8_t controlIntent = 0;
   7275     uint8_t focusMode;
   7276     uint8_t vsMode;
   7277     uint8_t optStabMode;
   7278     uint8_t cacMode;
   7279     uint8_t edge_mode;
   7280     uint8_t noise_red_mode;
   7281     uint8_t tonemap_mode;
   7282     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   7283     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7284     switch (type) {
   7285       case CAMERA3_TEMPLATE_PREVIEW:
   7286         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   7287         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   7288         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7289         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7290         edge_mode = ANDROID_EDGE_MODE_FAST;
   7291         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7292         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7293         break;
   7294       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   7295         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   7296         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   7297         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7298         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
   7299         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   7300         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   7301         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   7302         break;
   7303       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   7304         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   7305         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   7306         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7307         if (eisEnabled) {
   7308             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
   7309         }
   7310         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7311         edge_mode = ANDROID_EDGE_MODE_FAST;
   7312         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7313         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7314         if (forceVideoOis)
   7315             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7316         break;
   7317       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   7318         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   7319         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   7320         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7321         if (eisEnabled) {
   7322             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
   7323         }
   7324         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7325         edge_mode = ANDROID_EDGE_MODE_FAST;
   7326         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7327         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7328         if (forceVideoOis)
   7329             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7330         break;
   7331       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   7332         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   7333         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   7334         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7335         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7336         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
   7337         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
   7338         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7339         break;
   7340       case CAMERA3_TEMPLATE_MANUAL:
   7341         edge_mode = ANDROID_EDGE_MODE_FAST;
   7342         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7343         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7344         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7345         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
   7346         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   7347         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7348         break;
   7349       default:
   7350         edge_mode = ANDROID_EDGE_MODE_FAST;
   7351         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7352         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7353         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7354         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   7355         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7356         break;
   7357     }
   7358     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
   7359     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   7360     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
   7361     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
   7362         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   7363     }
   7364     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   7365 
   7366     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   7367             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
   7368         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7369     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   7370             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
   7371             || ois_disable)
   7372         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7373     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
   7374 
   7375     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   7376             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   7377 
   7378     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   7379     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   7380 
   7381     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   7382     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   7383 
   7384     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   7385     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   7386 
   7387     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   7388     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   7389 
   7390     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   7391     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   7392 
   7393     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   7394     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   7395 
   7396     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   7397     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   7398 
   7399     /*flash*/
   7400     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   7401     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   7402 
   7403     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   7404     settings.update(ANDROID_FLASH_FIRING_POWER,
   7405             &flashFiringLevel, 1);
   7406 
   7407     /* lens */
   7408     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   7409     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   7410 
   7411     if (gCamCapability[mCameraId]->filter_densities_count) {
   7412         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   7413         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   7414                         gCamCapability[mCameraId]->filter_densities_count);
   7415     }
   7416 
   7417     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   7418     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   7419 
   7420     float default_focus_distance = 0;
   7421     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
   7422 
   7423     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   7424     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   7425 
   7426     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   7427     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   7428 
   7429     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
   7430     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
   7431 
   7432     /* face detection (default to OFF) */
   7433     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   7434     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   7435 
   7436     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   7437     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   7438 
   7439     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   7440     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   7441 
   7442     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   7443     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   7444 
   7445     static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   7446     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
   7447 
   7448     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   7449     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
   7450 
   7451     /* Exposure time(Update the Min Exposure Time)*/
   7452     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   7453     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   7454 
   7455     /* frame duration */
   7456     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
   7457     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
   7458 
   7459     /* sensitivity */
   7460     static const int32_t default_sensitivity = 100;
   7461     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   7462 
   7463     /*edge mode*/
   7464     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
   7465 
   7466     /*noise reduction mode*/
   7467     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
   7468 
   7469     /*color correction mode*/
   7470     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   7471     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
   7472 
   7473     /*transform matrix mode*/
   7474     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
   7475 
   7476     int32_t scaler_crop_region[4];
   7477     scaler_crop_region[0] = 0;
   7478     scaler_crop_region[1] = 0;
   7479     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
   7480     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
   7481     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
   7482 
   7483     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   7484     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
   7485 
   7486     /*focus distance*/
   7487     float focus_distance = 0.0;
   7488     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
   7489 
   7490     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
   7491     float max_range = 0.0;
   7492     float max_fixed_fps = 0.0;
   7493     int32_t fps_range[2] = {0, 0};
   7494     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
   7495             i++) {
   7496         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
   7497             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   7498         if (type == CAMERA3_TEMPLATE_PREVIEW ||
   7499                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
   7500                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
   7501             if (range > max_range) {
   7502                 fps_range[0] =
   7503                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   7504                 fps_range[1] =
   7505                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   7506                 max_range = range;
   7507             }
   7508         } else {
   7509             if (range < 0.01 && max_fixed_fps <
   7510                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
   7511                 fps_range[0] =
   7512                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   7513                 fps_range[1] =
   7514                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   7515                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   7516             }
   7517         }
   7518     }
   7519     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
   7520 
   7521     /*precapture trigger*/
   7522     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
   7523     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
   7524 
   7525     /*af trigger*/
   7526     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   7527     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
   7528 
   7529     /* ae & af regions */
   7530     int32_t active_region[] = {
   7531             gCamCapability[mCameraId]->active_array_size.left,
   7532             gCamCapability[mCameraId]->active_array_size.top,
   7533             gCamCapability[mCameraId]->active_array_size.left +
   7534                     gCamCapability[mCameraId]->active_array_size.width,
   7535             gCamCapability[mCameraId]->active_array_size.top +
   7536                     gCamCapability[mCameraId]->active_array_size.height,
   7537             0};
   7538     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
   7539             sizeof(active_region) / sizeof(active_region[0]));
   7540     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
   7541             sizeof(active_region) / sizeof(active_region[0]));
   7542 
   7543     /* black level lock */
   7544     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   7545     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
   7546 
   7547     /* lens shading map mode */
   7548     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   7549     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
   7550         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
   7551     }
   7552     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
   7553 
   7554     //special defaults for manual template
   7555     if (type == CAMERA3_TEMPLATE_MANUAL) {
   7556         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
   7557         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
   7558 
   7559         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
   7560         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
   7561 
   7562         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
   7563         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
   7564 
   7565         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
   7566         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
   7567 
   7568         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
   7569         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
   7570 
   7571         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
   7572         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
   7573     }
   7574 
   7575 
   7576     /* TNR
   7577      * We'll use this location to determine which modes TNR will be set.
   7578      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
   7579      * This is not to be confused with linking on a per stream basis that decision
   7580      * is still on per-session basis and will be handled as part of config stream
   7581      */
   7582     uint8_t tnr_enable = 0;
   7583 
   7584     if (m_bTnrPreview || m_bTnrVideo) {
   7585 
   7586         switch (type) {
   7587             case CAMERA3_TEMPLATE_VIDEO_RECORD:
   7588             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   7589                     tnr_enable = 1;
   7590                     break;
   7591 
   7592             default:
   7593                     tnr_enable = 0;
   7594                     break;
   7595         }
   7596 
   7597         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
   7598         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   7599         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   7600 
   7601         CDBG("%s: TNR:%d with process plate %d for template:%d",
   7602                             __func__, tnr_enable, tnr_process_type, type);
   7603     }
   7604 
   7605     /* CDS default */
   7606     char prop[PROPERTY_VALUE_MAX];
   7607     memset(prop, 0, sizeof(prop));
   7608     property_get("persist.camera.CDS", prop, "Auto");
   7609     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
   7610     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
   7611     if (CAM_CDS_MODE_MAX == cds_mode) {
   7612         cds_mode = CAM_CDS_MODE_AUTO;
   7613     }
   7614     m_CdsPreference = cds_mode;
   7615 
   7616     /* Disabling CDS in templates which have TNR enabled*/
   7617     if (tnr_enable)
   7618         cds_mode = CAM_CDS_MODE_OFF;
   7619 
   7620     int32_t mode = cds_mode;
   7621     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
   7622 
   7623     /* hybrid ae */
   7624     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
   7625 
   7626     mDefaultMetadata[type] = settings.release();
   7627 
   7628     return mDefaultMetadata[type];
   7629 }
   7630 
   7631 /*===========================================================================
   7632  * FUNCTION   : setFrameParameters
   7633  *
   7634  * DESCRIPTION: set parameters per frame as requested in the metadata from
   7635  *              framework
   7636  *
   7637  * PARAMETERS :
   7638  *   @request   : request that needs to be serviced
   7639  *   @streamID : Stream ID of all the requested streams
   7640  *   @blob_request: Whether this request is a blob request or not
   7641  *
   7642  * RETURN     : success: NO_ERROR
   7643  *              failure:
   7644  *==========================================================================*/
   7645 int QCamera3HardwareInterface::setFrameParameters(
   7646                     camera3_capture_request_t *request,
   7647                     cam_stream_ID_t streamID,
   7648                     int blob_request,
   7649                     uint32_t snapshotStreamId)
   7650 {
   7651     /*translate from camera_metadata_t type to parm_type_t*/
   7652     int rc = 0;
   7653     int32_t hal_version = CAM_HAL_V3;
   7654 
   7655     clear_metadata_buffer(mParameters);
   7656     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
   7657         ALOGE("%s: Failed to set hal version in the parameters", __func__);
   7658         return BAD_VALUE;
   7659     }
   7660 
   7661     /*we need to update the frame number in the parameters*/
   7662     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
   7663             request->frame_number)) {
   7664         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   7665         return BAD_VALUE;
   7666     }
   7667 
   7668     /* Update stream id of all the requested buffers */
   7669     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
   7670         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
   7671         return BAD_VALUE;
   7672     }
   7673 
   7674     if (mUpdateDebugLevel) {
   7675         uint32_t dummyDebugLevel = 0;
   7676         /* The value of dummyDebugLevel is irrelavent. On
   7677          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
   7678         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
   7679                 dummyDebugLevel)) {
   7680             ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
   7681             return BAD_VALUE;
   7682         }
   7683         mUpdateDebugLevel = false;
   7684     }
   7685 
   7686     if(request->settings != NULL){
   7687         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
   7688         if (blob_request)
   7689             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
   7690     }
   7691 
   7692     return rc;
   7693 }
   7694 
   7695 /*===========================================================================
   7696  * FUNCTION   : setReprocParameters
   7697  *
   7698  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
   7699  *              return it.
   7700  *
   7701  * PARAMETERS :
   7702  *   @request   : request that needs to be serviced
   7703  *
   7704  * RETURN     : success: NO_ERROR
   7705  *              failure:
   7706  *==========================================================================*/
   7707 int32_t QCamera3HardwareInterface::setReprocParameters(
   7708         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
   7709         uint32_t snapshotStreamId)
   7710 {
   7711     /*translate from camera_metadata_t type to parm_type_t*/
   7712     int rc = 0;
   7713 
   7714     if (NULL == request->settings){
   7715         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
   7716         return BAD_VALUE;
   7717     }
   7718 
   7719     if (NULL == reprocParam) {
   7720         ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
   7721         return BAD_VALUE;
   7722     }
   7723     clear_metadata_buffer(reprocParam);
   7724 
   7725     /*we need to update the frame number in the parameters*/
   7726     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
   7727             request->frame_number)) {
   7728         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   7729         return BAD_VALUE;
   7730     }
   7731 
   7732     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
   7733     if (rc < 0) {
   7734         ALOGE("%s: Failed to translate reproc request", __func__);
   7735         return rc;
   7736     }
   7737 
   7738     CameraMetadata frame_settings;
   7739     frame_settings = request->settings;
   7740     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
   7741             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
   7742         int32_t *crop_count =
   7743                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
   7744         int32_t *crop_data =
   7745                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
   7746         int32_t *roi_map =
   7747                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
   7748         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
   7749             cam_crop_data_t crop_meta;
   7750             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
   7751             crop_meta.num_of_streams = 1;
   7752             crop_meta.crop_info[0].crop.left   = crop_data[0];
   7753             crop_meta.crop_info[0].crop.top    = crop_data[1];
   7754             crop_meta.crop_info[0].crop.width  = crop_data[2];
   7755             crop_meta.crop_info[0].crop.height = crop_data[3];
   7756 
   7757             crop_meta.crop_info[0].roi_map.left =
   7758                     roi_map[0];
   7759             crop_meta.crop_info[0].roi_map.top =
   7760                     roi_map[1];
   7761             crop_meta.crop_info[0].roi_map.width =
   7762                     roi_map[2];
   7763             crop_meta.crop_info[0].roi_map.height =
   7764                     roi_map[3];
   7765 
   7766             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
   7767                 rc = BAD_VALUE;
   7768             }
   7769             CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
   7770                     __func__,
   7771                     request->input_buffer->stream,
   7772                     crop_meta.crop_info[0].crop.left,
   7773                     crop_meta.crop_info[0].crop.top,
   7774                     crop_meta.crop_info[0].crop.width,
   7775                     crop_meta.crop_info[0].crop.height);
   7776             CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
   7777                     __func__,
   7778                     request->input_buffer->stream,
   7779                     crop_meta.crop_info[0].roi_map.left,
   7780                     crop_meta.crop_info[0].roi_map.top,
   7781                     crop_meta.crop_info[0].roi_map.width,
   7782                     crop_meta.crop_info[0].roi_map.height);
   7783             } else {
   7784                 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
   7785             }
   7786     } else {
   7787         ALOGE("%s: No crop data from matching output stream", __func__);
   7788     }
   7789 
   7790     /* These settings are not needed for regular requests so handle them specially for
   7791        reprocess requests; information needed for EXIF tags */
   7792     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   7793         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   7794                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   7795         if (NAME_NOT_FOUND != val) {
   7796             uint32_t flashMode = (uint32_t)val;
   7797             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
   7798                 rc = BAD_VALUE;
   7799             }
   7800         } else {
   7801             ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
   7802                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   7803         }
   7804     } else {
   7805         CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
   7806     }
   7807 
   7808     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
   7809         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
   7810         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
   7811             rc = BAD_VALUE;
   7812         }
   7813     } else {
   7814         CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
   7815     }
   7816 
   7817     return rc;
   7818 }
   7819 
   7820 /*===========================================================================
   7821  * FUNCTION   : saveRequestSettings
   7822  *
   7823  * DESCRIPTION: Add any settings that might have changed to the request settings
   7824  *              and save the settings to be applied on the frame
   7825  *
   7826  * PARAMETERS :
   7827  *   @jpegMetadata : the extracted and/or modified jpeg metadata
   7828  *   @request      : request with initial settings
   7829  *
   7830  * RETURN     :
   7831  * camera_metadata_t* : pointer to the saved request settings
   7832  *==========================================================================*/
   7833 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
   7834         const CameraMetadata &jpegMetadata,
   7835         camera3_capture_request_t *request)
   7836 {
   7837     camera_metadata_t *resultMetadata;
   7838     CameraMetadata camMetadata;
   7839     camMetadata = request->settings;
   7840 
   7841     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   7842         int32_t thumbnail_size[2];
   7843         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   7844         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   7845         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
   7846                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   7847     }
   7848 
   7849     resultMetadata = camMetadata.release();
   7850     return resultMetadata;
   7851 }
   7852 
   7853 /*===========================================================================
   7854  * FUNCTION   : setHalFpsRange
   7855  *
   7856  * DESCRIPTION: set FPS range parameter
   7857  *
   7858  *
   7859  * PARAMETERS :
   7860  *   @settings    : Metadata from framework
   7861  *   @hal_metadata: Metadata buffer
   7862  *
   7863  *
   7864  * RETURN     : success: NO_ERROR
   7865  *              failure:
   7866  *==========================================================================*/
   7867 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
   7868         metadata_buffer_t *hal_metadata)
   7869 {
   7870     int32_t rc = NO_ERROR;
   7871     cam_fps_range_t fps_range;
   7872     fps_range.min_fps = (float)
   7873             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   7874     fps_range.max_fps = (float)
   7875             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   7876     fps_range.video_min_fps = fps_range.min_fps;
   7877     fps_range.video_max_fps = fps_range.max_fps;
   7878 
   7879     CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
   7880             fps_range.min_fps, fps_range.max_fps);
   7881     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
   7882      * follows:
   7883      * ---------------------------------------------------------------|
   7884      *      Video stream is absent in configure_streams               |
   7885      *    (Camcorder preview before the first video record            |
   7886      * ---------------------------------------------------------------|
   7887      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   7888      *                   |             |             | vid_min/max_fps|
   7889      * ---------------------------------------------------------------|
   7890      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   7891      *                   |-------------|-------------|----------------|
   7892      *                   |  [240, 240] |     240     |  [240, 240]    |
   7893      * ---------------------------------------------------------------|
   7894      *     Video stream is present in configure_streams               |
   7895      * ---------------------------------------------------------------|
   7896      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   7897      *                   |             |             | vid_min/max_fps|
   7898      * ---------------------------------------------------------------|
   7899      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   7900      * (camcorder prev   |-------------|-------------|----------------|
   7901      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
   7902      *  is stopped)      |             |             |                |
   7903      * ---------------------------------------------------------------|
   7904      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
   7905      *                   |-------------|-------------|----------------|
   7906      *                   |  [240, 240] |     240     |  [240, 240]    |
   7907      * ---------------------------------------------------------------|
   7908      * When Video stream is absent in configure_streams,
   7909      * preview fps = sensor_fps / batchsize
   7910      * Eg: for 240fps at batchSize 4, preview = 60fps
   7911      *     for 120fps at batchSize 4, preview = 30fps
   7912      *
   7913      * When video stream is present in configure_streams, preview fps is as per
   7914      * the ratio of preview buffers to video buffers requested in process
   7915      * capture request
   7916      */
   7917     mBatchSize = 0;
   7918     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
   7919         fps_range.min_fps = fps_range.video_max_fps;
   7920         fps_range.video_min_fps = fps_range.video_max_fps;
   7921         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
   7922                 fps_range.max_fps);
   7923         if (NAME_NOT_FOUND != val) {
   7924             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
   7925             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   7926                 return BAD_VALUE;
   7927             }
   7928 
   7929             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
   7930                 /* If batchmode is currently in progress and the fps changes,
   7931                  * set the flag to restart the sensor */
   7932                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
   7933                         (mHFRVideoFps != fps_range.max_fps)) {
   7934                     mNeedSensorRestart = true;
   7935                 }
   7936                 mHFRVideoFps = fps_range.max_fps;
   7937                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
   7938                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
   7939                     mBatchSize = MAX_HFR_BATCH_SIZE;
   7940                 }
   7941              }
   7942             CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
   7943 
   7944          }
   7945     } else {
   7946         /* HFR mode is session param in backend/ISP. This should be reset when
   7947          * in non-HFR mode  */
   7948         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
   7949         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   7950             return BAD_VALUE;
   7951         }
   7952     }
   7953     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
   7954         return BAD_VALUE;
   7955     }
   7956     CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
   7957             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
   7958     return rc;
   7959 }
   7960 
   7961 /*===========================================================================
   7962  * FUNCTION   : translateToHalMetadata
   7963  *
   7964  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   7965  *
   7966  *
   7967  * PARAMETERS :
   7968  *   @request  : request sent from framework
   7969  *
   7970  *
   7971  * RETURN     : success: NO_ERROR
   7972  *              failure:
   7973  *==========================================================================*/
   7974 int QCamera3HardwareInterface::translateToHalMetadata
   7975                                   (const camera3_capture_request_t *request,
   7976                                    metadata_buffer_t *hal_metadata,
   7977                                    uint32_t snapshotStreamId)
   7978 {
   7979     int rc = 0;
   7980     CameraMetadata frame_settings;
   7981     frame_settings = request->settings;
   7982 
   7983     /* Do not change the order of the following list unless you know what you are
   7984      * doing.
   7985      * The order is laid out in such a way that parameters in the front of the table
   7986      * may be used to override the parameters later in the table. Examples are:
   7987      * 1. META_MODE should precede AEC/AWB/AF MODE
   7988      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
   7989      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
   7990      * 4. Any mode should precede it's corresponding settings
   7991      */
   7992     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   7993         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   7994         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
   7995             rc = BAD_VALUE;
   7996         }
   7997         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
   7998         if (rc != NO_ERROR) {
   7999             ALOGE("%s: extractSceneMode failed", __func__);
   8000         }
   8001     }
   8002 
   8003     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   8004         uint8_t fwk_aeMode =
   8005             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   8006         uint8_t aeMode;
   8007         int32_t redeye;
   8008 
   8009         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   8010             aeMode = CAM_AE_MODE_OFF;
   8011         } else {
   8012             aeMode = CAM_AE_MODE_ON;
   8013         }
   8014         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   8015             redeye = 1;
   8016         } else {
   8017             redeye = 0;
   8018         }
   8019 
   8020         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   8021                 fwk_aeMode);
   8022         if (NAME_NOT_FOUND != val) {
   8023             int32_t flashMode = (int32_t)val;
   8024             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
   8025         }
   8026 
   8027         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
   8028         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
   8029             rc = BAD_VALUE;
   8030         }
   8031     }
   8032 
   8033     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   8034         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   8035         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   8036                 fwk_whiteLevel);
   8037         if (NAME_NOT_FOUND != val) {
   8038             uint8_t whiteLevel = (uint8_t)val;
   8039             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
   8040                 rc = BAD_VALUE;
   8041             }
   8042         }
   8043     }
   8044 
   8045     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
   8046         uint8_t fwk_cacMode =
   8047                 frame_settings.find(
   8048                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
   8049         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   8050                 fwk_cacMode);
   8051         if (NAME_NOT_FOUND != val) {
   8052             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
   8053             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
   8054                 rc = BAD_VALUE;
   8055             }
   8056         } else {
   8057             ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
   8058         }
   8059     }
   8060 
   8061     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   8062         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   8063         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   8064                 fwk_focusMode);
   8065         if (NAME_NOT_FOUND != val) {
   8066             uint8_t focusMode = (uint8_t)val;
   8067             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
   8068                 rc = BAD_VALUE;
   8069             }
   8070         }
   8071     }
   8072 
   8073     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   8074         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   8075         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
   8076                 focalDistance)) {
   8077             rc = BAD_VALUE;
   8078         }
   8079     }
   8080 
   8081     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   8082         uint8_t fwk_antibandingMode =
   8083                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
   8084         int val = lookupHalName(ANTIBANDING_MODES_MAP,
   8085                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
   8086         if (NAME_NOT_FOUND != val) {
   8087             uint32_t hal_antibandingMode = (uint32_t)val;
   8088             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
   8089                     hal_antibandingMode)) {
   8090                 rc = BAD_VALUE;
   8091             }
   8092         }
   8093     }
   8094 
   8095     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   8096         int32_t expCompensation = frame_settings.find(
   8097                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   8098         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   8099             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   8100         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   8101             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   8102         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   8103                 expCompensation)) {
   8104             rc = BAD_VALUE;
   8105         }
   8106     }
   8107 
   8108     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   8109         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   8110         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
   8111             rc = BAD_VALUE;
   8112         }
   8113     }
   8114     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   8115         rc = setHalFpsRange(frame_settings, hal_metadata);
   8116         if (rc != NO_ERROR) {
   8117             ALOGE("%s: setHalFpsRange failed", __func__);
   8118         }
   8119     }
   8120 
   8121     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   8122         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   8123         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
   8124             rc = BAD_VALUE;
   8125         }
   8126     }
   8127 
   8128     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   8129         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   8130         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   8131                 fwk_effectMode);
   8132         if (NAME_NOT_FOUND != val) {
   8133             uint8_t effectMode = (uint8_t)val;
   8134             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
   8135                 rc = BAD_VALUE;
   8136             }
   8137         }
   8138     }
   8139 
   8140     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   8141         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   8142         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
   8143                 colorCorrectMode)) {
   8144             rc = BAD_VALUE;
   8145         }
   8146     }
   8147 
   8148     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   8149         cam_color_correct_gains_t colorCorrectGains;
   8150         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
   8151             colorCorrectGains.gains[i] =
   8152                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   8153         }
   8154         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
   8155                 colorCorrectGains)) {
   8156             rc = BAD_VALUE;
   8157         }
   8158     }
   8159 
   8160     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   8161         cam_color_correct_matrix_t colorCorrectTransform;
   8162         cam_rational_type_t transform_elem;
   8163         size_t num = 0;
   8164         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
   8165            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
   8166               transform_elem.numerator =
   8167                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   8168               transform_elem.denominator =
   8169                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   8170               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   8171               num++;
   8172            }
   8173         }
   8174         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   8175                 colorCorrectTransform)) {
   8176             rc = BAD_VALUE;
   8177         }
   8178     }
   8179 
   8180     cam_trigger_t aecTrigger;
   8181     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
   8182     aecTrigger.trigger_id = -1;
   8183     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   8184         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   8185         aecTrigger.trigger =
   8186             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   8187         aecTrigger.trigger_id =
   8188             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   8189         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   8190                 aecTrigger)) {
   8191             rc = BAD_VALUE;
   8192         }
   8193         CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
   8194                 aecTrigger.trigger, aecTrigger.trigger_id);
   8195     }
   8196 
   8197     /*af_trigger must come with a trigger id*/
   8198     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   8199         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   8200         cam_trigger_t af_trigger;
   8201         af_trigger.trigger =
   8202             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   8203         af_trigger.trigger_id =
   8204             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   8205         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
   8206             rc = BAD_VALUE;
   8207         }
   8208         CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
   8209                 af_trigger.trigger, af_trigger.trigger_id);
   8210     }
   8211 
   8212     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   8213         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   8214         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
   8215             rc = BAD_VALUE;
   8216         }
   8217     }
   8218     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   8219         cam_edge_application_t edge_application;
   8220         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   8221         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
   8222             edge_application.sharpness = 0;
   8223         } else {
   8224             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
   8225         }
   8226         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
   8227             rc = BAD_VALUE;
   8228         }
   8229     }
   8230 
   8231     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   8232         int32_t respectFlashMode = 1;
   8233         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   8234             uint8_t fwk_aeMode =
   8235                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   8236             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   8237                 respectFlashMode = 0;
   8238                 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
   8239                     __func__);
   8240             }
   8241         }
   8242         if (respectFlashMode) {
   8243             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   8244                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   8245             CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
   8246             // To check: CAM_INTF_META_FLASH_MODE usage
   8247             if (NAME_NOT_FOUND != val) {
   8248                 uint8_t flashMode = (uint8_t)val;
   8249                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
   8250                     rc = BAD_VALUE;
   8251                 }
   8252             }
   8253         }
   8254     }
   8255 
   8256     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   8257         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   8258         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
   8259             rc = BAD_VALUE;
   8260         }
   8261     }
   8262 
   8263     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   8264         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   8265         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
   8266                 flashFiringTime)) {
   8267             rc = BAD_VALUE;
   8268         }
   8269     }
   8270 
   8271     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   8272         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   8273         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
   8274                 hotPixelMode)) {
   8275             rc = BAD_VALUE;
   8276         }
   8277     }
   8278 
   8279     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   8280         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   8281         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
   8282                 lensAperture)) {
   8283             rc = BAD_VALUE;
   8284         }
   8285     }
   8286 
   8287     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   8288         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   8289         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
   8290                 filterDensity)) {
   8291             rc = BAD_VALUE;
   8292         }
   8293     }
   8294 
   8295     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   8296         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   8297         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
   8298                 focalLength)) {
   8299             rc = BAD_VALUE;
   8300         }
   8301     }
   8302 
   8303     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   8304         uint8_t optStabMode =
   8305                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   8306         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
   8307                 optStabMode)) {
   8308             rc = BAD_VALUE;
   8309         }
   8310     }
   8311 
   8312     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
   8313         uint8_t videoStabMode =
   8314                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
   8315         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
   8316                 videoStabMode)) {
   8317             rc = BAD_VALUE;
   8318         }
   8319     }
   8320 
   8321 
   8322     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   8323         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   8324         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
   8325                 noiseRedMode)) {
   8326             rc = BAD_VALUE;
   8327         }
   8328     }
   8329 
   8330     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
   8331         float reprocessEffectiveExposureFactor =
   8332             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
   8333         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
   8334                 reprocessEffectiveExposureFactor)) {
   8335             rc = BAD_VALUE;
   8336         }
   8337     }
   8338 
   8339     cam_crop_region_t scalerCropRegion;
   8340     bool scalerCropSet = false;
   8341     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   8342         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   8343         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   8344         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   8345         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   8346 
   8347         // Map coordinate system from active array to sensor output.
   8348         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
   8349                 scalerCropRegion.width, scalerCropRegion.height);
   8350 
   8351         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
   8352                 scalerCropRegion)) {
   8353             rc = BAD_VALUE;
   8354         }
   8355         scalerCropSet = true;
   8356     }
   8357 
   8358     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   8359         int64_t sensorExpTime =
   8360                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   8361         CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
   8362         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   8363                 sensorExpTime)) {
   8364             rc = BAD_VALUE;
   8365         }
   8366     }
   8367 
   8368     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   8369         int64_t sensorFrameDuration =
   8370                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   8371         int64_t minFrameDuration = getMinFrameDuration(request);
   8372         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
   8373         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   8374             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   8375         CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
   8376         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
   8377                 sensorFrameDuration)) {
   8378             rc = BAD_VALUE;
   8379         }
   8380     }
   8381 
   8382     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   8383         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   8384         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   8385                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   8386         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   8387                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   8388         CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
   8389         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
   8390                 sensorSensitivity)) {
   8391             rc = BAD_VALUE;
   8392         }
   8393     }
   8394 
   8395     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   8396         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   8397         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
   8398             rc = BAD_VALUE;
   8399         }
   8400     }
   8401 
   8402     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   8403         uint8_t fwk_facedetectMode =
   8404                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   8405 
   8406         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   8407                 fwk_facedetectMode);
   8408 
   8409         if (NAME_NOT_FOUND != val) {
   8410             uint8_t facedetectMode = (uint8_t)val;
   8411             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
   8412                     facedetectMode)) {
   8413                 rc = BAD_VALUE;
   8414             }
   8415         }
   8416     }
   8417 
   8418     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   8419         uint8_t histogramMode =
   8420                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   8421         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
   8422                 histogramMode)) {
   8423             rc = BAD_VALUE;
   8424         }
   8425     }
   8426 
   8427     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   8428         uint8_t sharpnessMapMode =
   8429                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   8430         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   8431                 sharpnessMapMode)) {
   8432             rc = BAD_VALUE;
   8433         }
   8434     }
   8435 
   8436     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   8437         uint8_t tonemapMode =
   8438                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   8439         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
   8440             rc = BAD_VALUE;
   8441         }
   8442     }
   8443     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
   8444     /*All tonemap channels will have the same number of points*/
   8445     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
   8446         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
   8447         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   8448         cam_rgb_tonemap_curves tonemapCurves;
   8449         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
   8450         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   8451             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   8452                     __func__, tonemapCurves.tonemap_points_cnt,
   8453                     CAM_MAX_TONEMAP_CURVE_SIZE);
   8454             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   8455         }
   8456 
   8457         /* ch0 = G*/
   8458         size_t point = 0;
   8459         cam_tonemap_curve_t tonemapCurveGreen;
   8460         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   8461             for (size_t j = 0; j < 2; j++) {
   8462                tonemapCurveGreen.tonemap_points[i][j] =
   8463                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   8464                point++;
   8465             }
   8466         }
   8467         tonemapCurves.curves[0] = tonemapCurveGreen;
   8468 
   8469         /* ch 1 = B */
   8470         point = 0;
   8471         cam_tonemap_curve_t tonemapCurveBlue;
   8472         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   8473             for (size_t j = 0; j < 2; j++) {
   8474                tonemapCurveBlue.tonemap_points[i][j] =
   8475                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   8476                point++;
   8477             }
   8478         }
   8479         tonemapCurves.curves[1] = tonemapCurveBlue;
   8480 
   8481         /* ch 2 = R */
   8482         point = 0;
   8483         cam_tonemap_curve_t tonemapCurveRed;
   8484         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   8485             for (size_t j = 0; j < 2; j++) {
   8486                tonemapCurveRed.tonemap_points[i][j] =
   8487                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   8488                point++;
   8489             }
   8490         }
   8491         tonemapCurves.curves[2] = tonemapCurveRed;
   8492 
   8493         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
   8494                 tonemapCurves)) {
   8495             rc = BAD_VALUE;
   8496         }
   8497     }
   8498 
   8499     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   8500         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   8501         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
   8502                 captureIntent)) {
   8503             rc = BAD_VALUE;
   8504         }
   8505     }
   8506 
   8507     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   8508         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   8509         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
   8510                 blackLevelLock)) {
   8511             rc = BAD_VALUE;
   8512         }
   8513     }
   8514 
   8515     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   8516         uint8_t lensShadingMapMode =
   8517                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   8518         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   8519                 lensShadingMapMode)) {
   8520             rc = BAD_VALUE;
   8521         }
   8522     }
   8523 
   8524     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   8525         cam_area_t roi;
   8526         bool reset = true;
   8527         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
   8528 
   8529         // Map coordinate system from active array to sensor output.
   8530         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   8531                 roi.rect.height);
   8532 
   8533         if (scalerCropSet) {
   8534             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   8535         }
   8536         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
   8537             rc = BAD_VALUE;
   8538         }
   8539     }
   8540 
   8541     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   8542         cam_area_t roi;
   8543         bool reset = true;
   8544         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
   8545 
   8546         // Map coordinate system from active array to sensor output.
   8547         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   8548                 roi.rect.height);
   8549 
   8550         if (scalerCropSet) {
   8551             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   8552         }
   8553         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
   8554             rc = BAD_VALUE;
   8555         }
   8556     }
   8557 
   8558     if (m_bIs4KVideo) {
   8559         /* Override needed for Video template in case of 4K video */
   8560         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8561                 CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
   8562             rc = BAD_VALUE;
   8563         }
   8564     } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   8565             frame_settings.exists(QCAMERA3_CDS_MODE)) {
   8566         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
   8567         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
   8568             ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
   8569         } else {
   8570             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8571                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
   8572                 rc = BAD_VALUE;
   8573             }
   8574         }
   8575     }
   8576 
   8577     // TNR
   8578     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
   8579         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
   8580         uint8_t b_TnrRequested = 0;
   8581         cam_denoise_param_t tnr;
   8582         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
   8583         tnr.process_plates =
   8584             (cam_denoise_process_type_t)frame_settings.find(
   8585             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
   8586         b_TnrRequested = tnr.denoise_enable;
   8587         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
   8588             rc = BAD_VALUE;
   8589         }
   8590     }
   8591 
   8592     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
   8593         int32_t fwk_testPatternMode =
   8594                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
   8595         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
   8596                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
   8597 
   8598         if (NAME_NOT_FOUND != testPatternMode) {
   8599             cam_test_pattern_data_t testPatternData;
   8600             memset(&testPatternData, 0, sizeof(testPatternData));
   8601             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
   8602             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
   8603                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
   8604                 int32_t *fwk_testPatternData =
   8605                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
   8606                 testPatternData.r = fwk_testPatternData[0];
   8607                 testPatternData.b = fwk_testPatternData[3];
   8608                 switch (gCamCapability[mCameraId]->color_arrangement) {
   8609                     case CAM_FILTER_ARRANGEMENT_RGGB:
   8610                     case CAM_FILTER_ARRANGEMENT_GRBG:
   8611                         testPatternData.gr = fwk_testPatternData[1];
   8612                         testPatternData.gb = fwk_testPatternData[2];
   8613                         break;
   8614                     case CAM_FILTER_ARRANGEMENT_GBRG:
   8615                     case CAM_FILTER_ARRANGEMENT_BGGR:
   8616                         testPatternData.gr = fwk_testPatternData[2];
   8617                         testPatternData.gb = fwk_testPatternData[1];
   8618                         break;
   8619                     default:
   8620                         ALOGE("%s: color arrangement %d is not supported", __func__,
   8621                                 gCamCapability[mCameraId]->color_arrangement);
   8622                         break;
   8623                 }
   8624             }
   8625             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
   8626                     testPatternData)) {
   8627                 rc = BAD_VALUE;
   8628             }
   8629         } else {
   8630             ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
   8631                     fwk_testPatternMode);
   8632         }
   8633     }
   8634 
   8635     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   8636         size_t count = 0;
   8637         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
   8638         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
   8639                 gps_coords.data.d, gps_coords.count, count);
   8640         if (gps_coords.count != count) {
   8641             rc = BAD_VALUE;
   8642         }
   8643     }
   8644 
   8645     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   8646         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
   8647         size_t count = 0;
   8648         const char *gps_methods_src = (const char *)
   8649                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
   8650         memset(gps_methods, '\0', sizeof(gps_methods));
   8651         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
   8652         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
   8653                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
   8654         if (GPS_PROCESSING_METHOD_SIZE != count) {
   8655             rc = BAD_VALUE;
   8656         }
   8657     }
   8658 
   8659     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   8660         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   8661         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
   8662                 gps_timestamp)) {
   8663             rc = BAD_VALUE;
   8664         }
   8665     }
   8666 
   8667     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   8668         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   8669         cam_rotation_info_t rotation_info;
   8670         if (orientation == 0) {
   8671            rotation_info.rotation = ROTATE_0;
   8672         } else if (orientation == 90) {
   8673            rotation_info.rotation = ROTATE_90;
   8674         } else if (orientation == 180) {
   8675            rotation_info.rotation = ROTATE_180;
   8676         } else if (orientation == 270) {
   8677            rotation_info.rotation = ROTATE_270;
   8678         }
   8679         rotation_info.streamId = snapshotStreamId;
   8680         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
   8681         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
   8682             rc = BAD_VALUE;
   8683         }
   8684     }
   8685 
   8686     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
   8687         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   8688         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
   8689             rc = BAD_VALUE;
   8690         }
   8691     }
   8692 
   8693     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
   8694         uint32_t thumb_quality = (uint32_t)
   8695                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
   8696         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
   8697                 thumb_quality)) {
   8698             rc = BAD_VALUE;
   8699         }
   8700     }
   8701 
   8702     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   8703         cam_dimension_t dim;
   8704         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   8705         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   8706         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
   8707             rc = BAD_VALUE;
   8708         }
   8709     }
   8710 
   8711     // Internal metadata
   8712     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
   8713         size_t count = 0;
   8714         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
   8715         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
   8716                 privatedata.data.i32, privatedata.count, count);
   8717         if (privatedata.count != count) {
   8718             rc = BAD_VALUE;
   8719         }
   8720     }
   8721 
   8722     if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
   8723         uint8_t* use_av_timer =
   8724                 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
   8725         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
   8726             rc = BAD_VALUE;
   8727         }
   8728     }
   8729 
   8730     // EV step
   8731     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
   8732             gCamCapability[mCameraId]->exp_compensation_step)) {
   8733         rc = BAD_VALUE;
   8734     }
   8735 
   8736     // CDS info
   8737     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
   8738         cam_cds_data_t *cdsData = (cam_cds_data_t *)
   8739                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
   8740 
   8741         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8742                 CAM_INTF_META_CDS_DATA, *cdsData)) {
   8743             rc = BAD_VALUE;
   8744         }
   8745     }
   8746 
   8747     // Hybrid AE
   8748     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
   8749         uint8_t *hybrid_ae = (uint8_t *)
   8750                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
   8751 
   8752         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8753                 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
   8754             rc = BAD_VALUE;
   8755         }
   8756     }
   8757 
   8758     return rc;
   8759 }
   8760 
   8761 /*===========================================================================
   8762  * FUNCTION   : captureResultCb
   8763  *
   8764  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   8765  *
   8766  * PARAMETERS :
   8767  *   @frame  : frame information from mm-camera-interface
   8768  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   8769  *   @userdata: userdata
   8770  *
   8771  * RETURN     : NONE
   8772  *==========================================================================*/
   8773 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   8774                 camera3_stream_buffer_t *buffer,
   8775                 uint32_t frame_number, bool isInputBuffer, void *userdata)
   8776 {
   8777     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   8778     if (hw == NULL) {
   8779         ALOGE("%s: Invalid hw %p", __func__, hw);
   8780         return;
   8781     }
   8782 
   8783     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
   8784     return;
   8785 }
   8786 
   8787 
   8788 /*===========================================================================
   8789  * FUNCTION   : initialize
   8790  *
   8791  * DESCRIPTION: Pass framework callback pointers to HAL
   8792  *
   8793  * PARAMETERS :
   8794  *
   8795  *
   8796  * RETURN     : Success : 0
   8797  *              Failure: -ENODEV
   8798  *==========================================================================*/
   8799 
   8800 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   8801                                   const camera3_callback_ops_t *callback_ops)
   8802 {
   8803     CDBG("%s: E", __func__);
   8804     QCamera3HardwareInterface *hw =
   8805         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8806     if (!hw) {
   8807         ALOGE("%s: NULL camera device", __func__);
   8808         return -ENODEV;
   8809     }
   8810 
   8811     int rc = hw->initialize(callback_ops);
   8812     CDBG("%s: X", __func__);
   8813     return rc;
   8814 }
   8815 
   8816 /*===========================================================================
   8817  * FUNCTION   : configure_streams
   8818  *
   8819  * DESCRIPTION:
   8820  *
   8821  * PARAMETERS :
   8822  *
   8823  *
   8824  * RETURN     : Success: 0
   8825  *              Failure: -EINVAL (if stream configuration is invalid)
   8826  *                       -ENODEV (fatal error)
   8827  *==========================================================================*/
   8828 
   8829 int QCamera3HardwareInterface::configure_streams(
   8830         const struct camera3_device *device,
   8831         camera3_stream_configuration_t *stream_list)
   8832 {
   8833     CDBG("%s: E", __func__);
   8834     QCamera3HardwareInterface *hw =
   8835         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8836     if (!hw) {
   8837         ALOGE("%s: NULL camera device", __func__);
   8838         return -ENODEV;
   8839     }
   8840     int rc = hw->configureStreams(stream_list);
   8841     CDBG("%s: X", __func__);
   8842     return rc;
   8843 }
   8844 
   8845 /*===========================================================================
   8846  * FUNCTION   : construct_default_request_settings
   8847  *
   8848  * DESCRIPTION: Configure a settings buffer to meet the required use case
   8849  *
   8850  * PARAMETERS :
   8851  *
   8852  *
   8853  * RETURN     : Success: Return valid metadata
   8854  *              Failure: Return NULL
   8855  *==========================================================================*/
   8856 const camera_metadata_t* QCamera3HardwareInterface::
   8857     construct_default_request_settings(const struct camera3_device *device,
   8858                                         int type)
   8859 {
   8860 
   8861     CDBG("%s: E", __func__);
   8862     camera_metadata_t* fwk_metadata = NULL;
   8863     QCamera3HardwareInterface *hw =
   8864         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8865     if (!hw) {
   8866         ALOGE("%s: NULL camera device", __func__);
   8867         return NULL;
   8868     }
   8869 
   8870     fwk_metadata = hw->translateCapabilityToMetadata(type);
   8871 
   8872     CDBG("%s: X", __func__);
   8873     return fwk_metadata;
   8874 }
   8875 
   8876 /*===========================================================================
   8877  * FUNCTION   : process_capture_request
   8878  *
   8879  * DESCRIPTION:
   8880  *
   8881  * PARAMETERS :
   8882  *
   8883  *
   8884  * RETURN     :
   8885  *==========================================================================*/
   8886 int QCamera3HardwareInterface::process_capture_request(
   8887                     const struct camera3_device *device,
   8888                     camera3_capture_request_t *request)
   8889 {
   8890     CDBG("%s: E", __func__);
   8891     QCamera3HardwareInterface *hw =
   8892         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8893     if (!hw) {
   8894         ALOGE("%s: NULL camera device", __func__);
   8895         return -EINVAL;
   8896     }
   8897 
   8898     int rc = hw->processCaptureRequest(request);
   8899     CDBG("%s: X", __func__);
   8900     return rc;
   8901 }
   8902 
   8903 /*===========================================================================
   8904  * FUNCTION   : dump
   8905  *
   8906  * DESCRIPTION:
   8907  *
   8908  * PARAMETERS :
   8909  *
   8910  *
   8911  * RETURN     :
   8912  *==========================================================================*/
   8913 
   8914 void QCamera3HardwareInterface::dump(
   8915                 const struct camera3_device *device, int fd)
   8916 {
   8917     /* Log level property is read when "adb shell dumpsys media.camera" is
   8918        called so that the log level can be controlled without restarting
   8919        the media server */
   8920     getLogLevel();
   8921 
   8922     CDBG("%s: E", __func__);
   8923     QCamera3HardwareInterface *hw =
   8924         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8925     if (!hw) {
   8926         ALOGE("%s: NULL camera device", __func__);
   8927         return;
   8928     }
   8929 
   8930     hw->dump(fd);
   8931     CDBG("%s: X", __func__);
   8932     return;
   8933 }
   8934 
   8935 /*===========================================================================
   8936  * FUNCTION   : flush
   8937  *
   8938  * DESCRIPTION:
   8939  *
   8940  * PARAMETERS :
   8941  *
   8942  *
   8943  * RETURN     :
   8944  *==========================================================================*/
   8945 
   8946 int QCamera3HardwareInterface::flush(
   8947                 const struct camera3_device *device)
   8948 {
   8949     int rc;
   8950     CDBG("%s: E", __func__);
   8951     QCamera3HardwareInterface *hw =
   8952         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8953     if (!hw) {
   8954         ALOGE("%s: NULL camera device", __func__);
   8955         return -EINVAL;
   8956     }
   8957 
   8958     rc = hw->flush();
   8959     CDBG("%s: X", __func__);
   8960     return rc;
   8961 }
   8962 
   8963 /*===========================================================================
   8964  * FUNCTION   : close_camera_device
   8965  *
   8966  * DESCRIPTION:
   8967  *
   8968  * PARAMETERS :
   8969  *
   8970  *
   8971  * RETURN     :
   8972  *==========================================================================*/
   8973 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   8974 {
   8975     CDBG("%s: E", __func__);
   8976     int ret = NO_ERROR;
   8977     QCamera3HardwareInterface *hw =
   8978         reinterpret_cast<QCamera3HardwareInterface *>(
   8979             reinterpret_cast<camera3_device_t *>(device)->priv);
   8980     if (!hw) {
   8981         ALOGE("NULL camera device");
   8982         return BAD_VALUE;
   8983     }
   8984     delete hw;
   8985 
   8986     CDBG("%s: X", __func__);
   8987     return ret;
   8988 }
   8989 
   8990 /*===========================================================================
   8991  * FUNCTION   : getWaveletDenoiseProcessPlate
   8992  *
   8993  * DESCRIPTION: query wavelet denoise process plate
   8994  *
   8995  * PARAMETERS : None
   8996  *
   8997  * RETURN     : WNR prcocess plate value
   8998  *==========================================================================*/
   8999 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   9000 {
   9001     char prop[PROPERTY_VALUE_MAX];
   9002     memset(prop, 0, sizeof(prop));
   9003     property_get("persist.denoise.process.plates", prop, "0");
   9004     int processPlate = atoi(prop);
   9005     switch(processPlate) {
   9006     case 0:
   9007         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   9008     case 1:
   9009         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   9010     case 2:
   9011         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9012     case 3:
   9013         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   9014     default:
   9015         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9016     }
   9017 }
   9018 
   9019 
   9020 /*===========================================================================
   9021  * FUNCTION   : getTemporalDenoiseProcessPlate
   9022  *
   9023  * DESCRIPTION: query temporal denoise process plate
   9024  *
   9025  * PARAMETERS : None
   9026  *
   9027  * RETURN     : TNR prcocess plate value
   9028  *==========================================================================*/
   9029 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
   9030 {
   9031     char prop[PROPERTY_VALUE_MAX];
   9032     memset(prop, 0, sizeof(prop));
   9033     property_get("persist.tnr.process.plates", prop, "0");
   9034     int processPlate = atoi(prop);
   9035     switch(processPlate) {
   9036     case 0:
   9037         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   9038     case 1:
   9039         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   9040     case 2:
   9041         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9042     case 3:
   9043         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   9044     default:
   9045         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9046     }
   9047 }
   9048 
   9049 
   9050 /*===========================================================================
   9051  * FUNCTION   : extractSceneMode
   9052  *
   9053  * DESCRIPTION: Extract scene mode from frameworks set metadata
   9054  *
   9055  * PARAMETERS :
   9056  *      @frame_settings: CameraMetadata reference
   9057  *      @metaMode: ANDROID_CONTORL_MODE
   9058  *      @hal_metadata: hal metadata structure
   9059  *
   9060  * RETURN     : None
   9061  *==========================================================================*/
   9062 int32_t QCamera3HardwareInterface::extractSceneMode(
   9063         const CameraMetadata &frame_settings, uint8_t metaMode,
   9064         metadata_buffer_t *hal_metadata)
   9065 {
   9066     int32_t rc = NO_ERROR;
   9067 
   9068     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   9069         camera_metadata_ro_entry entry =
   9070                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
   9071         if (0 == entry.count)
   9072             return rc;
   9073 
   9074         uint8_t fwk_sceneMode = entry.data.u8[0];
   9075 
   9076         int val = lookupHalName(SCENE_MODES_MAP,
   9077                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   9078                 fwk_sceneMode);
   9079         if (NAME_NOT_FOUND != val) {
   9080             uint8_t sceneMode = (uint8_t)val;
   9081             CDBG("%s: sceneMode: %d", __func__, sceneMode);
   9082             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   9083                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   9084                 rc = BAD_VALUE;
   9085             }
   9086         }
   9087     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
   9088             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
   9089         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   9090         CDBG("%s: sceneMode: %d", __func__, sceneMode);
   9091         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   9092                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   9093             rc = BAD_VALUE;
   9094         }
   9095     }
   9096     return rc;
   9097 }
   9098 
   9099 /*===========================================================================
   9100  * FUNCTION   : needRotationReprocess
   9101  *
   9102  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   9103  *
   9104  * PARAMETERS : none
   9105  *
   9106  * RETURN     : true: needed
   9107  *              false: no need
   9108  *==========================================================================*/
   9109 bool QCamera3HardwareInterface::needRotationReprocess()
   9110 {
   9111     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
   9112         // current rotation is not zero, and pp has the capability to process rotation
   9113         CDBG_HIGH("%s: need do reprocess for rotation", __func__);
   9114         return true;
   9115     }
   9116 
   9117     return false;
   9118 }
   9119 
   9120 /*===========================================================================
   9121  * FUNCTION   : needReprocess
   9122  *
   9123  * DESCRIPTION: if reprocess in needed
   9124  *
   9125  * PARAMETERS : none
   9126  *
   9127  * RETURN     : true: needed
   9128  *              false: no need
   9129  *==========================================================================*/
   9130 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
   9131 {
   9132     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
   9133         // TODO: add for ZSL HDR later
   9134         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   9135         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
   9136             CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
   9137             return true;
   9138         } else {
   9139             CDBG_HIGH("%s: already post processed frame", __func__);
   9140             return false;
   9141         }
   9142     }
   9143     return needRotationReprocess();
   9144 }
   9145 
   9146 /*===========================================================================
   9147  * FUNCTION   : needJpegRotation
   9148  *
   9149  * DESCRIPTION: if rotation from jpeg is needed
   9150  *
   9151  * PARAMETERS : none
   9152  *
   9153  * RETURN     : true: needed
   9154  *              false: no need
   9155  *==========================================================================*/
   9156 bool QCamera3HardwareInterface::needJpegRotation()
   9157 {
   9158    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
   9159     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
   9160        CDBG("%s: Need Jpeg to do the rotation", __func__);
   9161        return true;
   9162     }
   9163     return false;
   9164 }
   9165 
   9166 /*===========================================================================
   9167  * FUNCTION   : addOfflineReprocChannel
   9168  *
   9169  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
   9170  *              coming from input channel
   9171  *
   9172  * PARAMETERS :
   9173  *   @config  : reprocess configuration
   9174  *   @inputChHandle : pointer to the input (source) channel
   9175  *
   9176  *
   9177  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   9178  *==========================================================================*/
   9179 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
   9180         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
   9181 {
   9182     int32_t rc = NO_ERROR;
   9183     QCamera3ReprocessChannel *pChannel = NULL;
   9184 
   9185     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   9186             mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
   9187             CAM_QCOM_FEATURE_NONE, this, inputChHandle);
   9188     if (NULL == pChannel) {
   9189         ALOGE("%s: no mem for reprocess channel", __func__);
   9190         return NULL;
   9191     }
   9192 
   9193     rc = pChannel->initialize(IS_TYPE_NONE);
   9194     if (rc != NO_ERROR) {
   9195         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
   9196         delete pChannel;
   9197         return NULL;
   9198     }
   9199 
   9200     // pp feature config
   9201     cam_pp_feature_config_t pp_config;
   9202     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   9203 
   9204     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   9205 
   9206     rc = pChannel->addReprocStreamsFromSource(pp_config,
   9207             config,
   9208             IS_TYPE_NONE,
   9209             mMetadataChannel);
   9210 
   9211     if (rc != NO_ERROR) {
   9212         delete pChannel;
   9213         return NULL;
   9214     }
   9215     return pChannel;
   9216 }
   9217 
   9218 /*===========================================================================
   9219  * FUNCTION   : getMobicatMask
   9220  *
   9221  * DESCRIPTION: returns mobicat mask
   9222  *
   9223  * PARAMETERS : none
   9224  *
   9225  * RETURN     : mobicat mask
   9226  *
   9227  *==========================================================================*/
   9228 uint8_t QCamera3HardwareInterface::getMobicatMask()
   9229 {
   9230     return m_MobicatMask;
   9231 }
   9232 
   9233 /*===========================================================================
   9234  * FUNCTION   : setMobicat
   9235  *
   9236  * DESCRIPTION: set Mobicat on/off.
   9237  *
   9238  * PARAMETERS :
   9239  *   @params  : none
   9240  *
   9241  * RETURN     : int32_t type of status
   9242  *              NO_ERROR  -- success
   9243  *              none-zero failure code
   9244  *==========================================================================*/
   9245 int32_t QCamera3HardwareInterface::setMobicat()
   9246 {
   9247     char value [PROPERTY_VALUE_MAX];
   9248     property_get("persist.camera.mobicat", value, "0");
   9249     int32_t ret = NO_ERROR;
   9250     uint8_t enableMobi = (uint8_t)atoi(value);
   9251 
   9252     if (enableMobi) {
   9253         tune_cmd_t tune_cmd;
   9254         tune_cmd.type = SET_RELOAD_CHROMATIX;
   9255         tune_cmd.module = MODULE_ALL;
   9256         tune_cmd.value = TRUE;
   9257         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   9258                 CAM_INTF_PARM_SET_VFE_COMMAND,
   9259                 tune_cmd);
   9260 
   9261         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   9262                 CAM_INTF_PARM_SET_PP_COMMAND,
   9263                 tune_cmd);
   9264     }
   9265     m_MobicatMask = enableMobi;
   9266 
   9267     return ret;
   9268 }
   9269 
   9270 /*===========================================================================
   9271 * FUNCTION   : getLogLevel
   9272 *
   9273 * DESCRIPTION: Reads the log level property into a variable
   9274 *
   9275 * PARAMETERS :
   9276 *   None
   9277 *
   9278 * RETURN     :
   9279 *   None
   9280 *==========================================================================*/
   9281 void QCamera3HardwareInterface::getLogLevel()
   9282 {
   9283     char prop[PROPERTY_VALUE_MAX];
   9284     uint32_t globalLogLevel = 0;
   9285 
   9286     property_get("persist.camera.hal.debug", prop, "0");
   9287     int val = atoi(prop);
   9288     if (0 <= val) {
   9289         gCamHal3LogLevel = (uint32_t)val;
   9290     }
   9291     property_get("persist.camera.global.debug", prop, "0");
   9292     val = atoi(prop);
   9293     if (0 <= val) {
   9294         globalLogLevel = (uint32_t)val;
   9295     }
   9296 
   9297     /* Highest log level among hal.logs and global.logs is selected */
   9298     if (gCamHal3LogLevel < globalLogLevel)
   9299         gCamHal3LogLevel = globalLogLevel;
   9300 
   9301     return;
   9302 }
   9303 
   9304 /*===========================================================================
   9305  * FUNCTION   : validateStreamRotations
   9306  *
   9307  * DESCRIPTION: Check if the rotations requested are supported
   9308  *
   9309  * PARAMETERS :
   9310  *   @stream_list : streams to be configured
   9311  *
   9312  * RETURN     : NO_ERROR on success
   9313  *              -EINVAL on failure
   9314  *
   9315  *==========================================================================*/
   9316 int QCamera3HardwareInterface::validateStreamRotations(
   9317         camera3_stream_configuration_t *streamList)
   9318 {
   9319     int rc = NO_ERROR;
   9320 
   9321     /*
   9322     * Loop through all streams requested in configuration
   9323     * Check if unsupported rotations have been requested on any of them
   9324     */
   9325     for (size_t j = 0; j < streamList->num_streams; j++){
   9326         camera3_stream_t *newStream = streamList->streams[j];
   9327 
   9328         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
   9329         bool isImplDef = (newStream->format ==
   9330                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
   9331         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
   9332                 isImplDef);
   9333 
   9334         if (isRotated && (!isImplDef || isZsl)) {
   9335             ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
   9336                     "type:%d and stream format:%d", __func__,
   9337                     newStream->rotation, newStream->stream_type,
   9338                     newStream->format);
   9339             rc = -EINVAL;
   9340             break;
   9341         }
   9342     }
   9343     return rc;
   9344 }
   9345 
   9346 /*===========================================================================
   9347 * FUNCTION   : getFlashInfo
   9348 *
   9349 * DESCRIPTION: Retrieve information about whether the device has a flash.
   9350 *
   9351 * PARAMETERS :
   9352 *   @cameraId  : Camera id to query
   9353 *   @hasFlash  : Boolean indicating whether there is a flash device
   9354 *                associated with given camera
   9355 *   @flashNode : If a flash device exists, this will be its device node.
   9356 *
   9357 * RETURN     :
   9358 *   None
   9359 *==========================================================================*/
   9360 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
   9361         bool& hasFlash,
   9362         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
   9363 {
   9364     cam_capability_t* camCapability = gCamCapability[cameraId];
   9365     if (NULL == camCapability) {
   9366         hasFlash = false;
   9367         flashNode[0] = '\0';
   9368     } else {
   9369         hasFlash = camCapability->flash_available;
   9370         strlcpy(flashNode,
   9371                 (char*)camCapability->flash_dev_name,
   9372                 QCAMERA_MAX_FILEPATH_LENGTH);
   9373     }
   9374 }
   9375 
   9376 /*===========================================================================
   9377 * FUNCTION   : getEepromVersionInfo
   9378 *
   9379 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
   9380 *
   9381 * PARAMETERS : None
   9382 *
   9383 * RETURN     : string describing EEPROM version
   9384 *              "\0" if no such info available
   9385 *==========================================================================*/
   9386 const char *QCamera3HardwareInterface::getEepromVersionInfo()
   9387 {
   9388     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
   9389 }
   9390 
   9391 /*===========================================================================
   9392 * FUNCTION   : getLdafCalib
   9393 *
   9394 * DESCRIPTION: Retrieve Laser AF calibration data
   9395 *
   9396 * PARAMETERS : None
   9397 *
   9398 * RETURN     : Two uint32_t describing laser AF calibration data
   9399 *              NULL if none is available.
   9400 *==========================================================================*/
   9401 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
   9402 {
   9403     if (mLdafCalibExist) {
   9404         return &mLdafCalib[0];
   9405     } else {
   9406         return NULL;
   9407     }
   9408 }
   9409 
   9410 /*===========================================================================
   9411  * FUNCTION   : dynamicUpdateMetaStreamInfo
   9412  *
   9413  * DESCRIPTION: This function:
   9414  *             (1) stops all the channels
   9415  *             (2) returns error on pending requests and buffers
   9416  *             (3) sends metastream_info in setparams
   9417  *             (4) starts all channels
   9418  *             This is useful when sensor has to be restarted to apply any
   9419  *             settings such as frame rate from a different sensor mode
   9420  *
   9421  * PARAMETERS : None
   9422  *
   9423  * RETURN     : NO_ERROR on success
   9424  *              Error codes on failure
   9425  *
   9426  *==========================================================================*/
   9427 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
   9428 {
   9429     ATRACE_CALL();
   9430     int rc = NO_ERROR;
   9431 
   9432     CDBG("%s: E", __func__);
   9433 
   9434     rc = stopAllChannels();
   9435     if (rc < 0) {
   9436         ALOGE("%s: stopAllChannels failed", __func__);
   9437         return rc;
   9438     }
   9439 
   9440     rc = notifyErrorForPendingRequests();
   9441     if (rc < 0) {
   9442         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
   9443         return rc;
   9444     }
   9445 
   9446     /* Send meta stream info once again so that ISP can start */
   9447     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   9448             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   9449     CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
   9450     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   9451             mParameters);
   9452     if (rc < 0) {
   9453         ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
   9454                 __func__);
   9455     }
   9456 
   9457     rc = startAllChannels();
   9458     if (rc < 0) {
   9459         ALOGE("%s: startAllChannels failed", __func__);
   9460         return rc;
   9461     }
   9462 
   9463     CDBG("%s:%d X", __func__, __LINE__);
   9464     return rc;
   9465 }
   9466 
   9467 /*===========================================================================
   9468  * FUNCTION   : stopAllChannels
   9469  *
   9470  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
   9471  *
   9472  * PARAMETERS : None
   9473  *
   9474  * RETURN     : NO_ERROR on success
   9475  *              Error codes on failure
   9476  *
   9477  *==========================================================================*/
   9478 int32_t QCamera3HardwareInterface::stopAllChannels()
   9479 {
   9480     int32_t rc = NO_ERROR;
   9481 
   9482     // Stop the Streams/Channels
   9483     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   9484         it != mStreamInfo.end(); it++) {
   9485         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   9486         channel->stop();
   9487         (*it)->status = INVALID;
   9488     }
   9489 
   9490     if (mSupportChannel) {
   9491         mSupportChannel->stop();
   9492     }
   9493     if (mAnalysisChannel) {
   9494         mAnalysisChannel->stop();
   9495     }
   9496     if (mRawDumpChannel) {
   9497         mRawDumpChannel->stop();
   9498     }
   9499     if (mMetadataChannel) {
   9500         /* If content of mStreamInfo is not 0, there is metadata stream */
   9501         mMetadataChannel->stop();
   9502     }
   9503 
   9504     CDBG("%s:%d All channels stopped", __func__, __LINE__);
   9505     return rc;
   9506 }
   9507 
   9508 /*===========================================================================
   9509  * FUNCTION   : startAllChannels
   9510  *
   9511  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
   9512  *
   9513  * PARAMETERS : None
   9514  *
   9515  * RETURN     : NO_ERROR on success
   9516  *              Error codes on failure
   9517  *
   9518  *==========================================================================*/
   9519 int32_t QCamera3HardwareInterface::startAllChannels()
   9520 {
   9521     int32_t rc = NO_ERROR;
   9522 
   9523     CDBG("%s: Start all channels ", __func__);
   9524     // Start the Streams/Channels
   9525     if (mMetadataChannel) {
   9526         /* If content of mStreamInfo is not 0, there is metadata stream */
   9527         rc = mMetadataChannel->start();
   9528         if (rc < 0) {
   9529             ALOGE("%s: META channel start failed", __func__);
   9530             return rc;
   9531         }
   9532     }
   9533     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   9534         it != mStreamInfo.end(); it++) {
   9535         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   9536         rc = channel->start();
   9537         if (rc < 0) {
   9538             ALOGE("%s: channel start failed", __func__);
   9539             return rc;
   9540         }
   9541     }
   9542     if (mAnalysisChannel) {
   9543         mAnalysisChannel->start();
   9544     }
   9545     if (mSupportChannel) {
   9546         rc = mSupportChannel->start();
   9547         if (rc < 0) {
   9548             ALOGE("%s: Support channel start failed", __func__);
   9549             return rc;
   9550         }
   9551     }
   9552     if (mRawDumpChannel) {
   9553         rc = mRawDumpChannel->start();
   9554         if (rc < 0) {
   9555             ALOGE("%s: RAW dump channel start failed", __func__);
   9556             return rc;
   9557         }
   9558     }
   9559 
   9560     CDBG("%s:%d All channels started", __func__, __LINE__);
   9561     return rc;
   9562 }
   9563 
   9564 /*===========================================================================
   9565  * FUNCTION   : notifyErrorForPendingRequests
   9566  *
   9567  * DESCRIPTION: This function sends error for all the pending requests/buffers
   9568  *
   9569  * PARAMETERS : None
   9570  *
   9571  * RETURN     : Error codes
   9572  *              NO_ERROR on success
   9573  *
   9574  *==========================================================================*/
   9575 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
   9576 {
   9577     int32_t rc = NO_ERROR;
   9578     unsigned int frameNum = 0;
   9579     camera3_capture_result_t result;
   9580     camera3_stream_buffer_t *pStream_Buf = NULL;
   9581     FlushMap flushMap;
   9582 
   9583     memset(&result, 0, sizeof(camera3_capture_result_t));
   9584 
   9585     if (mPendingRequestsList.size() > 0) {
   9586         pendingRequestIterator i = mPendingRequestsList.begin();
   9587         frameNum = i->frame_number;
   9588     } else {
   9589         /* There might still be pending buffers even though there are
   9590          no pending requests. Setting the frameNum to MAX so that
   9591          all the buffers with smaller frame numbers are returned */
   9592         frameNum = UINT_MAX;
   9593     }
   9594 
   9595     CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
   9596       __func__, frameNum);
   9597 
   9598     // Go through the pending buffers and group them depending
   9599     // on frame number
   9600     for (List<PendingBufferInfo>::iterator k =
   9601             mPendingBuffersMap.mPendingBufferList.begin();
   9602             k != mPendingBuffersMap.mPendingBufferList.end();) {
   9603 
   9604         if (k->frame_number < frameNum) {
   9605             ssize_t idx = flushMap.indexOfKey(k->frame_number);
   9606             if (idx == NAME_NOT_FOUND) {
   9607                 Vector<PendingBufferInfo> pending;
   9608                 pending.add(*k);
   9609                 flushMap.add(k->frame_number, pending);
   9610             } else {
   9611                 Vector<PendingBufferInfo> &pending =
   9612                         flushMap.editValueFor(k->frame_number);
   9613                 pending.add(*k);
   9614             }
   9615 
   9616             mPendingBuffersMap.num_buffers--;
   9617             k = mPendingBuffersMap.mPendingBufferList.erase(k);
   9618         } else {
   9619             k++;
   9620         }
   9621     }
   9622 
   9623     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
   9624         uint32_t frame_number = flushMap.keyAt(iFlush);
   9625         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
   9626 
   9627         // Send Error notify to frameworks for each buffer for which
   9628         // metadata buffer is already sent
   9629         CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
   9630           __func__, frame_number, pending.size());
   9631 
   9632         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   9633         if (NULL == pStream_Buf) {
   9634             ALOGE("%s: No memory for pending buffers array", __func__);
   9635             return NO_MEMORY;
   9636         }
   9637         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
   9638 
   9639         for (size_t j = 0; j < pending.size(); j++) {
   9640             const PendingBufferInfo &info = pending.itemAt(j);
   9641             camera3_notify_msg_t notify_msg;
   9642             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   9643             notify_msg.type = CAMERA3_MSG_ERROR;
   9644             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
   9645             notify_msg.message.error.error_stream = info.stream;
   9646             notify_msg.message.error.frame_number = frame_number;
   9647             pStream_Buf[j].acquire_fence = -1;
   9648             pStream_Buf[j].release_fence = -1;
   9649             pStream_Buf[j].buffer = info.buffer;
   9650             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   9651             pStream_Buf[j].stream = info.stream;
   9652             mCallbackOps->notify(mCallbackOps, &notify_msg);
   9653             CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
   9654                     frame_number, info.stream);
   9655         }
   9656 
   9657         result.result = NULL;
   9658         result.frame_number = frame_number;
   9659         result.num_output_buffers = (uint32_t)pending.size();
   9660         result.output_buffers = pStream_Buf;
   9661         mCallbackOps->process_capture_result(mCallbackOps, &result);
   9662 
   9663         delete [] pStream_Buf;
   9664     }
   9665 
   9666     CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
   9667 
   9668     flushMap.clear();
   9669     for (List<PendingBufferInfo>::iterator k =
   9670             mPendingBuffersMap.mPendingBufferList.begin();
   9671             k != mPendingBuffersMap.mPendingBufferList.end();) {
   9672         ssize_t idx = flushMap.indexOfKey(k->frame_number);
   9673         if (idx == NAME_NOT_FOUND) {
   9674             Vector<PendingBufferInfo> pending;
   9675             pending.add(*k);
   9676             flushMap.add(k->frame_number, pending);
   9677         } else {
   9678             Vector<PendingBufferInfo> &pending =
   9679                     flushMap.editValueFor(k->frame_number);
   9680             pending.add(*k);
   9681         }
   9682 
   9683         mPendingBuffersMap.num_buffers--;
   9684         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   9685     }
   9686 
   9687     pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
   9688 
   9689     // Go through the pending requests info and send error request to framework
   9690     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
   9691         uint32_t frame_number = flushMap.keyAt(iFlush);
   9692         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
   9693         CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
   9694               __func__, frame_number);
   9695 
   9696         // Send shutter notify to frameworks
   9697         camera3_notify_msg_t notify_msg;
   9698         memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   9699         notify_msg.type = CAMERA3_MSG_ERROR;
   9700         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   9701         notify_msg.message.error.error_stream = NULL;
   9702         notify_msg.message.error.frame_number = frame_number;
   9703         mCallbackOps->notify(mCallbackOps, &notify_msg);
   9704 
   9705         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   9706         if (NULL == pStream_Buf) {
   9707             ALOGE("%s: No memory for pending buffers array", __func__);
   9708             return NO_MEMORY;
   9709         }
   9710         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
   9711 
   9712         for (size_t j = 0; j < pending.size(); j++) {
   9713             const PendingBufferInfo &info = pending.itemAt(j);
   9714             pStream_Buf[j].acquire_fence = -1;
   9715             pStream_Buf[j].release_fence = -1;
   9716             pStream_Buf[j].buffer = info.buffer;
   9717             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   9718             pStream_Buf[j].stream = info.stream;
   9719         }
   9720 
   9721         result.input_buffer = i->input_buffer;
   9722         result.num_output_buffers = (uint32_t)pending.size();
   9723         result.output_buffers = pStream_Buf;
   9724         result.result = NULL;
   9725         result.frame_number = frame_number;
   9726         mCallbackOps->process_capture_result(mCallbackOps, &result);
   9727         delete [] pStream_Buf;
   9728         i = erasePendingRequest(i);
   9729     }
   9730 
   9731     /* Reset pending frame Drop list and requests list */
   9732     mPendingFrameDropList.clear();
   9733 
   9734     flushMap.clear();
   9735     mPendingBuffersMap.num_buffers = 0;
   9736     mPendingBuffersMap.mPendingBufferList.clear();
   9737     mPendingReprocessResultList.clear();
   9738     CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
   9739 
   9740     return rc;
   9741 }
   9742 
   9743 bool QCamera3HardwareInterface::isOnEncoder(
   9744         const cam_dimension_t max_viewfinder_size,
   9745         uint32_t width, uint32_t height)
   9746 {
   9747     return (width > (uint32_t)max_viewfinder_size.width ||
   9748             height > (uint32_t)max_viewfinder_size.height);
   9749 }
   9750 
   9751 /*===========================================================================
   9752  * FUNCTION   : setBundleInfo
   9753  *
   9754  * DESCRIPTION: Set bundle info for all streams that are bundle.
   9755  *
   9756  * PARAMETERS : None
   9757  *
   9758  * RETURN     : NO_ERROR on success
   9759  *              Error codes on failure
   9760  *==========================================================================*/
   9761 int32_t QCamera3HardwareInterface::setBundleInfo()
   9762 {
   9763     int32_t rc = NO_ERROR;
   9764 
   9765     if (mChannelHandle) {
   9766         cam_bundle_config_t bundleInfo;
   9767         memset(&bundleInfo, 0, sizeof(bundleInfo));
   9768         rc = mCameraHandle->ops->get_bundle_info(
   9769                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
   9770         if (rc != NO_ERROR) {
   9771             ALOGE("%s: get_bundle_info failed", __func__);
   9772             return rc;
   9773         }
   9774         if (mAnalysisChannel) {
   9775             mAnalysisChannel->setBundleInfo(bundleInfo);
   9776         }
   9777         if (mSupportChannel) {
   9778             mSupportChannel->setBundleInfo(bundleInfo);
   9779         }
   9780         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   9781                 it != mStreamInfo.end(); it++) {
   9782             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   9783             channel->setBundleInfo(bundleInfo);
   9784         }
   9785         if (mRawDumpChannel) {
   9786             mRawDumpChannel->setBundleInfo(bundleInfo);
   9787         }
   9788     }
   9789 
   9790     return rc;
   9791 }
   9792 
   9793 }; //end namespace qcamera
   9794