Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define LOG_TAG "QCamera3HWI"
     31 //#define LOG_NDEBUG 0
     32 
     33 #define __STDC_LIMIT_MACROS
     34 
     35 // To remove
     36 #include <cutils/properties.h>
     37 
     38 // System dependencies
     39 #include <dlfcn.h>
     40 #include <fcntl.h>
     41 #include <stdio.h>
     42 #include <stdlib.h>
     43 #include "utils/Timers.h"
     44 #include "sys/ioctl.h"
     45 #include <sync/sync.h>
     46 #include "gralloc_priv.h"
     47 
     48 // Display dependencies
     49 #include "qdMetaData.h"
     50 
     51 // Camera dependencies
     52 #include "android/QCamera3External.h"
     53 #include "util/QCameraFlash.h"
     54 #include "QCamera3HWI.h"
     55 #include "QCamera3VendorTags.h"
     56 #include "QCameraTrace.h"
     57 
     58 extern "C" {
     59 #include "mm_camera_dbg.h"
     60 }
     61 
     62 using namespace android;
     63 
     64 namespace qcamera {
     65 
     66 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     67 
     68 #define EMPTY_PIPELINE_DELAY 2
     69 #define PARTIAL_RESULT_COUNT 2
     70 #define FRAME_SKIP_DELAY     0
     71 
     72 #define MAX_VALUE_8BIT ((1<<8)-1)
     73 #define MAX_VALUE_10BIT ((1<<10)-1)
     74 #define MAX_VALUE_12BIT ((1<<12)-1)
     75 
     76 #define VIDEO_4K_WIDTH  3840
     77 #define VIDEO_4K_HEIGHT 2160
     78 
     79 #define MAX_EIS_WIDTH 1920
     80 #define MAX_EIS_HEIGHT 1080
     81 
     82 #define MAX_RAW_STREAMS        1
     83 #define MAX_STALLING_STREAMS   1
     84 #define MAX_PROCESSED_STREAMS  3
     85 /* Batch mode is enabled only if FPS set is equal to or greater than this */
     86 #define MIN_FPS_FOR_BATCH_MODE (120)
     87 #define PREVIEW_FPS_FOR_HFR    (30)
     88 #define DEFAULT_VIDEO_FPS      (30.0)
     89 #define MAX_HFR_BATCH_SIZE     (8)
     90 #define REGIONS_TUPLE_COUNT    5
     91 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
     92 // Set a threshold for detection of missing buffers //seconds
     93 #define MISSING_REQUEST_BUF_TIMEOUT 3
     94 #define FLUSH_TIMEOUT 3
     95 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
     96 
     97 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
     98                                               CAM_QCOM_FEATURE_CROP |\
     99                                               CAM_QCOM_FEATURE_ROTATION |\
    100                                               CAM_QCOM_FEATURE_SHARPNESS |\
    101                                               CAM_QCOM_FEATURE_SCALE |\
    102                                               CAM_QCOM_FEATURE_CAC |\
    103                                               CAM_QCOM_FEATURE_CDS )
    104 /* Per configuration size for static metadata length*/
    105 #define PER_CONFIGURATION_SIZE_3 (3)
    106 
    107 #define TIMEOUT_NEVER -1
    108 
    109 /* Face landmarks indices */
    110 #define LEFT_EYE_X             0
    111 #define LEFT_EYE_Y             1
    112 #define RIGHT_EYE_X            2
    113 #define RIGHT_EYE_Y            3
    114 #define MOUTH_X                4
    115 #define MOUTH_Y                5
    116 #define TOTAL_LANDMARK_INDICES 6
    117 
    118 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
    119 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
    120 extern pthread_mutex_t gCamLock;
    121 volatile uint32_t gCamHal3LogLevel = 1;
    122 extern uint8_t gNumCameraSessions;
    123 
    124 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
    125     {"On",  CAM_CDS_MODE_ON},
    126     {"Off", CAM_CDS_MODE_OFF},
    127     {"Auto",CAM_CDS_MODE_AUTO}
    128 };
    129 const QCamera3HardwareInterface::QCameraMap<
    130         camera_metadata_enum_android_video_hdr_mode_t,
    131         cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
    132     { QCAMERA3_VIDEO_HDR_MODE_OFF,  CAM_VIDEO_HDR_MODE_OFF },
    133     { QCAMERA3_VIDEO_HDR_MODE_ON,   CAM_VIDEO_HDR_MODE_ON }
    134 };
    135 
    136 
    137 const QCamera3HardwareInterface::QCameraMap<
    138         camera_metadata_enum_android_ir_mode_t,
    139         cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
    140     {QCAMERA3_IR_MODE_OFF,  CAM_IR_MODE_OFF},
    141     {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
    142     {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
    143 };
    144 
    145 const QCamera3HardwareInterface::QCameraMap<
    146         camera_metadata_enum_android_control_effect_mode_t,
    147         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
    148     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
    149     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
    150     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
    151     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
    152     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
    153     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
    154     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
    155     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
    156     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
    157 };
    158 
    159 const QCamera3HardwareInterface::QCameraMap<
    160         camera_metadata_enum_android_control_awb_mode_t,
    161         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
    162     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
    163     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
    164     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
    165     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
    166     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
    167     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
    168     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
    169     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
    170     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
    171 };
    172 
    173 const QCamera3HardwareInterface::QCameraMap<
    174         camera_metadata_enum_android_control_scene_mode_t,
    175         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
    176     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
    177     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
    178     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
    179     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
    180     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
    181     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
    182     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
    183     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
    184     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
    185     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
    186     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
    187     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
    188     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
    189     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    190     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    191     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    192 };
    193 
    194 const QCamera3HardwareInterface::QCameraMap<
    195         camera_metadata_enum_android_control_af_mode_t,
    196         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    197     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
    198     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    199     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    200     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    201     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    202     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    203     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    204 };
    205 
    206 const QCamera3HardwareInterface::QCameraMap<
    207         camera_metadata_enum_android_color_correction_aberration_mode_t,
    208         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
    209     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
    210             CAM_COLOR_CORRECTION_ABERRATION_OFF },
    211     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
    212             CAM_COLOR_CORRECTION_ABERRATION_FAST },
    213     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
    214             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
    215 };
    216 
    217 const QCamera3HardwareInterface::QCameraMap<
    218         camera_metadata_enum_android_control_ae_antibanding_mode_t,
    219         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    220     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    221     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    222     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    223     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    224 };
    225 
    226 const QCamera3HardwareInterface::QCameraMap<
    227         camera_metadata_enum_android_control_ae_mode_t,
    228         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    229     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    230     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    231     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    232     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    233     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    234 };
    235 
    236 const QCamera3HardwareInterface::QCameraMap<
    237         camera_metadata_enum_android_flash_mode_t,
    238         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    239     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    240     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    241     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    242 };
    243 
    244 const QCamera3HardwareInterface::QCameraMap<
    245         camera_metadata_enum_android_statistics_face_detect_mode_t,
    246         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    247     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    248     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
    249     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    250 };
    251 
    252 const QCamera3HardwareInterface::QCameraMap<
    253         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
    254         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
    255     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
    256       CAM_FOCUS_UNCALIBRATED },
    257     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
    258       CAM_FOCUS_APPROXIMATE },
    259     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
    260       CAM_FOCUS_CALIBRATED }
    261 };
    262 
    263 const QCamera3HardwareInterface::QCameraMap<
    264         camera_metadata_enum_android_lens_state_t,
    265         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
    266     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
    267     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
    268 };
    269 
    270 const int32_t available_thumbnail_sizes[] = {0, 0,
    271                                              176, 144,
    272                                              240, 144,
    273                                              256, 144,
    274                                              240, 160,
    275                                              256, 154,
    276                                              240, 240,
    277                                              320, 240};
    278 
    279 const QCamera3HardwareInterface::QCameraMap<
    280         camera_metadata_enum_android_sensor_test_pattern_mode_t,
    281         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
    282     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
    283     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
    284     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
    285     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
    286     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
    287     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
    288 };
    289 
    290 /* Since there is no mapping for all the options some Android enum are not listed.
    291  * Also, the order in this list is important because while mapping from HAL to Android it will
    292  * traverse from lower to higher index which means that for HAL values that are map to different
    293  * Android values, the traverse logic will select the first one found.
    294  */
    295 const QCamera3HardwareInterface::QCameraMap<
    296         camera_metadata_enum_android_sensor_reference_illuminant1_t,
    297         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
    298     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
    299     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    300     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
    301     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
    302     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
    303     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
    304     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
    305     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
    306     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
    307     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
    308     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
    309     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
    310     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
    311     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
    312     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    313     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
    314 };
    315 
    316 const QCamera3HardwareInterface::QCameraMap<
    317         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
    318     { 60, CAM_HFR_MODE_60FPS},
    319     { 90, CAM_HFR_MODE_90FPS},
    320     { 120, CAM_HFR_MODE_120FPS},
    321     { 150, CAM_HFR_MODE_150FPS},
    322     { 180, CAM_HFR_MODE_180FPS},
    323     { 210, CAM_HFR_MODE_210FPS},
    324     { 240, CAM_HFR_MODE_240FPS},
    325     { 480, CAM_HFR_MODE_480FPS},
    326 };
    327 
    328 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    329     .initialize                         = QCamera3HardwareInterface::initialize,
    330     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
    331     .register_stream_buffers            = NULL,
    332     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
    333     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
    334     .get_metadata_vendor_tag_ops        = NULL,
    335     .dump                               = QCamera3HardwareInterface::dump,
    336     .flush                              = QCamera3HardwareInterface::flush,
    337     .reserved                           = {0},
    338 };
    339 
    340 // initialise to some default value
    341 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
    342 
    343 /*===========================================================================
    344  * FUNCTION   : QCamera3HardwareInterface
    345  *
    346  * DESCRIPTION: constructor of QCamera3HardwareInterface
    347  *
    348  * PARAMETERS :
    349  *   @cameraId  : camera ID
    350  *
    351  * RETURN     : none
    352  *==========================================================================*/
    353 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
    354         const camera_module_callbacks_t *callbacks)
    355     : mCameraId(cameraId),
    356       mCameraHandle(NULL),
    357       mCameraInitialized(false),
    358       mCallbackOps(NULL),
    359       mMetadataChannel(NULL),
    360       mPictureChannel(NULL),
    361       mRawChannel(NULL),
    362       mSupportChannel(NULL),
    363       mAnalysisChannel(NULL),
    364       mRawDumpChannel(NULL),
    365       mDummyBatchChannel(NULL),
    366       m_perfLock(),
    367       mCommon(),
    368       mChannelHandle(0),
    369       mFirstConfiguration(true),
    370       mFlush(false),
    371       mFlushPerf(false),
    372       mParamHeap(NULL),
    373       mParameters(NULL),
    374       mPrevParameters(NULL),
    375       m_bIsVideo(false),
    376       m_bIs4KVideo(false),
    377       m_bEisSupportedSize(false),
    378       m_bEisEnable(false),
    379       m_MobicatMask(0),
    380       mMinProcessedFrameDuration(0),
    381       mMinJpegFrameDuration(0),
    382       mMinRawFrameDuration(0),
    383       mMetaFrameCount(0U),
    384       mUpdateDebugLevel(false),
    385       mCallbacks(callbacks),
    386       mCaptureIntent(0),
    387       mCacMode(0),
    388       mBatchSize(0),
    389       mToBeQueuedVidBufs(0),
    390       mHFRVideoFps(DEFAULT_VIDEO_FPS),
    391       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
    392       mFirstFrameNumberInBatch(0),
    393       mNeedSensorRestart(false),
    394       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
    395       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
    396       mLdafCalibExist(false),
    397       mPowerHintEnabled(false),
    398       mLastCustIntentFrmNum(-1),
    399       mState(CLOSED),
    400       mIsDeviceLinked(false),
    401       mIsMainCamera(true),
    402       mLinkedCameraId(0),
    403       m_pRelCamSyncHeap(NULL),
    404       m_pRelCamSyncBuf(NULL)
    405 {
    406     getLogLevel();
    407     m_perfLock.lock_init();
    408     mCommon.init(gCamCapability[cameraId]);
    409     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    410 #ifndef USE_HAL_3_3
    411     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
    412 #else
    413     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
    414 #endif
    415     mCameraDevice.common.close = close_camera_device;
    416     mCameraDevice.ops = &mCameraOps;
    417     mCameraDevice.priv = this;
    418     gCamCapability[cameraId]->version = CAM_HAL_V3;
    419     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    420     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    421     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    422 
    423     pthread_cond_init(&mBuffersCond, NULL);
    424 
    425     pthread_cond_init(&mRequestCond, NULL);
    426     mPendingLiveRequest = 0;
    427     mCurrentRequestId = -1;
    428     pthread_mutex_init(&mMutex, NULL);
    429 
    430     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    431         mDefaultMetadata[i] = NULL;
    432 
    433     // Getting system props of different kinds
    434     char prop[PROPERTY_VALUE_MAX];
    435     memset(prop, 0, sizeof(prop));
    436     property_get("persist.camera.raw.dump", prop, "0");
    437     mEnableRawDump = atoi(prop);
    438     if (mEnableRawDump)
    439         LOGD("Raw dump from Camera HAL enabled");
    440 
    441     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
    442     memset(mLdafCalib, 0, sizeof(mLdafCalib));
    443 
    444     memset(prop, 0, sizeof(prop));
    445     property_get("persist.camera.tnr.preview", prop, "0");
    446     m_bTnrPreview = (uint8_t)atoi(prop);
    447 
    448     memset(prop, 0, sizeof(prop));
    449     property_get("persist.camera.tnr.video", prop, "0");
    450     m_bTnrVideo = (uint8_t)atoi(prop);
    451 
    452     memset(prop, 0, sizeof(prop));
    453     property_get("persist.camera.avtimer.debug", prop, "0");
    454     m_debug_avtimer = (uint8_t)atoi(prop);
    455 
    456     //Load and read GPU library.
    457     lib_surface_utils = NULL;
    458     LINK_get_surface_pixel_alignment = NULL;
    459     mSurfaceStridePadding = CAM_PAD_TO_32;
    460     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
    461     if (lib_surface_utils) {
    462         *(void **)&LINK_get_surface_pixel_alignment =
    463                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
    464          if (LINK_get_surface_pixel_alignment) {
    465              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
    466          }
    467          dlclose(lib_surface_utils);
    468     }
    469 }
    470 
    471 /*===========================================================================
    472  * FUNCTION   : ~QCamera3HardwareInterface
    473  *
    474  * DESCRIPTION: destructor of QCamera3HardwareInterface
    475  *
    476  * PARAMETERS : none
    477  *
    478  * RETURN     : none
    479  *==========================================================================*/
    480 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    481 {
    482     LOGD("E");
    483 
    484     /* Turn off current power hint before acquiring perfLock in case they
    485      * conflict with each other */
    486     disablePowerHint();
    487 
    488     m_perfLock.lock_acq();
    489 
    490     /* We need to stop all streams before deleting any stream */
    491     if (mRawDumpChannel) {
    492         mRawDumpChannel->stop();
    493     }
    494 
    495     // NOTE: 'camera3_stream_t *' objects are already freed at
    496     //        this stage by the framework
    497     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    498         it != mStreamInfo.end(); it++) {
    499         QCamera3ProcessingChannel *channel = (*it)->channel;
    500         if (channel) {
    501             channel->stop();
    502         }
    503     }
    504     if (mSupportChannel)
    505         mSupportChannel->stop();
    506 
    507     if (mAnalysisChannel) {
    508         mAnalysisChannel->stop();
    509     }
    510     if (mMetadataChannel) {
    511         mMetadataChannel->stop();
    512     }
    513     if (mChannelHandle) {
    514         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
    515                 mChannelHandle);
    516         LOGD("stopping channel %d", mChannelHandle);
    517     }
    518 
    519     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    520         it != mStreamInfo.end(); it++) {
    521         QCamera3ProcessingChannel *channel = (*it)->channel;
    522         if (channel)
    523             delete channel;
    524         free (*it);
    525     }
    526     if (mSupportChannel) {
    527         delete mSupportChannel;
    528         mSupportChannel = NULL;
    529     }
    530 
    531     if (mAnalysisChannel) {
    532         delete mAnalysisChannel;
    533         mAnalysisChannel = NULL;
    534     }
    535     if (mRawDumpChannel) {
    536         delete mRawDumpChannel;
    537         mRawDumpChannel = NULL;
    538     }
    539     if (mDummyBatchChannel) {
    540         delete mDummyBatchChannel;
    541         mDummyBatchChannel = NULL;
    542     }
    543 
    544     mPictureChannel = NULL;
    545 
    546     if (mMetadataChannel) {
    547         delete mMetadataChannel;
    548         mMetadataChannel = NULL;
    549     }
    550 
    551     /* Clean up all channels */
    552     if (mCameraInitialized) {
    553         if(!mFirstConfiguration){
    554             //send the last unconfigure
    555             cam_stream_size_info_t stream_config_info;
    556             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
    557             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
    558             stream_config_info.buffer_info.max_buffers =
    559                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
    560             clear_metadata_buffer(mParameters);
    561             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
    562                     stream_config_info);
    563             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    564             if (rc < 0) {
    565                 LOGE("set_parms failed for unconfigure");
    566             }
    567         }
    568         deinitParameters();
    569     }
    570 
    571     if (mChannelHandle) {
    572         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
    573                 mChannelHandle);
    574         LOGH("deleting channel %d", mChannelHandle);
    575         mChannelHandle = 0;
    576     }
    577 
    578     if (mState != CLOSED)
    579         closeCamera();
    580 
    581     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
    582         req.mPendingBufferList.clear();
    583     }
    584     mPendingBuffersMap.mPendingBuffersInRequest.clear();
    585     mPendingReprocessResultList.clear();
    586     for (pendingRequestIterator i = mPendingRequestsList.begin();
    587             i != mPendingRequestsList.end();) {
    588         i = erasePendingRequest(i);
    589     }
    590     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    591         if (mDefaultMetadata[i])
    592             free_camera_metadata(mDefaultMetadata[i]);
    593 
    594     m_perfLock.lock_rel();
    595     m_perfLock.lock_deinit();
    596 
    597     pthread_cond_destroy(&mRequestCond);
    598 
    599     pthread_cond_destroy(&mBuffersCond);
    600 
    601     pthread_mutex_destroy(&mMutex);
    602     LOGD("X");
    603 }
    604 
    605 /*===========================================================================
    606  * FUNCTION   : erasePendingRequest
    607  *
    608  * DESCRIPTION: function to erase a desired pending request after freeing any
    609  *              allocated memory
    610  *
    611  * PARAMETERS :
    612  *   @i       : iterator pointing to pending request to be erased
    613  *
    614  * RETURN     : iterator pointing to the next request
    615  *==========================================================================*/
    616 QCamera3HardwareInterface::pendingRequestIterator
    617         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
    618 {
    619     if (i->input_buffer != NULL) {
    620         free(i->input_buffer);
    621         i->input_buffer = NULL;
    622     }
    623     if (i->settings != NULL)
    624         free_camera_metadata((camera_metadata_t*)i->settings);
    625     return mPendingRequestsList.erase(i);
    626 }
    627 
    628 /*===========================================================================
    629  * FUNCTION   : camEvtHandle
    630  *
    631  * DESCRIPTION: Function registered to mm-camera-interface to handle events
    632  *
    633  * PARAMETERS :
    634  *   @camera_handle : interface layer camera handle
    635  *   @evt           : ptr to event
    636  *   @user_data     : user data ptr
    637  *
    638  * RETURN     : none
    639  *==========================================================================*/
    640 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
    641                                           mm_camera_event_t *evt,
    642                                           void *user_data)
    643 {
    644     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
    645     if (obj && evt) {
    646         switch(evt->server_event_type) {
    647             case CAM_EVENT_TYPE_DAEMON_DIED:
    648                 pthread_mutex_lock(&obj->mMutex);
    649                 obj->mState = ERROR;
    650                 pthread_mutex_unlock(&obj->mMutex);
    651                 LOGE("Fatal, camera daemon died");
    652                 break;
    653 
    654             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
    655                 LOGD("HAL got request pull from Daemon");
    656                 pthread_mutex_lock(&obj->mMutex);
    657                 obj->mWokenUpByDaemon = true;
    658                 obj->unblockRequestIfNecessary();
    659                 pthread_mutex_unlock(&obj->mMutex);
    660                 break;
    661 
    662             default:
    663                 LOGW("Warning: Unhandled event %d",
    664                         evt->server_event_type);
    665                 break;
    666         }
    667     } else {
    668         LOGE("NULL user_data/evt");
    669     }
    670 }
    671 
    672 /*===========================================================================
    673  * FUNCTION   : openCamera
    674  *
    675  * DESCRIPTION: open camera
    676  *
    677  * PARAMETERS :
    678  *   @hw_device  : double ptr for camera device struct
    679  *
    680  * RETURN     : int32_t type of status
    681  *              NO_ERROR  -- success
    682  *              none-zero failure code
    683  *==========================================================================*/
    684 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    685 {
    686     int rc = 0;
    687     if (mState != CLOSED) {
    688         *hw_device = NULL;
    689         return PERMISSION_DENIED;
    690     }
    691 
    692     m_perfLock.lock_acq();
    693     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
    694              mCameraId);
    695 
    696     rc = openCamera();
    697     if (rc == 0) {
    698         *hw_device = &mCameraDevice.common;
    699     } else
    700         *hw_device = NULL;
    701 
    702     m_perfLock.lock_rel();
    703     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
    704              mCameraId, rc);
    705 
    706     if (rc == NO_ERROR) {
    707         mState = OPENED;
    708     }
    709     return rc;
    710 }
    711 
    712 /*===========================================================================
    713  * FUNCTION   : openCamera
    714  *
    715  * DESCRIPTION: open camera
    716  *
    717  * PARAMETERS : none
    718  *
    719  * RETURN     : int32_t type of status
    720  *              NO_ERROR  -- success
    721  *              none-zero failure code
    722  *==========================================================================*/
    723 int QCamera3HardwareInterface::openCamera()
    724 {
    725     int rc = 0;
    726     char value[PROPERTY_VALUE_MAX];
    727 
    728     KPI_ATRACE_CALL();
    729     if (mCameraHandle) {
    730         LOGE("Failure: Camera already opened");
    731         return ALREADY_EXISTS;
    732     }
    733 
    734     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
    735     if (rc < 0) {
    736         LOGE("Failed to reserve flash for camera id: %d",
    737                 mCameraId);
    738         return UNKNOWN_ERROR;
    739     }
    740 
    741     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
    742     if (rc) {
    743         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
    744         return rc;
    745     }
    746 
    747     if (!mCameraHandle) {
    748         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
    749         return -ENODEV;
    750     }
    751 
    752     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
    753             camEvtHandle, (void *)this);
    754 
    755     if (rc < 0) {
    756         LOGE("Error, failed to register event callback");
    757         /* Not closing camera here since it is already handled in destructor */
    758         return FAILED_TRANSACTION;
    759     }
    760 
    761     mExifParams.debug_params =
    762             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
    763     if (mExifParams.debug_params) {
    764         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
    765     } else {
    766         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
    767         return NO_MEMORY;
    768     }
    769     mFirstConfiguration = true;
    770 
    771     //Notify display HAL that a camera session is active.
    772     //But avoid calling the same during bootup because camera service might open/close
    773     //cameras at boot time during its initialization and display service will also internally
    774     //wait for camera service to initialize first while calling this display API, resulting in a
    775     //deadlock situation. Since boot time camera open/close calls are made only to fetch
    776     //capabilities, no need of this display bw optimization.
    777     //Use "service.bootanim.exit" property to know boot status.
    778     property_get("service.bootanim.exit", value, "0");
    779     if (atoi(value) == 1) {
    780         pthread_mutex_lock(&gCamLock);
    781         if (gNumCameraSessions++ == 0) {
    782             setCameraLaunchStatus(true);
    783         }
    784         pthread_mutex_unlock(&gCamLock);
    785     }
    786 
    787     //fill the session id needed while linking dual cam
    788     pthread_mutex_lock(&gCamLock);
    789     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
    790         &sessionId[mCameraId]);
    791     pthread_mutex_unlock(&gCamLock);
    792 
    793     if (rc < 0) {
    794         LOGE("Error, failed to get sessiion id");
    795         return UNKNOWN_ERROR;
    796     } else {
    797         //Allocate related cam sync buffer
    798         //this is needed for the payload that goes along with bundling cmd for related
    799         //camera use cases
    800         m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
    801         rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
    802         if(rc != OK) {
    803             rc = NO_MEMORY;
    804             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
    805             return NO_MEMORY;
    806         }
    807 
    808         //Map memory for related cam sync buffer
    809         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
    810                 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
    811                 m_pRelCamSyncHeap->getFd(0),
    812                 sizeof(cam_sync_related_sensors_event_info_t),
    813                 m_pRelCamSyncHeap->getPtr(0));
    814         if(rc < 0) {
    815             LOGE("Dualcam: failed to map Related cam sync buffer");
    816             rc = FAILED_TRANSACTION;
    817             return NO_MEMORY;
    818         }
    819         m_pRelCamSyncBuf =
    820                 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
    821     }
    822 
    823     LOGH("mCameraId=%d",mCameraId);
    824 
    825     return NO_ERROR;
    826 }
    827 
    828 /*===========================================================================
    829  * FUNCTION   : closeCamera
    830  *
    831  * DESCRIPTION: close camera
    832  *
    833  * PARAMETERS : none
    834  *
    835  * RETURN     : int32_t type of status
    836  *              NO_ERROR  -- success
    837  *              none-zero failure code
    838  *==========================================================================*/
    839 int QCamera3HardwareInterface::closeCamera()
    840 {
    841     KPI_ATRACE_CALL();
    842     int rc = NO_ERROR;
    843     char value[PROPERTY_VALUE_MAX];
    844 
    845     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
    846              mCameraId);
    847 
    848     // unmap memory for related cam sync buffer
    849     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
    850             CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF);
    851     if (NULL != m_pRelCamSyncHeap) {
    852         m_pRelCamSyncHeap->deallocate();
    853         delete m_pRelCamSyncHeap;
    854         m_pRelCamSyncHeap = NULL;
    855         m_pRelCamSyncBuf = NULL;
    856     }
    857 
    858     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    859     mCameraHandle = NULL;
    860 
    861     //reset session id to some invalid id
    862     pthread_mutex_lock(&gCamLock);
    863     sessionId[mCameraId] = 0xDEADBEEF;
    864     pthread_mutex_unlock(&gCamLock);
    865 
    866     //Notify display HAL that there is no active camera session
    867     //but avoid calling the same during bootup. Refer to openCamera
    868     //for more details.
    869     property_get("service.bootanim.exit", value, "0");
    870     if (atoi(value) == 1) {
    871         pthread_mutex_lock(&gCamLock);
    872         if (--gNumCameraSessions == 0) {
    873             setCameraLaunchStatus(false);
    874         }
    875         pthread_mutex_unlock(&gCamLock);
    876     }
    877 
    878     if (mExifParams.debug_params) {
    879         free(mExifParams.debug_params);
    880         mExifParams.debug_params = NULL;
    881     }
    882     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
    883         LOGW("Failed to release flash for camera id: %d",
    884                 mCameraId);
    885     }
    886     mState = CLOSED;
    887     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
    888          mCameraId, rc);
    889     return rc;
    890 }
    891 
    892 /*===========================================================================
    893  * FUNCTION   : initialize
    894  *
    895  * DESCRIPTION: Initialize frameworks callback functions
    896  *
    897  * PARAMETERS :
    898  *   @callback_ops : callback function to frameworks
    899  *
    900  * RETURN     :
    901  *
    902  *==========================================================================*/
    903 int QCamera3HardwareInterface::initialize(
    904         const struct camera3_callback_ops *callback_ops)
    905 {
    906     ATRACE_CALL();
    907     int rc;
    908 
    909     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
    910     pthread_mutex_lock(&mMutex);
    911 
    912     // Validate current state
    913     switch (mState) {
    914         case OPENED:
    915             /* valid state */
    916             break;
    917         default:
    918             LOGE("Invalid state %d", mState);
    919             rc = -ENODEV;
    920             goto err1;
    921     }
    922 
    923     rc = initParameters();
    924     if (rc < 0) {
    925         LOGE("initParamters failed %d", rc);
    926         goto err1;
    927     }
    928     mCallbackOps = callback_ops;
    929 
    930     mChannelHandle = mCameraHandle->ops->add_channel(
    931             mCameraHandle->camera_handle, NULL, NULL, this);
    932     if (mChannelHandle == 0) {
    933         LOGE("add_channel failed");
    934         rc = -ENOMEM;
    935         pthread_mutex_unlock(&mMutex);
    936         return rc;
    937     }
    938 
    939     pthread_mutex_unlock(&mMutex);
    940     mCameraInitialized = true;
    941     mState = INITIALIZED;
    942     LOGI("X");
    943     return 0;
    944 
    945 err1:
    946     pthread_mutex_unlock(&mMutex);
    947     return rc;
    948 }
    949 
    950 /*===========================================================================
    951  * FUNCTION   : validateStreamDimensions
    952  *
    953  * DESCRIPTION: Check if the configuration requested are those advertised
    954  *
    955  * PARAMETERS :
    956  *   @stream_list : streams to be configured
    957  *
    958  * RETURN     :
    959  *
    960  *==========================================================================*/
    961 int QCamera3HardwareInterface::validateStreamDimensions(
    962         camera3_stream_configuration_t *streamList)
    963 {
    964     int rc = NO_ERROR;
    965     size_t count = 0;
    966 
    967     camera3_stream_t *inputStream = NULL;
    968     /*
    969     * Loop through all streams to find input stream if it exists*
    970     */
    971     for (size_t i = 0; i< streamList->num_streams; i++) {
    972         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
    973             if (inputStream != NULL) {
    974                 LOGE("Error, Multiple input streams requested");
    975                 return -EINVAL;
    976             }
    977             inputStream = streamList->streams[i];
    978         }
    979     }
    980     /*
    981     * Loop through all streams requested in configuration
    982     * Check if unsupported sizes have been requested on any of them
    983     */
    984     for (size_t j = 0; j < streamList->num_streams; j++) {
    985         bool sizeFound = false;
    986         camera3_stream_t *newStream = streamList->streams[j];
    987 
    988         uint32_t rotatedHeight = newStream->height;
    989         uint32_t rotatedWidth = newStream->width;
    990         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
    991                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
    992             rotatedHeight = newStream->width;
    993             rotatedWidth = newStream->height;
    994         }
    995 
    996         /*
    997         * Sizes are different for each type of stream format check against
    998         * appropriate table.
    999         */
   1000         switch (newStream->format) {
   1001         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   1002         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   1003         case HAL_PIXEL_FORMAT_RAW10:
   1004             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
   1005             for (size_t i = 0; i < count; i++) {
   1006                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
   1007                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
   1008                     sizeFound = true;
   1009                     break;
   1010                 }
   1011             }
   1012             break;
   1013         case HAL_PIXEL_FORMAT_BLOB:
   1014             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   1015             /* Verify set size against generated sizes table */
   1016             for (size_t i = 0; i < count; i++) {
   1017                 if (((int32_t)rotatedWidth ==
   1018                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
   1019                         ((int32_t)rotatedHeight ==
   1020                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
   1021                     sizeFound = true;
   1022                     break;
   1023                 }
   1024             }
   1025             break;
   1026         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1027         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1028         default:
   1029             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1030                     || newStream->stream_type == CAMERA3_STREAM_INPUT
   1031                     || IS_USAGE_ZSL(newStream->usage)) {
   1032                 if (((int32_t)rotatedWidth ==
   1033                                 gCamCapability[mCameraId]->active_array_size.width) &&
   1034                                 ((int32_t)rotatedHeight ==
   1035                                 gCamCapability[mCameraId]->active_array_size.height)) {
   1036                     sizeFound = true;
   1037                     break;
   1038                 }
   1039                 /* We could potentially break here to enforce ZSL stream
   1040                  * set from frameworks always is full active array size
   1041                  * but it is not clear from the spc if framework will always
   1042                  * follow that, also we have logic to override to full array
   1043                  * size, so keeping the logic lenient at the moment
   1044                  */
   1045             }
   1046             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
   1047                     MAX_SIZES_CNT);
   1048             for (size_t i = 0; i < count; i++) {
   1049                 if (((int32_t)rotatedWidth ==
   1050                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
   1051                             ((int32_t)rotatedHeight ==
   1052                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
   1053                     sizeFound = true;
   1054                     break;
   1055                 }
   1056             }
   1057             break;
   1058         } /* End of switch(newStream->format) */
   1059 
   1060         /* We error out even if a single stream has unsupported size set */
   1061         if (!sizeFound) {
   1062             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
   1063                     rotatedWidth, rotatedHeight, newStream->format,
   1064                     gCamCapability[mCameraId]->active_array_size.width,
   1065                     gCamCapability[mCameraId]->active_array_size.height);
   1066             rc = -EINVAL;
   1067             break;
   1068         }
   1069     } /* End of for each stream */
   1070     return rc;
   1071 }
   1072 
   1073 /*==============================================================================
   1074  * FUNCTION   : isSupportChannelNeeded
   1075  *
   1076  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
   1077  *
   1078  * PARAMETERS :
   1079  *   @stream_list : streams to be configured
   1080  *   @stream_config_info : the config info for streams to be configured
   1081  *
   1082  * RETURN     : Boolen true/false decision
   1083  *
   1084  *==========================================================================*/
   1085 bool QCamera3HardwareInterface::isSupportChannelNeeded(
   1086         camera3_stream_configuration_t *streamList,
   1087         cam_stream_size_info_t stream_config_info)
   1088 {
   1089     uint32_t i;
   1090     bool pprocRequested = false;
   1091     /* Check for conditions where PProc pipeline does not have any streams*/
   1092     for (i = 0; i < stream_config_info.num_streams; i++) {
   1093         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
   1094                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
   1095             pprocRequested = true;
   1096             break;
   1097         }
   1098     }
   1099 
   1100     if (pprocRequested == false )
   1101         return true;
   1102 
   1103     /* Dummy stream needed if only raw or jpeg streams present */
   1104     for (i = 0; i < streamList->num_streams; i++) {
   1105         switch(streamList->streams[i]->format) {
   1106             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1107             case HAL_PIXEL_FORMAT_RAW10:
   1108             case HAL_PIXEL_FORMAT_RAW16:
   1109             case HAL_PIXEL_FORMAT_BLOB:
   1110                 break;
   1111             default:
   1112                 return false;
   1113         }
   1114     }
   1115     return true;
   1116 }
   1117 
   1118 /*==============================================================================
   1119  * FUNCTION   : getSensorOutputSize
   1120  *
   1121  * DESCRIPTION: Get sensor output size based on current stream configuratoin
   1122  *
   1123  * PARAMETERS :
   1124  *   @sensor_dim : sensor output dimension (output)
   1125  *
   1126  * RETURN     : int32_t type of status
   1127  *              NO_ERROR  -- success
   1128  *              none-zero failure code
   1129  *
   1130  *==========================================================================*/
   1131 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
   1132 {
   1133     int32_t rc = NO_ERROR;
   1134 
   1135     cam_dimension_t max_dim = {0, 0};
   1136     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   1137         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
   1138             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
   1139         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
   1140             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
   1141     }
   1142 
   1143     clear_metadata_buffer(mParameters);
   1144 
   1145     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
   1146             max_dim);
   1147     if (rc != NO_ERROR) {
   1148         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
   1149         return rc;
   1150     }
   1151 
   1152     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   1153     if (rc != NO_ERROR) {
   1154         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
   1155         return rc;
   1156     }
   1157 
   1158     clear_metadata_buffer(mParameters);
   1159     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
   1160 
   1161     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
   1162             mParameters);
   1163     if (rc != NO_ERROR) {
   1164         LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
   1165         return rc;
   1166     }
   1167 
   1168     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
   1169     LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
   1170 
   1171     return rc;
   1172 }
   1173 
   1174 /*==============================================================================
   1175  * FUNCTION   : enablePowerHint
   1176  *
   1177  * DESCRIPTION: enable single powerhint for preview and different video modes.
   1178  *
   1179  * PARAMETERS :
   1180  *
   1181  * RETURN     : NULL
   1182  *
   1183  *==========================================================================*/
   1184 void QCamera3HardwareInterface::enablePowerHint()
   1185 {
   1186     if (!mPowerHintEnabled) {
   1187         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
   1188         mPowerHintEnabled = true;
   1189     }
   1190 }
   1191 
   1192 /*==============================================================================
   1193  * FUNCTION   : disablePowerHint
   1194  *
   1195  * DESCRIPTION: disable current powerhint.
   1196  *
   1197  * PARAMETERS :
   1198  *
   1199  * RETURN     : NULL
   1200  *
   1201  *==========================================================================*/
   1202 void QCamera3HardwareInterface::disablePowerHint()
   1203 {
   1204     if (mPowerHintEnabled) {
   1205         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
   1206         mPowerHintEnabled = false;
   1207     }
   1208 }
   1209 
   1210 /*==============================================================================
   1211  * FUNCTION   : addToPPFeatureMask
   1212  *
   1213  * DESCRIPTION: add additional features to pp feature mask based on
   1214  *              stream type and usecase
   1215  *
   1216  * PARAMETERS :
   1217  *   @stream_format : stream type for feature mask
   1218  *   @stream_idx : stream idx within postprocess_mask list to change
   1219  *
   1220  * RETURN     : NULL
   1221  *
   1222  *==========================================================================*/
   1223 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
   1224         uint32_t stream_idx)
   1225 {
   1226     char feature_mask_value[PROPERTY_VALUE_MAX];
   1227     cam_feature_mask_t feature_mask;
   1228     int args_converted;
   1229     int property_len;
   1230 
   1231     /* Get feature mask from property */
   1232 #ifdef _LE_CAMERA_
   1233     char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
   1234     snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
   1235     property_len = property_get("persist.camera.hal3.feature",
   1236             feature_mask_value, swtnr_feature_mask_value);
   1237 #else
   1238     property_len = property_get("persist.camera.hal3.feature",
   1239             feature_mask_value, "0");
   1240 #endif
   1241     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
   1242             (feature_mask_value[1] == 'x')) {
   1243         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
   1244     } else {
   1245         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
   1246     }
   1247     if (1 != args_converted) {
   1248         feature_mask = 0;
   1249         LOGE("Wrong feature mask %s", feature_mask_value);
   1250         return;
   1251     }
   1252 
   1253     switch (stream_format) {
   1254     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
   1255         /* Add LLVD to pp feature mask only if video hint is enabled */
   1256         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
   1257             mStreamConfigInfo.postprocess_mask[stream_idx]
   1258                     |= CAM_QTI_FEATURE_SW_TNR;
   1259             LOGH("Added SW TNR to pp feature mask");
   1260         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
   1261             mStreamConfigInfo.postprocess_mask[stream_idx]
   1262                     |= CAM_QCOM_FEATURE_LLVD;
   1263             LOGH("Added LLVD SeeMore to pp feature mask");
   1264         }
   1265         break;
   1266     }
   1267     default:
   1268         break;
   1269     }
   1270     LOGD("PP feature mask %llx",
   1271             mStreamConfigInfo.postprocess_mask[stream_idx]);
   1272 }
   1273 
   1274 /*==============================================================================
   1275  * FUNCTION   : updateFpsInPreviewBuffer
   1276  *
   1277  * DESCRIPTION: update FPS information in preview buffer.
   1278  *
   1279  * PARAMETERS :
   1280  *   @metadata    : pointer to metadata buffer
   1281  *   @frame_number: frame_number to look for in pending buffer list
   1282  *
   1283  * RETURN     : None
   1284  *
   1285  *==========================================================================*/
   1286 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
   1287         uint32_t frame_number)
   1288 {
   1289     // Mark all pending buffers for this particular request
   1290     // with corresponding framerate information
   1291     for (List<PendingBuffersInRequest>::iterator req =
   1292             mPendingBuffersMap.mPendingBuffersInRequest.begin();
   1293             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   1294         for(List<PendingBufferInfo>::iterator j =
   1295                 req->mPendingBufferList.begin();
   1296                 j != req->mPendingBufferList.end(); j++) {
   1297             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
   1298             if ((req->frame_number == frame_number) &&
   1299                 (channel->getStreamTypeMask() &
   1300                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
   1301                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
   1302                     CAM_INTF_PARM_FPS_RANGE, metadata) {
   1303                     typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
   1304                     struct private_handle_t *priv_handle =
   1305                         (struct private_handle_t *)(*(j->buffer));
   1306                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
   1307                 }
   1308             }
   1309         }
   1310     }
   1311 }
   1312 
   1313 /*==============================================================================
   1314  * FUNCTION   : updateTimeStampInPendingBuffers
   1315  *
   1316  * DESCRIPTION: update timestamp in display metadata for all pending buffers
   1317  *              of a frame number
   1318  *
   1319  * PARAMETERS :
   1320  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
   1321  *   @timestamp   : timestamp to be set
   1322  *
   1323  * RETURN     : None
   1324  *
   1325  *==========================================================================*/
   1326 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
   1327         uint32_t frameNumber, nsecs_t timestamp)
   1328 {
   1329     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
   1330             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   1331         if (req->frame_number != frameNumber)
   1332             continue;
   1333 
   1334         for (auto k = req->mPendingBufferList.begin();
   1335                 k != req->mPendingBufferList.end(); k++ ) {
   1336             struct private_handle_t *priv_handle =
   1337                     (struct private_handle_t *) (*(k->buffer));
   1338             setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
   1339         }
   1340     }
   1341     return;
   1342 }
   1343 
   1344 /*===========================================================================
   1345  * FUNCTION   : configureStreams
   1346  *
   1347  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
   1348  *              and output streams.
   1349  *
   1350  * PARAMETERS :
   1351  *   @stream_list : streams to be configured
   1352  *
   1353  * RETURN     :
   1354  *
   1355  *==========================================================================*/
   1356 int QCamera3HardwareInterface::configureStreams(
   1357         camera3_stream_configuration_t *streamList)
   1358 {
   1359     ATRACE_CALL();
   1360     int rc = 0;
   1361 
   1362     // Acquire perfLock before configure streams
   1363     m_perfLock.lock_acq();
   1364     rc = configureStreamsPerfLocked(streamList);
   1365     m_perfLock.lock_rel();
   1366 
   1367     return rc;
   1368 }
   1369 
   1370 /*===========================================================================
   1371  * FUNCTION   : configureStreamsPerfLocked
   1372  *
   1373  * DESCRIPTION: configureStreams while perfLock is held.
   1374  *
   1375  * PARAMETERS :
   1376  *   @stream_list : streams to be configured
   1377  *
   1378  * RETURN     : int32_t type of status
   1379  *              NO_ERROR  -- success
   1380  *              none-zero failure code
   1381  *==========================================================================*/
   1382 int QCamera3HardwareInterface::configureStreamsPerfLocked(
   1383         camera3_stream_configuration_t *streamList)
   1384 {
   1385     ATRACE_CALL();
   1386     int rc = 0;
   1387 
   1388     // Sanity check stream_list
   1389     if (streamList == NULL) {
   1390         LOGE("NULL stream configuration");
   1391         return BAD_VALUE;
   1392     }
   1393     if (streamList->streams == NULL) {
   1394         LOGE("NULL stream list");
   1395         return BAD_VALUE;
   1396     }
   1397 
   1398     if (streamList->num_streams < 1) {
   1399         LOGE("Bad number of streams requested: %d",
   1400                 streamList->num_streams);
   1401         return BAD_VALUE;
   1402     }
   1403 
   1404     if (streamList->num_streams >= MAX_NUM_STREAMS) {
   1405         LOGE("Maximum number of streams %d exceeded: %d",
   1406                 MAX_NUM_STREAMS, streamList->num_streams);
   1407         return BAD_VALUE;
   1408     }
   1409 
   1410     mOpMode = streamList->operation_mode;
   1411     LOGD("mOpMode: %d", mOpMode);
   1412 
   1413     /* first invalidate all the steams in the mStreamList
   1414      * if they appear again, they will be validated */
   1415     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   1416             it != mStreamInfo.end(); it++) {
   1417         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1418         if (channel) {
   1419           channel->stop();
   1420         }
   1421         (*it)->status = INVALID;
   1422     }
   1423 
   1424     if (mRawDumpChannel) {
   1425         mRawDumpChannel->stop();
   1426         delete mRawDumpChannel;
   1427         mRawDumpChannel = NULL;
   1428     }
   1429 
   1430     if (mSupportChannel)
   1431         mSupportChannel->stop();
   1432 
   1433     if (mAnalysisChannel) {
   1434         mAnalysisChannel->stop();
   1435     }
   1436     if (mMetadataChannel) {
   1437         /* If content of mStreamInfo is not 0, there is metadata stream */
   1438         mMetadataChannel->stop();
   1439     }
   1440     if (mChannelHandle) {
   1441         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   1442                 mChannelHandle);
   1443         LOGD("stopping channel %d", mChannelHandle);
   1444     }
   1445 
   1446     pthread_mutex_lock(&mMutex);
   1447 
   1448     // Check state
   1449     switch (mState) {
   1450         case INITIALIZED:
   1451         case CONFIGURED:
   1452         case STARTED:
   1453             /* valid state */
   1454             break;
   1455         default:
   1456             LOGE("Invalid state %d", mState);
   1457             pthread_mutex_unlock(&mMutex);
   1458             return -ENODEV;
   1459     }
   1460 
   1461     /* Check whether we have video stream */
   1462     m_bIs4KVideo = false;
   1463     m_bIsVideo = false;
   1464     m_bEisSupportedSize = false;
   1465     m_bTnrEnabled = false;
   1466     bool isZsl = false;
   1467     uint32_t videoWidth = 0U;
   1468     uint32_t videoHeight = 0U;
   1469     size_t rawStreamCnt = 0;
   1470     size_t stallStreamCnt = 0;
   1471     size_t processedStreamCnt = 0;
   1472     // Number of streams on ISP encoder path
   1473     size_t numStreamsOnEncoder = 0;
   1474     size_t numYuv888OnEncoder = 0;
   1475     bool bYuv888OverrideJpeg = false;
   1476     cam_dimension_t largeYuv888Size = {0, 0};
   1477     cam_dimension_t maxViewfinderSize = {0, 0};
   1478     bool bJpegExceeds4K = false;
   1479     bool bJpegOnEncoder = false;
   1480     bool bUseCommonFeatureMask = false;
   1481     cam_feature_mask_t commonFeatureMask = 0;
   1482     bool bSmallJpegSize = false;
   1483     uint32_t width_ratio;
   1484     uint32_t height_ratio;
   1485     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
   1486     camera3_stream_t *inputStream = NULL;
   1487     bool isJpeg = false;
   1488     cam_dimension_t jpegSize = {0, 0};
   1489 
   1490     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
   1491 
   1492     /*EIS configuration*/
   1493     bool oisSupported = false;
   1494     uint8_t eis_prop_set;
   1495     uint32_t maxEisWidth = 0;
   1496     uint32_t maxEisHeight = 0;
   1497 
   1498     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
   1499 
   1500     size_t count = IS_TYPE_MAX;
   1501     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
   1502     for (size_t i = 0; i < count; i++) {
   1503         if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
   1504             (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
   1505             m_bEisSupported = true;
   1506             break;
   1507         }
   1508     }
   1509     count = CAM_OPT_STAB_MAX;
   1510     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
   1511     for (size_t i = 0; i < count; i++) {
   1512         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
   1513             oisSupported = true;
   1514             break;
   1515         }
   1516     }
   1517 
   1518     if (m_bEisSupported) {
   1519         maxEisWidth = MAX_EIS_WIDTH;
   1520         maxEisHeight = MAX_EIS_HEIGHT;
   1521     }
   1522 
   1523     /* EIS setprop control */
   1524     char eis_prop[PROPERTY_VALUE_MAX];
   1525     memset(eis_prop, 0, sizeof(eis_prop));
   1526     property_get("persist.camera.eis.enable", eis_prop, "1");
   1527     eis_prop_set = (uint8_t)atoi(eis_prop);
   1528 
   1529     m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
   1530             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
   1531 
   1532     LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
   1533             m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
   1534 
   1535     /* stream configurations */
   1536     for (size_t i = 0; i < streamList->num_streams; i++) {
   1537         camera3_stream_t *newStream = streamList->streams[i];
   1538         LOGI("stream[%d] type = %d, format = %d, width = %d, "
   1539                 "height = %d, rotation = %d, usage = 0x%x",
   1540                  i, newStream->stream_type, newStream->format,
   1541                 newStream->width, newStream->height, newStream->rotation,
   1542                 newStream->usage);
   1543         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1544                 newStream->stream_type == CAMERA3_STREAM_INPUT){
   1545             isZsl = true;
   1546         }
   1547         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
   1548             inputStream = newStream;
   1549         }
   1550 
   1551         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1552             isJpeg = true;
   1553             jpegSize.width = newStream->width;
   1554             jpegSize.height = newStream->height;
   1555             if (newStream->width > VIDEO_4K_WIDTH ||
   1556                     newStream->height > VIDEO_4K_HEIGHT)
   1557                 bJpegExceeds4K = true;
   1558         }
   1559 
   1560         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
   1561                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
   1562             m_bIsVideo = true;
   1563             videoWidth = newStream->width;
   1564             videoHeight = newStream->height;
   1565             if ((VIDEO_4K_WIDTH <= newStream->width) &&
   1566                     (VIDEO_4K_HEIGHT <= newStream->height)) {
   1567                 m_bIs4KVideo = true;
   1568             }
   1569             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
   1570                                   (newStream->height <= maxEisHeight);
   1571         }
   1572         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1573                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
   1574             switch (newStream->format) {
   1575             case HAL_PIXEL_FORMAT_BLOB:
   1576                 stallStreamCnt++;
   1577                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1578                         newStream->height)) {
   1579                     numStreamsOnEncoder++;
   1580                     bJpegOnEncoder = true;
   1581                 }
   1582                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
   1583                         newStream->width);
   1584                 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
   1585                         newStream->height);;
   1586                 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
   1587                         "FATAL: max_downscale_factor cannot be zero and so assert");
   1588                 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
   1589                     (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
   1590                     LOGH("Setting small jpeg size flag to true");
   1591                     bSmallJpegSize = true;
   1592                 }
   1593                 break;
   1594             case HAL_PIXEL_FORMAT_RAW10:
   1595             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1596             case HAL_PIXEL_FORMAT_RAW16:
   1597                 rawStreamCnt++;
   1598                 break;
   1599             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1600                 processedStreamCnt++;
   1601                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1602                         newStream->height)) {
   1603                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
   1604                             !IS_USAGE_ZSL(newStream->usage)) {
   1605                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1606                     }
   1607                     numStreamsOnEncoder++;
   1608                 }
   1609                 break;
   1610             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1611                 processedStreamCnt++;
   1612                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1613                         newStream->height)) {
   1614                     // If Yuv888 size is not greater than 4K, set feature mask
   1615                     // to SUPERSET so that it support concurrent request on
   1616                     // YUV and JPEG.
   1617                     if (newStream->width <= VIDEO_4K_WIDTH &&
   1618                             newStream->height <= VIDEO_4K_HEIGHT) {
   1619                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1620                     }
   1621                     numStreamsOnEncoder++;
   1622                     numYuv888OnEncoder++;
   1623                     largeYuv888Size.width = newStream->width;
   1624                     largeYuv888Size.height = newStream->height;
   1625                 }
   1626                 break;
   1627             default:
   1628                 processedStreamCnt++;
   1629                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1630                         newStream->height)) {
   1631                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1632                     numStreamsOnEncoder++;
   1633                 }
   1634                 break;
   1635             }
   1636 
   1637         }
   1638     }
   1639 
   1640     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
   1641             gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
   1642             !m_bIsVideo) {
   1643         m_bEisEnable = false;
   1644     }
   1645 
   1646     /* Logic to enable/disable TNR based on specific config size/etc.*/
   1647     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
   1648             ((videoWidth == 1920 && videoHeight == 1080) ||
   1649             (videoWidth == 1280 && videoHeight == 720)) &&
   1650             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
   1651         m_bTnrEnabled = true;
   1652 
   1653     /* Check if num_streams is sane */
   1654     if (stallStreamCnt > MAX_STALLING_STREAMS ||
   1655             rawStreamCnt > MAX_RAW_STREAMS ||
   1656             processedStreamCnt > MAX_PROCESSED_STREAMS) {
   1657         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
   1658                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
   1659         pthread_mutex_unlock(&mMutex);
   1660         return -EINVAL;
   1661     }
   1662     /* Check whether we have zsl stream or 4k video case */
   1663     if (isZsl && m_bIsVideo) {
   1664         LOGE("Currently invalid configuration ZSL&Video!");
   1665         pthread_mutex_unlock(&mMutex);
   1666         return -EINVAL;
   1667     }
   1668     /* Check if stream sizes are sane */
   1669     if (numStreamsOnEncoder > 2) {
   1670         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
   1671         pthread_mutex_unlock(&mMutex);
   1672         return -EINVAL;
   1673     } else if (1 < numStreamsOnEncoder){
   1674         bUseCommonFeatureMask = true;
   1675         LOGH("Multiple streams above max viewfinder size, common mask needed");
   1676     }
   1677 
   1678     /* Check if BLOB size is greater than 4k in 4k recording case */
   1679     if (m_bIs4KVideo && bJpegExceeds4K) {
   1680         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
   1681         pthread_mutex_unlock(&mMutex);
   1682         return -EINVAL;
   1683     }
   1684 
   1685     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
   1686     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
   1687     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
   1688     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
   1689     // configurations:
   1690     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
   1691     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
   1692     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
   1693     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
   1694         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
   1695                 __func__);
   1696         pthread_mutex_unlock(&mMutex);
   1697         return -EINVAL;
   1698     }
   1699 
   1700     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
   1701     // the YUV stream's size is greater or equal to the JPEG size, set common
   1702     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
   1703     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
   1704             jpegSize.width, jpegSize.height) &&
   1705             largeYuv888Size.width > jpegSize.width &&
   1706             largeYuv888Size.height > jpegSize.height) {
   1707         bYuv888OverrideJpeg = true;
   1708     } else if (!isJpeg && numStreamsOnEncoder > 1) {
   1709         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1710     }
   1711 
   1712     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
   1713             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
   1714             commonFeatureMask);
   1715     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
   1716             numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
   1717 
   1718     rc = validateStreamDimensions(streamList);
   1719     if (rc == NO_ERROR) {
   1720         rc = validateStreamRotations(streamList);
   1721     }
   1722     if (rc != NO_ERROR) {
   1723         LOGE("Invalid stream configuration requested!");
   1724         pthread_mutex_unlock(&mMutex);
   1725         return rc;
   1726     }
   1727 
   1728     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
   1729     for (size_t i = 0; i < streamList->num_streams; i++) {
   1730         camera3_stream_t *newStream = streamList->streams[i];
   1731         LOGH("newStream type = %d, stream format = %d "
   1732                 "stream size : %d x %d, stream rotation = %d",
   1733                  newStream->stream_type, newStream->format,
   1734                 newStream->width, newStream->height, newStream->rotation);
   1735         //if the stream is in the mStreamList validate it
   1736         bool stream_exists = false;
   1737         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1738                 it != mStreamInfo.end(); it++) {
   1739             if ((*it)->stream == newStream) {
   1740                 QCamera3ProcessingChannel *channel =
   1741                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1742                 stream_exists = true;
   1743                 if (channel)
   1744                     delete channel;
   1745                 (*it)->status = VALID;
   1746                 (*it)->stream->priv = NULL;
   1747                 (*it)->channel = NULL;
   1748             }
   1749         }
   1750         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
   1751             //new stream
   1752             stream_info_t* stream_info;
   1753             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
   1754             if (!stream_info) {
   1755                LOGE("Could not allocate stream info");
   1756                rc = -ENOMEM;
   1757                pthread_mutex_unlock(&mMutex);
   1758                return rc;
   1759             }
   1760             stream_info->stream = newStream;
   1761             stream_info->status = VALID;
   1762             stream_info->channel = NULL;
   1763             mStreamInfo.push_back(stream_info);
   1764         }
   1765         /* Covers Opaque ZSL and API1 F/W ZSL */
   1766         if (IS_USAGE_ZSL(newStream->usage)
   1767                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
   1768             if (zslStream != NULL) {
   1769                 LOGE("Multiple input/reprocess streams requested!");
   1770                 pthread_mutex_unlock(&mMutex);
   1771                 return BAD_VALUE;
   1772             }
   1773             zslStream = newStream;
   1774         }
   1775         /* Covers YUV reprocess */
   1776         if (inputStream != NULL) {
   1777             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
   1778                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1779                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1780                     && inputStream->width == newStream->width
   1781                     && inputStream->height == newStream->height) {
   1782                 if (zslStream != NULL) {
   1783                     /* This scenario indicates multiple YUV streams with same size
   1784                      * as input stream have been requested, since zsl stream handle
   1785                      * is solely use for the purpose of overriding the size of streams
   1786                      * which share h/w streams we will just make a guess here as to
   1787                      * which of the stream is a ZSL stream, this will be refactored
   1788                      * once we make generic logic for streams sharing encoder output
   1789                      */
   1790                     LOGH("Warning, Multiple ip/reprocess streams requested!");
   1791                 }
   1792                 zslStream = newStream;
   1793             }
   1794         }
   1795     }
   1796 
   1797     /* If a zsl stream is set, we know that we have configured at least one input or
   1798        bidirectional stream */
   1799     if (NULL != zslStream) {
   1800         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
   1801         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
   1802         mInputStreamInfo.format = zslStream->format;
   1803         mInputStreamInfo.usage = zslStream->usage;
   1804         LOGD("Input stream configured! %d x %d, format %d, usage %d",
   1805                  mInputStreamInfo.dim.width,
   1806                 mInputStreamInfo.dim.height,
   1807                 mInputStreamInfo.format, mInputStreamInfo.usage);
   1808     }
   1809 
   1810     cleanAndSortStreamInfo();
   1811     if (mMetadataChannel) {
   1812         delete mMetadataChannel;
   1813         mMetadataChannel = NULL;
   1814     }
   1815     if (mSupportChannel) {
   1816         delete mSupportChannel;
   1817         mSupportChannel = NULL;
   1818     }
   1819 
   1820     if (mAnalysisChannel) {
   1821         delete mAnalysisChannel;
   1822         mAnalysisChannel = NULL;
   1823     }
   1824 
   1825     if (mDummyBatchChannel) {
   1826         delete mDummyBatchChannel;
   1827         mDummyBatchChannel = NULL;
   1828     }
   1829 
   1830     //Create metadata channel and initialize it
   1831     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
   1832     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
   1833             gCamCapability[mCameraId]->color_arrangement);
   1834     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
   1835                     mChannelHandle, mCameraHandle->ops, captureResultCb,
   1836                     &padding_info, metadataFeatureMask, this);
   1837     if (mMetadataChannel == NULL) {
   1838         LOGE("failed to allocate metadata channel");
   1839         rc = -ENOMEM;
   1840         pthread_mutex_unlock(&mMutex);
   1841         return rc;
   1842     }
   1843     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
   1844     if (rc < 0) {
   1845         LOGE("metadata channel initialization failed");
   1846         delete mMetadataChannel;
   1847         mMetadataChannel = NULL;
   1848         pthread_mutex_unlock(&mMutex);
   1849         return rc;
   1850     }
   1851 
   1852     // Create analysis stream all the time, even when h/w support is not available
   1853     {
   1854         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1855         setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
   1856                 gCamCapability[mCameraId]->color_arrangement);
   1857         cam_analysis_info_t analysisInfo;
   1858         int32_t ret = NO_ERROR;
   1859         ret = mCommon.getAnalysisInfo(
   1860                 FALSE,
   1861                 TRUE,
   1862                 analysisFeatureMask,
   1863                 &analysisInfo);
   1864         if (ret == NO_ERROR) {
   1865             mAnalysisChannel = new QCamera3SupportChannel(
   1866                     mCameraHandle->camera_handle,
   1867                     mChannelHandle,
   1868                     mCameraHandle->ops,
   1869                     &analysisInfo.analysis_padding_info,
   1870                     analysisFeatureMask,
   1871                     CAM_STREAM_TYPE_ANALYSIS,
   1872                     &analysisInfo.analysis_max_res,
   1873                     (analysisInfo.analysis_format
   1874                     == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
   1875                     : CAM_FORMAT_YUV_420_NV21),
   1876                     analysisInfo.hw_analysis_supported,
   1877                     gCamCapability[mCameraId]->color_arrangement,
   1878                     this,
   1879                     0); // force buffer count to 0
   1880         } else {
   1881             LOGW("getAnalysisInfo failed, ret = %d", ret);
   1882         }
   1883         if (!mAnalysisChannel) {
   1884             LOGW("Analysis channel cannot be created");
   1885         }
   1886     }
   1887 
   1888     bool isRawStreamRequested = false;
   1889     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
   1890     /* Allocate channel objects for the requested streams */
   1891     for (size_t i = 0; i < streamList->num_streams; i++) {
   1892         camera3_stream_t *newStream = streamList->streams[i];
   1893         uint32_t stream_usage = newStream->usage;
   1894         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
   1895         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
   1896         struct camera_info *p_info = NULL;
   1897         pthread_mutex_lock(&gCamLock);
   1898         p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
   1899         pthread_mutex_unlock(&gCamLock);
   1900         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1901                 || IS_USAGE_ZSL(newStream->usage)) &&
   1902             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
   1903             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1904             if (bUseCommonFeatureMask) {
   1905                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1906                         commonFeatureMask;
   1907             } else {
   1908                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1909                         CAM_QCOM_FEATURE_NONE;
   1910             }
   1911 
   1912         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1913                 LOGH("Input stream configured, reprocess config");
   1914         } else {
   1915             //for non zsl streams find out the format
   1916             switch (newStream->format) {
   1917             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
   1918             {
   1919                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1920                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1921                 /* add additional features to pp feature mask */
   1922                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
   1923                         mStreamConfigInfo.num_streams);
   1924 
   1925                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
   1926                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1927                                 CAM_STREAM_TYPE_VIDEO;
   1928                     if (m_bTnrEnabled && m_bTnrVideo) {
   1929                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1930                             CAM_QCOM_FEATURE_CPP_TNR;
   1931                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
   1932                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
   1933                                 ~CAM_QCOM_FEATURE_CDS;
   1934                     }
   1935                 } else {
   1936                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1937                             CAM_STREAM_TYPE_PREVIEW;
   1938                     if (m_bTnrEnabled && m_bTnrPreview) {
   1939                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1940                                 CAM_QCOM_FEATURE_CPP_TNR;
   1941                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
   1942                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
   1943                                 ~CAM_QCOM_FEATURE_CDS;
   1944                     }
   1945                     padding_info.width_padding = mSurfaceStridePadding;
   1946                     padding_info.height_padding = CAM_PAD_TO_2;
   1947                 }
   1948                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
   1949                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
   1950                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1951                             newStream->height;
   1952                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1953                             newStream->width;
   1954                 }
   1955             }
   1956             break;
   1957             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1958                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
   1959                 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
   1960                     if (bUseCommonFeatureMask)
   1961                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1962                                 commonFeatureMask;
   1963                     else
   1964                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1965                                 CAM_QCOM_FEATURE_NONE;
   1966                 } else {
   1967                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1968                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1969                 }
   1970             break;
   1971             case HAL_PIXEL_FORMAT_BLOB:
   1972                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1973                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
   1974                 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
   1975                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1976                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1977                 } else {
   1978                     if (bUseCommonFeatureMask &&
   1979                             isOnEncoder(maxViewfinderSize, newStream->width,
   1980                             newStream->height)) {
   1981                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
   1982                     } else {
   1983                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   1984                     }
   1985                 }
   1986                 if (isZsl) {
   1987                     if (zslStream) {
   1988                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1989                                 (int32_t)zslStream->width;
   1990                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1991                                 (int32_t)zslStream->height;
   1992                     } else {
   1993                         LOGE("Error, No ZSL stream identified");
   1994                         pthread_mutex_unlock(&mMutex);
   1995                         return -EINVAL;
   1996                     }
   1997                 } else if (m_bIs4KVideo) {
   1998                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
   1999                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
   2000                 } else if (bYuv888OverrideJpeg) {
   2001                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2002                             (int32_t)largeYuv888Size.width;
   2003                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2004                             (int32_t)largeYuv888Size.height;
   2005                 }
   2006                 break;
   2007             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   2008             case HAL_PIXEL_FORMAT_RAW16:
   2009             case HAL_PIXEL_FORMAT_RAW10:
   2010                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
   2011                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   2012                 isRawStreamRequested = true;
   2013                 break;
   2014             default:
   2015                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
   2016                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   2017                 break;
   2018             }
   2019         }
   2020 
   2021         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2022                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2023                 gCamCapability[mCameraId]->color_arrangement);
   2024 
   2025         if (newStream->priv == NULL) {
   2026             //New stream, construct channel
   2027             switch (newStream->stream_type) {
   2028             case CAMERA3_STREAM_INPUT:
   2029                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
   2030                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
   2031                 break;
   2032             case CAMERA3_STREAM_BIDIRECTIONAL:
   2033                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
   2034                     GRALLOC_USAGE_HW_CAMERA_WRITE;
   2035                 break;
   2036             case CAMERA3_STREAM_OUTPUT:
   2037                 /* For video encoding stream, set read/write rarely
   2038                  * flag so that they may be set to un-cached */
   2039                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
   2040                     newStream->usage |=
   2041                          (GRALLOC_USAGE_SW_READ_RARELY |
   2042                          GRALLOC_USAGE_SW_WRITE_RARELY |
   2043                          GRALLOC_USAGE_HW_CAMERA_WRITE);
   2044                 else if (IS_USAGE_ZSL(newStream->usage))
   2045                 {
   2046                     LOGD("ZSL usage flag skipping");
   2047                 }
   2048                 else if (newStream == zslStream
   2049                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   2050                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
   2051                 } else
   2052                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
   2053                 break;
   2054             default:
   2055                 LOGE("Invalid stream_type %d", newStream->stream_type);
   2056                 break;
   2057             }
   2058 
   2059             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
   2060                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   2061                 QCamera3ProcessingChannel *channel = NULL;
   2062                 switch (newStream->format) {
   2063                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   2064                     if ((newStream->usage &
   2065                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
   2066                             (streamList->operation_mode ==
   2067                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   2068                     ) {
   2069                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2070                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   2071                                 &gCamCapability[mCameraId]->padding_info,
   2072                                 this,
   2073                                 newStream,
   2074                                 (cam_stream_type_t)
   2075                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2076                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2077                                 mMetadataChannel,
   2078                                 0); //heap buffers are not required for HFR video channel
   2079                         if (channel == NULL) {
   2080                             LOGE("allocation of channel failed");
   2081                             pthread_mutex_unlock(&mMutex);
   2082                             return -ENOMEM;
   2083                         }
   2084                         //channel->getNumBuffers() will return 0 here so use
   2085                         //MAX_INFLIGH_HFR_REQUESTS
   2086                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
   2087                         newStream->priv = channel;
   2088                         LOGI("num video buffers in HFR mode: %d",
   2089                                  MAX_INFLIGHT_HFR_REQUESTS);
   2090                     } else {
   2091                         /* Copy stream contents in HFR preview only case to create
   2092                          * dummy batch channel so that sensor streaming is in
   2093                          * HFR mode */
   2094                         if (!m_bIsVideo && (streamList->operation_mode ==
   2095                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
   2096                             mDummyBatchStream = *newStream;
   2097                         }
   2098                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2099                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   2100                                 &gCamCapability[mCameraId]->padding_info,
   2101                                 this,
   2102                                 newStream,
   2103                                 (cam_stream_type_t)
   2104                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2105                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2106                                 mMetadataChannel,
   2107                                 MAX_INFLIGHT_REQUESTS);
   2108                         if (channel == NULL) {
   2109                             LOGE("allocation of channel failed");
   2110                             pthread_mutex_unlock(&mMutex);
   2111                             return -ENOMEM;
   2112                         }
   2113                         newStream->max_buffers = channel->getNumBuffers();
   2114                         newStream->priv = channel;
   2115                     }
   2116                     break;
   2117                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
   2118                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
   2119                             mChannelHandle,
   2120                             mCameraHandle->ops, captureResultCb,
   2121                             &padding_info,
   2122                             this,
   2123                             newStream,
   2124                             (cam_stream_type_t)
   2125                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2126                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2127                             mMetadataChannel);
   2128                     if (channel == NULL) {
   2129                         LOGE("allocation of YUV channel failed");
   2130                         pthread_mutex_unlock(&mMutex);
   2131                         return -ENOMEM;
   2132                     }
   2133                     newStream->max_buffers = channel->getNumBuffers();
   2134                     newStream->priv = channel;
   2135                     break;
   2136                 }
   2137                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   2138                 case HAL_PIXEL_FORMAT_RAW16:
   2139                 case HAL_PIXEL_FORMAT_RAW10:
   2140                     mRawChannel = new QCamera3RawChannel(
   2141                             mCameraHandle->camera_handle, mChannelHandle,
   2142                             mCameraHandle->ops, captureResultCb,
   2143                             &padding_info,
   2144                             this, newStream,
   2145                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2146                             mMetadataChannel,
   2147                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
   2148                     if (mRawChannel == NULL) {
   2149                         LOGE("allocation of raw channel failed");
   2150                         pthread_mutex_unlock(&mMutex);
   2151                         return -ENOMEM;
   2152                     }
   2153                     newStream->max_buffers = mRawChannel->getNumBuffers();
   2154                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
   2155                     break;
   2156                 case HAL_PIXEL_FORMAT_BLOB:
   2157                     // Max live snapshot inflight buffer is 1. This is to mitigate
   2158                     // frame drop issues for video snapshot. The more buffers being
   2159                     // allocated, the more frame drops there are.
   2160                     mPictureChannel = new QCamera3PicChannel(
   2161                             mCameraHandle->camera_handle, mChannelHandle,
   2162                             mCameraHandle->ops, captureResultCb,
   2163                             &padding_info, this, newStream,
   2164                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2165                             m_bIs4KVideo, isZsl, mMetadataChannel,
   2166                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
   2167                     if (mPictureChannel == NULL) {
   2168                         LOGE("allocation of channel failed");
   2169                         pthread_mutex_unlock(&mMutex);
   2170                         return -ENOMEM;
   2171                     }
   2172                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
   2173                     newStream->max_buffers = mPictureChannel->getNumBuffers();
   2174                     mPictureChannel->overrideYuvSize(
   2175                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
   2176                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
   2177                     break;
   2178 
   2179                 default:
   2180                     LOGE("not a supported format 0x%x", newStream->format);
   2181                     break;
   2182                 }
   2183             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
   2184                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
   2185             } else {
   2186                 LOGE("Error, Unknown stream type");
   2187                 pthread_mutex_unlock(&mMutex);
   2188                 return -EINVAL;
   2189             }
   2190 
   2191             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
   2192             if (channel != NULL && channel->isUBWCEnabled()) {
   2193                 cam_format_t fmt = channel->getStreamDefaultFormat(
   2194                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2195                         newStream->width, newStream->height);
   2196                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
   2197                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
   2198                 }
   2199             }
   2200 
   2201             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   2202                     it != mStreamInfo.end(); it++) {
   2203                 if ((*it)->stream == newStream) {
   2204                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
   2205                     break;
   2206                 }
   2207             }
   2208         } else {
   2209             // Channel already exists for this stream
   2210             // Do nothing for now
   2211         }
   2212         padding_info = gCamCapability[mCameraId]->padding_info;
   2213 
   2214         /* Do not add entries for input stream in metastream info
   2215          * since there is no real stream associated with it
   2216          */
   2217         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
   2218             mStreamConfigInfo.num_streams++;
   2219     }
   2220 
   2221     //RAW DUMP channel
   2222     if (mEnableRawDump && isRawStreamRequested == false){
   2223         cam_dimension_t rawDumpSize;
   2224         rawDumpSize = getMaxRawSize(mCameraId);
   2225         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
   2226         setPAAFSupport(rawDumpFeatureMask,
   2227                 CAM_STREAM_TYPE_RAW,
   2228                 gCamCapability[mCameraId]->color_arrangement);
   2229         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
   2230                                   mChannelHandle,
   2231                                   mCameraHandle->ops,
   2232                                   rawDumpSize,
   2233                                   &padding_info,
   2234                                   this, rawDumpFeatureMask);
   2235         if (!mRawDumpChannel) {
   2236             LOGE("Raw Dump channel cannot be created");
   2237             pthread_mutex_unlock(&mMutex);
   2238             return -ENOMEM;
   2239         }
   2240     }
   2241 
   2242 
   2243     if (mAnalysisChannel) {
   2244         cam_analysis_info_t analysisInfo;
   2245         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
   2246         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2247                 CAM_STREAM_TYPE_ANALYSIS;
   2248         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2249                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2250         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2251                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2252                 gCamCapability[mCameraId]->color_arrangement);
   2253         rc = mCommon.getAnalysisInfo(FALSE, TRUE,
   2254                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2255                 &analysisInfo);
   2256         if (rc != NO_ERROR) {
   2257             LOGE("getAnalysisInfo failed, ret = %d", rc);
   2258             pthread_mutex_unlock(&mMutex);
   2259             return rc;
   2260         }
   2261         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2262                 analysisInfo.analysis_max_res;
   2263         mStreamConfigInfo.num_streams++;
   2264     }
   2265 
   2266     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
   2267         cam_analysis_info_t supportInfo;
   2268         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
   2269         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2270         setPAAFSupport(callbackFeatureMask,
   2271                 CAM_STREAM_TYPE_CALLBACK,
   2272                 gCamCapability[mCameraId]->color_arrangement);
   2273         int32_t ret = NO_ERROR;
   2274         ret = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
   2275         if (ret != NO_ERROR) {
   2276             /* Ignore the error for Mono camera
   2277              * because the PAAF bit mask is only set
   2278              * for CAM_STREAM_TYPE_ANALYSIS stream type
   2279              */
   2280             if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
   2281                 LOGW("getAnalysisInfo failed, ret = %d", ret);
   2282             }
   2283         }
   2284         mSupportChannel = new QCamera3SupportChannel(
   2285                 mCameraHandle->camera_handle,
   2286                 mChannelHandle,
   2287                 mCameraHandle->ops,
   2288                 &gCamCapability[mCameraId]->padding_info,
   2289                 callbackFeatureMask,
   2290                 CAM_STREAM_TYPE_CALLBACK,
   2291                 &QCamera3SupportChannel::kDim,
   2292                 CAM_FORMAT_YUV_420_NV21,
   2293                 supportInfo.hw_analysis_supported,
   2294                 gCamCapability[mCameraId]->color_arrangement,
   2295                 this);
   2296         if (!mSupportChannel) {
   2297             LOGE("dummy channel cannot be created");
   2298             pthread_mutex_unlock(&mMutex);
   2299             return -ENOMEM;
   2300         }
   2301     }
   2302 
   2303     if (mSupportChannel) {
   2304         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2305                 QCamera3SupportChannel::kDim;
   2306         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2307                 CAM_STREAM_TYPE_CALLBACK;
   2308         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2309                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2310         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2311                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2312                 gCamCapability[mCameraId]->color_arrangement);
   2313         mStreamConfigInfo.num_streams++;
   2314     }
   2315 
   2316     if (mRawDumpChannel) {
   2317         cam_dimension_t rawSize;
   2318         rawSize = getMaxRawSize(mCameraId);
   2319         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2320                 rawSize;
   2321         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2322                 CAM_STREAM_TYPE_RAW;
   2323         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2324                 CAM_QCOM_FEATURE_NONE;
   2325         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2326                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2327                 gCamCapability[mCameraId]->color_arrangement);
   2328         mStreamConfigInfo.num_streams++;
   2329     }
   2330     /* In HFR mode, if video stream is not added, create a dummy channel so that
   2331      * ISP can create a batch mode even for preview only case. This channel is
   2332      * never 'start'ed (no stream-on), it is only 'initialized'  */
   2333     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   2334             !m_bIsVideo) {
   2335         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2336         setPAAFSupport(dummyFeatureMask,
   2337                 CAM_STREAM_TYPE_VIDEO,
   2338                 gCamCapability[mCameraId]->color_arrangement);
   2339         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2340                 mChannelHandle,
   2341                 mCameraHandle->ops, captureResultCb,
   2342                 &gCamCapability[mCameraId]->padding_info,
   2343                 this,
   2344                 &mDummyBatchStream,
   2345                 CAM_STREAM_TYPE_VIDEO,
   2346                 dummyFeatureMask,
   2347                 mMetadataChannel);
   2348         if (NULL == mDummyBatchChannel) {
   2349             LOGE("creation of mDummyBatchChannel failed."
   2350                     "Preview will use non-hfr sensor mode ");
   2351         }
   2352     }
   2353     if (mDummyBatchChannel) {
   2354         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2355                 mDummyBatchStream.width;
   2356         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2357                 mDummyBatchStream.height;
   2358         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2359                 CAM_STREAM_TYPE_VIDEO;
   2360         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2361                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2362         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2363                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2364                 gCamCapability[mCameraId]->color_arrangement);
   2365         mStreamConfigInfo.num_streams++;
   2366     }
   2367 
   2368     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
   2369     mStreamConfigInfo.buffer_info.max_buffers =
   2370             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   2371 
   2372     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
   2373     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2374             i != mPendingRequestsList.end();) {
   2375         i = erasePendingRequest(i);
   2376     }
   2377     mPendingFrameDropList.clear();
   2378     // Initialize/Reset the pending buffers list
   2379     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   2380         req.mPendingBufferList.clear();
   2381     }
   2382     mPendingBuffersMap.mPendingBuffersInRequest.clear();
   2383 
   2384     mPendingReprocessResultList.clear();
   2385 
   2386     mCurJpegMeta.clear();
   2387     //Get min frame duration for this streams configuration
   2388     deriveMinFrameDuration();
   2389 
   2390     // Update state
   2391     mState = CONFIGURED;
   2392 
   2393     pthread_mutex_unlock(&mMutex);
   2394 
   2395     return rc;
   2396 }
   2397 
   2398 /*===========================================================================
   2399  * FUNCTION   : validateCaptureRequest
   2400  *
   2401  * DESCRIPTION: validate a capture request from camera service
   2402  *
   2403  * PARAMETERS :
   2404  *   @request : request from framework to process
   2405  *
   2406  * RETURN     :
   2407  *
   2408  *==========================================================================*/
   2409 int QCamera3HardwareInterface::validateCaptureRequest(
   2410                     camera3_capture_request_t *request)
   2411 {
   2412     ssize_t idx = 0;
   2413     const camera3_stream_buffer_t *b;
   2414     CameraMetadata meta;
   2415 
   2416     /* Sanity check the request */
   2417     if (request == NULL) {
   2418         LOGE("NULL capture request");
   2419         return BAD_VALUE;
   2420     }
   2421 
   2422     if ((request->settings == NULL) && (mState == CONFIGURED)) {
   2423         /*settings cannot be null for the first request*/
   2424         return BAD_VALUE;
   2425     }
   2426 
   2427     uint32_t frameNumber = request->frame_number;
   2428     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
   2429         LOGE("Request %d: No output buffers provided!",
   2430                 __FUNCTION__, frameNumber);
   2431         return BAD_VALUE;
   2432     }
   2433     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
   2434         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
   2435                  request->num_output_buffers, MAX_NUM_STREAMS);
   2436         return BAD_VALUE;
   2437     }
   2438     if (request->input_buffer != NULL) {
   2439         b = request->input_buffer;
   2440         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2441             LOGE("Request %d: Buffer %ld: Status not OK!",
   2442                      frameNumber, (long)idx);
   2443             return BAD_VALUE;
   2444         }
   2445         if (b->release_fence != -1) {
   2446             LOGE("Request %d: Buffer %ld: Has a release fence!",
   2447                      frameNumber, (long)idx);
   2448             return BAD_VALUE;
   2449         }
   2450         if (b->buffer == NULL) {
   2451             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
   2452                      frameNumber, (long)idx);
   2453             return BAD_VALUE;
   2454         }
   2455     }
   2456 
   2457     // Validate all buffers
   2458     b = request->output_buffers;
   2459     do {
   2460         QCamera3ProcessingChannel *channel =
   2461                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
   2462         if (channel == NULL) {
   2463             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
   2464                      frameNumber, (long)idx);
   2465             return BAD_VALUE;
   2466         }
   2467         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2468             LOGE("Request %d: Buffer %ld: Status not OK!",
   2469                      frameNumber, (long)idx);
   2470             return BAD_VALUE;
   2471         }
   2472         if (b->release_fence != -1) {
   2473             LOGE("Request %d: Buffer %ld: Has a release fence!",
   2474                      frameNumber, (long)idx);
   2475             return BAD_VALUE;
   2476         }
   2477         if (b->buffer == NULL) {
   2478             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
   2479                      frameNumber, (long)idx);
   2480             return BAD_VALUE;
   2481         }
   2482         if (*(b->buffer) == NULL) {
   2483             LOGE("Request %d: Buffer %ld: NULL private handle!",
   2484                      frameNumber, (long)idx);
   2485             return BAD_VALUE;
   2486         }
   2487         idx++;
   2488         b = request->output_buffers + idx;
   2489     } while (idx < (ssize_t)request->num_output_buffers);
   2490 
   2491     return NO_ERROR;
   2492 }
   2493 
   2494 /*===========================================================================
   2495  * FUNCTION   : deriveMinFrameDuration
   2496  *
   2497  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
   2498  *              on currently configured streams.
   2499  *
   2500  * PARAMETERS : NONE
   2501  *
   2502  * RETURN     : NONE
   2503  *
   2504  *==========================================================================*/
   2505 void QCamera3HardwareInterface::deriveMinFrameDuration()
   2506 {
   2507     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
   2508 
   2509     maxJpegDim = 0;
   2510     maxProcessedDim = 0;
   2511     maxRawDim = 0;
   2512 
   2513     // Figure out maximum jpeg, processed, and raw dimensions
   2514     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   2515         it != mStreamInfo.end(); it++) {
   2516 
   2517         // Input stream doesn't have valid stream_type
   2518         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
   2519             continue;
   2520 
   2521         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
   2522         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2523             if (dimension > maxJpegDim)
   2524                 maxJpegDim = dimension;
   2525         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2526                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2527                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
   2528             if (dimension > maxRawDim)
   2529                 maxRawDim = dimension;
   2530         } else {
   2531             if (dimension > maxProcessedDim)
   2532                 maxProcessedDim = dimension;
   2533         }
   2534     }
   2535 
   2536     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
   2537             MAX_SIZES_CNT);
   2538 
   2539     //Assume all jpeg dimensions are in processed dimensions.
   2540     if (maxJpegDim > maxProcessedDim)
   2541         maxProcessedDim = maxJpegDim;
   2542     //Find the smallest raw dimension that is greater or equal to jpeg dimension
   2543     if (maxProcessedDim > maxRawDim) {
   2544         maxRawDim = INT32_MAX;
   2545 
   2546         for (size_t i = 0; i < count; i++) {
   2547             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
   2548                     gCamCapability[mCameraId]->raw_dim[i].height;
   2549             if (dimension >= maxProcessedDim && dimension < maxRawDim)
   2550                 maxRawDim = dimension;
   2551         }
   2552     }
   2553 
   2554     //Find minimum durations for processed, jpeg, and raw
   2555     for (size_t i = 0; i < count; i++) {
   2556         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
   2557                 gCamCapability[mCameraId]->raw_dim[i].height) {
   2558             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
   2559             break;
   2560         }
   2561     }
   2562     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   2563     for (size_t i = 0; i < count; i++) {
   2564         if (maxProcessedDim ==
   2565                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
   2566                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
   2567             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2568             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2569             break;
   2570         }
   2571     }
   2572 }
   2573 
   2574 /*===========================================================================
   2575  * FUNCTION   : getMinFrameDuration
   2576  *
   2577  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
   2578  *              and current request configuration.
   2579  *
   2580  * PARAMETERS : @request: requset sent by the frameworks
   2581  *
   2582  * RETURN     : min farme duration for a particular request
   2583  *
   2584  *==========================================================================*/
   2585 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
   2586 {
   2587     bool hasJpegStream = false;
   2588     bool hasRawStream = false;
   2589     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
   2590         const camera3_stream_t *stream = request->output_buffers[i].stream;
   2591         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
   2592             hasJpegStream = true;
   2593         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2594                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2595                 stream->format == HAL_PIXEL_FORMAT_RAW16)
   2596             hasRawStream = true;
   2597     }
   2598 
   2599     if (!hasJpegStream)
   2600         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
   2601     else
   2602         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
   2603 }
   2604 
   2605 /*===========================================================================
   2606  * FUNCTION   : handleBuffersDuringFlushLock
   2607  *
   2608  * DESCRIPTION: Account for buffers returned from back-end during flush
   2609  *              This function is executed while mMutex is held by the caller.
   2610  *
   2611  * PARAMETERS :
   2612  *   @buffer: image buffer for the callback
   2613  *
   2614  * RETURN     :
   2615  *==========================================================================*/
   2616 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
   2617 {
   2618     bool buffer_found = false;
   2619     for (List<PendingBuffersInRequest>::iterator req =
   2620             mPendingBuffersMap.mPendingBuffersInRequest.begin();
   2621             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   2622         for (List<PendingBufferInfo>::iterator i =
   2623                 req->mPendingBufferList.begin();
   2624                 i != req->mPendingBufferList.end(); i++) {
   2625             if (i->buffer == buffer->buffer) {
   2626                 mPendingBuffersMap.numPendingBufsAtFlush--;
   2627                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
   2628                     buffer->buffer, req->frame_number,
   2629                     mPendingBuffersMap.numPendingBufsAtFlush);
   2630                 buffer_found = true;
   2631                 break;
   2632             }
   2633         }
   2634         if (buffer_found) {
   2635             break;
   2636         }
   2637     }
   2638     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
   2639         //signal the flush()
   2640         LOGD("All buffers returned to HAL. Continue flush");
   2641         pthread_cond_signal(&mBuffersCond);
   2642     }
   2643 }
   2644 
   2645 
   2646 /*===========================================================================
   2647  * FUNCTION   : handlePendingReprocResults
   2648  *
   2649  * DESCRIPTION: check and notify on any pending reprocess results
   2650  *
   2651  * PARAMETERS :
   2652  *   @frame_number   : Pending request frame number
   2653  *
   2654  * RETURN     : int32_t type of status
   2655  *              NO_ERROR  -- success
   2656  *              none-zero failure code
   2657  *==========================================================================*/
   2658 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
   2659 {
   2660     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
   2661             j != mPendingReprocessResultList.end(); j++) {
   2662         if (j->frame_number == frame_number) {
   2663             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
   2664 
   2665             LOGD("Delayed reprocess notify %d",
   2666                     frame_number);
   2667 
   2668             for (pendingRequestIterator k = mPendingRequestsList.begin();
   2669                     k != mPendingRequestsList.end(); k++) {
   2670 
   2671                 if (k->frame_number == j->frame_number) {
   2672                     LOGD("Found reprocess frame number %d in pending reprocess List "
   2673                             "Take it out!!",
   2674                             k->frame_number);
   2675 
   2676                     camera3_capture_result result;
   2677                     memset(&result, 0, sizeof(camera3_capture_result));
   2678                     result.frame_number = frame_number;
   2679                     result.num_output_buffers = 1;
   2680                     result.output_buffers =  &j->buffer;
   2681                     result.input_buffer = k->input_buffer;
   2682                     result.result = k->settings;
   2683                     result.partial_result = PARTIAL_RESULT_COUNT;
   2684                     mCallbackOps->process_capture_result(mCallbackOps, &result);
   2685 
   2686                     erasePendingRequest(k);
   2687                     break;
   2688                 }
   2689             }
   2690             mPendingReprocessResultList.erase(j);
   2691             break;
   2692         }
   2693     }
   2694     return NO_ERROR;
   2695 }
   2696 
   2697 /*===========================================================================
   2698  * FUNCTION   : handleBatchMetadata
   2699  *
   2700  * DESCRIPTION: Handles metadata buffer callback in batch mode
   2701  *
   2702  * PARAMETERS : @metadata_buf: metadata buffer
   2703  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2704  *                 the meta buf in this method
   2705  *
   2706  * RETURN     :
   2707  *
   2708  *==========================================================================*/
   2709 void QCamera3HardwareInterface::handleBatchMetadata(
   2710         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
   2711 {
   2712     ATRACE_CALL();
   2713 
   2714     if (NULL == metadata_buf) {
   2715         LOGE("metadata_buf is NULL");
   2716         return;
   2717     }
   2718     /* In batch mode, the metdata will contain the frame number and timestamp of
   2719      * the last frame in the batch. Eg: a batch containing buffers from request
   2720      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
   2721      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
   2722      * multiple process_capture_results */
   2723     metadata_buffer_t *metadata =
   2724             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2725     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
   2726     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
   2727     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
   2728     uint32_t frame_number = 0, urgent_frame_number = 0;
   2729     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
   2730     bool invalid_metadata = false;
   2731     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
   2732     size_t loopCount = 1;
   2733 
   2734     int32_t *p_frame_number_valid =
   2735             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2736     uint32_t *p_frame_number =
   2737             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2738     int64_t *p_capture_time =
   2739             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2740     int32_t *p_urgent_frame_number_valid =
   2741             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2742     uint32_t *p_urgent_frame_number =
   2743             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2744 
   2745     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
   2746             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
   2747             (NULL == p_urgent_frame_number)) {
   2748         LOGE("Invalid metadata");
   2749         invalid_metadata = true;
   2750     } else {
   2751         frame_number_valid = *p_frame_number_valid;
   2752         last_frame_number = *p_frame_number;
   2753         last_frame_capture_time = *p_capture_time;
   2754         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2755         last_urgent_frame_number = *p_urgent_frame_number;
   2756     }
   2757 
   2758     /* In batchmode, when no video buffers are requested, set_parms are sent
   2759      * for every capture_request. The difference between consecutive urgent
   2760      * frame numbers and frame numbers should be used to interpolate the
   2761      * corresponding frame numbers and time stamps */
   2762     pthread_mutex_lock(&mMutex);
   2763     if (urgent_frame_number_valid) {
   2764         first_urgent_frame_number =
   2765                 mPendingBatchMap.valueFor(last_urgent_frame_number);
   2766         urgentFrameNumDiff = last_urgent_frame_number + 1 -
   2767                 first_urgent_frame_number;
   2768 
   2769         LOGD("urgent_frm: valid: %d frm_num: %d - %d",
   2770                  urgent_frame_number_valid,
   2771                 first_urgent_frame_number, last_urgent_frame_number);
   2772     }
   2773 
   2774     if (frame_number_valid) {
   2775         first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
   2776         frameNumDiff = last_frame_number + 1 -
   2777                 first_frame_number;
   2778         mPendingBatchMap.removeItem(last_frame_number);
   2779 
   2780         LOGD("frm: valid: %d frm_num: %d - %d",
   2781                  frame_number_valid,
   2782                 first_frame_number, last_frame_number);
   2783 
   2784     }
   2785     pthread_mutex_unlock(&mMutex);
   2786 
   2787     if (urgent_frame_number_valid || frame_number_valid) {
   2788         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
   2789         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
   2790             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
   2791                      urgentFrameNumDiff, last_urgent_frame_number);
   2792         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
   2793             LOGE("frameNumDiff: %d frameNum: %d",
   2794                      frameNumDiff, last_frame_number);
   2795     }
   2796 
   2797     for (size_t i = 0; i < loopCount; i++) {
   2798         /* handleMetadataWithLock is called even for invalid_metadata for
   2799          * pipeline depth calculation */
   2800         if (!invalid_metadata) {
   2801             /* Infer frame number. Batch metadata contains frame number of the
   2802              * last frame */
   2803             if (urgent_frame_number_valid) {
   2804                 if (i < urgentFrameNumDiff) {
   2805                     urgent_frame_number =
   2806                             first_urgent_frame_number + i;
   2807                     LOGD("inferred urgent frame_number: %d",
   2808                              urgent_frame_number);
   2809                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2810                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
   2811                 } else {
   2812                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
   2813                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2814                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
   2815                 }
   2816             }
   2817 
   2818             /* Infer frame number. Batch metadata contains frame number of the
   2819              * last frame */
   2820             if (frame_number_valid) {
   2821                 if (i < frameNumDiff) {
   2822                     frame_number = first_frame_number + i;
   2823                     LOGD("inferred frame_number: %d", frame_number);
   2824                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2825                             CAM_INTF_META_FRAME_NUMBER, frame_number);
   2826                 } else {
   2827                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
   2828                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2829                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
   2830                 }
   2831             }
   2832 
   2833             if (last_frame_capture_time) {
   2834                 //Infer timestamp
   2835                 first_frame_capture_time = last_frame_capture_time -
   2836                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
   2837                 capture_time =
   2838                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
   2839                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2840                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
   2841                 LOGD("batch capture_time: %lld, capture_time: %lld",
   2842                          last_frame_capture_time, capture_time);
   2843             }
   2844         }
   2845         pthread_mutex_lock(&mMutex);
   2846         handleMetadataWithLock(metadata_buf,
   2847                 false /* free_and_bufdone_meta_buf */,
   2848                 (i == 0) /* first metadata in the batch metadata */);
   2849         pthread_mutex_unlock(&mMutex);
   2850     }
   2851 
   2852     /* BufDone metadata buffer */
   2853     if (free_and_bufdone_meta_buf) {
   2854         mMetadataChannel->bufDone(metadata_buf);
   2855         free(metadata_buf);
   2856     }
   2857 }
   2858 
   2859 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
   2860         camera3_error_msg_code_t errorCode)
   2861 {
   2862     camera3_notify_msg_t notify_msg;
   2863     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2864     notify_msg.type = CAMERA3_MSG_ERROR;
   2865     notify_msg.message.error.error_code = errorCode;
   2866     notify_msg.message.error.error_stream = NULL;
   2867     notify_msg.message.error.frame_number = frameNumber;
   2868     mCallbackOps->notify(mCallbackOps, &notify_msg);
   2869 
   2870     return;
   2871 }
   2872 /*===========================================================================
   2873  * FUNCTION   : handleMetadataWithLock
   2874  *
   2875  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
   2876  *
   2877  * PARAMETERS : @metadata_buf: metadata buffer
   2878  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2879  *                 the meta buf in this method
   2880  *              @firstMetadataInBatch: Boolean to indicate whether this is the
   2881  *                  first metadata in a batch. Valid only for batch mode
   2882  *
   2883  * RETURN     :
   2884  *
   2885  *==========================================================================*/
   2886 void QCamera3HardwareInterface::handleMetadataWithLock(
   2887     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
   2888     bool firstMetadataInBatch)
   2889 {
   2890     ATRACE_CALL();
   2891     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
   2892         //during flush do not send metadata from this thread
   2893         LOGD("not sending metadata during flush or when mState is error");
   2894         if (free_and_bufdone_meta_buf) {
   2895             mMetadataChannel->bufDone(metadata_buf);
   2896             free(metadata_buf);
   2897         }
   2898         return;
   2899     }
   2900 
   2901     //not in flush
   2902     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2903     int32_t frame_number_valid, urgent_frame_number_valid;
   2904     uint32_t frame_number, urgent_frame_number;
   2905     int64_t capture_time;
   2906     nsecs_t currentSysTime;
   2907 
   2908     int32_t *p_frame_number_valid =
   2909             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2910     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2911     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2912     int32_t *p_urgent_frame_number_valid =
   2913             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2914     uint32_t *p_urgent_frame_number =
   2915             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2916     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
   2917             metadata) {
   2918         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
   2919                  *p_frame_number_valid, *p_frame_number);
   2920     }
   2921 
   2922     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
   2923             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
   2924         LOGE("Invalid metadata");
   2925         if (free_and_bufdone_meta_buf) {
   2926             mMetadataChannel->bufDone(metadata_buf);
   2927             free(metadata_buf);
   2928         }
   2929         goto done_metadata;
   2930     }
   2931     frame_number_valid =        *p_frame_number_valid;
   2932     frame_number =              *p_frame_number;
   2933     capture_time =              *p_capture_time;
   2934     urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2935     urgent_frame_number =       *p_urgent_frame_number;
   2936     currentSysTime =            systemTime(CLOCK_MONOTONIC);
   2937 
   2938     // Detect if buffers from any requests are overdue
   2939     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   2940         if ( (currentSysTime - req.timestamp) >
   2941             s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
   2942             for (auto &missed : req.mPendingBufferList) {
   2943                 LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
   2944                     "stream type = %d, stream format = %d",
   2945                     frame_number, req.frame_number, missed.buffer,
   2946                     missed.stream->stream_type, missed.stream->format);
   2947             }
   2948         }
   2949     }
   2950     //Partial result on process_capture_result for timestamp
   2951     if (urgent_frame_number_valid) {
   2952         LOGD("valid urgent frame_number = %u, capture_time = %lld",
   2953            urgent_frame_number, capture_time);
   2954 
   2955         //Recieved an urgent Frame Number, handle it
   2956         //using partial results
   2957         for (pendingRequestIterator i =
   2958                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
   2959             LOGD("Iterator Frame = %d urgent frame = %d",
   2960                  i->frame_number, urgent_frame_number);
   2961 
   2962             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
   2963                 (i->partial_result_cnt == 0)) {
   2964                 LOGE("Error: HAL missed urgent metadata for frame number %d",
   2965                          i->frame_number);
   2966             }
   2967 
   2968             if (i->frame_number == urgent_frame_number &&
   2969                      i->bUrgentReceived == 0) {
   2970 
   2971                 camera3_capture_result_t result;
   2972                 memset(&result, 0, sizeof(camera3_capture_result_t));
   2973 
   2974                 i->partial_result_cnt++;
   2975                 i->bUrgentReceived = 1;
   2976                 // Extract 3A metadata
   2977                 result.result =
   2978                     translateCbUrgentMetadataToResultMetadata(metadata);
   2979                 // Populate metadata result
   2980                 result.frame_number = urgent_frame_number;
   2981                 result.num_output_buffers = 0;
   2982                 result.output_buffers = NULL;
   2983                 result.partial_result = i->partial_result_cnt;
   2984 
   2985                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   2986                 LOGD("urgent frame_number = %u, capture_time = %lld",
   2987                       result.frame_number, capture_time);
   2988                 free_camera_metadata((camera_metadata_t *)result.result);
   2989                 break;
   2990             }
   2991         }
   2992     }
   2993 
   2994     if (!frame_number_valid) {
   2995         LOGD("Not a valid normal frame number, used as SOF only");
   2996         if (free_and_bufdone_meta_buf) {
   2997             mMetadataChannel->bufDone(metadata_buf);
   2998             free(metadata_buf);
   2999         }
   3000         goto done_metadata;
   3001     }
   3002     LOGH("valid frame_number = %u, capture_time = %lld",
   3003             frame_number, capture_time);
   3004 
   3005     for (pendingRequestIterator i = mPendingRequestsList.begin();
   3006             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   3007         // Flush out all entries with less or equal frame numbers.
   3008 
   3009         camera3_capture_result_t result;
   3010         memset(&result, 0, sizeof(camera3_capture_result_t));
   3011 
   3012         LOGD("frame_number in the list is %u", i->frame_number);
   3013         i->partial_result_cnt++;
   3014         result.partial_result = i->partial_result_cnt;
   3015 
   3016         // Check whether any stream buffer corresponding to this is dropped or not
   3017         // If dropped, then send the ERROR_BUFFER for the corresponding stream
   3018         if (p_cam_frame_drop) {
   3019             /* Clear notify_msg structure */
   3020             camera3_notify_msg_t notify_msg;
   3021             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3022             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3023                     j != i->buffers.end(); j++) {
   3024                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
   3025                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3026                 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
   3027                     if (streamID == p_cam_frame_drop->streamID[k]) {
   3028                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   3029                         LOGE("Start of reporting error frame#=%u, streamID=%u",
   3030                                  i->frame_number, streamID);
   3031                         notify_msg.type = CAMERA3_MSG_ERROR;
   3032                         notify_msg.message.error.frame_number = i->frame_number;
   3033                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
   3034                         notify_msg.message.error.error_stream = j->stream;
   3035                         mCallbackOps->notify(mCallbackOps, &notify_msg);
   3036                         LOGE("End of reporting error frame#=%u, streamID=%u",
   3037                                 i->frame_number, streamID);
   3038                         PendingFrameDropInfo PendingFrameDrop;
   3039                         PendingFrameDrop.frame_number=i->frame_number;
   3040                         PendingFrameDrop.stream_ID = streamID;
   3041                         // Add the Frame drop info to mPendingFrameDropList
   3042                         mPendingFrameDropList.push_back(PendingFrameDrop);
   3043                    }
   3044                 }
   3045             }
   3046         }
   3047 
   3048         // Send empty metadata with already filled buffers for dropped metadata
   3049         // and send valid metadata with already filled buffers for current metadata
   3050         /* we could hit this case when we either
   3051          * 1. have a pending reprocess request or
   3052          * 2. miss a metadata buffer callback */
   3053         if (i->frame_number < frame_number) {
   3054             if (i->input_buffer) {
   3055                 /* this will be handled in handleInputBufferWithLock */
   3056                 i++;
   3057                 continue;
   3058             } else if (mBatchSize) {
   3059 
   3060                 mPendingLiveRequest--;
   3061 
   3062                 CameraMetadata dummyMetadata;
   3063                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
   3064                 result.result = dummyMetadata.release();
   3065 
   3066                 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
   3067             } else {
   3068                 LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
   3069                 if (free_and_bufdone_meta_buf) {
   3070                     mMetadataChannel->bufDone(metadata_buf);
   3071                     free(metadata_buf);
   3072                 }
   3073                 mState = ERROR;
   3074                 goto done_metadata;
   3075             }
   3076         } else {
   3077             mPendingLiveRequest--;
   3078             /* Clear notify_msg structure */
   3079             camera3_notify_msg_t notify_msg;
   3080             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3081 
   3082             // Send shutter notify to frameworks
   3083             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3084             notify_msg.message.shutter.frame_number = i->frame_number;
   3085             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3086             mCallbackOps->notify(mCallbackOps, &notify_msg);
   3087 
   3088             i->timestamp = capture_time;
   3089 
   3090             /* Set the timestamp in display metadata so that clients aware of
   3091                private_handle such as VT can use this un-modified timestamps.
   3092                Camera framework is unaware of this timestamp and cannot change this */
   3093             updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
   3094 
   3095             // Find channel requiring metadata, meaning internal offline postprocess
   3096             // is needed.
   3097             //TODO: for now, we don't support two streams requiring metadata at the same time.
   3098             // (because we are not making copies, and metadata buffer is not reference counted.
   3099             bool internalPproc = false;
   3100             for (pendingBufferIterator iter = i->buffers.begin();
   3101                     iter != i->buffers.end(); iter++) {
   3102                 if (iter->need_metadata) {
   3103                     internalPproc = true;
   3104                     QCamera3ProcessingChannel *channel =
   3105                             (QCamera3ProcessingChannel *)iter->stream->priv;
   3106                     channel->queueReprocMetadata(metadata_buf);
   3107                     break;
   3108                 }
   3109             }
   3110 
   3111             result.result = translateFromHalMetadata(metadata,
   3112                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
   3113                     i->capture_intent, internalPproc, i->fwkCacMode,
   3114                     firstMetadataInBatch);
   3115 
   3116             saveExifParams(metadata);
   3117 
   3118             if (i->blob_request) {
   3119                 {
   3120                     //Dump tuning metadata if enabled and available
   3121                     char prop[PROPERTY_VALUE_MAX];
   3122                     memset(prop, 0, sizeof(prop));
   3123                     property_get("persist.camera.dumpmetadata", prop, "0");
   3124                     int32_t enabled = atoi(prop);
   3125                     if (enabled && metadata->is_tuning_params_valid) {
   3126                         dumpMetadataToFile(metadata->tuning_params,
   3127                                mMetaFrameCount,
   3128                                enabled,
   3129                                "Snapshot",
   3130                                frame_number);
   3131                     }
   3132                 }
   3133             }
   3134 
   3135             if (!internalPproc) {
   3136                 LOGD("couldn't find need_metadata for this metadata");
   3137                 // Return metadata buffer
   3138                 if (free_and_bufdone_meta_buf) {
   3139                     mMetadataChannel->bufDone(metadata_buf);
   3140                     free(metadata_buf);
   3141                 }
   3142             }
   3143         }
   3144         if (!result.result) {
   3145             LOGE("metadata is NULL");
   3146         }
   3147         result.frame_number = i->frame_number;
   3148         result.input_buffer = i->input_buffer;
   3149         result.num_output_buffers = 0;
   3150         result.output_buffers = NULL;
   3151         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3152                     j != i->buffers.end(); j++) {
   3153             if (j->buffer) {
   3154                 result.num_output_buffers++;
   3155             }
   3156         }
   3157 
   3158         updateFpsInPreviewBuffer(metadata, i->frame_number);
   3159 
   3160         if (result.num_output_buffers > 0) {
   3161             camera3_stream_buffer_t *result_buffers =
   3162                 new camera3_stream_buffer_t[result.num_output_buffers];
   3163             if (result_buffers != NULL) {
   3164                 size_t result_buffers_idx = 0;
   3165                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3166                         j != i->buffers.end(); j++) {
   3167                     if (j->buffer) {
   3168                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   3169                                 m != mPendingFrameDropList.end(); m++) {
   3170                             QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   3171                             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3172                             if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
   3173                                 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   3174                                 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
   3175                                         frame_number, streamID);
   3176                                 m = mPendingFrameDropList.erase(m);
   3177                                 break;
   3178                             }
   3179                         }
   3180                         mPendingBuffersMap.removeBuf(j->buffer->buffer);
   3181                         result_buffers[result_buffers_idx++] = *(j->buffer);
   3182                         free(j->buffer);
   3183                         j->buffer = NULL;
   3184                     }
   3185                 }
   3186 
   3187                 result.output_buffers = result_buffers;
   3188                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3189                 LOGD("meta frame_number = %u, capture_time = %lld",
   3190                         result.frame_number, i->timestamp);
   3191                 free_camera_metadata((camera_metadata_t *)result.result);
   3192                 delete[] result_buffers;
   3193             }else {
   3194                 LOGE("Fatal error: out of memory");
   3195             }
   3196         } else {
   3197             mCallbackOps->process_capture_result(mCallbackOps, &result);
   3198             LOGD("meta frame_number = %u, capture_time = %lld",
   3199                     result.frame_number, i->timestamp);
   3200             free_camera_metadata((camera_metadata_t *)result.result);
   3201         }
   3202 
   3203         i = erasePendingRequest(i);
   3204 
   3205         if (!mPendingReprocessResultList.empty()) {
   3206             handlePendingReprocResults(frame_number + 1);
   3207         }
   3208     }
   3209 
   3210 done_metadata:
   3211     for (pendingRequestIterator i = mPendingRequestsList.begin();
   3212             i != mPendingRequestsList.end() ;i++) {
   3213         i->pipeline_depth++;
   3214     }
   3215     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
   3216     unblockRequestIfNecessary();
   3217 }
   3218 
   3219 /*===========================================================================
   3220  * FUNCTION   : hdrPlusPerfLock
   3221  *
   3222  * DESCRIPTION: perf lock for HDR+ using custom intent
   3223  *
   3224  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
   3225  *
   3226  * RETURN     : None
   3227  *
   3228  *==========================================================================*/
   3229 void QCamera3HardwareInterface::hdrPlusPerfLock(
   3230         mm_camera_super_buf_t *metadata_buf)
   3231 {
   3232     if (NULL == metadata_buf) {
   3233         LOGE("metadata_buf is NULL");
   3234         return;
   3235     }
   3236     metadata_buffer_t *metadata =
   3237             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   3238     int32_t *p_frame_number_valid =
   3239             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   3240     uint32_t *p_frame_number =
   3241             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   3242 
   3243     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
   3244         LOGE("%s: Invalid metadata", __func__);
   3245         return;
   3246     }
   3247 
   3248     //acquire perf lock for 5 sec after the last HDR frame is captured
   3249     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
   3250         if ((p_frame_number != NULL) &&
   3251                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
   3252             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
   3253         }
   3254     }
   3255 
   3256     //release lock after perf lock timer is expired. If lock is already released,
   3257     //isTimerReset returns false
   3258     if (m_perfLock.isTimerReset()) {
   3259         mLastCustIntentFrmNum = -1;
   3260         m_perfLock.lock_rel_timed();
   3261     }
   3262 }
   3263 
   3264 /*===========================================================================
   3265  * FUNCTION   : handleInputBufferWithLock
   3266  *
   3267  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
   3268  *
   3269  * PARAMETERS : @frame_number: frame number of the input buffer
   3270  *
   3271  * RETURN     :
   3272  *
   3273  *==========================================================================*/
   3274 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
   3275 {
   3276     ATRACE_CALL();
   3277     pendingRequestIterator i = mPendingRequestsList.begin();
   3278     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   3279         i++;
   3280     }
   3281     if (i != mPendingRequestsList.end() && i->input_buffer) {
   3282         //found the right request
   3283         if (!i->shutter_notified) {
   3284             CameraMetadata settings;
   3285             camera3_notify_msg_t notify_msg;
   3286             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3287             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   3288             if(i->settings) {
   3289                 settings = i->settings;
   3290                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   3291                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   3292                 } else {
   3293                     LOGE("No timestamp in input settings! Using current one.");
   3294                 }
   3295             } else {
   3296                 LOGE("Input settings missing!");
   3297             }
   3298 
   3299             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3300             notify_msg.message.shutter.frame_number = frame_number;
   3301             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3302             mCallbackOps->notify(mCallbackOps, &notify_msg);
   3303             i->shutter_notified = true;
   3304             LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
   3305                         i->frame_number, notify_msg.message.shutter.timestamp);
   3306         }
   3307 
   3308         if (i->input_buffer->release_fence != -1) {
   3309            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   3310            close(i->input_buffer->release_fence);
   3311            if (rc != OK) {
   3312                LOGE("input buffer sync wait failed %d", rc);
   3313            }
   3314         }
   3315 
   3316         camera3_capture_result result;
   3317         memset(&result, 0, sizeof(camera3_capture_result));
   3318         result.frame_number = frame_number;
   3319         result.result = i->settings;
   3320         result.input_buffer = i->input_buffer;
   3321         result.partial_result = PARTIAL_RESULT_COUNT;
   3322 
   3323         mCallbackOps->process_capture_result(mCallbackOps, &result);
   3324         LOGD("Input request metadata and input buffer frame_number = %u",
   3325                         i->frame_number);
   3326         i = erasePendingRequest(i);
   3327     } else {
   3328         LOGE("Could not find input request for frame number %d", frame_number);
   3329     }
   3330 }
   3331 
   3332 /*===========================================================================
   3333  * FUNCTION   : handleBufferWithLock
   3334  *
   3335  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   3336  *
   3337  * PARAMETERS : @buffer: image buffer for the callback
   3338  *              @frame_number: frame number of the image buffer
   3339  *
   3340  * RETURN     :
   3341  *
   3342  *==========================================================================*/
   3343 void QCamera3HardwareInterface::handleBufferWithLock(
   3344     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   3345 {
   3346     ATRACE_CALL();
   3347     /* Nothing to be done during error state */
   3348     if ((ERROR == mState) || (DEINIT == mState)) {
   3349         return;
   3350     }
   3351     if (mFlushPerf) {
   3352         handleBuffersDuringFlushLock(buffer);
   3353         return;
   3354     }
   3355     //not in flush
   3356     // If the frame number doesn't exist in the pending request list,
   3357     // directly send the buffer to the frameworks, and update pending buffers map
   3358     // Otherwise, book-keep the buffer.
   3359     pendingRequestIterator i = mPendingRequestsList.begin();
   3360     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   3361         i++;
   3362     }
   3363     if (i == mPendingRequestsList.end()) {
   3364         // Verify all pending requests frame_numbers are greater
   3365         for (pendingRequestIterator j = mPendingRequestsList.begin();
   3366                 j != mPendingRequestsList.end(); j++) {
   3367             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
   3368                 LOGW("Error: pending live frame number %d is smaller than %d",
   3369                          j->frame_number, frame_number);
   3370             }
   3371         }
   3372         camera3_capture_result_t result;
   3373         memset(&result, 0, sizeof(camera3_capture_result_t));
   3374         result.result = NULL;
   3375         result.frame_number = frame_number;
   3376         result.num_output_buffers = 1;
   3377         result.partial_result = 0;
   3378         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   3379                 m != mPendingFrameDropList.end(); m++) {
   3380             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
   3381             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3382             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
   3383                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   3384                 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
   3385                          frame_number, streamID);
   3386                 m = mPendingFrameDropList.erase(m);
   3387                 break;
   3388             }
   3389         }
   3390         result.output_buffers = buffer;
   3391         LOGH("result frame_number = %d, buffer = %p",
   3392                  frame_number, buffer->buffer);
   3393 
   3394         mPendingBuffersMap.removeBuf(buffer->buffer);
   3395 
   3396         mCallbackOps->process_capture_result(mCallbackOps, &result);
   3397     } else {
   3398         if (i->input_buffer) {
   3399             CameraMetadata settings;
   3400             camera3_notify_msg_t notify_msg;
   3401             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3402             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   3403             if(i->settings) {
   3404                 settings = i->settings;
   3405                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   3406                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   3407                 } else {
   3408                     LOGW("No timestamp in input settings! Using current one.");
   3409                 }
   3410             } else {
   3411                 LOGE("Input settings missing!");
   3412             }
   3413 
   3414             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3415             notify_msg.message.shutter.frame_number = frame_number;
   3416             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3417 
   3418             if (i->input_buffer->release_fence != -1) {
   3419                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   3420                close(i->input_buffer->release_fence);
   3421                if (rc != OK) {
   3422                    LOGE("input buffer sync wait failed %d", rc);
   3423                }
   3424             }
   3425             mPendingBuffersMap.removeBuf(buffer->buffer);
   3426 
   3427             camera3_capture_result result;
   3428             memset(&result, 0, sizeof(camera3_capture_result));
   3429             result.frame_number = frame_number;
   3430             result.result = i->settings;
   3431             result.input_buffer = i->input_buffer;
   3432             result.num_output_buffers = 1;
   3433             result.output_buffers = buffer;
   3434             result.partial_result = PARTIAL_RESULT_COUNT;
   3435 
   3436             mCallbackOps->notify(mCallbackOps, &notify_msg);
   3437             mCallbackOps->process_capture_result(mCallbackOps, &result);
   3438             LOGD("Notify reprocess now %d!", frame_number);
   3439             i = erasePendingRequest(i);
   3440         } else {
   3441             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3442                 j != i->buffers.end(); j++) {
   3443                 if (j->stream == buffer->stream) {
   3444                     if (j->buffer != NULL) {
   3445                         LOGE("Error: buffer is already set");
   3446                     } else {
   3447                         j->buffer = (camera3_stream_buffer_t *)malloc(
   3448                             sizeof(camera3_stream_buffer_t));
   3449                         *(j->buffer) = *buffer;
   3450                         LOGH("cache buffer %p at result frame_number %u",
   3451                              buffer->buffer, frame_number);
   3452                     }
   3453                 }
   3454             }
   3455         }
   3456     }
   3457 }
   3458 
   3459 /*===========================================================================
   3460  * FUNCTION   : unblockRequestIfNecessary
   3461  *
   3462  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
   3463  *              that mMutex is held when this function is called.
   3464  *
   3465  * PARAMETERS :
   3466  *
   3467  * RETURN     :
   3468  *
   3469  *==========================================================================*/
   3470 void QCamera3HardwareInterface::unblockRequestIfNecessary()
   3471 {
   3472    // Unblock process_capture_request
   3473    pthread_cond_signal(&mRequestCond);
   3474 }
   3475 
   3476 
   3477 /*===========================================================================
   3478  * FUNCTION   : processCaptureRequest
   3479  *
   3480  * DESCRIPTION: process a capture request from camera service
   3481  *
   3482  * PARAMETERS :
   3483  *   @request : request from framework to process
   3484  *
   3485  * RETURN     :
   3486  *
   3487  *==========================================================================*/
   3488 int QCamera3HardwareInterface::processCaptureRequest(
   3489                     camera3_capture_request_t *request)
   3490 {
   3491     ATRACE_CALL();
   3492     int rc = NO_ERROR;
   3493     int32_t request_id;
   3494     CameraMetadata meta;
   3495     bool isVidBufRequested = false;
   3496     camera3_stream_buffer_t *pInputBuffer = NULL;
   3497 
   3498     pthread_mutex_lock(&mMutex);
   3499 
   3500     // Validate current state
   3501     switch (mState) {
   3502         case CONFIGURED:
   3503         case STARTED:
   3504             /* valid state */
   3505             break;
   3506 
   3507         case ERROR:
   3508             pthread_mutex_unlock(&mMutex);
   3509             handleCameraDeviceError();
   3510             return -ENODEV;
   3511 
   3512         default:
   3513             LOGE("Invalid state %d", mState);
   3514             pthread_mutex_unlock(&mMutex);
   3515             return -ENODEV;
   3516     }
   3517 
   3518     rc = validateCaptureRequest(request);
   3519     if (rc != NO_ERROR) {
   3520         LOGE("incoming request is not valid");
   3521         pthread_mutex_unlock(&mMutex);
   3522         return rc;
   3523     }
   3524 
   3525     meta = request->settings;
   3526 
   3527     // For first capture request, send capture intent, and
   3528     // stream on all streams
   3529     if (mState == CONFIGURED) {
   3530         // send an unconfigure to the backend so that the isp
   3531         // resources are deallocated
   3532         if (!mFirstConfiguration) {
   3533             cam_stream_size_info_t stream_config_info;
   3534             int32_t hal_version = CAM_HAL_V3;
   3535             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
   3536             stream_config_info.buffer_info.min_buffers =
   3537                     MIN_INFLIGHT_REQUESTS;
   3538             stream_config_info.buffer_info.max_buffers =
   3539                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   3540             clear_metadata_buffer(mParameters);
   3541             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3542                     CAM_INTF_PARM_HAL_VERSION, hal_version);
   3543             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3544                     CAM_INTF_META_STREAM_INFO, stream_config_info);
   3545             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3546                     mParameters);
   3547             if (rc < 0) {
   3548                 LOGE("set_parms for unconfigure failed");
   3549                 pthread_mutex_unlock(&mMutex);
   3550                 return rc;
   3551             }
   3552         }
   3553         m_perfLock.lock_acq();
   3554         /* get eis information for stream configuration */
   3555         cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
   3556         char is_type_value[PROPERTY_VALUE_MAX];
   3557         property_get("persist.camera.is_type", is_type_value, "4");
   3558         isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
   3559         // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
   3560         property_get("persist.camera.is_type_preview", is_type_value, "4");
   3561         isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
   3562         LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
   3563 
   3564         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3565             int32_t hal_version = CAM_HAL_V3;
   3566             uint8_t captureIntent =
   3567                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3568             mCaptureIntent = captureIntent;
   3569             clear_metadata_buffer(mParameters);
   3570             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
   3571             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
   3572         }
   3573 
   3574         uint8_t fwkVideoStabMode=0;
   3575         if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
   3576             fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
   3577         }
   3578 
   3579         // If EIS setprop is enabled & if first capture setting has EIS enabled then only
   3580         // turn it on for video/preview
   3581         bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
   3582                 (isTypeVideo >= IS_TYPE_EIS_2_0);
   3583         int32_t vsMode;
   3584         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
   3585         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
   3586             rc = BAD_VALUE;
   3587         }
   3588         LOGD("setEis %d", setEis);
   3589         bool eis3Supported = false;
   3590         size_t count = IS_TYPE_MAX;
   3591         count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
   3592         for (size_t i = 0; i < count; i++) {
   3593             if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
   3594                 eis3Supported = true;
   3595                 break;
   3596             }
   3597         }
   3598 
   3599         //IS type will be 0 unless EIS is supported. If EIS is supported
   3600         //it could either be 4 or 5 depending on the stream and video size
   3601         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   3602             if (setEis) {
   3603                 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
   3604                     is_type = isTypePreview;
   3605                 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
   3606                     if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
   3607                         LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
   3608                         is_type = IS_TYPE_EIS_2_0;
   3609                     } else {
   3610                         is_type = isTypeVideo;
   3611                     }
   3612                 } else {
   3613                     is_type = IS_TYPE_NONE;
   3614                 }
   3615                  mStreamConfigInfo.is_type[i] = is_type;
   3616             } else {
   3617                  mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
   3618             }
   3619         }
   3620 
   3621         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3622                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   3623 
   3624         int32_t tintless_value = 1;
   3625         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3626                 CAM_INTF_PARM_TINTLESS, tintless_value);
   3627         //Disable CDS for HFR mode or if DIS/EIS is on.
   3628         //CDS is a session parameter in the backend/ISP, so need to be set/reset
   3629         //after every configure_stream
   3630         if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
   3631                 (m_bIsVideo)) {
   3632             int32_t cds = CAM_CDS_MODE_OFF;
   3633             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3634                     CAM_INTF_PARM_CDS_MODE, cds))
   3635                 LOGE("Failed to disable CDS for HFR mode");
   3636 
   3637         }
   3638 
   3639         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
   3640             uint8_t* use_av_timer = NULL;
   3641 
   3642             if (m_debug_avtimer){
   3643                 use_av_timer = &m_debug_avtimer;
   3644             }
   3645             else{
   3646                 use_av_timer =
   3647                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
   3648             }
   3649 
   3650             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
   3651                 rc = BAD_VALUE;
   3652             }
   3653         }
   3654 
   3655         setMobicat();
   3656 
   3657         /* Set fps and hfr mode while sending meta stream info so that sensor
   3658          * can configure appropriate streaming mode */
   3659         mHFRVideoFps = DEFAULT_VIDEO_FPS;
   3660         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
   3661         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
   3662         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   3663             rc = setHalFpsRange(meta, mParameters);
   3664             if (rc == NO_ERROR) {
   3665                 int32_t max_fps =
   3666                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   3667                 if (max_fps == 60) {
   3668                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
   3669                 }
   3670                 /* For HFR, more buffers are dequeued upfront to improve the performance */
   3671                 if (mBatchSize) {
   3672                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
   3673                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
   3674                 }
   3675             }
   3676             else {
   3677                 LOGE("setHalFpsRange failed");
   3678             }
   3679         }
   3680         if (meta.exists(ANDROID_CONTROL_MODE)) {
   3681             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
   3682             rc = extractSceneMode(meta, metaMode, mParameters);
   3683             if (rc != NO_ERROR) {
   3684                 LOGE("extractSceneMode failed");
   3685             }
   3686         }
   3687 
   3688         if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
   3689             cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
   3690                     meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
   3691             rc = setVideoHdrMode(mParameters, vhdr);
   3692             if (rc != NO_ERROR) {
   3693                 LOGE("setVideoHDR is failed");
   3694             }
   3695         }
   3696 
   3697         //TODO: validate the arguments, HSV scenemode should have only the
   3698         //advertised fps ranges
   3699 
   3700         /*set the capture intent, hal version, tintless, stream info,
   3701          *and disenable parameters to the backend*/
   3702         LOGD("set_parms META_STREAM_INFO " );
   3703         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   3704             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
   3705                     "Format:%d is_type: %d",
   3706                     mStreamConfigInfo.type[i],
   3707                     mStreamConfigInfo.stream_sizes[i].width,
   3708                     mStreamConfigInfo.stream_sizes[i].height,
   3709                     mStreamConfigInfo.postprocess_mask[i],
   3710                     mStreamConfigInfo.format[i],
   3711                     mStreamConfigInfo.is_type[i]);
   3712         }
   3713 
   3714         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3715                     mParameters);
   3716         if (rc < 0) {
   3717             LOGE("set_parms failed for hal version, stream info");
   3718         }
   3719 
   3720         cam_dimension_t sensor_dim;
   3721         memset(&sensor_dim, 0, sizeof(sensor_dim));
   3722         rc = getSensorOutputSize(sensor_dim);
   3723         if (rc != NO_ERROR) {
   3724             LOGE("Failed to get sensor output size");
   3725             pthread_mutex_unlock(&mMutex);
   3726             goto error_exit;
   3727         }
   3728 
   3729         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
   3730                 gCamCapability[mCameraId]->active_array_size.height,
   3731                 sensor_dim.width, sensor_dim.height);
   3732 
   3733         /* Set batchmode before initializing channel. Since registerBuffer
   3734          * internally initializes some of the channels, better set batchmode
   3735          * even before first register buffer */
   3736         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3737             it != mStreamInfo.end(); it++) {
   3738             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3739             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   3740                     && mBatchSize) {
   3741                 rc = channel->setBatchSize(mBatchSize);
   3742                 //Disable per frame map unmap for HFR/batchmode case
   3743                 rc |= channel->setPerFrameMapUnmap(false);
   3744                 if (NO_ERROR != rc) {
   3745                     LOGE("Channel init failed %d", rc);
   3746                     pthread_mutex_unlock(&mMutex);
   3747                     goto error_exit;
   3748                 }
   3749             }
   3750         }
   3751 
   3752         //First initialize all streams
   3753         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3754             it != mStreamInfo.end(); it++) {
   3755             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3756             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
   3757                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
   3758                setEis) {
   3759                 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   3760                     if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
   3761                         is_type = mStreamConfigInfo.is_type[i];
   3762                         break;
   3763                     }
   3764                 }
   3765                 rc = channel->initialize(is_type);
   3766             } else {
   3767                 rc = channel->initialize(IS_TYPE_NONE);
   3768             }
   3769             if (NO_ERROR != rc) {
   3770                 LOGE("Channel initialization failed %d", rc);
   3771                 pthread_mutex_unlock(&mMutex);
   3772                 goto error_exit;
   3773             }
   3774         }
   3775 
   3776         if (mRawDumpChannel) {
   3777             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
   3778             if (rc != NO_ERROR) {
   3779                 LOGE("Error: Raw Dump Channel init failed");
   3780                 pthread_mutex_unlock(&mMutex);
   3781                 goto error_exit;
   3782             }
   3783         }
   3784         if (mSupportChannel) {
   3785             rc = mSupportChannel->initialize(IS_TYPE_NONE);
   3786             if (rc < 0) {
   3787                 LOGE("Support channel initialization failed");
   3788                 pthread_mutex_unlock(&mMutex);
   3789                 goto error_exit;
   3790             }
   3791         }
   3792         if (mAnalysisChannel) {
   3793             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
   3794             if (rc < 0) {
   3795                 LOGE("Analysis channel initialization failed");
   3796                 pthread_mutex_unlock(&mMutex);
   3797                 goto error_exit;
   3798             }
   3799         }
   3800         if (mDummyBatchChannel) {
   3801             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
   3802             if (rc < 0) {
   3803                 LOGE("mDummyBatchChannel setBatchSize failed");
   3804                 pthread_mutex_unlock(&mMutex);
   3805                 goto error_exit;
   3806             }
   3807             rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
   3808             if (rc < 0) {
   3809                 LOGE("mDummyBatchChannel initialization failed");
   3810                 pthread_mutex_unlock(&mMutex);
   3811                 goto error_exit;
   3812             }
   3813         }
   3814 
   3815         // Set bundle info
   3816         rc = setBundleInfo();
   3817         if (rc < 0) {
   3818             LOGE("setBundleInfo failed %d", rc);
   3819             pthread_mutex_unlock(&mMutex);
   3820             goto error_exit;
   3821         }
   3822 
   3823         //update settings from app here
   3824         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
   3825             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
   3826             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
   3827         }
   3828         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
   3829             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
   3830             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
   3831         }
   3832         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
   3833             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
   3834             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
   3835 
   3836             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
   3837                 (mLinkedCameraId != mCameraId) ) {
   3838                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
   3839                     mLinkedCameraId, mCameraId);
   3840                 pthread_mutex_unlock(&mMutex);
   3841                 goto error_exit;
   3842             }
   3843         }
   3844 
   3845         // add bundle related cameras
   3846         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
   3847         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
   3848             if (mIsDeviceLinked)
   3849                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
   3850             else
   3851                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
   3852 
   3853             pthread_mutex_lock(&gCamLock);
   3854 
   3855             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
   3856                 LOGE("Dualcam: Invalid Session Id ");
   3857                 pthread_mutex_unlock(&gCamLock);
   3858                 pthread_mutex_unlock(&mMutex);
   3859                 goto error_exit;
   3860             }
   3861 
   3862             if (mIsMainCamera == 1) {
   3863                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
   3864                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
   3865                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
   3866                 // related session id should be session id of linked session
   3867                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   3868             } else {
   3869                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
   3870                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
   3871                 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
   3872                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   3873             }
   3874             pthread_mutex_unlock(&gCamLock);
   3875 
   3876             rc = mCameraHandle->ops->sync_related_sensors(
   3877                     mCameraHandle->camera_handle, m_pRelCamSyncBuf);
   3878             if (rc < 0) {
   3879                 LOGE("Dualcam: link failed");
   3880                 pthread_mutex_unlock(&mMutex);
   3881                 goto error_exit;
   3882             }
   3883         }
   3884 
   3885         //Then start them.
   3886         LOGH("Start META Channel");
   3887         rc = mMetadataChannel->start();
   3888         if (rc < 0) {
   3889             LOGE("META channel start failed");
   3890             pthread_mutex_unlock(&mMutex);
   3891             goto error_exit;
   3892         }
   3893 
   3894         if (mAnalysisChannel) {
   3895             rc = mAnalysisChannel->start();
   3896             if (rc < 0) {
   3897                 LOGE("Analysis channel start failed");
   3898                 mMetadataChannel->stop();
   3899                 pthread_mutex_unlock(&mMutex);
   3900                 goto error_exit;
   3901             }
   3902         }
   3903 
   3904         if (mSupportChannel) {
   3905             rc = mSupportChannel->start();
   3906             if (rc < 0) {
   3907                 LOGE("Support channel start failed");
   3908                 mMetadataChannel->stop();
   3909                 /* Although support and analysis are mutually exclusive today
   3910                    adding it in anycase for future proofing */
   3911                 if (mAnalysisChannel) {
   3912                     mAnalysisChannel->stop();
   3913                 }
   3914                 pthread_mutex_unlock(&mMutex);
   3915                 goto error_exit;
   3916             }
   3917         }
   3918         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3919             it != mStreamInfo.end(); it++) {
   3920             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3921             LOGH("Start Processing Channel mask=%d",
   3922                      channel->getStreamTypeMask());
   3923             rc = channel->start();
   3924             if (rc < 0) {
   3925                 LOGE("channel start failed");
   3926                 pthread_mutex_unlock(&mMutex);
   3927                 goto error_exit;
   3928             }
   3929         }
   3930 
   3931         if (mRawDumpChannel) {
   3932             LOGD("Starting raw dump stream");
   3933             rc = mRawDumpChannel->start();
   3934             if (rc != NO_ERROR) {
   3935                 LOGE("Error Starting Raw Dump Channel");
   3936                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3937                       it != mStreamInfo.end(); it++) {
   3938                     QCamera3Channel *channel =
   3939                         (QCamera3Channel *)(*it)->stream->priv;
   3940                     LOGH("Stopping Processing Channel mask=%d",
   3941                         channel->getStreamTypeMask());
   3942                     channel->stop();
   3943                 }
   3944                 if (mSupportChannel)
   3945                     mSupportChannel->stop();
   3946                 if (mAnalysisChannel) {
   3947                     mAnalysisChannel->stop();
   3948                 }
   3949                 mMetadataChannel->stop();
   3950                 pthread_mutex_unlock(&mMutex);
   3951                 goto error_exit;
   3952             }
   3953         }
   3954 
   3955         if (mChannelHandle) {
   3956 
   3957             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   3958                     mChannelHandle);
   3959             if (rc != NO_ERROR) {
   3960                 LOGE("start_channel failed %d", rc);
   3961                 pthread_mutex_unlock(&mMutex);
   3962                 goto error_exit;
   3963             }
   3964         }
   3965 
   3966         goto no_error;
   3967 error_exit:
   3968         m_perfLock.lock_rel();
   3969         return rc;
   3970 no_error:
   3971         m_perfLock.lock_rel();
   3972 
   3973         mWokenUpByDaemon = false;
   3974         mPendingLiveRequest = 0;
   3975         mFirstConfiguration = false;
   3976         enablePowerHint();
   3977     }
   3978 
   3979     uint32_t frameNumber = request->frame_number;
   3980     cam_stream_ID_t streamID;
   3981 
   3982     if (mFlushPerf) {
   3983         //we cannot accept any requests during flush
   3984         LOGE("process_capture_request cannot proceed during flush");
   3985         pthread_mutex_unlock(&mMutex);
   3986         return NO_ERROR; //should return an error
   3987     }
   3988 
   3989     if (meta.exists(ANDROID_REQUEST_ID)) {
   3990         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
   3991         mCurrentRequestId = request_id;
   3992         LOGD("Received request with id: %d", request_id);
   3993     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
   3994         LOGE("Unable to find request id field, \
   3995                 & no previous id available");
   3996         pthread_mutex_unlock(&mMutex);
   3997         return NAME_NOT_FOUND;
   3998     } else {
   3999         LOGD("Re-using old request id");
   4000         request_id = mCurrentRequestId;
   4001     }
   4002 
   4003     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
   4004                                     request->num_output_buffers,
   4005                                     request->input_buffer,
   4006                                     frameNumber);
   4007     // Acquire all request buffers first
   4008     streamID.num_streams = 0;
   4009     int blob_request = 0;
   4010     uint32_t snapshotStreamId = 0;
   4011     for (size_t i = 0; i < request->num_output_buffers; i++) {
   4012         const camera3_stream_buffer_t& output = request->output_buffers[i];
   4013         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   4014 
   4015         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   4016             //Call function to store local copy of jpeg data for encode params.
   4017             blob_request = 1;
   4018             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
   4019         }
   4020 
   4021         if (output.acquire_fence != -1) {
   4022            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
   4023            close(output.acquire_fence);
   4024            if (rc != OK) {
   4025               LOGE("sync wait failed %d", rc);
   4026               pthread_mutex_unlock(&mMutex);
   4027               return rc;
   4028            }
   4029         }
   4030 
   4031         streamID.streamID[streamID.num_streams] =
   4032             channel->getStreamID(channel->getStreamTypeMask());
   4033         streamID.num_streams++;
   4034 
   4035         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
   4036             isVidBufRequested = true;
   4037         }
   4038     }
   4039 
   4040     if (blob_request) {
   4041         KPI_ATRACE_INT("SNAPSHOT", 1);
   4042     }
   4043     if (blob_request && mRawDumpChannel) {
   4044         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
   4045         streamID.streamID[streamID.num_streams] =
   4046             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
   4047         streamID.num_streams++;
   4048     }
   4049 
   4050     if(request->input_buffer == NULL) {
   4051         /* Parse the settings:
   4052          * - For every request in NORMAL MODE
   4053          * - For every request in HFR mode during preview only case
   4054          * - For first request of every batch in HFR mode during video
   4055          * recording. In batchmode the same settings except frame number is
   4056          * repeated in each request of the batch.
   4057          */
   4058         if (!mBatchSize ||
   4059            (mBatchSize && !isVidBufRequested) ||
   4060            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
   4061             rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
   4062             if (rc < 0) {
   4063                 LOGE("fail to set frame parameters");
   4064                 pthread_mutex_unlock(&mMutex);
   4065                 return rc;
   4066             }
   4067         }
   4068         /* For batchMode HFR, setFrameParameters is not called for every
   4069          * request. But only frame number of the latest request is parsed.
   4070          * Keep track of first and last frame numbers in a batch so that
   4071          * metadata for the frame numbers of batch can be duplicated in
   4072          * handleBatchMetadta */
   4073         if (mBatchSize) {
   4074             if (!mToBeQueuedVidBufs) {
   4075                 //start of the batch
   4076                 mFirstFrameNumberInBatch = request->frame_number;
   4077             }
   4078             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   4079                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
   4080                 LOGE("Failed to set the frame number in the parameters");
   4081                 pthread_mutex_unlock(&mMutex);
   4082                 return BAD_VALUE;
   4083             }
   4084         }
   4085         if (mNeedSensorRestart) {
   4086             /* Unlock the mutex as restartSensor waits on the channels to be
   4087              * stopped, which in turn calls stream callback functions -
   4088              * handleBufferWithLock and handleMetadataWithLock */
   4089             pthread_mutex_unlock(&mMutex);
   4090             rc = dynamicUpdateMetaStreamInfo();
   4091             if (rc != NO_ERROR) {
   4092                 LOGE("Restarting the sensor failed");
   4093                 return BAD_VALUE;
   4094             }
   4095             mNeedSensorRestart = false;
   4096             pthread_mutex_lock(&mMutex);
   4097         }
   4098     } else {
   4099 
   4100         if (request->input_buffer->acquire_fence != -1) {
   4101            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
   4102            close(request->input_buffer->acquire_fence);
   4103            if (rc != OK) {
   4104               LOGE("input buffer sync wait failed %d", rc);
   4105               pthread_mutex_unlock(&mMutex);
   4106               return rc;
   4107            }
   4108         }
   4109     }
   4110 
   4111     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
   4112         mLastCustIntentFrmNum = frameNumber;
   4113     }
   4114     /* Update pending request list and pending buffers map */
   4115     PendingRequestInfo pendingRequest;
   4116     pendingRequestIterator latestRequest;
   4117     pendingRequest.frame_number = frameNumber;
   4118     pendingRequest.num_buffers = request->num_output_buffers;
   4119     pendingRequest.request_id = request_id;
   4120     pendingRequest.blob_request = blob_request;
   4121     pendingRequest.timestamp = 0;
   4122     pendingRequest.bUrgentReceived = 0;
   4123     if (request->input_buffer) {
   4124         pendingRequest.input_buffer =
   4125                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
   4126         *(pendingRequest.input_buffer) = *(request->input_buffer);
   4127         pInputBuffer = pendingRequest.input_buffer;
   4128     } else {
   4129        pendingRequest.input_buffer = NULL;
   4130        pInputBuffer = NULL;
   4131     }
   4132 
   4133     pendingRequest.pipeline_depth = 0;
   4134     pendingRequest.partial_result_cnt = 0;
   4135     extractJpegMetadata(mCurJpegMeta, request);
   4136     pendingRequest.jpegMetadata = mCurJpegMeta;
   4137     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
   4138     pendingRequest.shutter_notified = false;
   4139 
   4140     //extract capture intent
   4141     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   4142         mCaptureIntent =
   4143                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   4144     }
   4145     pendingRequest.capture_intent = mCaptureIntent;
   4146 
   4147     //extract CAC info
   4148     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
   4149         mCacMode =
   4150                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
   4151     }
   4152     pendingRequest.fwkCacMode = mCacMode;
   4153 
   4154     PendingBuffersInRequest bufsForCurRequest;
   4155     bufsForCurRequest.frame_number = frameNumber;
   4156     // Mark current timestamp for the new request
   4157     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
   4158 
   4159     for (size_t i = 0; i < request->num_output_buffers; i++) {
   4160         RequestedBufferInfo requestedBuf;
   4161         memset(&requestedBuf, 0, sizeof(requestedBuf));
   4162         requestedBuf.stream = request->output_buffers[i].stream;
   4163         requestedBuf.buffer = NULL;
   4164         pendingRequest.buffers.push_back(requestedBuf);
   4165 
   4166         // Add to buffer handle the pending buffers list
   4167         PendingBufferInfo bufferInfo;
   4168         bufferInfo.buffer = request->output_buffers[i].buffer;
   4169         bufferInfo.stream = request->output_buffers[i].stream;
   4170         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
   4171         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
   4172         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
   4173             frameNumber, bufferInfo.buffer,
   4174             channel->getStreamTypeMask(), bufferInfo.stream->format);
   4175     }
   4176     // Add this request packet into mPendingBuffersMap
   4177     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
   4178     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
   4179         mPendingBuffersMap.get_num_overall_buffers());
   4180 
   4181     latestRequest = mPendingRequestsList.insert(
   4182             mPendingRequestsList.end(), pendingRequest);
   4183     if(mFlush) {
   4184         LOGI("mFlush is true");
   4185         pthread_mutex_unlock(&mMutex);
   4186         return NO_ERROR;
   4187     }
   4188 
   4189     // Notify metadata channel we receive a request
   4190     mMetadataChannel->request(NULL, frameNumber);
   4191 
   4192     if(request->input_buffer != NULL){
   4193         LOGD("Input request, frame_number %d", frameNumber);
   4194         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
   4195         if (NO_ERROR != rc) {
   4196             LOGE("fail to set reproc parameters");
   4197             pthread_mutex_unlock(&mMutex);
   4198             return rc;
   4199         }
   4200     }
   4201 
   4202     // Call request on other streams
   4203     uint32_t streams_need_metadata = 0;
   4204     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
   4205     for (size_t i = 0; i < request->num_output_buffers; i++) {
   4206         const camera3_stream_buffer_t& output = request->output_buffers[i];
   4207         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   4208 
   4209         if (channel == NULL) {
   4210             LOGW("invalid channel pointer for stream");
   4211             continue;
   4212         }
   4213 
   4214         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   4215             LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
   4216                       output.buffer, request->input_buffer, frameNumber);
   4217             if(request->input_buffer != NULL){
   4218                 rc = channel->request(output.buffer, frameNumber,
   4219                         pInputBuffer, &mReprocMeta);
   4220                 if (rc < 0) {
   4221                     LOGE("Fail to request on picture channel");
   4222                     pthread_mutex_unlock(&mMutex);
   4223                     return rc;
   4224                 }
   4225             } else {
   4226                 LOGD("snapshot request with buffer %p, frame_number %d",
   4227                          output.buffer, frameNumber);
   4228                 if (!request->settings) {
   4229                     rc = channel->request(output.buffer, frameNumber,
   4230                             NULL, mPrevParameters);
   4231                 } else {
   4232                     rc = channel->request(output.buffer, frameNumber,
   4233                             NULL, mParameters);
   4234                 }
   4235                 if (rc < 0) {
   4236                     LOGE("Fail to request on picture channel");
   4237                     pthread_mutex_unlock(&mMutex);
   4238                     return rc;
   4239                 }
   4240                 pendingBufferIter->need_metadata = true;
   4241                 streams_need_metadata++;
   4242             }
   4243         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   4244             bool needMetadata = false;
   4245             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
   4246             rc = yuvChannel->request(output.buffer, frameNumber,
   4247                     pInputBuffer,
   4248                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
   4249             if (rc < 0) {
   4250                 LOGE("Fail to request on YUV channel");
   4251                 pthread_mutex_unlock(&mMutex);
   4252                 return rc;
   4253             }
   4254             pendingBufferIter->need_metadata = needMetadata;
   4255             if (needMetadata)
   4256                 streams_need_metadata += 1;
   4257             LOGD("calling YUV channel request, need_metadata is %d",
   4258                      needMetadata);
   4259         } else {
   4260             LOGD("request with buffer %p, frame_number %d",
   4261                   output.buffer, frameNumber);
   4262             rc = channel->request(output.buffer, frameNumber);
   4263             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   4264                     && mBatchSize) {
   4265                 mToBeQueuedVidBufs++;
   4266                 if (mToBeQueuedVidBufs == mBatchSize) {
   4267                     channel->queueBatchBuf();
   4268                 }
   4269             }
   4270             if (rc < 0) {
   4271                 LOGE("request failed");
   4272                 pthread_mutex_unlock(&mMutex);
   4273                 return rc;
   4274             }
   4275         }
   4276         pendingBufferIter++;
   4277     }
   4278 
   4279     //If 2 streams have need_metadata set to true, fail the request, unless
   4280     //we copy/reference count the metadata buffer
   4281     if (streams_need_metadata > 1) {
   4282         LOGE("not supporting request in which two streams requires"
   4283                 " 2 HAL metadata for reprocessing");
   4284         pthread_mutex_unlock(&mMutex);
   4285         return -EINVAL;
   4286     }
   4287 
   4288     if(request->input_buffer == NULL) {
   4289         /* Set the parameters to backend:
   4290          * - For every request in NORMAL MODE
   4291          * - For every request in HFR mode during preview only case
   4292          * - Once every batch in HFR mode during video recording
   4293          */
   4294         if (!mBatchSize ||
   4295            (mBatchSize && !isVidBufRequested) ||
   4296            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
   4297             LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
   4298                      mBatchSize, isVidBufRequested,
   4299                     mToBeQueuedVidBufs);
   4300             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   4301                     mParameters);
   4302             if (rc < 0) {
   4303                 LOGE("set_parms failed");
   4304             }
   4305             /* reset to zero coz, the batch is queued */
   4306             mToBeQueuedVidBufs = 0;
   4307             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
   4308         }
   4309         mPendingLiveRequest++;
   4310     }
   4311 
   4312     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
   4313 
   4314     mState = STARTED;
   4315     // Added a timed condition wait
   4316     struct timespec ts;
   4317     uint8_t isValidTimeout = 1;
   4318     rc = clock_gettime(CLOCK_REALTIME, &ts);
   4319     if (rc < 0) {
   4320       isValidTimeout = 0;
   4321       LOGE("Error reading the real time clock!!");
   4322     }
   4323     else {
   4324       // Make timeout as 5 sec for request to be honored
   4325       ts.tv_sec += 5;
   4326     }
   4327     //Block on conditional variable
   4328     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
   4329             (mState != ERROR) && (mState != DEINIT)) {
   4330         if (!isValidTimeout) {
   4331             LOGD("Blocking on conditional wait");
   4332             pthread_cond_wait(&mRequestCond, &mMutex);
   4333         }
   4334         else {
   4335             LOGD("Blocking on timed conditional wait");
   4336             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
   4337             if (rc == ETIMEDOUT) {
   4338                 rc = -ENODEV;
   4339                 LOGE("Unblocked on timeout!!!!");
   4340                 break;
   4341             }
   4342         }
   4343         LOGD("Unblocked");
   4344         if (mWokenUpByDaemon) {
   4345             mWokenUpByDaemon = false;
   4346             if (mPendingLiveRequest < mMaxInFlightRequests)
   4347                 break;
   4348         }
   4349     }
   4350     pthread_mutex_unlock(&mMutex);
   4351 
   4352     return rc;
   4353 }
   4354 
   4355 /*===========================================================================
   4356  * FUNCTION   : dump
   4357  *
   4358  * DESCRIPTION:
   4359  *
   4360  * PARAMETERS :
   4361  *
   4362  *
   4363  * RETURN     :
   4364  *==========================================================================*/
   4365 void QCamera3HardwareInterface::dump(int fd)
   4366 {
   4367     pthread_mutex_lock(&mMutex);
   4368     dprintf(fd, "\n Camera HAL3 information Begin \n");
   4369 
   4370     dprintf(fd, "\nNumber of pending requests: %zu \n",
   4371         mPendingRequestsList.size());
   4372     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   4373     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
   4374     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   4375     for(pendingRequestIterator i = mPendingRequestsList.begin();
   4376             i != mPendingRequestsList.end(); i++) {
   4377         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
   4378         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
   4379         i->input_buffer);
   4380     }
   4381     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
   4382                 mPendingBuffersMap.get_num_overall_buffers());
   4383     dprintf(fd, "-------+------------------\n");
   4384     dprintf(fd, " Frame | Stream type mask \n");
   4385     dprintf(fd, "-------+------------------\n");
   4386     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   4387         for(auto &j : req.mPendingBufferList) {
   4388             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
   4389             dprintf(fd, " %5d | %11d \n",
   4390                     req.frame_number, channel->getStreamTypeMask());
   4391         }
   4392     }
   4393     dprintf(fd, "-------+------------------\n");
   4394 
   4395     dprintf(fd, "\nPending frame drop list: %zu\n",
   4396         mPendingFrameDropList.size());
   4397     dprintf(fd, "-------+-----------\n");
   4398     dprintf(fd, " Frame | Stream ID \n");
   4399     dprintf(fd, "-------+-----------\n");
   4400     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
   4401         i != mPendingFrameDropList.end(); i++) {
   4402         dprintf(fd, " %5d | %9d \n",
   4403             i->frame_number, i->stream_ID);
   4404     }
   4405     dprintf(fd, "-------+-----------\n");
   4406 
   4407     dprintf(fd, "\n Camera HAL3 information End \n");
   4408 
   4409     /* use dumpsys media.camera as trigger to send update debug level event */
   4410     mUpdateDebugLevel = true;
   4411     pthread_mutex_unlock(&mMutex);
   4412     return;
   4413 }
   4414 
   4415 /*===========================================================================
   4416  * FUNCTION   : flush
   4417  *
   4418  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
   4419  *              conditionally restarts channels
   4420  *
   4421  * PARAMETERS :
   4422  *  @ restartChannels: re-start all channels
   4423  *
   4424  *
   4425  * RETURN     :
   4426  *          0 on success
   4427  *          Error code on failure
   4428  *==========================================================================*/
   4429 int QCamera3HardwareInterface::flush(bool restartChannels)
   4430 {
   4431     KPI_ATRACE_CALL();
   4432     int32_t rc = NO_ERROR;
   4433 
   4434     LOGD("Unblocking Process Capture Request");
   4435     pthread_mutex_lock(&mMutex);
   4436     mFlush = true;
   4437     pthread_mutex_unlock(&mMutex);
   4438 
   4439     rc = stopAllChannels();
   4440     // unlink of dualcam
   4441     if (mIsDeviceLinked) {
   4442         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
   4443         pthread_mutex_lock(&gCamLock);
   4444 
   4445         if (mIsMainCamera == 1) {
   4446             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
   4447             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
   4448             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
   4449             // related session id should be session id of linked session
   4450             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   4451         } else {
   4452             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
   4453             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
   4454             m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
   4455             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   4456         }
   4457         pthread_mutex_unlock(&gCamLock);
   4458 
   4459         rc = mCameraHandle->ops->sync_related_sensors(
   4460                 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
   4461         if (rc < 0) {
   4462             LOGE("Dualcam: Unlink failed, but still proceed to close");
   4463         }
   4464     }
   4465 
   4466     if (rc < 0) {
   4467         LOGE("stopAllChannels failed");
   4468         return rc;
   4469     }
   4470     if (mChannelHandle) {
   4471         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   4472                 mChannelHandle);
   4473     }
   4474 
   4475     // Reset bundle info
   4476     rc = setBundleInfo();
   4477     if (rc < 0) {
   4478         LOGE("setBundleInfo failed %d", rc);
   4479         return rc;
   4480     }
   4481 
   4482     // Mutex Lock
   4483     pthread_mutex_lock(&mMutex);
   4484 
   4485     // Unblock process_capture_request
   4486     mPendingLiveRequest = 0;
   4487     pthread_cond_signal(&mRequestCond);
   4488 
   4489     rc = notifyErrorForPendingRequests();
   4490     if (rc < 0) {
   4491         LOGE("notifyErrorForPendingRequests failed");
   4492         pthread_mutex_unlock(&mMutex);
   4493         return rc;
   4494     }
   4495 
   4496     mFlush = false;
   4497 
   4498     // Start the Streams/Channels
   4499     if (restartChannels) {
   4500         rc = startAllChannels();
   4501         if (rc < 0) {
   4502             LOGE("startAllChannels failed");
   4503             pthread_mutex_unlock(&mMutex);
   4504             return rc;
   4505         }
   4506     }
   4507 
   4508     if (mChannelHandle) {
   4509         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   4510                     mChannelHandle);
   4511         if (rc < 0) {
   4512             LOGE("start_channel failed");
   4513             pthread_mutex_unlock(&mMutex);
   4514             return rc;
   4515         }
   4516     }
   4517 
   4518     pthread_mutex_unlock(&mMutex);
   4519 
   4520     return 0;
   4521 }
   4522 
   4523 /*===========================================================================
   4524  * FUNCTION   : flushPerf
   4525  *
   4526  * DESCRIPTION: This is the performance optimization version of flush that does
   4527  *              not use stream off, rather flushes the system
   4528  *
   4529  * PARAMETERS :
   4530  *
   4531  *
   4532  * RETURN     : 0 : success
   4533  *              -EINVAL: input is malformed (device is not valid)
   4534  *              -ENODEV: if the device has encountered a serious error
   4535  *==========================================================================*/
   4536 int QCamera3HardwareInterface::flushPerf()
   4537 {
   4538     ATRACE_CALL();
   4539     int32_t rc = 0;
   4540     struct timespec timeout;
   4541     bool timed_wait = false;
   4542 
   4543     pthread_mutex_lock(&mMutex);
   4544     mFlushPerf = true;
   4545     mPendingBuffersMap.numPendingBufsAtFlush =
   4546         mPendingBuffersMap.get_num_overall_buffers();
   4547     LOGD("Calling flush. Wait for %d buffers to return",
   4548         mPendingBuffersMap.numPendingBufsAtFlush);
   4549 
   4550     /* send the flush event to the backend */
   4551     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
   4552     if (rc < 0) {
   4553         LOGE("Error in flush: IOCTL failure");
   4554         mFlushPerf = false;
   4555         pthread_mutex_unlock(&mMutex);
   4556         return -ENODEV;
   4557     }
   4558 
   4559     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
   4560         LOGD("No pending buffers in HAL, return flush");
   4561         mFlushPerf = false;
   4562         pthread_mutex_unlock(&mMutex);
   4563         return rc;
   4564     }
   4565 
   4566     /* wait on a signal that buffers were received */
   4567     rc = clock_gettime(CLOCK_REALTIME, &timeout);
   4568     if (rc < 0) {
   4569         LOGE("Error reading the real time clock, cannot use timed wait");
   4570     } else {
   4571         timeout.tv_sec += FLUSH_TIMEOUT;
   4572         timed_wait = true;
   4573     }
   4574 
   4575     //Block on conditional variable
   4576     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
   4577         LOGD("Waiting on mBuffersCond");
   4578         if (!timed_wait) {
   4579             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
   4580             if (rc != 0) {
   4581                  LOGE("pthread_cond_wait failed due to rc = %s",
   4582                         strerror(rc));
   4583                  break;
   4584             }
   4585         } else {
   4586             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
   4587             if (rc != 0) {
   4588                 LOGE("pthread_cond_timedwait failed due to rc = %s",
   4589                             strerror(rc));
   4590                 break;
   4591             }
   4592         }
   4593     }
   4594     if (rc != 0) {
   4595         mFlushPerf = false;
   4596         pthread_mutex_unlock(&mMutex);
   4597         return -ENODEV;
   4598     }
   4599 
   4600     LOGD("Received buffers, now safe to return them");
   4601 
   4602     //make sure the channels handle flush
   4603     //currently only required for the picture channel to release snapshot resources
   4604     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4605             it != mStreamInfo.end(); it++) {
   4606         QCamera3Channel *channel = (*it)->channel;
   4607         if (channel) {
   4608             rc = channel->flush();
   4609             if (rc) {
   4610                LOGE("Flushing the channels failed with error %d", rc);
   4611                // even though the channel flush failed we need to continue and
   4612                // return the buffers we have to the framework, however the return
   4613                // value will be an error
   4614                rc = -ENODEV;
   4615             }
   4616         }
   4617     }
   4618 
   4619     /* notify the frameworks and send errored results */
   4620     rc = notifyErrorForPendingRequests();
   4621     if (rc < 0) {
   4622         LOGE("notifyErrorForPendingRequests failed");
   4623         pthread_mutex_unlock(&mMutex);
   4624         return rc;
   4625     }
   4626 
   4627     //unblock process_capture_request
   4628     mPendingLiveRequest = 0;
   4629     unblockRequestIfNecessary();
   4630 
   4631     mFlushPerf = false;
   4632     pthread_mutex_unlock(&mMutex);
   4633     LOGD ("Flush Operation complete. rc = %d", rc);
   4634     return rc;
   4635 }
   4636 
   4637 /*===========================================================================
   4638  * FUNCTION   : handleCameraDeviceError
   4639  *
   4640  * DESCRIPTION: This function calls internal flush and notifies the error to
   4641  *              framework and updates the state variable.
   4642  *
   4643  * PARAMETERS : None
   4644  *
   4645  * RETURN     : NO_ERROR on Success
   4646  *              Error code on failure
   4647  *==========================================================================*/
   4648 int32_t QCamera3HardwareInterface::handleCameraDeviceError()
   4649 {
   4650     int32_t rc = NO_ERROR;
   4651 
   4652     pthread_mutex_lock(&mMutex);
   4653     if (mState != ERROR) {
   4654         //if mState != ERROR, nothing to be done
   4655         pthread_mutex_unlock(&mMutex);
   4656         return NO_ERROR;
   4657     }
   4658     pthread_mutex_unlock(&mMutex);
   4659 
   4660     rc = flush(false /* restart channels */);
   4661     if (NO_ERROR != rc) {
   4662         LOGE("internal flush to handle mState = ERROR failed");
   4663     }
   4664 
   4665     pthread_mutex_lock(&mMutex);
   4666     mState = DEINIT;
   4667     pthread_mutex_unlock(&mMutex);
   4668 
   4669     camera3_notify_msg_t notify_msg;
   4670     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   4671     notify_msg.type = CAMERA3_MSG_ERROR;
   4672     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
   4673     notify_msg.message.error.error_stream = NULL;
   4674     notify_msg.message.error.frame_number = 0;
   4675     mCallbackOps->notify(mCallbackOps, &notify_msg);
   4676 
   4677     return rc;
   4678 }
   4679 
   4680 /*===========================================================================
   4681  * FUNCTION   : captureResultCb
   4682  *
   4683  * DESCRIPTION: Callback handler for all capture result
   4684  *              (streams, as well as metadata)
   4685  *
   4686  * PARAMETERS :
   4687  *   @metadata : metadata information
   4688  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   4689  *               NULL if metadata.
   4690  *
   4691  * RETURN     : NONE
   4692  *==========================================================================*/
   4693 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   4694                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
   4695 {
   4696     if (metadata_buf) {
   4697         if (mBatchSize) {
   4698             handleBatchMetadata(metadata_buf,
   4699                     true /* free_and_bufdone_meta_buf */);
   4700         } else { /* mBatchSize = 0 */
   4701             hdrPlusPerfLock(metadata_buf);
   4702             pthread_mutex_lock(&mMutex);
   4703             handleMetadataWithLock(metadata_buf,
   4704                     true /* free_and_bufdone_meta_buf */,
   4705                     false /* first frame of batch metadata */ );
   4706             pthread_mutex_unlock(&mMutex);
   4707         }
   4708     } else if (isInputBuffer) {
   4709         pthread_mutex_lock(&mMutex);
   4710         handleInputBufferWithLock(frame_number);
   4711         pthread_mutex_unlock(&mMutex);
   4712     } else {
   4713         pthread_mutex_lock(&mMutex);
   4714         handleBufferWithLock(buffer, frame_number);
   4715         pthread_mutex_unlock(&mMutex);
   4716     }
   4717     return;
   4718 }
   4719 
   4720 /*===========================================================================
   4721  * FUNCTION   : getReprocessibleOutputStreamId
   4722  *
   4723  * DESCRIPTION: Get source output stream id for the input reprocess stream
   4724  *              based on size and format, which would be the largest
   4725  *              output stream if an input stream exists.
   4726  *
   4727  * PARAMETERS :
   4728  *   @id      : return the stream id if found
   4729  *
   4730  * RETURN     : int32_t type of status
   4731  *              NO_ERROR  -- success
   4732  *              none-zero failure code
   4733  *==========================================================================*/
   4734 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
   4735 {
   4736     /* check if any output or bidirectional stream with the same size and format
   4737        and return that stream */
   4738     if ((mInputStreamInfo.dim.width > 0) &&
   4739             (mInputStreamInfo.dim.height > 0)) {
   4740         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4741                 it != mStreamInfo.end(); it++) {
   4742 
   4743             camera3_stream_t *stream = (*it)->stream;
   4744             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
   4745                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
   4746                     (stream->format == mInputStreamInfo.format)) {
   4747                 // Usage flag for an input stream and the source output stream
   4748                 // may be different.
   4749                 LOGD("Found reprocessible output stream! %p", *it);
   4750                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
   4751                          stream->usage, mInputStreamInfo.usage);
   4752 
   4753                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
   4754                 if (channel != NULL && channel->mStreams[0]) {
   4755                     id = channel->mStreams[0]->getMyServerID();
   4756                     return NO_ERROR;
   4757                 }
   4758             }
   4759         }
   4760     } else {
   4761         LOGD("No input stream, so no reprocessible output stream");
   4762     }
   4763     return NAME_NOT_FOUND;
   4764 }
   4765 
   4766 /*===========================================================================
   4767  * FUNCTION   : lookupFwkName
   4768  *
   4769  * DESCRIPTION: In case the enum is not same in fwk and backend
   4770  *              make sure the parameter is correctly propogated
   4771  *
   4772  * PARAMETERS  :
   4773  *   @arr      : map between the two enums
   4774  *   @len      : len of the map
   4775  *   @hal_name : name of the hal_parm to map
   4776  *
   4777  * RETURN     : int type of status
   4778  *              fwk_name  -- success
   4779  *              none-zero failure code
   4780  *==========================================================================*/
   4781 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
   4782         size_t len, halType hal_name)
   4783 {
   4784 
   4785     for (size_t i = 0; i < len; i++) {
   4786         if (arr[i].hal_name == hal_name) {
   4787             return arr[i].fwk_name;
   4788         }
   4789     }
   4790 
   4791     /* Not able to find matching framework type is not necessarily
   4792      * an error case. This happens when mm-camera supports more attributes
   4793      * than the frameworks do */
   4794     LOGH("Cannot find matching framework type");
   4795     return NAME_NOT_FOUND;
   4796 }
   4797 
   4798 /*===========================================================================
   4799  * FUNCTION   : lookupHalName
   4800  *
   4801  * DESCRIPTION: In case the enum is not same in fwk and backend
   4802  *              make sure the parameter is correctly propogated
   4803  *
   4804  * PARAMETERS  :
   4805  *   @arr      : map between the two enums
   4806  *   @len      : len of the map
   4807  *   @fwk_name : name of the hal_parm to map
   4808  *
   4809  * RETURN     : int32_t type of status
   4810  *              hal_name  -- success
   4811  *              none-zero failure code
   4812  *==========================================================================*/
   4813 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
   4814         size_t len, fwkType fwk_name)
   4815 {
   4816     for (size_t i = 0; i < len; i++) {
   4817         if (arr[i].fwk_name == fwk_name) {
   4818             return arr[i].hal_name;
   4819         }
   4820     }
   4821 
   4822     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
   4823     return NAME_NOT_FOUND;
   4824 }
   4825 
   4826 /*===========================================================================
   4827  * FUNCTION   : lookupProp
   4828  *
   4829  * DESCRIPTION: lookup a value by its name
   4830  *
   4831  * PARAMETERS :
   4832  *   @arr     : map between the two enums
   4833  *   @len     : size of the map
   4834  *   @name    : name to be looked up
   4835  *
   4836  * RETURN     : Value if found
   4837  *              CAM_CDS_MODE_MAX if not found
   4838  *==========================================================================*/
   4839 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
   4840         size_t len, const char *name)
   4841 {
   4842     if (name) {
   4843         for (size_t i = 0; i < len; i++) {
   4844             if (!strcmp(arr[i].desc, name)) {
   4845                 return arr[i].val;
   4846             }
   4847         }
   4848     }
   4849     return CAM_CDS_MODE_MAX;
   4850 }
   4851 
   4852 /*===========================================================================
   4853  *
   4854  * DESCRIPTION:
   4855  *
   4856  * PARAMETERS :
   4857  *   @metadata : metadata information from callback
   4858  *   @timestamp: metadata buffer timestamp
   4859  *   @request_id: request id
   4860  *   @jpegMetadata: additional jpeg metadata
   4861  *   @pprocDone: whether internal offline postprocsesing is done
   4862  *
   4863  * RETURN     : camera_metadata_t*
   4864  *              metadata in a format specified by fwk
   4865  *==========================================================================*/
   4866 camera_metadata_t*
   4867 QCamera3HardwareInterface::translateFromHalMetadata(
   4868                                  metadata_buffer_t *metadata,
   4869                                  nsecs_t timestamp,
   4870                                  int32_t request_id,
   4871                                  const CameraMetadata& jpegMetadata,
   4872                                  uint8_t pipeline_depth,
   4873                                  uint8_t capture_intent,
   4874                                  bool pprocDone,
   4875                                  uint8_t fwk_cacMode,
   4876                                  bool firstMetadataInBatch)
   4877 {
   4878     CameraMetadata camMetadata;
   4879     camera_metadata_t *resultMetadata;
   4880 
   4881     if (mBatchSize && !firstMetadataInBatch) {
   4882         /* In batch mode, use cached metadata from the first metadata
   4883             in the batch */
   4884         camMetadata.clear();
   4885         camMetadata = mCachedMetadata;
   4886     }
   4887 
   4888     if (jpegMetadata.entryCount())
   4889         camMetadata.append(jpegMetadata);
   4890 
   4891     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   4892     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   4893     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
   4894     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
   4895 
   4896     if (mBatchSize && !firstMetadataInBatch) {
   4897         /* In batch mode, use cached metadata instead of parsing metadata buffer again */
   4898         resultMetadata = camMetadata.release();
   4899         return resultMetadata;
   4900     }
   4901 
   4902     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
   4903         int64_t fwk_frame_number = *frame_number;
   4904         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   4905     }
   4906 
   4907     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
   4908         int32_t fps_range[2];
   4909         fps_range[0] = (int32_t)float_range->min_fps;
   4910         fps_range[1] = (int32_t)float_range->max_fps;
   4911         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   4912                                       fps_range, 2);
   4913         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
   4914              fps_range[0], fps_range[1]);
   4915     }
   4916 
   4917     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
   4918         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
   4919     }
   4920 
   4921     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   4922         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
   4923                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
   4924                 *sceneMode);
   4925         if (NAME_NOT_FOUND != val) {
   4926             uint8_t fwkSceneMode = (uint8_t)val;
   4927             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
   4928             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
   4929                      fwkSceneMode);
   4930         }
   4931     }
   4932 
   4933     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
   4934         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
   4935         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
   4936     }
   4937 
   4938     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
   4939         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
   4940         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
   4941     }
   4942 
   4943     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
   4944         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
   4945         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
   4946     }
   4947 
   4948     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
   4949             CAM_INTF_META_EDGE_MODE, metadata) {
   4950         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
   4951     }
   4952 
   4953     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
   4954         uint8_t fwk_flashPower = (uint8_t) *flashPower;
   4955         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
   4956     }
   4957 
   4958     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
   4959         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   4960     }
   4961 
   4962     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
   4963         if (0 <= *flashState) {
   4964             uint8_t fwk_flashState = (uint8_t) *flashState;
   4965             if (!gCamCapability[mCameraId]->flash_available) {
   4966                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   4967             }
   4968             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
   4969         }
   4970     }
   4971 
   4972     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
   4973         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
   4974         if (NAME_NOT_FOUND != val) {
   4975             uint8_t fwk_flashMode = (uint8_t)val;
   4976             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
   4977         }
   4978     }
   4979 
   4980     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
   4981         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
   4982         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
   4983     }
   4984 
   4985     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
   4986         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   4987     }
   4988 
   4989     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
   4990         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   4991     }
   4992 
   4993     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
   4994         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   4995     }
   4996 
   4997     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
   4998         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
   4999         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
   5000     }
   5001 
   5002     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
   5003         uint8_t fwk_videoStab = (uint8_t) *videoStab;
   5004         LOGD("fwk_videoStab = %d", fwk_videoStab);
   5005         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
   5006     } else {
   5007         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
   5008         // and so hardcoding the Video Stab result to OFF mode.
   5009         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   5010         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
   5011         LOGD("EIS result default to OFF mode");
   5012     }
   5013 
   5014     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
   5015         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
   5016         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
   5017     }
   5018 
   5019     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
   5020         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
   5021     }
   5022 
   5023     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
   5024         CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
   5025 
   5026         LOGD("dynamicblackLevel = %f %f %f %f",
   5027           blackLevelSourcePattern->cam_black_level[0],
   5028           blackLevelSourcePattern->cam_black_level[1],
   5029           blackLevelSourcePattern->cam_black_level[2],
   5030           blackLevelSourcePattern->cam_black_level[3]);
   5031     }
   5032 
   5033     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
   5034         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
   5035         float fwk_blackLevelInd[4];
   5036 
   5037         fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
   5038         fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
   5039         fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
   5040         fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
   5041 
   5042         LOGD("applied dynamicblackLevel = %f %f %f %f",
   5043           blackLevelAppliedPattern->cam_black_level[0],
   5044           blackLevelAppliedPattern->cam_black_level[1],
   5045           blackLevelAppliedPattern->cam_black_level[2],
   5046           blackLevelAppliedPattern->cam_black_level[3]);
   5047         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
   5048 
   5049 #ifndef USE_HAL_3_3
   5050         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
   5051         // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
   5052         // depth space.
   5053         fwk_blackLevelInd[0] /= 64.0;
   5054         fwk_blackLevelInd[1] /= 64.0;
   5055         fwk_blackLevelInd[2] /= 64.0;
   5056         fwk_blackLevelInd[3] /= 64.0;
   5057         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
   5058 #endif
   5059     }
   5060 
   5061 #ifndef USE_HAL_3_3
   5062     // Fixed whitelevel is used by ISP/Sensor
   5063     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
   5064             &gCamCapability[mCameraId]->white_level, 1);
   5065 #endif
   5066 
   5067     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
   5068             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
   5069         int32_t scalerCropRegion[4];
   5070         scalerCropRegion[0] = hScalerCropRegion->left;
   5071         scalerCropRegion[1] = hScalerCropRegion->top;
   5072         scalerCropRegion[2] = hScalerCropRegion->width;
   5073         scalerCropRegion[3] = hScalerCropRegion->height;
   5074 
   5075         // Adjust crop region from sensor output coordinate system to active
   5076         // array coordinate system.
   5077         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
   5078                 scalerCropRegion[2], scalerCropRegion[3]);
   5079 
   5080         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   5081     }
   5082 
   5083     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
   5084         LOGD("sensorExpTime = %lld", *sensorExpTime);
   5085         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   5086     }
   5087 
   5088     IF_META_AVAILABLE(int64_t, sensorFameDuration,
   5089             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
   5090         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
   5091         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   5092     }
   5093 
   5094     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
   5095             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
   5096         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
   5097         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   5098                 sensorRollingShutterSkew, 1);
   5099     }
   5100 
   5101     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
   5102         LOGD("sensorSensitivity = %d", *sensorSensitivity);
   5103         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
   5104 
   5105         //calculate the noise profile based on sensitivity
   5106         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
   5107         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
   5108         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
   5109         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
   5110             noise_profile[i]   = noise_profile_S;
   5111             noise_profile[i+1] = noise_profile_O;
   5112         }
   5113         LOGD("noise model entry (S, O) is (%f, %f)",
   5114                 noise_profile_S, noise_profile_O);
   5115         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
   5116                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
   5117     }
   5118 
   5119 #ifndef USE_HAL_3_3
   5120     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
   5121         int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
   5122         camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
   5123     }
   5124 #endif
   5125 
   5126     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
   5127         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
   5128         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
   5129     }
   5130 
   5131     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
   5132         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   5133                 *faceDetectMode);
   5134         if (NAME_NOT_FOUND != val) {
   5135             uint8_t fwk_faceDetectMode = (uint8_t)val;
   5136             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
   5137 
   5138             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
   5139                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
   5140                         CAM_INTF_META_FACE_DETECTION, metadata) {
   5141                     uint8_t numFaces = MIN(
   5142                             faceDetectionInfo->num_faces_detected, MAX_ROI);
   5143                     int32_t faceIds[MAX_ROI];
   5144                     uint8_t faceScores[MAX_ROI];
   5145                     int32_t faceRectangles[MAX_ROI * 4];
   5146                     int32_t faceLandmarks[MAX_ROI * 6];
   5147                     size_t j = 0, k = 0;
   5148 
   5149                     for (size_t i = 0; i < numFaces; i++) {
   5150                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
   5151                         // Adjust crop region from sensor output coordinate system to active
   5152                         // array coordinate system.
   5153                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
   5154                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
   5155                                 rect.width, rect.height);
   5156 
   5157                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   5158                                 faceRectangles+j, -1);
   5159 
   5160                         j+= 4;
   5161                     }
   5162                     if (numFaces <= 0) {
   5163                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
   5164                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
   5165                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
   5166                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
   5167                     }
   5168 
   5169                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
   5170                             numFaces);
   5171                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   5172                             faceRectangles, numFaces * 4U);
   5173                     if (fwk_faceDetectMode ==
   5174                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
   5175                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
   5176                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
   5177 
   5178                             for (size_t i = 0; i < numFaces; i++) {
   5179                                 // Map the co-ordinate sensor output coordinate system to active
   5180                                 // array coordinate system.
   5181                                 mCropRegionMapper.toActiveArray(
   5182                                         landmarks->face_landmarks[i].left_eye_center.x,
   5183                                         landmarks->face_landmarks[i].left_eye_center.y);
   5184                                 mCropRegionMapper.toActiveArray(
   5185                                         landmarks->face_landmarks[i].right_eye_center.x,
   5186                                         landmarks->face_landmarks[i].right_eye_center.y);
   5187                                 mCropRegionMapper.toActiveArray(
   5188                                         landmarks->face_landmarks[i].mouth_center.x,
   5189                                         landmarks->face_landmarks[i].mouth_center.y);
   5190 
   5191                                 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
   5192                                 k+= TOTAL_LANDMARK_INDICES;
   5193                             }
   5194                         } else {
   5195                             for (size_t i = 0; i < numFaces; i++) {
   5196                                 setInvalidLandmarks(faceLandmarks+k);
   5197                                 k+= TOTAL_LANDMARK_INDICES;
   5198                             }
   5199                         }
   5200 
   5201                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   5202                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   5203                                 faceLandmarks, numFaces * 6U);
   5204                    }
   5205                 }
   5206             }
   5207         }
   5208     }
   5209 
   5210     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
   5211         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
   5212         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
   5213 
   5214         if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
   5215             IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
   5216                 // process histogram statistics info
   5217                 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
   5218                 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
   5219                 cam_histogram_data_t rHistData, gHistData, bHistData;
   5220                 memset(&rHistData, 0, sizeof(rHistData));
   5221                 memset(&gHistData, 0, sizeof(gHistData));
   5222                 memset(&bHistData, 0, sizeof(bHistData));
   5223 
   5224                 switch (stats_data->type) {
   5225                 case CAM_HISTOGRAM_TYPE_BAYER:
   5226                     switch (stats_data->bayer_stats.data_type) {
   5227                         case CAM_STATS_CHANNEL_GR:
   5228                             rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
   5229                             break;
   5230                         case CAM_STATS_CHANNEL_GB:
   5231                             rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
   5232                             break;
   5233                         case CAM_STATS_CHANNEL_B:
   5234                             rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
   5235                             break;
   5236                         case CAM_STATS_CHANNEL_ALL:
   5237                             rHistData = stats_data->bayer_stats.r_stats;
   5238                             //Framework expects only 3 channels. So, for now,
   5239                             //use gb stats for G channel.
   5240                             gHistData = stats_data->bayer_stats.gb_stats;
   5241                             bHistData = stats_data->bayer_stats.b_stats;
   5242                             break;
   5243                         case CAM_STATS_CHANNEL_Y:
   5244                         case CAM_STATS_CHANNEL_R:
   5245                         default:
   5246                             rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
   5247                             break;
   5248                     }
   5249                     break;
   5250                 case CAM_HISTOGRAM_TYPE_YUV:
   5251                     rHistData = gHistData = bHistData = stats_data->yuv_stats;
   5252                     break;
   5253                 }
   5254 
   5255                 memcpy(hist_buf, rHistData.hist_buf, hist_size);
   5256                 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
   5257                 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
   5258 
   5259                 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
   5260             }
   5261         }
   5262     }
   5263 
   5264     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
   5265             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
   5266         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
   5267         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
   5268     }
   5269 
   5270     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
   5271             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
   5272         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
   5273                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
   5274     }
   5275 
   5276     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
   5277             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
   5278         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
   5279                 CAM_MAX_SHADING_MAP_HEIGHT);
   5280         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
   5281                 CAM_MAX_SHADING_MAP_WIDTH);
   5282         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   5283                 lensShadingMap->lens_shading, 4U * map_width * map_height);
   5284     }
   5285 
   5286     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
   5287         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
   5288         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
   5289     }
   5290 
   5291     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
   5292         //Populate CAM_INTF_META_TONEMAP_CURVES
   5293         /* ch0 = G, ch 1 = B, ch 2 = R*/
   5294         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   5295             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
   5296                      tonemap->tonemap_points_cnt,
   5297                     CAM_MAX_TONEMAP_CURVE_SIZE);
   5298             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   5299         }
   5300 
   5301         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
   5302                         &tonemap->curves[0].tonemap_points[0][0],
   5303                         tonemap->tonemap_points_cnt * 2);
   5304 
   5305         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
   5306                         &tonemap->curves[1].tonemap_points[0][0],
   5307                         tonemap->tonemap_points_cnt * 2);
   5308 
   5309         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
   5310                         &tonemap->curves[2].tonemap_points[0][0],
   5311                         tonemap->tonemap_points_cnt * 2);
   5312     }
   5313 
   5314     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
   5315             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
   5316         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
   5317                 CC_GAIN_MAX);
   5318     }
   5319 
   5320     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
   5321             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
   5322         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   5323                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
   5324                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
   5325     }
   5326 
   5327     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
   5328             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
   5329         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   5330             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
   5331                      toneCurve->tonemap_points_cnt,
   5332                     CAM_MAX_TONEMAP_CURVE_SIZE);
   5333             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   5334         }
   5335         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
   5336                 (float*)toneCurve->curve.tonemap_points,
   5337                 toneCurve->tonemap_points_cnt * 2);
   5338     }
   5339 
   5340     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
   5341             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
   5342         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   5343                 predColorCorrectionGains->gains, 4);
   5344     }
   5345 
   5346     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
   5347             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
   5348         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   5349                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
   5350                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
   5351     }
   5352 
   5353     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
   5354         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
   5355     }
   5356 
   5357     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
   5358         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
   5359         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
   5360     }
   5361 
   5362     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
   5363         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
   5364         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
   5365     }
   5366 
   5367     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
   5368         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   5369                 *effectMode);
   5370         if (NAME_NOT_FOUND != val) {
   5371             uint8_t fwk_effectMode = (uint8_t)val;
   5372             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
   5373         }
   5374     }
   5375 
   5376     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
   5377             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
   5378         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
   5379                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
   5380         if (NAME_NOT_FOUND != fwk_testPatternMode) {
   5381             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
   5382         }
   5383         int32_t fwk_testPatternData[4];
   5384         fwk_testPatternData[0] = testPatternData->r;
   5385         fwk_testPatternData[3] = testPatternData->b;
   5386         switch (gCamCapability[mCameraId]->color_arrangement) {
   5387         case CAM_FILTER_ARRANGEMENT_RGGB:
   5388         case CAM_FILTER_ARRANGEMENT_GRBG:
   5389             fwk_testPatternData[1] = testPatternData->gr;
   5390             fwk_testPatternData[2] = testPatternData->gb;
   5391             break;
   5392         case CAM_FILTER_ARRANGEMENT_GBRG:
   5393         case CAM_FILTER_ARRANGEMENT_BGGR:
   5394             fwk_testPatternData[2] = testPatternData->gr;
   5395             fwk_testPatternData[1] = testPatternData->gb;
   5396             break;
   5397         default:
   5398             LOGE("color arrangement %d is not supported",
   5399                 gCamCapability[mCameraId]->color_arrangement);
   5400             break;
   5401         }
   5402         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
   5403     }
   5404 
   5405     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
   5406         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
   5407     }
   5408 
   5409     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
   5410         String8 str((const char *)gps_methods);
   5411         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
   5412     }
   5413 
   5414     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
   5415         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
   5416     }
   5417 
   5418     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
   5419         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
   5420     }
   5421 
   5422     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
   5423         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
   5424         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
   5425     }
   5426 
   5427     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
   5428         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
   5429         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
   5430     }
   5431 
   5432     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
   5433         int32_t fwk_thumb_size[2];
   5434         fwk_thumb_size[0] = thumb_size->width;
   5435         fwk_thumb_size[1] = thumb_size->height;
   5436         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
   5437     }
   5438 
   5439     IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
   5440         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
   5441                 privateData,
   5442                 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
   5443     }
   5444 
   5445     if (metadata->is_tuning_params_valid) {
   5446         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
   5447         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
   5448         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
   5449 
   5450 
   5451         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
   5452                 sizeof(uint32_t));
   5453         data += sizeof(uint32_t);
   5454 
   5455         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
   5456                 sizeof(uint32_t));
   5457         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
   5458         data += sizeof(uint32_t);
   5459 
   5460         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
   5461                 sizeof(uint32_t));
   5462         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
   5463         data += sizeof(uint32_t);
   5464 
   5465         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
   5466                 sizeof(uint32_t));
   5467         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
   5468         data += sizeof(uint32_t);
   5469 
   5470         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
   5471                 sizeof(uint32_t));
   5472         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
   5473         data += sizeof(uint32_t);
   5474 
   5475         metadata->tuning_params.tuning_mod3_data_size = 0;
   5476         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
   5477                 sizeof(uint32_t));
   5478         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
   5479         data += sizeof(uint32_t);
   5480 
   5481         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
   5482                 TUNING_SENSOR_DATA_MAX);
   5483         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
   5484                 count);
   5485         data += count;
   5486 
   5487         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
   5488                 TUNING_VFE_DATA_MAX);
   5489         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
   5490                 count);
   5491         data += count;
   5492 
   5493         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
   5494                 TUNING_CPP_DATA_MAX);
   5495         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
   5496                 count);
   5497         data += count;
   5498 
   5499         count = MIN(metadata->tuning_params.tuning_cac_data_size,
   5500                 TUNING_CAC_DATA_MAX);
   5501         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
   5502                 count);
   5503         data += count;
   5504 
   5505         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
   5506                 (int32_t *)(void *)tuning_meta_data_blob,
   5507                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
   5508     }
   5509 
   5510     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
   5511             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
   5512         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   5513                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
   5514                 NEUTRAL_COL_POINTS);
   5515     }
   5516 
   5517     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
   5518         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
   5519         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
   5520     }
   5521 
   5522     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
   5523         int32_t aeRegions[REGIONS_TUPLE_COUNT];
   5524         // Adjust crop region from sensor output coordinate system to active
   5525         // array coordinate system.
   5526         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
   5527                 hAeRegions->rect.width, hAeRegions->rect.height);
   5528 
   5529         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   5530         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
   5531                 REGIONS_TUPLE_COUNT);
   5532         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   5533                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
   5534                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
   5535                 hAeRegions->rect.height);
   5536     }
   5537 
   5538     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
   5539         uint8_t fwk_afState = (uint8_t) *afState;
   5540         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
   5541         LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
   5542     }
   5543 
   5544     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
   5545         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   5546     }
   5547 
   5548     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
   5549         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
   5550     }
   5551 
   5552     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
   5553         uint8_t fwk_lensState = *lensState;
   5554         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
   5555     }
   5556 
   5557     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
   5558         /*af regions*/
   5559         int32_t afRegions[REGIONS_TUPLE_COUNT];
   5560         // Adjust crop region from sensor output coordinate system to active
   5561         // array coordinate system.
   5562         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
   5563                 hAfRegions->rect.width, hAfRegions->rect.height);
   5564 
   5565         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   5566         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
   5567                 REGIONS_TUPLE_COUNT);
   5568         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   5569                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
   5570                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
   5571                 hAfRegions->rect.height);
   5572     }
   5573 
   5574     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
   5575         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   5576                 *hal_ab_mode);
   5577         if (NAME_NOT_FOUND != val) {
   5578             uint8_t fwk_ab_mode = (uint8_t)val;
   5579             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
   5580         }
   5581     }
   5582 
   5583     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   5584         int val = lookupFwkName(SCENE_MODES_MAP,
   5585                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
   5586         if (NAME_NOT_FOUND != val) {
   5587             uint8_t fwkBestshotMode = (uint8_t)val;
   5588             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
   5589             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
   5590         } else {
   5591             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
   5592         }
   5593     }
   5594 
   5595     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
   5596          uint8_t fwk_mode = (uint8_t) *mode;
   5597          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
   5598     }
   5599 
   5600     /* Constant metadata values to be update*/
   5601     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
   5602     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
   5603 
   5604     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   5605     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   5606 
   5607     int32_t hotPixelMap[2];
   5608     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
   5609 
   5610     // CDS
   5611     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
   5612         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
   5613     }
   5614 
   5615     IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
   5616         int32_t fwk_hdr;
   5617         if(*vhdr == CAM_SENSOR_HDR_OFF) {
   5618             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
   5619         } else {
   5620             fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
   5621         }
   5622         camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
   5623     }
   5624 
   5625     IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
   5626         int32_t fwk_ir = (int32_t) *ir;
   5627         camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
   5628     }
   5629 
   5630     // AEC SPEED
   5631     IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
   5632         camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
   5633     }
   5634 
   5635     // AWB SPEED
   5636     IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
   5637         camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
   5638     }
   5639 
   5640     // TNR
   5641     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
   5642         uint8_t tnr_enable       = tnr->denoise_enable;
   5643         int32_t tnr_process_type = (int32_t)tnr->process_plates;
   5644 
   5645         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   5646         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   5647     }
   5648 
   5649     // Reprocess crop data
   5650     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
   5651         uint8_t cnt = crop_data->num_of_streams;
   5652         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
   5653             // mm-qcamera-daemon only posts crop_data for streams
   5654             // not linked to pproc. So no valid crop metadata is not
   5655             // necessarily an error case.
   5656             LOGD("No valid crop metadata entries");
   5657         } else {
   5658             uint32_t reproc_stream_id;
   5659             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   5660                 LOGD("No reprocessible stream found, ignore crop data");
   5661             } else {
   5662                 int rc = NO_ERROR;
   5663                 Vector<int32_t> roi_map;
   5664                 int32_t *crop = new int32_t[cnt*4];
   5665                 if (NULL == crop) {
   5666                    rc = NO_MEMORY;
   5667                 }
   5668                 if (NO_ERROR == rc) {
   5669                     int32_t streams_found = 0;
   5670                     for (size_t i = 0; i < cnt; i++) {
   5671                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
   5672                             if (pprocDone) {
   5673                                 // HAL already does internal reprocessing,
   5674                                 // either via reprocessing before JPEG encoding,
   5675                                 // or offline postprocessing for pproc bypass case.
   5676                                 crop[0] = 0;
   5677                                 crop[1] = 0;
   5678                                 crop[2] = mInputStreamInfo.dim.width;
   5679                                 crop[3] = mInputStreamInfo.dim.height;
   5680                             } else {
   5681                                 crop[0] = crop_data->crop_info[i].crop.left;
   5682                                 crop[1] = crop_data->crop_info[i].crop.top;
   5683                                 crop[2] = crop_data->crop_info[i].crop.width;
   5684                                 crop[3] = crop_data->crop_info[i].crop.height;
   5685                             }
   5686                             roi_map.add(crop_data->crop_info[i].roi_map.left);
   5687                             roi_map.add(crop_data->crop_info[i].roi_map.top);
   5688                             roi_map.add(crop_data->crop_info[i].roi_map.width);
   5689                             roi_map.add(crop_data->crop_info[i].roi_map.height);
   5690                             streams_found++;
   5691                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
   5692                                     crop[0], crop[1], crop[2], crop[3]);
   5693                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
   5694                                     crop_data->crop_info[i].roi_map.left,
   5695                                     crop_data->crop_info[i].roi_map.top,
   5696                                     crop_data->crop_info[i].roi_map.width,
   5697                                     crop_data->crop_info[i].roi_map.height);
   5698                             break;
   5699 
   5700                        }
   5701                     }
   5702                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
   5703                             &streams_found, 1);
   5704                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
   5705                             crop, (size_t)(streams_found * 4));
   5706                     if (roi_map.array()) {
   5707                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
   5708                                 roi_map.array(), roi_map.size());
   5709                     }
   5710                }
   5711                if (crop) {
   5712                    delete [] crop;
   5713                }
   5714             }
   5715         }
   5716     }
   5717 
   5718     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
   5719         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
   5720         // so hardcoding the CAC result to OFF mode.
   5721         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   5722         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
   5723     } else {
   5724         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
   5725             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   5726                     *cacMode);
   5727             if (NAME_NOT_FOUND != val) {
   5728                 uint8_t resultCacMode = (uint8_t)val;
   5729                 // check whether CAC result from CB is equal to Framework set CAC mode
   5730                 // If not equal then set the CAC mode came in corresponding request
   5731                 if (fwk_cacMode != resultCacMode) {
   5732                     resultCacMode = fwk_cacMode;
   5733                 }
   5734                 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
   5735                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
   5736             } else {
   5737                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
   5738             }
   5739         }
   5740     }
   5741 
   5742     // Post blob of cam_cds_data through vendor tag.
   5743     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
   5744         uint8_t cnt = cdsInfo->num_of_streams;
   5745         cam_cds_data_t cdsDataOverride;
   5746         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
   5747         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
   5748         cdsDataOverride.num_of_streams = 1;
   5749         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
   5750             uint32_t reproc_stream_id;
   5751             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   5752                 LOGD("No reprocessible stream found, ignore cds data");
   5753             } else {
   5754                 for (size_t i = 0; i < cnt; i++) {
   5755                     if (cdsInfo->cds_info[i].stream_id ==
   5756                             reproc_stream_id) {
   5757                         cdsDataOverride.cds_info[0].cds_enable =
   5758                                 cdsInfo->cds_info[i].cds_enable;
   5759                         break;
   5760                     }
   5761                 }
   5762             }
   5763         } else {
   5764             LOGD("Invalid stream count %d in CDS_DATA", cnt);
   5765         }
   5766         camMetadata.update(QCAMERA3_CDS_INFO,
   5767                 (uint8_t *)&cdsDataOverride,
   5768                 sizeof(cam_cds_data_t));
   5769     }
   5770 
   5771     // Ldaf calibration data
   5772     if (!mLdafCalibExist) {
   5773         IF_META_AVAILABLE(uint32_t, ldafCalib,
   5774                 CAM_INTF_META_LDAF_EXIF, metadata) {
   5775             mLdafCalibExist = true;
   5776             mLdafCalib[0] = ldafCalib[0];
   5777             mLdafCalib[1] = ldafCalib[1];
   5778             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
   5779                     ldafCalib[0], ldafCalib[1]);
   5780         }
   5781     }
   5782 
   5783     // Reprocess and DDM debug data through vendor tag
   5784     cam_reprocess_info_t repro_info;
   5785     memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
   5786     IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
   5787             CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
   5788         memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
   5789     }
   5790     IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
   5791             CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
   5792         memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
   5793     }
   5794     IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
   5795             CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
   5796         memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
   5797     }
   5798     IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
   5799             CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
   5800         memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
   5801     }
   5802     IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
   5803             CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
   5804         memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
   5805     }
   5806     IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
   5807         memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
   5808     }
   5809     IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
   5810             CAM_INTF_PARM_ROTATION, metadata) {
   5811         memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
   5812     }
   5813     IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
   5814         memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
   5815     }
   5816     IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
   5817         memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
   5818     }
   5819     camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
   5820         (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
   5821 
   5822     /* In batch mode, cache the first metadata in the batch */
   5823     if (mBatchSize && firstMetadataInBatch) {
   5824         mCachedMetadata.clear();
   5825         mCachedMetadata = camMetadata;
   5826     }
   5827 
   5828     resultMetadata = camMetadata.release();
   5829     return resultMetadata;
   5830 }
   5831 
   5832 /*===========================================================================
   5833  * FUNCTION   : saveExifParams
   5834  *
   5835  * DESCRIPTION:
   5836  *
   5837  * PARAMETERS :
   5838  *   @metadata : metadata information from callback
   5839  *
   5840  * RETURN     : none
   5841  *
   5842  *==========================================================================*/
   5843 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
   5844 {
   5845     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
   5846             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
   5847         if (mExifParams.debug_params) {
   5848             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
   5849             mExifParams.debug_params->ae_debug_params_valid = TRUE;
   5850         }
   5851     }
   5852     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
   5853             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
   5854         if (mExifParams.debug_params) {
   5855             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
   5856             mExifParams.debug_params->awb_debug_params_valid = TRUE;
   5857         }
   5858     }
   5859     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
   5860             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
   5861         if (mExifParams.debug_params) {
   5862             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
   5863             mExifParams.debug_params->af_debug_params_valid = TRUE;
   5864         }
   5865     }
   5866     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
   5867             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
   5868         if (mExifParams.debug_params) {
   5869             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
   5870             mExifParams.debug_params->asd_debug_params_valid = TRUE;
   5871         }
   5872     }
   5873     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
   5874             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
   5875         if (mExifParams.debug_params) {
   5876             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
   5877             mExifParams.debug_params->stats_debug_params_valid = TRUE;
   5878         }
   5879     }
   5880     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
   5881             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
   5882         if (mExifParams.debug_params) {
   5883             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
   5884             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
   5885         }
   5886     }
   5887     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
   5888             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
   5889         if (mExifParams.debug_params) {
   5890             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
   5891             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
   5892         }
   5893     }
   5894     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
   5895             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
   5896         if (mExifParams.debug_params) {
   5897             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
   5898             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
   5899         }
   5900     }
   5901 }
   5902 
   5903 /*===========================================================================
   5904  * FUNCTION   : get3AExifParams
   5905  *
   5906  * DESCRIPTION:
   5907  *
   5908  * PARAMETERS : none
   5909  *
   5910  *
   5911  * RETURN     : mm_jpeg_exif_params_t
   5912  *
   5913  *==========================================================================*/
   5914 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
   5915 {
   5916     return mExifParams;
   5917 }
   5918 
   5919 /*===========================================================================
   5920  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
   5921  *
   5922  * DESCRIPTION:
   5923  *
   5924  * PARAMETERS :
   5925  *   @metadata : metadata information from callback
   5926  *
   5927  * RETURN     : camera_metadata_t*
   5928  *              metadata in a format specified by fwk
   5929  *==========================================================================*/
   5930 camera_metadata_t*
   5931 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
   5932                                 (metadata_buffer_t *metadata)
   5933 {
   5934     CameraMetadata camMetadata;
   5935     camera_metadata_t *resultMetadata;
   5936 
   5937 
   5938     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
   5939         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
   5940         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
   5941         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
   5942     }
   5943 
   5944     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
   5945         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
   5946                 &aecTrigger->trigger, 1);
   5947         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
   5948                 &aecTrigger->trigger_id, 1);
   5949         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
   5950                  aecTrigger->trigger);
   5951         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
   5952                 aecTrigger->trigger_id);
   5953     }
   5954 
   5955     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
   5956         uint8_t fwk_ae_state = (uint8_t) *ae_state;
   5957         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
   5958         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
   5959     }
   5960 
   5961     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
   5962         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
   5963         if (NAME_NOT_FOUND != val) {
   5964             uint8_t fwkAfMode = (uint8_t)val;
   5965             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
   5966             LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
   5967         } else {
   5968             LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
   5969                     val);
   5970         }
   5971     }
   5972 
   5973     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
   5974         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
   5975                 &af_trigger->trigger, 1);
   5976         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
   5977                  af_trigger->trigger);
   5978         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
   5979         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
   5980                 af_trigger->trigger_id);
   5981     }
   5982 
   5983     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
   5984         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   5985                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
   5986         if (NAME_NOT_FOUND != val) {
   5987             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
   5988             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
   5989             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
   5990         } else {
   5991             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
   5992         }
   5993     }
   5994 
   5995     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   5996     uint32_t aeMode = CAM_AE_MODE_MAX;
   5997     int32_t flashMode = CAM_FLASH_MODE_MAX;
   5998     int32_t redeye = -1;
   5999     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
   6000         aeMode = *pAeMode;
   6001     }
   6002     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
   6003         flashMode = *pFlashMode;
   6004     }
   6005     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
   6006         redeye = *pRedeye;
   6007     }
   6008 
   6009     if (1 == redeye) {
   6010         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   6011         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6012     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
   6013         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   6014                 flashMode);
   6015         if (NAME_NOT_FOUND != val) {
   6016             fwk_aeMode = (uint8_t)val;
   6017             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6018         } else {
   6019             LOGE("Unsupported flash mode %d", flashMode);
   6020         }
   6021     } else if (aeMode == CAM_AE_MODE_ON) {
   6022         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
   6023         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6024     } else if (aeMode == CAM_AE_MODE_OFF) {
   6025         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   6026         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6027     } else {
   6028         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
   6029               "flashMode:%d, aeMode:%u!!!",
   6030                  redeye, flashMode, aeMode);
   6031     }
   6032 
   6033     resultMetadata = camMetadata.release();
   6034     return resultMetadata;
   6035 }
   6036 
   6037 /*===========================================================================
   6038  * FUNCTION   : dumpMetadataToFile
   6039  *
   6040  * DESCRIPTION: Dumps tuning metadata to file system
   6041  *
   6042  * PARAMETERS :
   6043  *   @meta           : tuning metadata
   6044  *   @dumpFrameCount : current dump frame count
   6045  *   @enabled        : Enable mask
   6046  *
   6047  *==========================================================================*/
   6048 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
   6049                                                    uint32_t &dumpFrameCount,
   6050                                                    bool enabled,
   6051                                                    const char *type,
   6052                                                    uint32_t frameNumber)
   6053 {
   6054     //Some sanity checks
   6055     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
   6056         LOGE("Tuning sensor data size bigger than expected %d: %d",
   6057               meta.tuning_sensor_data_size,
   6058               TUNING_SENSOR_DATA_MAX);
   6059         return;
   6060     }
   6061 
   6062     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
   6063         LOGE("Tuning VFE data size bigger than expected %d: %d",
   6064               meta.tuning_vfe_data_size,
   6065               TUNING_VFE_DATA_MAX);
   6066         return;
   6067     }
   6068 
   6069     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
   6070         LOGE("Tuning CPP data size bigger than expected %d: %d",
   6071               meta.tuning_cpp_data_size,
   6072               TUNING_CPP_DATA_MAX);
   6073         return;
   6074     }
   6075 
   6076     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
   6077         LOGE("Tuning CAC data size bigger than expected %d: %d",
   6078               meta.tuning_cac_data_size,
   6079               TUNING_CAC_DATA_MAX);
   6080         return;
   6081     }
   6082     //
   6083 
   6084     if(enabled){
   6085         char timeBuf[FILENAME_MAX];
   6086         char buf[FILENAME_MAX];
   6087         memset(buf, 0, sizeof(buf));
   6088         memset(timeBuf, 0, sizeof(timeBuf));
   6089         time_t current_time;
   6090         struct tm * timeinfo;
   6091         time (&current_time);
   6092         timeinfo = localtime (&current_time);
   6093         if (timeinfo != NULL) {
   6094             strftime (timeBuf, sizeof(timeBuf),
   6095                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
   6096         }
   6097         String8 filePath(timeBuf);
   6098         snprintf(buf,
   6099                 sizeof(buf),
   6100                 "%dm_%s_%d.bin",
   6101                 dumpFrameCount,
   6102                 type,
   6103                 frameNumber);
   6104         filePath.append(buf);
   6105         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
   6106         if (file_fd >= 0) {
   6107             ssize_t written_len = 0;
   6108             meta.tuning_data_version = TUNING_DATA_VERSION;
   6109             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
   6110             written_len += write(file_fd, data, sizeof(uint32_t));
   6111             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
   6112             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
   6113             written_len += write(file_fd, data, sizeof(uint32_t));
   6114             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
   6115             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
   6116             written_len += write(file_fd, data, sizeof(uint32_t));
   6117             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
   6118             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
   6119             written_len += write(file_fd, data, sizeof(uint32_t));
   6120             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
   6121             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
   6122             written_len += write(file_fd, data, sizeof(uint32_t));
   6123             meta.tuning_mod3_data_size = 0;
   6124             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
   6125             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
   6126             written_len += write(file_fd, data, sizeof(uint32_t));
   6127             size_t total_size = meta.tuning_sensor_data_size;
   6128             data = (void *)((uint8_t *)&meta.data);
   6129             written_len += write(file_fd, data, total_size);
   6130             total_size = meta.tuning_vfe_data_size;
   6131             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
   6132             written_len += write(file_fd, data, total_size);
   6133             total_size = meta.tuning_cpp_data_size;
   6134             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
   6135             written_len += write(file_fd, data, total_size);
   6136             total_size = meta.tuning_cac_data_size;
   6137             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
   6138             written_len += write(file_fd, data, total_size);
   6139             close(file_fd);
   6140         }else {
   6141             LOGE("fail to open file for metadata dumping");
   6142         }
   6143     }
   6144 }
   6145 
   6146 /*===========================================================================
   6147  * FUNCTION   : cleanAndSortStreamInfo
   6148  *
   6149  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
   6150  *              and sort them such that raw stream is at the end of the list
   6151  *              This is a workaround for camera daemon constraint.
   6152  *
   6153  * PARAMETERS : None
   6154  *
   6155  *==========================================================================*/
   6156 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
   6157 {
   6158     List<stream_info_t *> newStreamInfo;
   6159 
   6160     /*clean up invalid streams*/
   6161     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   6162             it != mStreamInfo.end();) {
   6163         if(((*it)->status) == INVALID){
   6164             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
   6165             delete channel;
   6166             free(*it);
   6167             it = mStreamInfo.erase(it);
   6168         } else {
   6169             it++;
   6170         }
   6171     }
   6172 
   6173     // Move preview/video/callback/snapshot streams into newList
   6174     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   6175             it != mStreamInfo.end();) {
   6176         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
   6177                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
   6178                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
   6179             newStreamInfo.push_back(*it);
   6180             it = mStreamInfo.erase(it);
   6181         } else
   6182             it++;
   6183     }
   6184     // Move raw streams into newList
   6185     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   6186             it != mStreamInfo.end();) {
   6187         newStreamInfo.push_back(*it);
   6188         it = mStreamInfo.erase(it);
   6189     }
   6190 
   6191     mStreamInfo = newStreamInfo;
   6192 }
   6193 
   6194 /*===========================================================================
   6195  * FUNCTION   : extractJpegMetadata
   6196  *
   6197  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
   6198  *              JPEG metadata is cached in HAL, and return as part of capture
   6199  *              result when metadata is returned from camera daemon.
   6200  *
   6201  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
   6202  *              @request:      capture request
   6203  *
   6204  *==========================================================================*/
   6205 void QCamera3HardwareInterface::extractJpegMetadata(
   6206         CameraMetadata& jpegMetadata,
   6207         const camera3_capture_request_t *request)
   6208 {
   6209     CameraMetadata frame_settings;
   6210     frame_settings = request->settings;
   6211 
   6212     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
   6213         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
   6214                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
   6215                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
   6216 
   6217     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
   6218         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
   6219                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
   6220                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
   6221 
   6222     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
   6223         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
   6224                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
   6225                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
   6226 
   6227     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
   6228         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
   6229                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
   6230                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
   6231 
   6232     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
   6233         jpegMetadata.update(ANDROID_JPEG_QUALITY,
   6234                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
   6235                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
   6236 
   6237     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
   6238         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
   6239                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
   6240                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
   6241 
   6242     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   6243         int32_t thumbnail_size[2];
   6244         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   6245         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   6246         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   6247             int32_t orientation =
   6248                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   6249             if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
   6250                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
   6251                int32_t temp;
   6252                temp = thumbnail_size[0];
   6253                thumbnail_size[0] = thumbnail_size[1];
   6254                thumbnail_size[1] = temp;
   6255             }
   6256          }
   6257          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
   6258                 thumbnail_size,
   6259                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   6260     }
   6261 
   6262 }
   6263 
   6264 /*===========================================================================
   6265  * FUNCTION   : convertToRegions
   6266  *
   6267  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   6268  *
   6269  * PARAMETERS :
   6270  *   @rect   : cam_rect_t struct to convert
   6271  *   @region : int32_t destination array
   6272  *   @weight : if we are converting from cam_area_t, weight is valid
   6273  *             else weight = -1
   6274  *
   6275  *==========================================================================*/
   6276 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
   6277         int32_t *region, int weight)
   6278 {
   6279     region[0] = rect.left;
   6280     region[1] = rect.top;
   6281     region[2] = rect.left + rect.width;
   6282     region[3] = rect.top + rect.height;
   6283     if (weight > -1) {
   6284         region[4] = weight;
   6285     }
   6286 }
   6287 
   6288 /*===========================================================================
   6289  * FUNCTION   : convertFromRegions
   6290  *
   6291  * DESCRIPTION: helper method to convert from array to cam_rect_t
   6292  *
   6293  * PARAMETERS :
   6294  *   @rect   : cam_rect_t struct to convert
   6295  *   @region : int32_t destination array
   6296  *   @weight : if we are converting from cam_area_t, weight is valid
   6297  *             else weight = -1
   6298  *
   6299  *==========================================================================*/
   6300 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
   6301         const camera_metadata_t *settings, uint32_t tag)
   6302 {
   6303     CameraMetadata frame_settings;
   6304     frame_settings = settings;
   6305     int32_t x_min = frame_settings.find(tag).data.i32[0];
   6306     int32_t y_min = frame_settings.find(tag).data.i32[1];
   6307     int32_t x_max = frame_settings.find(tag).data.i32[2];
   6308     int32_t y_max = frame_settings.find(tag).data.i32[3];
   6309     roi.weight = frame_settings.find(tag).data.i32[4];
   6310     roi.rect.left = x_min;
   6311     roi.rect.top = y_min;
   6312     roi.rect.width = x_max - x_min;
   6313     roi.rect.height = y_max - y_min;
   6314 }
   6315 
   6316 /*===========================================================================
   6317  * FUNCTION   : resetIfNeededROI
   6318  *
   6319  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   6320  *              crop region
   6321  *
   6322  * PARAMETERS :
   6323  *   @roi       : cam_area_t struct to resize
   6324  *   @scalerCropRegion : cam_crop_region_t region to compare against
   6325  *
   6326  *
   6327  *==========================================================================*/
   6328 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   6329                                                  const cam_crop_region_t* scalerCropRegion)
   6330 {
   6331     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   6332     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   6333     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
   6334     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
   6335 
   6336     /* According to spec weight = 0 is used to indicate roi needs to be disabled
   6337      * without having this check the calculations below to validate if the roi
   6338      * is inside scalar crop region will fail resulting in the roi not being
   6339      * reset causing algorithm to continue to use stale roi window
   6340      */
   6341     if (roi->weight == 0) {
   6342         return true;
   6343     }
   6344 
   6345     if ((roi_x_max < scalerCropRegion->left) ||
   6346         // right edge of roi window is left of scalar crop's left edge
   6347         (roi_y_max < scalerCropRegion->top)  ||
   6348         // bottom edge of roi window is above scalar crop's top edge
   6349         (roi->rect.left > crop_x_max) ||
   6350         // left edge of roi window is beyond(right) of scalar crop's right edge
   6351         (roi->rect.top > crop_y_max)){
   6352         // top edge of roi windo is above scalar crop's top edge
   6353         return false;
   6354     }
   6355     if (roi->rect.left < scalerCropRegion->left) {
   6356         roi->rect.left = scalerCropRegion->left;
   6357     }
   6358     if (roi->rect.top < scalerCropRegion->top) {
   6359         roi->rect.top = scalerCropRegion->top;
   6360     }
   6361     if (roi_x_max > crop_x_max) {
   6362         roi_x_max = crop_x_max;
   6363     }
   6364     if (roi_y_max > crop_y_max) {
   6365         roi_y_max = crop_y_max;
   6366     }
   6367     roi->rect.width = roi_x_max - roi->rect.left;
   6368     roi->rect.height = roi_y_max - roi->rect.top;
   6369     return true;
   6370 }
   6371 
   6372 /*===========================================================================
   6373  * FUNCTION   : convertLandmarks
   6374  *
   6375  * DESCRIPTION: helper method to extract the landmarks from face detection info
   6376  *
   6377  * PARAMETERS :
   6378  *   @landmark_data : input landmark data to be converted
   6379  *   @landmarks : int32_t destination array
   6380  *
   6381  *
   6382  *==========================================================================*/
   6383 void QCamera3HardwareInterface::convertLandmarks(
   6384         cam_face_landmarks_info_t landmark_data,
   6385         int32_t *landmarks)
   6386 {
   6387     if (landmark_data.is_left_eye_valid) {
   6388         landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
   6389         landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
   6390     } else {
   6391         landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
   6392         landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
   6393     }
   6394 
   6395     if (landmark_data.is_right_eye_valid) {
   6396         landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
   6397         landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
   6398     } else {
   6399         landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
   6400         landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
   6401     }
   6402 
   6403     if (landmark_data.is_mouth_valid) {
   6404         landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
   6405         landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
   6406     } else {
   6407         landmarks[MOUTH_X] = FACE_INVALID_POINT;
   6408         landmarks[MOUTH_Y] = FACE_INVALID_POINT;
   6409     }
   6410 }
   6411 
   6412 /*===========================================================================
   6413  * FUNCTION   : setInvalidLandmarks
   6414  *
   6415  * DESCRIPTION: helper method to set invalid landmarks
   6416  *
   6417  * PARAMETERS :
   6418  *   @landmarks : int32_t destination array
   6419  *
   6420  *
   6421  *==========================================================================*/
   6422 void QCamera3HardwareInterface::setInvalidLandmarks(
   6423         int32_t *landmarks)
   6424 {
   6425     landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
   6426     landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
   6427     landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
   6428     landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
   6429     landmarks[MOUTH_X] = FACE_INVALID_POINT;
   6430     landmarks[MOUTH_Y] = FACE_INVALID_POINT;
   6431 }
   6432 
   6433 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   6434 
   6435 /*===========================================================================
   6436  * FUNCTION   : getCapabilities
   6437  *
   6438  * DESCRIPTION: query camera capability from back-end
   6439  *
   6440  * PARAMETERS :
   6441  *   @ops  : mm-interface ops structure
   6442  *   @cam_handle  : camera handle for which we need capability
   6443  *
   6444  * RETURN     : ptr type of capability structure
   6445  *              capability for success
   6446  *              NULL for failure
   6447  *==========================================================================*/
   6448 cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
   6449         uint32_t cam_handle)
   6450 {
   6451     int rc = NO_ERROR;
   6452     QCamera3HeapMemory *capabilityHeap = NULL;
   6453     cam_capability_t *cap_ptr = NULL;
   6454 
   6455     if (ops == NULL) {
   6456         LOGE("Invalid arguments");
   6457         return NULL;
   6458     }
   6459 
   6460     capabilityHeap = new QCamera3HeapMemory(1);
   6461     if (capabilityHeap == NULL) {
   6462         LOGE("creation of capabilityHeap failed");
   6463         return NULL;
   6464     }
   6465 
   6466     /* Allocate memory for capability buffer */
   6467     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
   6468     if(rc != OK) {
   6469         LOGE("No memory for cappability");
   6470         goto allocate_failed;
   6471     }
   6472 
   6473     /* Map memory for capability buffer */
   6474     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   6475 
   6476     rc = ops->map_buf(cam_handle,
   6477             CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
   6478             sizeof(cam_capability_t), capabilityHeap->getPtr(0));
   6479     if(rc < 0) {
   6480         LOGE("failed to map capability buffer");
   6481         rc = FAILED_TRANSACTION;
   6482         goto map_failed;
   6483     }
   6484 
   6485     /* Query Capability */
   6486     rc = ops->query_capability(cam_handle);
   6487     if(rc < 0) {
   6488         LOGE("failed to query capability");
   6489         rc = FAILED_TRANSACTION;
   6490         goto query_failed;
   6491     }
   6492 
   6493     cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   6494     if (cap_ptr == NULL) {
   6495         LOGE("out of memory");
   6496         rc = NO_MEMORY;
   6497         goto query_failed;
   6498     }
   6499 
   6500     memset(cap_ptr, 0, sizeof(cam_capability_t));
   6501     memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
   6502 
   6503     int index;
   6504     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
   6505         cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
   6506         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
   6507         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
   6508     }
   6509 
   6510 query_failed:
   6511     ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
   6512 map_failed:
   6513     capabilityHeap->deallocate();
   6514 allocate_failed:
   6515     delete capabilityHeap;
   6516 
   6517     if (rc != NO_ERROR) {
   6518         return NULL;
   6519     } else {
   6520         return cap_ptr;
   6521     }
   6522 }
   6523 
   6524 /*===========================================================================
   6525  * FUNCTION   : initCapabilities
   6526  *
   6527  * DESCRIPTION: initialize camera capabilities in static data struct
   6528  *
   6529  * PARAMETERS :
   6530  *   @cameraId  : camera Id
   6531  *
   6532  * RETURN     : int32_t type of status
   6533  *              NO_ERROR  -- success
   6534  *              none-zero failure code
   6535  *==========================================================================*/
   6536 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
   6537 {
   6538     int rc = 0;
   6539     mm_camera_vtbl_t *cameraHandle = NULL;
   6540     uint32_t handle = 0;
   6541 
   6542     rc = camera_open((uint8_t)cameraId, &cameraHandle);
   6543     if (rc) {
   6544         LOGE("camera_open failed. rc = %d", rc);
   6545         goto open_failed;
   6546     }
   6547     if (!cameraHandle) {
   6548         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
   6549         goto open_failed;
   6550     }
   6551 
   6552     handle = get_main_camera_handle(cameraHandle->camera_handle);
   6553     gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
   6554     if (gCamCapability[cameraId] == NULL) {
   6555         rc = FAILED_TRANSACTION;
   6556         goto failed_op;
   6557     }
   6558 
   6559     if (is_dual_camera_by_idx(cameraId)) {
   6560         handle = get_aux_camera_handle(cameraHandle->camera_handle);
   6561         gCamCapability[cameraId]->aux_cam_cap =
   6562                 getCapabilities(cameraHandle->ops, handle);
   6563         if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
   6564             rc = FAILED_TRANSACTION;
   6565             free(gCamCapability[cameraId]);
   6566             goto failed_op;
   6567         }
   6568     }
   6569 failed_op:
   6570     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   6571     cameraHandle = NULL;
   6572 open_failed:
   6573     return rc;
   6574 }
   6575 
   6576 /*==========================================================================
   6577  * FUNCTION   : get3Aversion
   6578  *
   6579  * DESCRIPTION: get the Q3A S/W version
   6580  *
   6581  * PARAMETERS :
   6582  *  @sw_version: Reference of Q3A structure which will hold version info upon
   6583  *               return
   6584  *
   6585  * RETURN     : None
   6586  *
   6587  *==========================================================================*/
   6588 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
   6589 {
   6590     if(gCamCapability[mCameraId])
   6591         sw_version = gCamCapability[mCameraId]->q3a_version;
   6592     else
   6593         LOGE("Capability structure NULL!");
   6594 }
   6595 
   6596 
   6597 /*===========================================================================
   6598  * FUNCTION   : initParameters
   6599  *
   6600  * DESCRIPTION: initialize camera parameters
   6601  *
   6602  * PARAMETERS :
   6603  *
   6604  * RETURN     : int32_t type of status
   6605  *              NO_ERROR  -- success
   6606  *              none-zero failure code
   6607  *==========================================================================*/
   6608 int QCamera3HardwareInterface::initParameters()
   6609 {
   6610     int rc = 0;
   6611 
   6612     //Allocate Set Param Buffer
   6613     mParamHeap = new QCamera3HeapMemory(1);
   6614     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
   6615     if(rc != OK) {
   6616         rc = NO_MEMORY;
   6617         LOGE("Failed to allocate SETPARM Heap memory");
   6618         delete mParamHeap;
   6619         mParamHeap = NULL;
   6620         return rc;
   6621     }
   6622 
   6623     //Map memory for parameters buffer
   6624     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   6625             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   6626             mParamHeap->getFd(0),
   6627             sizeof(metadata_buffer_t),
   6628             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
   6629     if(rc < 0) {
   6630         LOGE("failed to map SETPARM buffer");
   6631         rc = FAILED_TRANSACTION;
   6632         mParamHeap->deallocate();
   6633         delete mParamHeap;
   6634         mParamHeap = NULL;
   6635         return rc;
   6636     }
   6637 
   6638     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
   6639 
   6640     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
   6641     return rc;
   6642 }
   6643 
   6644 /*===========================================================================
   6645  * FUNCTION   : deinitParameters
   6646  *
   6647  * DESCRIPTION: de-initialize camera parameters
   6648  *
   6649  * PARAMETERS :
   6650  *
   6651  * RETURN     : NONE
   6652  *==========================================================================*/
   6653 void QCamera3HardwareInterface::deinitParameters()
   6654 {
   6655     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   6656             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   6657 
   6658     mParamHeap->deallocate();
   6659     delete mParamHeap;
   6660     mParamHeap = NULL;
   6661 
   6662     mParameters = NULL;
   6663 
   6664     free(mPrevParameters);
   6665     mPrevParameters = NULL;
   6666 }
   6667 
   6668 /*===========================================================================
   6669  * FUNCTION   : calcMaxJpegSize
   6670  *
   6671  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   6672  *
   6673  * PARAMETERS :
   6674  *
   6675  * RETURN     : max_jpeg_size
   6676  *==========================================================================*/
   6677 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
   6678 {
   6679     size_t max_jpeg_size = 0;
   6680     size_t temp_width, temp_height;
   6681     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
   6682             MAX_SIZES_CNT);
   6683     for (size_t i = 0; i < count; i++) {
   6684         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
   6685         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
   6686         if (temp_width * temp_height > max_jpeg_size ) {
   6687             max_jpeg_size = temp_width * temp_height;
   6688         }
   6689     }
   6690     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   6691     return max_jpeg_size;
   6692 }
   6693 
   6694 /*===========================================================================
   6695  * FUNCTION   : getMaxRawSize
   6696  *
   6697  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
   6698  *
   6699  * PARAMETERS :
   6700  *
   6701  * RETURN     : Largest supported Raw Dimension
   6702  *==========================================================================*/
   6703 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
   6704 {
   6705     int max_width = 0;
   6706     cam_dimension_t maxRawSize;
   6707 
   6708     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
   6709     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
   6710         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
   6711             max_width = gCamCapability[camera_id]->raw_dim[i].width;
   6712             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
   6713         }
   6714     }
   6715     return maxRawSize;
   6716 }
   6717 
   6718 
   6719 /*===========================================================================
   6720  * FUNCTION   : calcMaxJpegDim
   6721  *
   6722  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
   6723  *
   6724  * PARAMETERS :
   6725  *
   6726  * RETURN     : max_jpeg_dim
   6727  *==========================================================================*/
   6728 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
   6729 {
   6730     cam_dimension_t max_jpeg_dim;
   6731     cam_dimension_t curr_jpeg_dim;
   6732     max_jpeg_dim.width = 0;
   6733     max_jpeg_dim.height = 0;
   6734     curr_jpeg_dim.width = 0;
   6735     curr_jpeg_dim.height = 0;
   6736     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   6737         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   6738         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   6739         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
   6740             max_jpeg_dim.width * max_jpeg_dim.height ) {
   6741             max_jpeg_dim.width = curr_jpeg_dim.width;
   6742             max_jpeg_dim.height = curr_jpeg_dim.height;
   6743         }
   6744     }
   6745     return max_jpeg_dim;
   6746 }
   6747 
   6748 /*===========================================================================
   6749  * FUNCTION   : addStreamConfig
   6750  *
   6751  * DESCRIPTION: adds the stream configuration to the array
   6752  *
   6753  * PARAMETERS :
   6754  * @available_stream_configs : pointer to stream configuration array
   6755  * @scalar_format            : scalar format
   6756  * @dim                      : configuration dimension
   6757  * @config_type              : input or output configuration type
   6758  *
   6759  * RETURN     : NONE
   6760  *==========================================================================*/
   6761 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
   6762         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
   6763 {
   6764     available_stream_configs.add(scalar_format);
   6765     available_stream_configs.add(dim.width);
   6766     available_stream_configs.add(dim.height);
   6767     available_stream_configs.add(config_type);
   6768 }
   6769 
   6770 /*===========================================================================
   6771  * FUNCTION   : suppportBurstCapture
   6772  *
   6773  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
   6774  *
   6775  * PARAMETERS :
   6776  *   @cameraId  : camera Id
   6777  *
   6778  * RETURN     : true if camera supports BURST_CAPTURE
   6779  *              false otherwise
   6780  *==========================================================================*/
   6781 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
   6782 {
   6783     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
   6784     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
   6785     const int32_t highResWidth = 3264;
   6786     const int32_t highResHeight = 2448;
   6787 
   6788     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
   6789         // Maximum resolution images cannot be captured at >= 10fps
   6790         // -> not supporting BURST_CAPTURE
   6791         return false;
   6792     }
   6793 
   6794     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
   6795         // Maximum resolution images can be captured at >= 20fps
   6796         // --> supporting BURST_CAPTURE
   6797         return true;
   6798     }
   6799 
   6800     // Find the smallest highRes resolution, or largest resolution if there is none
   6801     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
   6802             MAX_SIZES_CNT);
   6803     size_t highRes = 0;
   6804     while ((highRes + 1 < totalCnt) &&
   6805             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
   6806             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
   6807             highResWidth * highResHeight)) {
   6808         highRes++;
   6809     }
   6810     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
   6811         return true;
   6812     } else {
   6813         return false;
   6814     }
   6815 }
   6816 
   6817 /*===========================================================================
   6818  * FUNCTION   : initStaticMetadata
   6819  *
   6820  * DESCRIPTION: initialize the static metadata
   6821  *
   6822  * PARAMETERS :
   6823  *   @cameraId  : camera Id
   6824  *
   6825  * RETURN     : int32_t type of status
   6826  *              0  -- success
   6827  *              non-zero failure code
   6828  *==========================================================================*/
   6829 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
   6830 {
   6831     int rc = 0;
   6832     CameraMetadata staticInfo;
   6833     size_t count = 0;
   6834     bool limitedDevice = false;
   6835     char prop[PROPERTY_VALUE_MAX];
   6836     bool supportBurst = false;
   6837 
   6838     supportBurst = supportBurstCapture(cameraId);
   6839 
   6840     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
   6841      * guaranteed or if min fps of max resolution is less than 20 fps, its
   6842      * advertised as limited device*/
   6843     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
   6844             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
   6845             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
   6846             !supportBurst;
   6847 
   6848     uint8_t supportedHwLvl = limitedDevice ?
   6849             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
   6850 #ifndef USE_HAL_3_3
   6851             // LEVEL_3 - This device will support level 3.
   6852             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
   6853 #else
   6854             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
   6855 #endif
   6856 
   6857     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   6858             &supportedHwLvl, 1);
   6859 
   6860     bool facingBack = false;
   6861     if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
   6862             (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
   6863         facingBack = true;
   6864     }
   6865     /*HAL 3 only*/
   6866     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   6867                     &gCamCapability[cameraId]->min_focus_distance, 1);
   6868 
   6869     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   6870                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   6871 
   6872     /*should be using focal lengths but sensor doesn't provide that info now*/
   6873     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   6874                       &gCamCapability[cameraId]->focal_length,
   6875                       1);
   6876 
   6877     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   6878             gCamCapability[cameraId]->apertures,
   6879             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
   6880 
   6881     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   6882             gCamCapability[cameraId]->filter_densities,
   6883             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
   6884 
   6885 
   6886     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   6887             (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
   6888             MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
   6889 
   6890     int32_t lens_shading_map_size[] = {
   6891             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
   6892             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
   6893     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   6894                       lens_shading_map_size,
   6895                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   6896 
   6897     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   6898             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
   6899 
   6900     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   6901             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
   6902 
   6903     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   6904             &gCamCapability[cameraId]->max_frame_duration, 1);
   6905 
   6906     camera_metadata_rational baseGainFactor = {
   6907             gCamCapability[cameraId]->base_gain_factor.numerator,
   6908             gCamCapability[cameraId]->base_gain_factor.denominator};
   6909     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   6910                       &baseGainFactor, 1);
   6911 
   6912     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   6913                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
   6914 
   6915     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   6916             gCamCapability[cameraId]->pixel_array_size.height};
   6917     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   6918                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
   6919 
   6920     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
   6921             gCamCapability[cameraId]->active_array_size.top,
   6922             gCamCapability[cameraId]->active_array_size.width,
   6923             gCamCapability[cameraId]->active_array_size.height};
   6924     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   6925             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
   6926 
   6927     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   6928             &gCamCapability[cameraId]->white_level, 1);
   6929 
   6930     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   6931             gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
   6932 
   6933 #ifndef USE_HAL_3_3
   6934     bool hasBlackRegions = false;
   6935     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
   6936         LOGW("black_region_count: %d is bounded to %d",
   6937             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
   6938         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
   6939     }
   6940     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
   6941         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
   6942         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
   6943             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
   6944         }
   6945         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
   6946                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
   6947         hasBlackRegions = true;
   6948     }
   6949 #endif
   6950     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   6951             &gCamCapability[cameraId]->flash_charge_duration, 1);
   6952 
   6953     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   6954             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   6955 
   6956     // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
   6957     // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
   6958     // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
   6959     uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
   6960     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   6961             &timestampSource, 1);
   6962 
   6963     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   6964             &gCamCapability[cameraId]->histogram_size, 1);
   6965 
   6966     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   6967             &gCamCapability[cameraId]->max_histogram_count, 1);
   6968 
   6969     int32_t sharpness_map_size[] = {
   6970             gCamCapability[cameraId]->sharpness_map_size.width,
   6971             gCamCapability[cameraId]->sharpness_map_size.height};
   6972 
   6973     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   6974             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   6975 
   6976     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   6977             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   6978 
   6979     int32_t scalar_formats[] = {
   6980             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
   6981             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
   6982             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
   6983             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
   6984             HAL_PIXEL_FORMAT_RAW10,
   6985             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
   6986     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
   6987     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   6988                       scalar_formats,
   6989                       scalar_formats_count);
   6990 
   6991     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
   6992     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   6993     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   6994             count, MAX_SIZES_CNT, available_processed_sizes);
   6995     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   6996             available_processed_sizes, count * 2);
   6997 
   6998     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
   6999     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
   7000     makeTable(gCamCapability[cameraId]->raw_dim,
   7001             count, MAX_SIZES_CNT, available_raw_sizes);
   7002     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   7003             available_raw_sizes, count * 2);
   7004 
   7005     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   7006     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
   7007     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   7008             count, MAX_SIZES_CNT, available_fps_ranges);
   7009     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   7010             available_fps_ranges, count * 2);
   7011 
   7012     camera_metadata_rational exposureCompensationStep = {
   7013             gCamCapability[cameraId]->exp_compensation_step.numerator,
   7014             gCamCapability[cameraId]->exp_compensation_step.denominator};
   7015     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   7016                       &exposureCompensationStep, 1);
   7017 
   7018     Vector<uint8_t> availableVstabModes;
   7019     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
   7020     char eis_prop[PROPERTY_VALUE_MAX];
   7021     bool eisSupported = false;
   7022     memset(eis_prop, 0, sizeof(eis_prop));
   7023     property_get("persist.camera.eis.enable", eis_prop, "1");
   7024     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
   7025     count = IS_TYPE_MAX;
   7026     count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
   7027     for (size_t i = 0; i < count; i++) {
   7028         if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
   7029             (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
   7030             eisSupported = true;
   7031             break;
   7032         }
   7033     }
   7034     if (facingBack && eis_prop_set && eisSupported) {
   7035         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
   7036     }
   7037     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   7038                       availableVstabModes.array(), availableVstabModes.size());
   7039 
   7040     /*HAL 1 and HAL 3 common*/
   7041     uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
   7042     uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
   7043     uint32_t minZoomStep = 100; //as per HAL1/API1 spec
   7044     float maxZoom = maxZoomStep/minZoomStep;
   7045     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   7046             &maxZoom, 1);
   7047 
   7048     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
   7049     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
   7050 
   7051     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
   7052     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
   7053         max3aRegions[2] = 0; /* AF not supported */
   7054     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   7055             max3aRegions, 3);
   7056 
   7057     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
   7058     memset(prop, 0, sizeof(prop));
   7059     property_get("persist.camera.facedetect", prop, "1");
   7060     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
   7061     LOGD("Support face detection mode: %d",
   7062              supportedFaceDetectMode);
   7063 
   7064     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
   7065     /* support mode should be OFF if max number of face is 0 */
   7066     if (maxFaces <= 0) {
   7067         supportedFaceDetectMode = 0;
   7068     }
   7069     Vector<uint8_t> availableFaceDetectModes;
   7070     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
   7071     if (supportedFaceDetectMode == 1) {
   7072         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   7073     } else if (supportedFaceDetectMode == 2) {
   7074         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   7075     } else if (supportedFaceDetectMode == 3) {
   7076         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   7077         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   7078     } else {
   7079         maxFaces = 0;
   7080     }
   7081     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   7082             availableFaceDetectModes.array(),
   7083             availableFaceDetectModes.size());
   7084     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   7085             (int32_t *)&maxFaces, 1);
   7086 
   7087     int32_t exposureCompensationRange[] = {
   7088             gCamCapability[cameraId]->exposure_compensation_min,
   7089             gCamCapability[cameraId]->exposure_compensation_max};
   7090     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   7091             exposureCompensationRange,
   7092             sizeof(exposureCompensationRange)/sizeof(int32_t));
   7093 
   7094     uint8_t lensFacing = (facingBack) ?
   7095             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   7096     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   7097 
   7098     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   7099                       available_thumbnail_sizes,
   7100                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   7101 
   7102     /*all sizes will be clubbed into this tag*/
   7103     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   7104     /*android.scaler.availableStreamConfigurations*/
   7105     Vector<int32_t> available_stream_configs;
   7106     cam_dimension_t active_array_dim;
   7107     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
   7108     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
   7109     /* Add input/output stream configurations for each scalar formats*/
   7110     for (size_t j = 0; j < scalar_formats_count; j++) {
   7111         switch (scalar_formats[j]) {
   7112         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   7113         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   7114         case HAL_PIXEL_FORMAT_RAW10:
   7115             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7116                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   7117                 addStreamConfig(available_stream_configs, scalar_formats[j],
   7118                         gCamCapability[cameraId]->raw_dim[i],
   7119                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   7120             }
   7121             break;
   7122         case HAL_PIXEL_FORMAT_BLOB:
   7123             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7124                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   7125                 addStreamConfig(available_stream_configs, scalar_formats[j],
   7126                         gCamCapability[cameraId]->picture_sizes_tbl[i],
   7127                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   7128             }
   7129             break;
   7130         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   7131         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   7132         default:
   7133             cam_dimension_t largest_picture_size;
   7134             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
   7135             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7136                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   7137                 addStreamConfig(available_stream_configs, scalar_formats[j],
   7138                         gCamCapability[cameraId]->picture_sizes_tbl[i],
   7139                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   7140                 /* Book keep largest */
   7141                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
   7142                         >= largest_picture_size.width &&
   7143                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
   7144                         >= largest_picture_size.height)
   7145                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
   7146             }
   7147             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
   7148             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
   7149                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   7150                  addStreamConfig(available_stream_configs, scalar_formats[j],
   7151                          largest_picture_size,
   7152                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
   7153             }
   7154             break;
   7155         }
   7156     }
   7157 
   7158     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   7159                       available_stream_configs.array(), available_stream_configs.size());
   7160     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   7161     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   7162 
   7163     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   7164     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   7165 
   7166     /* android.scaler.availableMinFrameDurations */
   7167     Vector<int64_t> available_min_durations;
   7168     for (size_t j = 0; j < scalar_formats_count; j++) {
   7169         switch (scalar_formats[j]) {
   7170         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   7171         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   7172         case HAL_PIXEL_FORMAT_RAW10:
   7173             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7174                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   7175                 available_min_durations.add(scalar_formats[j]);
   7176                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
   7177                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
   7178                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
   7179             }
   7180             break;
   7181         default:
   7182             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7183                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   7184                 available_min_durations.add(scalar_formats[j]);
   7185                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
   7186                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
   7187                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
   7188             }
   7189             break;
   7190         }
   7191     }
   7192     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   7193                       available_min_durations.array(), available_min_durations.size());
   7194 
   7195     Vector<int32_t> available_hfr_configs;
   7196     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
   7197         int32_t fps = 0;
   7198         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
   7199         case CAM_HFR_MODE_60FPS:
   7200             fps = 60;
   7201             break;
   7202         case CAM_HFR_MODE_90FPS:
   7203             fps = 90;
   7204             break;
   7205         case CAM_HFR_MODE_120FPS:
   7206             fps = 120;
   7207             break;
   7208         case CAM_HFR_MODE_150FPS:
   7209             fps = 150;
   7210             break;
   7211         case CAM_HFR_MODE_180FPS:
   7212             fps = 180;
   7213             break;
   7214         case CAM_HFR_MODE_210FPS:
   7215             fps = 210;
   7216             break;
   7217         case CAM_HFR_MODE_240FPS:
   7218             fps = 240;
   7219             break;
   7220         case CAM_HFR_MODE_480FPS:
   7221             fps = 480;
   7222             break;
   7223         case CAM_HFR_MODE_OFF:
   7224         case CAM_HFR_MODE_MAX:
   7225         default:
   7226             break;
   7227         }
   7228 
   7229         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
   7230         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
   7231             /* For each HFR frame rate, need to advertise one variable fps range
   7232              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
   7233              * and [120, 120]. While camcorder preview alone is running [30, 120] is
   7234              * set by the app. When video recording is started, [120, 120] is
   7235              * set. This way sensor configuration does not change when recording
   7236              * is started */
   7237 
   7238             /* (width, height, fps_min, fps_max, batch_size_max) */
   7239             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
   7240                 j < MAX_SIZES_CNT; j++) {
   7241                 available_hfr_configs.add(
   7242                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
   7243                 available_hfr_configs.add(
   7244                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
   7245                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
   7246                 available_hfr_configs.add(fps);
   7247                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   7248 
   7249                 /* (width, height, fps_min, fps_max, batch_size_max) */
   7250                 available_hfr_configs.add(
   7251                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
   7252                 available_hfr_configs.add(
   7253                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
   7254                 available_hfr_configs.add(fps);
   7255                 available_hfr_configs.add(fps);
   7256                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   7257             }
   7258        }
   7259     }
   7260     //Advertise HFR capability only if the property is set
   7261     memset(prop, 0, sizeof(prop));
   7262     property_get("persist.camera.hal3hfr.enable", prop, "1");
   7263     uint8_t hfrEnable = (uint8_t)atoi(prop);
   7264 
   7265     if(hfrEnable && available_hfr_configs.array()) {
   7266         staticInfo.update(
   7267                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
   7268                 available_hfr_configs.array(), available_hfr_configs.size());
   7269     }
   7270 
   7271     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
   7272     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   7273                       &max_jpeg_size, 1);
   7274 
   7275     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   7276     size_t size = 0;
   7277     count = CAM_EFFECT_MODE_MAX;
   7278     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
   7279     for (size_t i = 0; i < count; i++) {
   7280         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   7281                 gCamCapability[cameraId]->supported_effects[i]);
   7282         if (NAME_NOT_FOUND != val) {
   7283             avail_effects[size] = (uint8_t)val;
   7284             size++;
   7285         }
   7286     }
   7287     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   7288                       avail_effects,
   7289                       size);
   7290 
   7291     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   7292     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   7293     size_t supported_scene_modes_cnt = 0;
   7294     count = CAM_SCENE_MODE_MAX;
   7295     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
   7296     for (size_t i = 0; i < count; i++) {
   7297         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
   7298                 CAM_SCENE_MODE_OFF) {
   7299             int val = lookupFwkName(SCENE_MODES_MAP,
   7300                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
   7301                     gCamCapability[cameraId]->supported_scene_modes[i]);
   7302             if (NAME_NOT_FOUND != val) {
   7303                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   7304                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
   7305                 supported_scene_modes_cnt++;
   7306             }
   7307         }
   7308     }
   7309     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   7310                       avail_scene_modes,
   7311                       supported_scene_modes_cnt);
   7312 
   7313     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
   7314     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   7315                       supported_scene_modes_cnt,
   7316                       CAM_SCENE_MODE_MAX,
   7317                       scene_mode_overrides,
   7318                       supported_indexes,
   7319                       cameraId);
   7320 
   7321     if (supported_scene_modes_cnt == 0) {
   7322         supported_scene_modes_cnt = 1;
   7323         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
   7324     }
   7325 
   7326     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   7327             scene_mode_overrides, supported_scene_modes_cnt * 3);
   7328 
   7329     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
   7330                                          ANDROID_CONTROL_MODE_AUTO,
   7331                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
   7332     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
   7333             available_control_modes,
   7334             3);
   7335 
   7336     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   7337     size = 0;
   7338     count = CAM_ANTIBANDING_MODE_MAX;
   7339     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
   7340     for (size_t i = 0; i < count; i++) {
   7341         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   7342                 gCamCapability[cameraId]->supported_antibandings[i]);
   7343         if (NAME_NOT_FOUND != val) {
   7344             avail_antibanding_modes[size] = (uint8_t)val;
   7345             size++;
   7346         }
   7347 
   7348     }
   7349     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   7350                       avail_antibanding_modes,
   7351                       size);
   7352 
   7353     uint8_t avail_abberation_modes[] = {
   7354             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   7355             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
   7356             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
   7357     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
   7358     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
   7359     if (0 == count) {
   7360         //  If no aberration correction modes are available for a device, this advertise OFF mode
   7361         size = 1;
   7362     } else {
   7363         // If count is not zero then atleast one among the FAST or HIGH quality is supported
   7364         // So, advertize all 3 modes if atleast any one mode is supported as per the
   7365         // new M requirement
   7366         size = 3;
   7367     }
   7368     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   7369             avail_abberation_modes,
   7370             size);
   7371 
   7372     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   7373     size = 0;
   7374     count = CAM_FOCUS_MODE_MAX;
   7375     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
   7376     for (size_t i = 0; i < count; i++) {
   7377         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   7378                 gCamCapability[cameraId]->supported_focus_modes[i]);
   7379         if (NAME_NOT_FOUND != val) {
   7380             avail_af_modes[size] = (uint8_t)val;
   7381             size++;
   7382         }
   7383     }
   7384     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   7385                       avail_af_modes,
   7386                       size);
   7387 
   7388     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   7389     size = 0;
   7390     count = CAM_WB_MODE_MAX;
   7391     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
   7392     for (size_t i = 0; i < count; i++) {
   7393         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   7394                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   7395                 gCamCapability[cameraId]->supported_white_balances[i]);
   7396         if (NAME_NOT_FOUND != val) {
   7397             avail_awb_modes[size] = (uint8_t)val;
   7398             size++;
   7399         }
   7400     }
   7401     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   7402                       avail_awb_modes,
   7403                       size);
   7404 
   7405     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   7406     count = CAM_FLASH_FIRING_LEVEL_MAX;
   7407     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
   7408             count);
   7409     for (size_t i = 0; i < count; i++) {
   7410         available_flash_levels[i] =
   7411                 gCamCapability[cameraId]->supported_firing_levels[i];
   7412     }
   7413     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   7414             available_flash_levels, count);
   7415 
   7416     uint8_t flashAvailable;
   7417     if (gCamCapability[cameraId]->flash_available)
   7418         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
   7419     else
   7420         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
   7421     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   7422             &flashAvailable, 1);
   7423 
   7424     Vector<uint8_t> avail_ae_modes;
   7425     count = CAM_AE_MODE_MAX;
   7426     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
   7427     for (size_t i = 0; i < count; i++) {
   7428         avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
   7429     }
   7430     if (flashAvailable) {
   7431         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
   7432         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
   7433     }
   7434     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   7435                       avail_ae_modes.array(),
   7436                       avail_ae_modes.size());
   7437 
   7438     int32_t sensitivity_range[2];
   7439     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   7440     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   7441     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   7442                       sensitivity_range,
   7443                       sizeof(sensitivity_range) / sizeof(int32_t));
   7444 
   7445     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   7446                       &gCamCapability[cameraId]->max_analog_sensitivity,
   7447                       1);
   7448 
   7449     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   7450     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   7451                       &sensor_orientation,
   7452                       1);
   7453 
   7454     int32_t max_output_streams[] = {
   7455             MAX_STALLING_STREAMS,
   7456             MAX_PROCESSED_STREAMS,
   7457             MAX_RAW_STREAMS};
   7458     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   7459             max_output_streams,
   7460             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
   7461 
   7462     uint8_t avail_leds = 0;
   7463     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
   7464                       &avail_leds, 0);
   7465 
   7466     uint8_t focus_dist_calibrated;
   7467     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
   7468             gCamCapability[cameraId]->focus_dist_calibrated);
   7469     if (NAME_NOT_FOUND != val) {
   7470         focus_dist_calibrated = (uint8_t)val;
   7471         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   7472                      &focus_dist_calibrated, 1);
   7473     }
   7474 
   7475     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
   7476     size = 0;
   7477     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
   7478             MAX_TEST_PATTERN_CNT);
   7479     for (size_t i = 0; i < count; i++) {
   7480         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
   7481                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
   7482         if (NAME_NOT_FOUND != testpatternMode) {
   7483             avail_testpattern_modes[size] = testpatternMode;
   7484             size++;
   7485         }
   7486     }
   7487     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   7488                       avail_testpattern_modes,
   7489                       size);
   7490 
   7491     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
   7492     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
   7493                       &max_pipeline_depth,
   7494                       1);
   7495 
   7496     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
   7497     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   7498                       &partial_result_count,
   7499                        1);
   7500 
   7501     int32_t max_stall_duration = MAX_REPROCESS_STALL;
   7502     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
   7503 
   7504     Vector<uint8_t> available_capabilities;
   7505     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   7506     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
   7507     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
   7508     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
   7509     if (supportBurst) {
   7510         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
   7511     }
   7512     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   7513     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
   7514     if (hfrEnable && available_hfr_configs.array()) {
   7515         available_capabilities.add(
   7516                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
   7517     }
   7518 
   7519     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
   7520         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
   7521     }
   7522     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   7523             available_capabilities.array(),
   7524             available_capabilities.size());
   7525 
   7526     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
   7527     //Assumption is that all bayer cameras support MANUAL_SENSOR.
   7528     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   7529             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   7530 
   7531     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   7532             &aeLockAvailable, 1);
   7533 
   7534     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
   7535     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
   7536     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   7537             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   7538 
   7539     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   7540             &awbLockAvailable, 1);
   7541 
   7542     int32_t max_input_streams = 1;
   7543     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   7544                       &max_input_streams,
   7545                       1);
   7546 
   7547     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
   7548     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
   7549             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
   7550             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
   7551             HAL_PIXEL_FORMAT_YCbCr_420_888};
   7552     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   7553                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
   7554 
   7555     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
   7556     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
   7557                       &max_latency,
   7558                       1);
   7559 
   7560 #ifndef USE_HAL_3_3
   7561     int32_t isp_sensitivity_range[2];
   7562     isp_sensitivity_range[0] =
   7563         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
   7564     isp_sensitivity_range[1] =
   7565         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
   7566     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
   7567                       isp_sensitivity_range,
   7568                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
   7569 #endif
   7570 
   7571     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
   7572                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
   7573     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   7574             available_hot_pixel_modes,
   7575             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
   7576 
   7577     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
   7578                                          ANDROID_SHADING_MODE_FAST,
   7579                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
   7580     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
   7581                       available_shading_modes,
   7582                       3);
   7583 
   7584     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
   7585                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
   7586     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   7587                       available_lens_shading_map_modes,
   7588                       2);
   7589 
   7590     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
   7591                                       ANDROID_EDGE_MODE_FAST,
   7592                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
   7593                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
   7594     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   7595             available_edge_modes,
   7596             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
   7597 
   7598     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
   7599                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
   7600                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
   7601                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
   7602                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
   7603     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   7604             available_noise_red_modes,
   7605             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
   7606 
   7607     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
   7608                                          ANDROID_TONEMAP_MODE_FAST,
   7609                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
   7610     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   7611             available_tonemap_modes,
   7612             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
   7613 
   7614     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
   7615     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   7616             available_hot_pixel_map_modes,
   7617             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
   7618 
   7619     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   7620             gCamCapability[cameraId]->reference_illuminant1);
   7621     if (NAME_NOT_FOUND != val) {
   7622         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   7623         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
   7624     }
   7625 
   7626     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   7627             gCamCapability[cameraId]->reference_illuminant2);
   7628     if (NAME_NOT_FOUND != val) {
   7629         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   7630         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
   7631     }
   7632 
   7633     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
   7634             (void *)gCamCapability[cameraId]->forward_matrix1,
   7635             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   7636 
   7637     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
   7638             (void *)gCamCapability[cameraId]->forward_matrix2,
   7639             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   7640 
   7641     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
   7642             (void *)gCamCapability[cameraId]->color_transform1,
   7643             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   7644 
   7645     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
   7646             (void *)gCamCapability[cameraId]->color_transform2,
   7647             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   7648 
   7649     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
   7650             (void *)gCamCapability[cameraId]->calibration_transform1,
   7651             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   7652 
   7653     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
   7654             (void *)gCamCapability[cameraId]->calibration_transform2,
   7655             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   7656 
   7657     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
   7658        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
   7659        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   7660        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   7661        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
   7662        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   7663        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
   7664        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
   7665        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
   7666        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
   7667        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
   7668        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
   7669        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   7670        ANDROID_JPEG_GPS_COORDINATES,
   7671        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
   7672        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
   7673        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
   7674        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   7675        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
   7676        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
   7677        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
   7678        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
   7679        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
   7680        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
   7681 #ifndef USE_HAL_3_3
   7682        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
   7683 #endif
   7684        ANDROID_STATISTICS_FACE_DETECT_MODE,
   7685        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   7686        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
   7687        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   7688        ANDROID_BLACK_LEVEL_LOCK };
   7689 
   7690     size_t request_keys_cnt =
   7691             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
   7692     Vector<int32_t> available_request_keys;
   7693     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
   7694     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   7695         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
   7696     }
   7697 
   7698     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
   7699             available_request_keys.array(), available_request_keys.size());
   7700 
   7701     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
   7702        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
   7703        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
   7704        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
   7705        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
   7706        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   7707        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
   7708        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
   7709        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
   7710        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   7711        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   7712        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
   7713        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
   7714        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
   7715        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   7716        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
   7717        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   7718        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
   7719        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   7720        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   7721        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
   7722        ANDROID_STATISTICS_FACE_SCORES,
   7723 #ifndef USE_HAL_3_3
   7724        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
   7725 #endif
   7726        };
   7727 
   7728     size_t result_keys_cnt =
   7729             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
   7730 
   7731     Vector<int32_t> available_result_keys;
   7732     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
   7733     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   7734         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
   7735     }
   7736     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
   7737         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
   7738         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
   7739     }
   7740     if (supportedFaceDetectMode == 1) {
   7741         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
   7742         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
   7743     } else if ((supportedFaceDetectMode == 2) ||
   7744             (supportedFaceDetectMode == 3)) {
   7745         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
   7746         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
   7747     }
   7748 #ifndef USE_HAL_3_3
   7749     if (hasBlackRegions) {
   7750         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
   7751         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
   7752     }
   7753 #endif
   7754     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   7755             available_result_keys.array(), available_result_keys.size());
   7756 
   7757     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   7758        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   7759        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
   7760        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
   7761        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   7762        ANDROID_SCALER_CROPPING_TYPE,
   7763        ANDROID_SYNC_MAX_LATENCY,
   7764        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   7765        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   7766        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   7767        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
   7768        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
   7769        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   7770        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   7771        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   7772        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   7773        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   7774        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   7775        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   7776        ANDROID_LENS_FACING,
   7777        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   7778        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   7779        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   7780        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   7781        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   7782        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   7783        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   7784        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
   7785        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
   7786        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   7787        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
   7788        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   7789        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   7790        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   7791        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   7792        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   7793        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
   7794        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   7795        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   7796        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   7797        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   7798        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   7799        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   7800        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   7801        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   7802        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   7803        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   7804        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   7805        ANDROID_TONEMAP_MAX_CURVE_POINTS,
   7806        ANDROID_CONTROL_AVAILABLE_MODES,
   7807        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   7808        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   7809        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   7810        ANDROID_SHADING_AVAILABLE_MODES,
   7811        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   7812 #ifndef USE_HAL_3_3
   7813        ANDROID_SENSOR_OPAQUE_RAW_SIZE,
   7814        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
   7815 #endif
   7816        };
   7817 
   7818     Vector<int32_t> available_characteristics_keys;
   7819     available_characteristics_keys.appendArray(characteristics_keys_basic,
   7820             sizeof(characteristics_keys_basic)/sizeof(int32_t));
   7821 #ifndef USE_HAL_3_3
   7822     if (hasBlackRegions) {
   7823         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
   7824     }
   7825 #endif
   7826     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   7827                       available_characteristics_keys.array(),
   7828                       available_characteristics_keys.size());
   7829 
   7830     /*available stall durations depend on the hw + sw and will be different for different devices */
   7831     /*have to add for raw after implementation*/
   7832     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
   7833     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
   7834 
   7835     Vector<int64_t> available_stall_durations;
   7836     for (uint32_t j = 0; j < stall_formats_count; j++) {
   7837         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
   7838             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
   7839                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   7840                 available_stall_durations.add(stall_formats[j]);
   7841                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
   7842                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
   7843                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
   7844           }
   7845         } else {
   7846             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
   7847                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   7848                 available_stall_durations.add(stall_formats[j]);
   7849                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
   7850                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
   7851                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
   7852             }
   7853         }
   7854     }
   7855     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   7856                       available_stall_durations.array(),
   7857                       available_stall_durations.size());
   7858 
   7859     //QCAMERA3_OPAQUE_RAW
   7860     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   7861     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   7862     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
   7863     case LEGACY_RAW:
   7864         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   7865             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
   7866         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   7867             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   7868         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   7869             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
   7870         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   7871         break;
   7872     case MIPI_RAW:
   7873         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   7874             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
   7875         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   7876             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
   7877         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   7878             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
   7879         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
   7880         break;
   7881     default:
   7882         LOGE("unknown opaque_raw_format %d",
   7883                 gCamCapability[cameraId]->opaque_raw_fmt);
   7884         break;
   7885     }
   7886     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
   7887 
   7888     Vector<int32_t> strides;
   7889     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7890             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   7891         cam_stream_buf_plane_info_t buf_planes;
   7892         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
   7893         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
   7894         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   7895             &gCamCapability[cameraId]->padding_info, &buf_planes);
   7896         strides.add(buf_planes.plane_info.mp[0].stride);
   7897     }
   7898     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
   7899             strides.size());
   7900 
   7901     //Video HDR default
   7902     if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
   7903             (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
   7904             CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
   7905         int32_t vhdr_mode[] = {
   7906                 QCAMERA3_VIDEO_HDR_MODE_OFF,
   7907                 QCAMERA3_VIDEO_HDR_MODE_ON};
   7908 
   7909         size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
   7910         staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
   7911                     vhdr_mode, vhdr_mode_count);
   7912     }
   7913 
   7914     staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
   7915             (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
   7916             sizeof(gCamCapability[cameraId]->related_cam_calibration));
   7917 
   7918     uint8_t isMonoOnly =
   7919             (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
   7920     staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
   7921             &isMonoOnly, 1);
   7922 
   7923 #ifndef USE_HAL_3_3
   7924     Vector<int32_t> opaque_size;
   7925     for (size_t j = 0; j < scalar_formats_count; j++) {
   7926         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
   7927             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7928                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   7929                 cam_stream_buf_plane_info_t buf_planes;
   7930 
   7931                 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   7932                          &gCamCapability[cameraId]->padding_info, &buf_planes);
   7933 
   7934                 if (rc == 0) {
   7935                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
   7936                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
   7937                     opaque_size.add(buf_planes.plane_info.frame_len);
   7938                 }else {
   7939                     LOGE("raw frame calculation failed!");
   7940                 }
   7941             }
   7942         }
   7943     }
   7944 
   7945     if ((opaque_size.size() > 0) &&
   7946             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
   7947         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
   7948     else
   7949         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
   7950 #endif
   7951 
   7952     if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
   7953         int32_t avail_ir_modes[CAM_IR_MODE_MAX];
   7954         size = 0;
   7955         count = CAM_IR_MODE_MAX;
   7956         count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
   7957         for (size_t i = 0; i < count; i++) {
   7958             int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
   7959                     gCamCapability[cameraId]->supported_ir_modes[i]);
   7960             if (NAME_NOT_FOUND != val) {
   7961                 avail_ir_modes[size] = (int32_t)val;
   7962                 size++;
   7963             }
   7964         }
   7965         staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
   7966                 avail_ir_modes, size);
   7967     }
   7968 
   7969     gStaticMetadata[cameraId] = staticInfo.release();
   7970     return rc;
   7971 }
   7972 
   7973 /*===========================================================================
   7974  * FUNCTION   : makeTable
   7975  *
   7976  * DESCRIPTION: make a table of sizes
   7977  *
   7978  * PARAMETERS :
   7979  *
   7980  *
   7981  *==========================================================================*/
   7982 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
   7983         size_t max_size, int32_t *sizeTable)
   7984 {
   7985     size_t j = 0;
   7986     if (size > max_size) {
   7987        size = max_size;
   7988     }
   7989     for (size_t i = 0; i < size; i++) {
   7990         sizeTable[j] = dimTable[i].width;
   7991         sizeTable[j+1] = dimTable[i].height;
   7992         j+=2;
   7993     }
   7994 }
   7995 
   7996 /*===========================================================================
   7997  * FUNCTION   : makeFPSTable
   7998  *
   7999  * DESCRIPTION: make a table of fps ranges
   8000  *
   8001  * PARAMETERS :
   8002  *
   8003  *==========================================================================*/
   8004 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
   8005         size_t max_size, int32_t *fpsRangesTable)
   8006 {
   8007     size_t j = 0;
   8008     if (size > max_size) {
   8009        size = max_size;
   8010     }
   8011     for (size_t i = 0; i < size; i++) {
   8012         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   8013         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   8014         j+=2;
   8015     }
   8016 }
   8017 
   8018 /*===========================================================================
   8019  * FUNCTION   : makeOverridesList
   8020  *
   8021  * DESCRIPTION: make a list of scene mode overrides
   8022  *
   8023  * PARAMETERS :
   8024  *
   8025  *
   8026  *==========================================================================*/
   8027 void QCamera3HardwareInterface::makeOverridesList(
   8028         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
   8029         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
   8030 {
   8031     /*daemon will give a list of overrides for all scene modes.
   8032       However we should send the fwk only the overrides for the scene modes
   8033       supported by the framework*/
   8034     size_t j = 0;
   8035     if (size > max_size) {
   8036        size = max_size;
   8037     }
   8038     size_t focus_count = CAM_FOCUS_MODE_MAX;
   8039     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
   8040             focus_count);
   8041     for (size_t i = 0; i < size; i++) {
   8042         bool supt = false;
   8043         size_t index = supported_indexes[i];
   8044         overridesList[j] = gCamCapability[camera_id]->flash_available ?
   8045                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
   8046         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   8047                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   8048                 overridesTable[index].awb_mode);
   8049         if (NAME_NOT_FOUND != val) {
   8050             overridesList[j+1] = (uint8_t)val;
   8051         }
   8052         uint8_t focus_override = overridesTable[index].af_mode;
   8053         for (size_t k = 0; k < focus_count; k++) {
   8054            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   8055               supt = true;
   8056               break;
   8057            }
   8058         }
   8059         if (supt) {
   8060             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   8061                     focus_override);
   8062             if (NAME_NOT_FOUND != val) {
   8063                 overridesList[j+2] = (uint8_t)val;
   8064             }
   8065         } else {
   8066            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   8067         }
   8068         j+=3;
   8069     }
   8070 }
   8071 
   8072 /*===========================================================================
   8073  * FUNCTION   : filterJpegSizes
   8074  *
   8075  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
   8076  *              could be downscaled to
   8077  *
   8078  * PARAMETERS :
   8079  *
   8080  * RETURN     : length of jpegSizes array
   8081  *==========================================================================*/
   8082 
   8083 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
   8084         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
   8085         uint8_t downscale_factor)
   8086 {
   8087     if (0 == downscale_factor) {
   8088         downscale_factor = 1;
   8089     }
   8090 
   8091     int32_t min_width = active_array_size.width / downscale_factor;
   8092     int32_t min_height = active_array_size.height / downscale_factor;
   8093     size_t jpegSizesCnt = 0;
   8094     if (processedSizesCnt > maxCount) {
   8095         processedSizesCnt = maxCount;
   8096     }
   8097     for (size_t i = 0; i < processedSizesCnt; i+=2) {
   8098         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
   8099             jpegSizes[jpegSizesCnt] = processedSizes[i];
   8100             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
   8101             jpegSizesCnt += 2;
   8102         }
   8103     }
   8104     return jpegSizesCnt;
   8105 }
   8106 
   8107 /*===========================================================================
   8108  * FUNCTION   : computeNoiseModelEntryS
   8109  *
   8110  * DESCRIPTION: function to map a given sensitivity to the S noise
   8111  *              model parameters in the DNG noise model.
   8112  *
   8113  * PARAMETERS : sens : the sensor sensitivity
   8114  *
   8115  ** RETURN    : S (sensor amplification) noise
   8116  *
   8117  *==========================================================================*/
   8118 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
   8119     double s = gCamCapability[mCameraId]->gradient_S * sens +
   8120             gCamCapability[mCameraId]->offset_S;
   8121     return ((s < 0.0) ? 0.0 : s);
   8122 }
   8123 
   8124 /*===========================================================================
   8125  * FUNCTION   : computeNoiseModelEntryO
   8126  *
   8127  * DESCRIPTION: function to map a given sensitivity to the O noise
   8128  *              model parameters in the DNG noise model.
   8129  *
   8130  * PARAMETERS : sens : the sensor sensitivity
   8131  *
   8132  ** RETURN    : O (sensor readout) noise
   8133  *
   8134  *==========================================================================*/
   8135 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
   8136     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
   8137     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
   8138             1.0 : (1.0 * sens / max_analog_sens);
   8139     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
   8140             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
   8141     return ((o < 0.0) ? 0.0 : o);
   8142 }
   8143 
   8144 /*===========================================================================
   8145  * FUNCTION   : getSensorSensitivity
   8146  *
   8147  * DESCRIPTION: convert iso_mode to an integer value
   8148  *
   8149  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   8150  *
   8151  ** RETURN    : sensitivity supported by sensor
   8152  *
   8153  *==========================================================================*/
   8154 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   8155 {
   8156     int32_t sensitivity;
   8157 
   8158     switch (iso_mode) {
   8159     case CAM_ISO_MODE_100:
   8160         sensitivity = 100;
   8161         break;
   8162     case CAM_ISO_MODE_200:
   8163         sensitivity = 200;
   8164         break;
   8165     case CAM_ISO_MODE_400:
   8166         sensitivity = 400;
   8167         break;
   8168     case CAM_ISO_MODE_800:
   8169         sensitivity = 800;
   8170         break;
   8171     case CAM_ISO_MODE_1600:
   8172         sensitivity = 1600;
   8173         break;
   8174     default:
   8175         sensitivity = -1;
   8176         break;
   8177     }
   8178     return sensitivity;
   8179 }
   8180 
   8181 /*===========================================================================
   8182  * FUNCTION   : getCamInfo
   8183  *
   8184  * DESCRIPTION: query camera capabilities
   8185  *
   8186  * PARAMETERS :
   8187  *   @cameraId  : camera Id
   8188  *   @info      : camera info struct to be filled in with camera capabilities
   8189  *
   8190  * RETURN     : int type of status
   8191  *              NO_ERROR  -- success
   8192  *              none-zero failure code
   8193  *==========================================================================*/
   8194 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
   8195         struct camera_info *info)
   8196 {
   8197     ATRACE_CALL();
   8198     int rc = 0;
   8199 
   8200     pthread_mutex_lock(&gCamLock);
   8201     if (NULL == gCamCapability[cameraId]) {
   8202         rc = initCapabilities(cameraId);
   8203         if (rc < 0) {
   8204             pthread_mutex_unlock(&gCamLock);
   8205             return rc;
   8206         }
   8207     }
   8208 
   8209     if (NULL == gStaticMetadata[cameraId]) {
   8210         rc = initStaticMetadata(cameraId);
   8211         if (rc < 0) {
   8212             pthread_mutex_unlock(&gCamLock);
   8213             return rc;
   8214         }
   8215     }
   8216 
   8217     switch(gCamCapability[cameraId]->position) {
   8218     case CAM_POSITION_BACK:
   8219     case CAM_POSITION_BACK_AUX:
   8220         info->facing = CAMERA_FACING_BACK;
   8221         break;
   8222 
   8223     case CAM_POSITION_FRONT:
   8224     case CAM_POSITION_FRONT_AUX:
   8225         info->facing = CAMERA_FACING_FRONT;
   8226         break;
   8227 
   8228     default:
   8229         LOGE("Unknown position type %d for camera id:%d",
   8230                 gCamCapability[cameraId]->position, cameraId);
   8231         rc = -1;
   8232         break;
   8233     }
   8234 
   8235 
   8236     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
   8237 #ifndef USE_HAL_3_3
   8238     info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
   8239 #else
   8240     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
   8241 #endif
   8242     info->static_camera_characteristics = gStaticMetadata[cameraId];
   8243 
   8244     //For now assume both cameras can operate independently.
   8245     info->conflicting_devices = NULL;
   8246     info->conflicting_devices_length = 0;
   8247 
   8248     //resource cost is 100 * MIN(1.0, m/M),
   8249     //where m is throughput requirement with maximum stream configuration
   8250     //and M is CPP maximum throughput.
   8251     float max_fps = 0.0;
   8252     for (uint32_t i = 0;
   8253             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
   8254         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
   8255             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
   8256     }
   8257     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
   8258             gCamCapability[cameraId]->active_array_size.width *
   8259             gCamCapability[cameraId]->active_array_size.height * max_fps /
   8260             gCamCapability[cameraId]->max_pixel_bandwidth;
   8261     info->resource_cost = 100 * MIN(1.0, ratio);
   8262     LOGI("camera %d resource cost is %d", cameraId,
   8263             info->resource_cost);
   8264 
   8265     pthread_mutex_unlock(&gCamLock);
   8266     return rc;
   8267 }
   8268 
   8269 /*===========================================================================
   8270  * FUNCTION   : translateCapabilityToMetadata
   8271  *
   8272  * DESCRIPTION: translate the capability into camera_metadata_t
   8273  *
   8274  * PARAMETERS : type of the request
   8275  *
   8276  *
   8277  * RETURN     : success: camera_metadata_t*
   8278  *              failure: NULL
   8279  *
   8280  *==========================================================================*/
   8281 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   8282 {
   8283     if (mDefaultMetadata[type] != NULL) {
   8284         return mDefaultMetadata[type];
   8285     }
   8286     //first time we are handling this request
   8287     //fill up the metadata structure using the wrapper class
   8288     CameraMetadata settings;
   8289     //translate from cam_capability_t to camera_metadata_tag_t
   8290     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   8291     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   8292     int32_t defaultRequestID = 0;
   8293     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   8294 
   8295     /* OIS disable */
   8296     char ois_prop[PROPERTY_VALUE_MAX];
   8297     memset(ois_prop, 0, sizeof(ois_prop));
   8298     property_get("persist.camera.ois.disable", ois_prop, "0");
   8299     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
   8300 
   8301     /* Force video to use OIS */
   8302     char videoOisProp[PROPERTY_VALUE_MAX];
   8303     memset(videoOisProp, 0, sizeof(videoOisProp));
   8304     property_get("persist.camera.ois.video", videoOisProp, "1");
   8305     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
   8306     uint8_t controlIntent = 0;
   8307     uint8_t focusMode;
   8308     uint8_t vsMode;
   8309     uint8_t optStabMode;
   8310     uint8_t cacMode;
   8311     uint8_t edge_mode;
   8312     uint8_t noise_red_mode;
   8313     uint8_t tonemap_mode;
   8314     bool highQualityModeEntryAvailable = FALSE;
   8315     bool fastModeEntryAvailable = FALSE;
   8316     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   8317     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8318     switch (type) {
   8319       case CAMERA3_TEMPLATE_PREVIEW:
   8320         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   8321         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8322         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8323         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8324         edge_mode = ANDROID_EDGE_MODE_FAST;
   8325         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8326         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8327         break;
   8328       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   8329         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   8330         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8331         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8332         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   8333         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   8334         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   8335         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   8336         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
   8337         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
   8338             if (gCamCapability[mCameraId]->aberration_modes[i] ==
   8339                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
   8340                 highQualityModeEntryAvailable = TRUE;
   8341             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
   8342                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
   8343                 fastModeEntryAvailable = TRUE;
   8344             }
   8345         }
   8346         if (highQualityModeEntryAvailable) {
   8347             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
   8348         } else if (fastModeEntryAvailable) {
   8349             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8350         }
   8351         break;
   8352       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   8353         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   8354         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   8355         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8356         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8357         edge_mode = ANDROID_EDGE_MODE_FAST;
   8358         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8359         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8360         if (forceVideoOis)
   8361             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8362         break;
   8363       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   8364         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   8365         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   8366         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8367         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8368         edge_mode = ANDROID_EDGE_MODE_FAST;
   8369         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8370         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8371         if (forceVideoOis)
   8372             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8373         break;
   8374       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   8375         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   8376         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8377         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8378         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8379         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
   8380         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
   8381         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8382         break;
   8383       case CAMERA3_TEMPLATE_MANUAL:
   8384         edge_mode = ANDROID_EDGE_MODE_FAST;
   8385         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8386         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8387         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8388         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
   8389         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   8390         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8391         break;
   8392       default:
   8393         edge_mode = ANDROID_EDGE_MODE_FAST;
   8394         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8395         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8396         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8397         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   8398         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8399         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8400         break;
   8401     }
   8402     // Set CAC to OFF if underlying device doesn't support
   8403     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
   8404         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   8405     }
   8406     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
   8407     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   8408     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
   8409     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
   8410         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   8411     }
   8412     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   8413 
   8414     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   8415             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
   8416         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8417     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   8418             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
   8419             || ois_disable)
   8420         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8421     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
   8422 
   8423     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   8424             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   8425 
   8426     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   8427     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   8428 
   8429     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   8430     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   8431 
   8432     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   8433     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   8434 
   8435     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   8436     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   8437 
   8438     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   8439     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   8440 
   8441     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   8442     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   8443 
   8444     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   8445     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   8446 
   8447     /*flash*/
   8448     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   8449     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   8450 
   8451     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   8452     settings.update(ANDROID_FLASH_FIRING_POWER,
   8453             &flashFiringLevel, 1);
   8454 
   8455     /* lens */
   8456     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   8457     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   8458 
   8459     if (gCamCapability[mCameraId]->filter_densities_count) {
   8460         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   8461         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   8462                         gCamCapability[mCameraId]->filter_densities_count);
   8463     }
   8464 
   8465     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   8466     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   8467 
   8468     if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
   8469         float default_focus_distance = 0;
   8470         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
   8471     }
   8472 
   8473     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   8474     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   8475 
   8476     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   8477     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   8478 
   8479     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
   8480     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
   8481 
   8482     /* face detection (default to OFF) */
   8483     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   8484     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   8485 
   8486     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   8487     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   8488 
   8489     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   8490     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   8491 
   8492     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   8493     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   8494 
   8495     static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   8496     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
   8497 
   8498     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   8499     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
   8500 
   8501     /* Exposure time(Update the Min Exposure Time)*/
   8502     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   8503     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   8504 
   8505     /* frame duration */
   8506     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
   8507     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
   8508 
   8509     /* sensitivity */
   8510     static const int32_t default_sensitivity = 100;
   8511     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   8512 #ifndef USE_HAL_3_3
   8513     static const int32_t default_isp_sensitivity =
   8514             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
   8515     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
   8516 #endif
   8517 
   8518     /*edge mode*/
   8519     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
   8520 
   8521     /*noise reduction mode*/
   8522     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
   8523 
   8524     /*color correction mode*/
   8525     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   8526     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
   8527 
   8528     /*transform matrix mode*/
   8529     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
   8530 
   8531     int32_t scaler_crop_region[4];
   8532     scaler_crop_region[0] = 0;
   8533     scaler_crop_region[1] = 0;
   8534     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
   8535     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
   8536     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
   8537 
   8538     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   8539     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
   8540 
   8541     /*focus distance*/
   8542     float focus_distance = 0.0;
   8543     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
   8544 
   8545     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
   8546     float max_range = 0.0;
   8547     float max_fixed_fps = 0.0;
   8548     int32_t fps_range[2] = {0, 0};
   8549     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
   8550             i++) {
   8551         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
   8552             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   8553         if (type == CAMERA3_TEMPLATE_PREVIEW ||
   8554                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
   8555                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
   8556             if (range > max_range) {
   8557                 fps_range[0] =
   8558                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   8559                 fps_range[1] =
   8560                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   8561                 max_range = range;
   8562             }
   8563         } else {
   8564             if (range < 0.01 && max_fixed_fps <
   8565                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
   8566                 fps_range[0] =
   8567                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   8568                 fps_range[1] =
   8569                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   8570                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   8571             }
   8572         }
   8573     }
   8574     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
   8575 
   8576     /*precapture trigger*/
   8577     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
   8578     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
   8579 
   8580     /*af trigger*/
   8581     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   8582     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
   8583 
   8584     /* ae & af regions */
   8585     int32_t active_region[] = {
   8586             gCamCapability[mCameraId]->active_array_size.left,
   8587             gCamCapability[mCameraId]->active_array_size.top,
   8588             gCamCapability[mCameraId]->active_array_size.left +
   8589                     gCamCapability[mCameraId]->active_array_size.width,
   8590             gCamCapability[mCameraId]->active_array_size.top +
   8591                     gCamCapability[mCameraId]->active_array_size.height,
   8592             0};
   8593     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
   8594             sizeof(active_region) / sizeof(active_region[0]));
   8595     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
   8596             sizeof(active_region) / sizeof(active_region[0]));
   8597 
   8598     /* black level lock */
   8599     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   8600     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
   8601 
   8602     /* lens shading map mode */
   8603     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   8604     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
   8605         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
   8606     }
   8607     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
   8608 
   8609     //special defaults for manual template
   8610     if (type == CAMERA3_TEMPLATE_MANUAL) {
   8611         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
   8612         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
   8613 
   8614         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
   8615         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
   8616 
   8617         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
   8618         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
   8619 
   8620         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
   8621         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
   8622 
   8623         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
   8624         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
   8625 
   8626         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
   8627         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
   8628     }
   8629 
   8630 
   8631     /* TNR
   8632      * We'll use this location to determine which modes TNR will be set.
   8633      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
   8634      * This is not to be confused with linking on a per stream basis that decision
   8635      * is still on per-session basis and will be handled as part of config stream
   8636      */
   8637     uint8_t tnr_enable = 0;
   8638 
   8639     if (m_bTnrPreview || m_bTnrVideo) {
   8640 
   8641         switch (type) {
   8642             case CAMERA3_TEMPLATE_VIDEO_RECORD:
   8643                     tnr_enable = 1;
   8644                     break;
   8645 
   8646             default:
   8647                     tnr_enable = 0;
   8648                     break;
   8649         }
   8650 
   8651         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
   8652         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   8653         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   8654 
   8655         LOGD("TNR:%d with process plate %d for template:%d",
   8656                              tnr_enable, tnr_process_type, type);
   8657     }
   8658 
   8659     //Update Link tags to default
   8660     int32_t sync_type = CAM_TYPE_STANDALONE;
   8661     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
   8662 
   8663     int32_t is_main = 0; //this doesn't matter as app should overwrite
   8664     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
   8665 
   8666     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
   8667 
   8668     /* CDS default */
   8669     char prop[PROPERTY_VALUE_MAX];
   8670     memset(prop, 0, sizeof(prop));
   8671     property_get("persist.camera.CDS", prop, "Auto");
   8672     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
   8673     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
   8674     if (CAM_CDS_MODE_MAX == cds_mode) {
   8675         cds_mode = CAM_CDS_MODE_AUTO;
   8676     }
   8677 
   8678     /* Disabling CDS in templates which have TNR enabled*/
   8679     if (tnr_enable)
   8680         cds_mode = CAM_CDS_MODE_OFF;
   8681 
   8682     int32_t mode = cds_mode;
   8683     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
   8684 
   8685     int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
   8686     settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
   8687 
   8688     /* IR Mode Default Off */
   8689     int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
   8690     settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
   8691 
   8692     /* Manual Convergence AEC Speed is disabled by default*/
   8693     float default_aec_speed = 0;
   8694     settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
   8695 
   8696     /* Manual Convergence AWB Speed is disabled by default*/
   8697     float default_awb_speed = 0;
   8698     settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
   8699 
   8700     mDefaultMetadata[type] = settings.release();
   8701 
   8702     return mDefaultMetadata[type];
   8703 }
   8704 
   8705 /*===========================================================================
   8706  * FUNCTION   : setFrameParameters
   8707  *
   8708  * DESCRIPTION: set parameters per frame as requested in the metadata from
   8709  *              framework
   8710  *
   8711  * PARAMETERS :
   8712  *   @request   : request that needs to be serviced
   8713  *   @streamID : Stream ID of all the requested streams
   8714  *   @blob_request: Whether this request is a blob request or not
   8715  *
   8716  * RETURN     : success: NO_ERROR
   8717  *              failure:
   8718  *==========================================================================*/
   8719 int QCamera3HardwareInterface::setFrameParameters(
   8720                     camera3_capture_request_t *request,
   8721                     cam_stream_ID_t streamID,
   8722                     int blob_request,
   8723                     uint32_t snapshotStreamId)
   8724 {
   8725     /*translate from camera_metadata_t type to parm_type_t*/
   8726     int rc = 0;
   8727     int32_t hal_version = CAM_HAL_V3;
   8728 
   8729     clear_metadata_buffer(mParameters);
   8730     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
   8731         LOGE("Failed to set hal version in the parameters");
   8732         return BAD_VALUE;
   8733     }
   8734 
   8735     /*we need to update the frame number in the parameters*/
   8736     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
   8737             request->frame_number)) {
   8738         LOGE("Failed to set the frame number in the parameters");
   8739         return BAD_VALUE;
   8740     }
   8741 
   8742     /* Update stream id of all the requested buffers */
   8743     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
   8744         LOGE("Failed to set stream type mask in the parameters");
   8745         return BAD_VALUE;
   8746     }
   8747 
   8748     if (mUpdateDebugLevel) {
   8749         uint32_t dummyDebugLevel = 0;
   8750         /* The value of dummyDebugLevel is irrelavent. On
   8751          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
   8752         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
   8753                 dummyDebugLevel)) {
   8754             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
   8755             return BAD_VALUE;
   8756         }
   8757         mUpdateDebugLevel = false;
   8758     }
   8759 
   8760     if(request->settings != NULL){
   8761         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
   8762         if (blob_request)
   8763             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
   8764     }
   8765 
   8766     return rc;
   8767 }
   8768 
   8769 /*===========================================================================
   8770  * FUNCTION   : setReprocParameters
   8771  *
   8772  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
   8773  *              return it.
   8774  *
   8775  * PARAMETERS :
   8776  *   @request   : request that needs to be serviced
   8777  *
   8778  * RETURN     : success: NO_ERROR
   8779  *              failure:
   8780  *==========================================================================*/
   8781 int32_t QCamera3HardwareInterface::setReprocParameters(
   8782         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
   8783         uint32_t snapshotStreamId)
   8784 {
   8785     /*translate from camera_metadata_t type to parm_type_t*/
   8786     int rc = 0;
   8787 
   8788     if (NULL == request->settings){
   8789         LOGE("Reprocess settings cannot be NULL");
   8790         return BAD_VALUE;
   8791     }
   8792 
   8793     if (NULL == reprocParam) {
   8794         LOGE("Invalid reprocessing metadata buffer");
   8795         return BAD_VALUE;
   8796     }
   8797     clear_metadata_buffer(reprocParam);
   8798 
   8799     /*we need to update the frame number in the parameters*/
   8800     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
   8801             request->frame_number)) {
   8802         LOGE("Failed to set the frame number in the parameters");
   8803         return BAD_VALUE;
   8804     }
   8805 
   8806     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
   8807     if (rc < 0) {
   8808         LOGE("Failed to translate reproc request");
   8809         return rc;
   8810     }
   8811 
   8812     CameraMetadata frame_settings;
   8813     frame_settings = request->settings;
   8814     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
   8815             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
   8816         int32_t *crop_count =
   8817                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
   8818         int32_t *crop_data =
   8819                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
   8820         int32_t *roi_map =
   8821                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
   8822         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
   8823             cam_crop_data_t crop_meta;
   8824             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
   8825             crop_meta.num_of_streams = 1;
   8826             crop_meta.crop_info[0].crop.left   = crop_data[0];
   8827             crop_meta.crop_info[0].crop.top    = crop_data[1];
   8828             crop_meta.crop_info[0].crop.width  = crop_data[2];
   8829             crop_meta.crop_info[0].crop.height = crop_data[3];
   8830 
   8831             crop_meta.crop_info[0].roi_map.left =
   8832                     roi_map[0];
   8833             crop_meta.crop_info[0].roi_map.top =
   8834                     roi_map[1];
   8835             crop_meta.crop_info[0].roi_map.width =
   8836                     roi_map[2];
   8837             crop_meta.crop_info[0].roi_map.height =
   8838                     roi_map[3];
   8839 
   8840             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
   8841                 rc = BAD_VALUE;
   8842             }
   8843             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
   8844                     request->input_buffer->stream,
   8845                     crop_meta.crop_info[0].crop.left,
   8846                     crop_meta.crop_info[0].crop.top,
   8847                     crop_meta.crop_info[0].crop.width,
   8848                     crop_meta.crop_info[0].crop.height);
   8849             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
   8850                     request->input_buffer->stream,
   8851                     crop_meta.crop_info[0].roi_map.left,
   8852                     crop_meta.crop_info[0].roi_map.top,
   8853                     crop_meta.crop_info[0].roi_map.width,
   8854                     crop_meta.crop_info[0].roi_map.height);
   8855             } else {
   8856                 LOGE("Invalid reprocess crop count %d!", *crop_count);
   8857             }
   8858     } else {
   8859         LOGE("No crop data from matching output stream");
   8860     }
   8861 
   8862     /* These settings are not needed for regular requests so handle them specially for
   8863        reprocess requests; information needed for EXIF tags */
   8864     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   8865         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   8866                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   8867         if (NAME_NOT_FOUND != val) {
   8868             uint32_t flashMode = (uint32_t)val;
   8869             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
   8870                 rc = BAD_VALUE;
   8871             }
   8872         } else {
   8873             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
   8874                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   8875         }
   8876     } else {
   8877         LOGH("No flash mode in reprocess settings");
   8878     }
   8879 
   8880     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
   8881         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
   8882         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
   8883             rc = BAD_VALUE;
   8884         }
   8885     } else {
   8886         LOGH("No flash state in reprocess settings");
   8887     }
   8888 
   8889     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
   8890         uint8_t *reprocessFlags =
   8891             frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
   8892         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
   8893                 *reprocessFlags)) {
   8894                 rc = BAD_VALUE;
   8895         }
   8896     }
   8897 
   8898     // Add metadata which reprocess needs
   8899     if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
   8900         cam_reprocess_info_t *repro_info =
   8901                 (cam_reprocess_info_t *)frame_settings.find
   8902                 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
   8903         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
   8904                 repro_info->sensor_crop_info);
   8905         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
   8906                 repro_info->camif_crop_info);
   8907         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
   8908                 repro_info->isp_crop_info);
   8909         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
   8910                 repro_info->cpp_crop_info);
   8911         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
   8912                 repro_info->af_focal_length_ratio);
   8913         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
   8914                 repro_info->pipeline_flip);
   8915         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
   8916                 repro_info->af_roi);
   8917         ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
   8918                 repro_info->dyn_mask);
   8919         /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
   8920            CAM_INTF_PARM_ROTATION metadata then has been added in
   8921            translateToHalMetadata. HAL need to keep this new rotation
   8922            metadata. Otherwise, the old rotation info saved in the vendor tag
   8923            would be used */
   8924         IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
   8925                 CAM_INTF_PARM_ROTATION, reprocParam) {
   8926             LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
   8927         } else {
   8928             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
   8929                     repro_info->rotation_info);
   8930         }
   8931     }
   8932 
   8933     /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
   8934        to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
   8935        roi.width and roi.height would be the final JPEG size.
   8936        For now, HAL only checks this for reprocess request */
   8937     if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
   8938             frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
   8939         uint8_t *enable =
   8940             frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
   8941         if (*enable == TRUE) {
   8942             int32_t *crop_data =
   8943                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
   8944             cam_stream_crop_info_t crop_meta;
   8945             memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
   8946             crop_meta.stream_id = 0;
   8947             crop_meta.crop.left   = crop_data[0];
   8948             crop_meta.crop.top    = crop_data[1];
   8949             crop_meta.crop.width  = crop_data[2];
   8950             crop_meta.crop.height = crop_data[3];
   8951             // The JPEG crop roi should match cpp output size
   8952             IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
   8953                     CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
   8954                 crop_meta.roi_map.left = 0;
   8955                 crop_meta.roi_map.top = 0;
   8956                 crop_meta.roi_map.width = cpp_crop->crop.width;
   8957                 crop_meta.roi_map.height = cpp_crop->crop.height;
   8958             }
   8959             ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
   8960                     crop_meta);
   8961             LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
   8962                     crop_meta.crop.left, crop_meta.crop.top,
   8963                     crop_meta.crop.width, crop_meta.crop.height, mCameraId);
   8964             LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
   8965                     crop_meta.roi_map.left, crop_meta.roi_map.top,
   8966                     crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
   8967 
   8968             // Add JPEG scale information
   8969             cam_dimension_t scale_dim;
   8970             memset(&scale_dim, 0, sizeof(cam_dimension_t));
   8971             if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
   8972                 int32_t *roi =
   8973                     frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
   8974                 scale_dim.width = roi[2];
   8975                 scale_dim.height = roi[3];
   8976                 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
   8977                     scale_dim);
   8978                 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
   8979                     scale_dim.width, scale_dim.height, mCameraId);
   8980             }
   8981         }
   8982     }
   8983 
   8984     return rc;
   8985 }
   8986 
   8987 /*===========================================================================
   8988  * FUNCTION   : saveRequestSettings
   8989  *
   8990  * DESCRIPTION: Add any settings that might have changed to the request settings
   8991  *              and save the settings to be applied on the frame
   8992  *
   8993  * PARAMETERS :
   8994  *   @jpegMetadata : the extracted and/or modified jpeg metadata
   8995  *   @request      : request with initial settings
   8996  *
   8997  * RETURN     :
   8998  * camera_metadata_t* : pointer to the saved request settings
   8999  *==========================================================================*/
   9000 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
   9001         const CameraMetadata &jpegMetadata,
   9002         camera3_capture_request_t *request)
   9003 {
   9004     camera_metadata_t *resultMetadata;
   9005     CameraMetadata camMetadata;
   9006     camMetadata = request->settings;
   9007 
   9008     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   9009         int32_t thumbnail_size[2];
   9010         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   9011         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   9012         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
   9013                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   9014     }
   9015 
   9016     if (request->input_buffer != NULL) {
   9017         uint8_t reprocessFlags = 1;
   9018         camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
   9019                 (uint8_t*)&reprocessFlags,
   9020                 sizeof(reprocessFlags));
   9021     }
   9022 
   9023     resultMetadata = camMetadata.release();
   9024     return resultMetadata;
   9025 }
   9026 
   9027 /*===========================================================================
   9028  * FUNCTION   : setHalFpsRange
   9029  *
   9030  * DESCRIPTION: set FPS range parameter
   9031  *
   9032  *
   9033  * PARAMETERS :
   9034  *   @settings    : Metadata from framework
   9035  *   @hal_metadata: Metadata buffer
   9036  *
   9037  *
   9038  * RETURN     : success: NO_ERROR
   9039  *              failure:
   9040  *==========================================================================*/
   9041 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
   9042         metadata_buffer_t *hal_metadata)
   9043 {
   9044     int32_t rc = NO_ERROR;
   9045     cam_fps_range_t fps_range;
   9046     fps_range.min_fps = (float)
   9047             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   9048     fps_range.max_fps = (float)
   9049             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   9050     fps_range.video_min_fps = fps_range.min_fps;
   9051     fps_range.video_max_fps = fps_range.max_fps;
   9052 
   9053     LOGD("aeTargetFpsRange fps: [%f %f]",
   9054             fps_range.min_fps, fps_range.max_fps);
   9055     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
   9056      * follows:
   9057      * ---------------------------------------------------------------|
   9058      *      Video stream is absent in configure_streams               |
   9059      *    (Camcorder preview before the first video record            |
   9060      * ---------------------------------------------------------------|
   9061      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   9062      *                   |             |             | vid_min/max_fps|
   9063      * ---------------------------------------------------------------|
   9064      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   9065      *                   |-------------|-------------|----------------|
   9066      *                   |  [240, 240] |     240     |  [240, 240]    |
   9067      * ---------------------------------------------------------------|
   9068      *     Video stream is present in configure_streams               |
   9069      * ---------------------------------------------------------------|
   9070      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   9071      *                   |             |             | vid_min/max_fps|
   9072      * ---------------------------------------------------------------|
   9073      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   9074      * (camcorder prev   |-------------|-------------|----------------|
   9075      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
   9076      *  is stopped)      |             |             |                |
   9077      * ---------------------------------------------------------------|
   9078      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
   9079      *                   |-------------|-------------|----------------|
   9080      *                   |  [240, 240] |     240     |  [240, 240]    |
   9081      * ---------------------------------------------------------------|
   9082      * When Video stream is absent in configure_streams,
   9083      * preview fps = sensor_fps / batchsize
   9084      * Eg: for 240fps at batchSize 4, preview = 60fps
   9085      *     for 120fps at batchSize 4, preview = 30fps
   9086      *
   9087      * When video stream is present in configure_streams, preview fps is as per
   9088      * the ratio of preview buffers to video buffers requested in process
   9089      * capture request
   9090      */
   9091     mBatchSize = 0;
   9092     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
   9093         fps_range.min_fps = fps_range.video_max_fps;
   9094         fps_range.video_min_fps = fps_range.video_max_fps;
   9095         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
   9096                 fps_range.max_fps);
   9097         if (NAME_NOT_FOUND != val) {
   9098             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
   9099             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   9100                 return BAD_VALUE;
   9101             }
   9102 
   9103             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
   9104                 /* If batchmode is currently in progress and the fps changes,
   9105                  * set the flag to restart the sensor */
   9106                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
   9107                         (mHFRVideoFps != fps_range.max_fps)) {
   9108                     mNeedSensorRestart = true;
   9109                 }
   9110                 mHFRVideoFps = fps_range.max_fps;
   9111                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
   9112                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
   9113                     mBatchSize = MAX_HFR_BATCH_SIZE;
   9114                 }
   9115              }
   9116             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
   9117 
   9118          }
   9119     } else {
   9120         /* HFR mode is session param in backend/ISP. This should be reset when
   9121          * in non-HFR mode  */
   9122         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
   9123         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   9124             return BAD_VALUE;
   9125         }
   9126     }
   9127     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
   9128         return BAD_VALUE;
   9129     }
   9130     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
   9131             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
   9132     return rc;
   9133 }
   9134 
   9135 /*===========================================================================
   9136  * FUNCTION   : translateToHalMetadata
   9137  *
   9138  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   9139  *
   9140  *
   9141  * PARAMETERS :
   9142  *   @request  : request sent from framework
   9143  *
   9144  *
   9145  * RETURN     : success: NO_ERROR
   9146  *              failure:
   9147  *==========================================================================*/
   9148 int QCamera3HardwareInterface::translateToHalMetadata
   9149                                   (const camera3_capture_request_t *request,
   9150                                    metadata_buffer_t *hal_metadata,
   9151                                    uint32_t snapshotStreamId)
   9152 {
   9153     int rc = 0;
   9154     CameraMetadata frame_settings;
   9155     frame_settings = request->settings;
   9156 
   9157     /* Do not change the order of the following list unless you know what you are
   9158      * doing.
   9159      * The order is laid out in such a way that parameters in the front of the table
   9160      * may be used to override the parameters later in the table. Examples are:
   9161      * 1. META_MODE should precede AEC/AWB/AF MODE
   9162      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
   9163      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
   9164      * 4. Any mode should precede it's corresponding settings
   9165      */
   9166     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   9167         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   9168         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
   9169             rc = BAD_VALUE;
   9170         }
   9171         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
   9172         if (rc != NO_ERROR) {
   9173             LOGE("extractSceneMode failed");
   9174         }
   9175     }
   9176 
   9177     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   9178         uint8_t fwk_aeMode =
   9179             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   9180         uint8_t aeMode;
   9181         int32_t redeye;
   9182 
   9183         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   9184             aeMode = CAM_AE_MODE_OFF;
   9185         } else {
   9186             aeMode = CAM_AE_MODE_ON;
   9187         }
   9188         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   9189             redeye = 1;
   9190         } else {
   9191             redeye = 0;
   9192         }
   9193 
   9194         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   9195                 fwk_aeMode);
   9196         if (NAME_NOT_FOUND != val) {
   9197             int32_t flashMode = (int32_t)val;
   9198             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
   9199         }
   9200 
   9201         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
   9202         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
   9203             rc = BAD_VALUE;
   9204         }
   9205     }
   9206 
   9207     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   9208         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   9209         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   9210                 fwk_whiteLevel);
   9211         if (NAME_NOT_FOUND != val) {
   9212             uint8_t whiteLevel = (uint8_t)val;
   9213             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
   9214                 rc = BAD_VALUE;
   9215             }
   9216         }
   9217     }
   9218 
   9219     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
   9220         uint8_t fwk_cacMode =
   9221                 frame_settings.find(
   9222                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
   9223         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   9224                 fwk_cacMode);
   9225         if (NAME_NOT_FOUND != val) {
   9226             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
   9227             bool entryAvailable = FALSE;
   9228             // Check whether Frameworks set CAC mode is supported in device or not
   9229             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
   9230                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
   9231                     entryAvailable = TRUE;
   9232                     break;
   9233                 }
   9234             }
   9235             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
   9236             // If entry not found then set the device supported mode instead of frameworks mode i.e,
   9237             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
   9238             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
   9239             if (entryAvailable == FALSE) {
   9240                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
   9241                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
   9242                 } else {
   9243                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
   9244                         // High is not supported and so set the FAST as spec say's underlying
   9245                         // device implementation can be the same for both modes.
   9246                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
   9247                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
   9248                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
   9249                         // in order to avoid the fps drop due to high quality
   9250                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
   9251                     } else {
   9252                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
   9253                     }
   9254                 }
   9255             }
   9256             LOGD("Final cacMode is %d", cacMode);
   9257             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
   9258                 rc = BAD_VALUE;
   9259             }
   9260         } else {
   9261             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
   9262         }
   9263     }
   9264 
   9265     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   9266         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   9267         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   9268                 fwk_focusMode);
   9269         if (NAME_NOT_FOUND != val) {
   9270             uint8_t focusMode = (uint8_t)val;
   9271             LOGD("set focus mode %d", focusMode);
   9272             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
   9273                 rc = BAD_VALUE;
   9274             }
   9275         }
   9276     }
   9277 
   9278     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   9279         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   9280         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
   9281                 focalDistance)) {
   9282             rc = BAD_VALUE;
   9283         }
   9284     }
   9285 
   9286     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   9287         uint8_t fwk_antibandingMode =
   9288                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
   9289         int val = lookupHalName(ANTIBANDING_MODES_MAP,
   9290                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
   9291         if (NAME_NOT_FOUND != val) {
   9292             uint32_t hal_antibandingMode = (uint32_t)val;
   9293             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
   9294                     hal_antibandingMode)) {
   9295                 rc = BAD_VALUE;
   9296             }
   9297         }
   9298     }
   9299 
   9300     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   9301         int32_t expCompensation = frame_settings.find(
   9302                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   9303         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   9304             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   9305         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   9306             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   9307         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   9308                 expCompensation)) {
   9309             rc = BAD_VALUE;
   9310         }
   9311     }
   9312 
   9313     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   9314         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   9315         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
   9316             rc = BAD_VALUE;
   9317         }
   9318     }
   9319     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   9320         rc = setHalFpsRange(frame_settings, hal_metadata);
   9321         if (rc != NO_ERROR) {
   9322             LOGE("setHalFpsRange failed");
   9323         }
   9324     }
   9325 
   9326     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   9327         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   9328         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
   9329             rc = BAD_VALUE;
   9330         }
   9331     }
   9332 
   9333     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   9334         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   9335         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   9336                 fwk_effectMode);
   9337         if (NAME_NOT_FOUND != val) {
   9338             uint8_t effectMode = (uint8_t)val;
   9339             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
   9340                 rc = BAD_VALUE;
   9341             }
   9342         }
   9343     }
   9344 
   9345     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   9346         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   9347         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
   9348                 colorCorrectMode)) {
   9349             rc = BAD_VALUE;
   9350         }
   9351     }
   9352 
   9353     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   9354         cam_color_correct_gains_t colorCorrectGains;
   9355         for (size_t i = 0; i < CC_GAIN_MAX; i++) {
   9356             colorCorrectGains.gains[i] =
   9357                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   9358         }
   9359         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
   9360                 colorCorrectGains)) {
   9361             rc = BAD_VALUE;
   9362         }
   9363     }
   9364 
   9365     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   9366         cam_color_correct_matrix_t colorCorrectTransform;
   9367         cam_rational_type_t transform_elem;
   9368         size_t num = 0;
   9369         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
   9370            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
   9371               transform_elem.numerator =
   9372                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   9373               transform_elem.denominator =
   9374                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   9375               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   9376               num++;
   9377            }
   9378         }
   9379         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   9380                 colorCorrectTransform)) {
   9381             rc = BAD_VALUE;
   9382         }
   9383     }
   9384 
   9385     cam_trigger_t aecTrigger;
   9386     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
   9387     aecTrigger.trigger_id = -1;
   9388     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   9389         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   9390         aecTrigger.trigger =
   9391             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   9392         aecTrigger.trigger_id =
   9393             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   9394         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   9395                 aecTrigger)) {
   9396             rc = BAD_VALUE;
   9397         }
   9398         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
   9399                 aecTrigger.trigger, aecTrigger.trigger_id);
   9400     }
   9401 
   9402     /*af_trigger must come with a trigger id*/
   9403     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   9404         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   9405         cam_trigger_t af_trigger;
   9406         af_trigger.trigger =
   9407             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   9408         af_trigger.trigger_id =
   9409             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   9410         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
   9411             rc = BAD_VALUE;
   9412         }
   9413         LOGD("AfTrigger: %d AfTriggerID: %d",
   9414                 af_trigger.trigger, af_trigger.trigger_id);
   9415     }
   9416 
   9417     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   9418         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   9419         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
   9420             rc = BAD_VALUE;
   9421         }
   9422     }
   9423     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   9424         cam_edge_application_t edge_application;
   9425         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   9426         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
   9427             edge_application.sharpness = 0;
   9428         } else {
   9429             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
   9430         }
   9431         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
   9432             rc = BAD_VALUE;
   9433         }
   9434     }
   9435 
   9436     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   9437         int32_t respectFlashMode = 1;
   9438         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   9439             uint8_t fwk_aeMode =
   9440                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   9441             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   9442                 respectFlashMode = 0;
   9443                 LOGH("AE Mode controls flash, ignore android.flash.mode");
   9444             }
   9445         }
   9446         if (respectFlashMode) {
   9447             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   9448                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   9449             LOGH("flash mode after mapping %d", val);
   9450             // To check: CAM_INTF_META_FLASH_MODE usage
   9451             if (NAME_NOT_FOUND != val) {
   9452                 uint8_t flashMode = (uint8_t)val;
   9453                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
   9454                     rc = BAD_VALUE;
   9455                 }
   9456             }
   9457         }
   9458     }
   9459 
   9460     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   9461         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   9462         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
   9463             rc = BAD_VALUE;
   9464         }
   9465     }
   9466 
   9467     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   9468         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   9469         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
   9470                 flashFiringTime)) {
   9471             rc = BAD_VALUE;
   9472         }
   9473     }
   9474 
   9475     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   9476         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   9477         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
   9478                 hotPixelMode)) {
   9479             rc = BAD_VALUE;
   9480         }
   9481     }
   9482 
   9483     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   9484         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   9485         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
   9486                 lensAperture)) {
   9487             rc = BAD_VALUE;
   9488         }
   9489     }
   9490 
   9491     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   9492         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   9493         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
   9494                 filterDensity)) {
   9495             rc = BAD_VALUE;
   9496         }
   9497     }
   9498 
   9499     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   9500         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   9501         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
   9502                 focalLength)) {
   9503             rc = BAD_VALUE;
   9504         }
   9505     }
   9506 
   9507     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   9508         uint8_t optStabMode =
   9509                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   9510         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
   9511                 optStabMode)) {
   9512             rc = BAD_VALUE;
   9513         }
   9514     }
   9515 
   9516     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
   9517         uint8_t videoStabMode =
   9518                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
   9519         LOGD("videoStabMode from APP = %d", videoStabMode);
   9520         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
   9521                 videoStabMode)) {
   9522             rc = BAD_VALUE;
   9523         }
   9524     }
   9525 
   9526 
   9527     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   9528         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   9529         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
   9530                 noiseRedMode)) {
   9531             rc = BAD_VALUE;
   9532         }
   9533     }
   9534 
   9535     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
   9536         float reprocessEffectiveExposureFactor =
   9537             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
   9538         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
   9539                 reprocessEffectiveExposureFactor)) {
   9540             rc = BAD_VALUE;
   9541         }
   9542     }
   9543 
   9544     cam_crop_region_t scalerCropRegion;
   9545     bool scalerCropSet = false;
   9546     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   9547         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   9548         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   9549         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   9550         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   9551 
   9552         // Map coordinate system from active array to sensor output.
   9553         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
   9554                 scalerCropRegion.width, scalerCropRegion.height);
   9555 
   9556         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
   9557                 scalerCropRegion)) {
   9558             rc = BAD_VALUE;
   9559         }
   9560         scalerCropSet = true;
   9561     }
   9562 
   9563     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   9564         int64_t sensorExpTime =
   9565                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   9566         LOGD("setting sensorExpTime %lld", sensorExpTime);
   9567         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   9568                 sensorExpTime)) {
   9569             rc = BAD_VALUE;
   9570         }
   9571     }
   9572 
   9573     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   9574         int64_t sensorFrameDuration =
   9575                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   9576         int64_t minFrameDuration = getMinFrameDuration(request);
   9577         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
   9578         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   9579             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   9580         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
   9581         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
   9582                 sensorFrameDuration)) {
   9583             rc = BAD_VALUE;
   9584         }
   9585     }
   9586 
   9587     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   9588         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   9589         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   9590                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   9591         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   9592                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   9593         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
   9594         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
   9595                 sensorSensitivity)) {
   9596             rc = BAD_VALUE;
   9597         }
   9598     }
   9599 
   9600 #ifndef USE_HAL_3_3
   9601     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
   9602         int32_t ispSensitivity =
   9603             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
   9604         if (ispSensitivity <
   9605             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
   9606                 ispSensitivity =
   9607                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
   9608                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
   9609         }
   9610         if (ispSensitivity >
   9611             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
   9612                 ispSensitivity =
   9613                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
   9614                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
   9615         }
   9616         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
   9617                 ispSensitivity)) {
   9618             rc = BAD_VALUE;
   9619         }
   9620     }
   9621 #endif
   9622 
   9623     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   9624         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   9625         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
   9626             rc = BAD_VALUE;
   9627         }
   9628     }
   9629 
   9630     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   9631         uint8_t fwk_facedetectMode =
   9632                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   9633 
   9634         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   9635                 fwk_facedetectMode);
   9636 
   9637         if (NAME_NOT_FOUND != val) {
   9638             uint8_t facedetectMode = (uint8_t)val;
   9639             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
   9640                     facedetectMode)) {
   9641                 rc = BAD_VALUE;
   9642             }
   9643         }
   9644     }
   9645 
   9646     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   9647         uint8_t histogramMode =
   9648                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   9649         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
   9650                 histogramMode)) {
   9651             rc = BAD_VALUE;
   9652         }
   9653     }
   9654 
   9655     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   9656         uint8_t sharpnessMapMode =
   9657                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   9658         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   9659                 sharpnessMapMode)) {
   9660             rc = BAD_VALUE;
   9661         }
   9662     }
   9663 
   9664     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   9665         uint8_t tonemapMode =
   9666                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   9667         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
   9668             rc = BAD_VALUE;
   9669         }
   9670     }
   9671     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
   9672     /*All tonemap channels will have the same number of points*/
   9673     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
   9674         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
   9675         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   9676         cam_rgb_tonemap_curves tonemapCurves;
   9677         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
   9678         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   9679             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
   9680                      tonemapCurves.tonemap_points_cnt,
   9681                     CAM_MAX_TONEMAP_CURVE_SIZE);
   9682             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   9683         }
   9684 
   9685         /* ch0 = G*/
   9686         size_t point = 0;
   9687         cam_tonemap_curve_t tonemapCurveGreen;
   9688         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   9689             for (size_t j = 0; j < 2; j++) {
   9690                tonemapCurveGreen.tonemap_points[i][j] =
   9691                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   9692                point++;
   9693             }
   9694         }
   9695         tonemapCurves.curves[0] = tonemapCurveGreen;
   9696 
   9697         /* ch 1 = B */
   9698         point = 0;
   9699         cam_tonemap_curve_t tonemapCurveBlue;
   9700         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   9701             for (size_t j = 0; j < 2; j++) {
   9702                tonemapCurveBlue.tonemap_points[i][j] =
   9703                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   9704                point++;
   9705             }
   9706         }
   9707         tonemapCurves.curves[1] = tonemapCurveBlue;
   9708 
   9709         /* ch 2 = R */
   9710         point = 0;
   9711         cam_tonemap_curve_t tonemapCurveRed;
   9712         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   9713             for (size_t j = 0; j < 2; j++) {
   9714                tonemapCurveRed.tonemap_points[i][j] =
   9715                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   9716                point++;
   9717             }
   9718         }
   9719         tonemapCurves.curves[2] = tonemapCurveRed;
   9720 
   9721         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
   9722                 tonemapCurves)) {
   9723             rc = BAD_VALUE;
   9724         }
   9725     }
   9726 
   9727     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   9728         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   9729         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
   9730                 captureIntent)) {
   9731             rc = BAD_VALUE;
   9732         }
   9733     }
   9734 
   9735     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   9736         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   9737         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
   9738                 blackLevelLock)) {
   9739             rc = BAD_VALUE;
   9740         }
   9741     }
   9742 
   9743     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   9744         uint8_t lensShadingMapMode =
   9745                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   9746         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   9747                 lensShadingMapMode)) {
   9748             rc = BAD_VALUE;
   9749         }
   9750     }
   9751 
   9752     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   9753         cam_area_t roi;
   9754         bool reset = true;
   9755         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
   9756 
   9757         // Map coordinate system from active array to sensor output.
   9758         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   9759                 roi.rect.height);
   9760 
   9761         if (scalerCropSet) {
   9762             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   9763         }
   9764         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
   9765             rc = BAD_VALUE;
   9766         }
   9767     }
   9768 
   9769     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   9770         cam_area_t roi;
   9771         bool reset = true;
   9772         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
   9773 
   9774         // Map coordinate system from active array to sensor output.
   9775         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   9776                 roi.rect.height);
   9777 
   9778         if (scalerCropSet) {
   9779             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   9780         }
   9781         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
   9782             rc = BAD_VALUE;
   9783         }
   9784     }
   9785 
   9786     // CDS for non-HFR non-video mode
   9787     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   9788             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
   9789         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
   9790         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
   9791             LOGE("Invalid CDS mode %d!", *fwk_cds);
   9792         } else {
   9793             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   9794                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
   9795                 rc = BAD_VALUE;
   9796             }
   9797         }
   9798     }
   9799 
   9800     // Video HDR
   9801     if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
   9802         cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
   9803                 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
   9804         rc = setVideoHdrMode(mParameters, vhdr);
   9805         if (rc != NO_ERROR) {
   9806             LOGE("setVideoHDR is failed");
   9807         }
   9808     }
   9809 
   9810     //IR
   9811     if(frame_settings.exists(QCAMERA3_IR_MODE)) {
   9812         cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
   9813                 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
   9814         if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
   9815             LOGE("Invalid IR mode %d!", fwk_ir);
   9816         } else {
   9817             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   9818                     CAM_INTF_META_IR_MODE, fwk_ir)) {
   9819                 rc = BAD_VALUE;
   9820             }
   9821         }
   9822     }
   9823 
   9824     if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
   9825         float aec_speed;
   9826         aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
   9827         LOGD("AEC Speed :%f", aec_speed);
   9828         if ( aec_speed < 0 ) {
   9829             LOGE("Invalid AEC mode %f!", aec_speed);
   9830         } else {
   9831             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
   9832                     aec_speed)) {
   9833                 rc = BAD_VALUE;
   9834             }
   9835         }
   9836     }
   9837 
   9838     if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
   9839         float awb_speed;
   9840         awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
   9841         LOGD("AWB Speed :%f", awb_speed);
   9842         if ( awb_speed < 0 ) {
   9843             LOGE("Invalid AWB mode %f!", awb_speed);
   9844         } else {
   9845             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
   9846                     awb_speed)) {
   9847                 rc = BAD_VALUE;
   9848             }
   9849         }
   9850     }
   9851 
   9852     // TNR
   9853     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
   9854         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
   9855         uint8_t b_TnrRequested = 0;
   9856         cam_denoise_param_t tnr;
   9857         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
   9858         tnr.process_plates =
   9859             (cam_denoise_process_type_t)frame_settings.find(
   9860             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
   9861         b_TnrRequested = tnr.denoise_enable;
   9862         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
   9863             rc = BAD_VALUE;
   9864         }
   9865     }
   9866 
   9867     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
   9868         int32_t fwk_testPatternMode =
   9869                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
   9870         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
   9871                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
   9872 
   9873         if (NAME_NOT_FOUND != testPatternMode) {
   9874             cam_test_pattern_data_t testPatternData;
   9875             memset(&testPatternData, 0, sizeof(testPatternData));
   9876             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
   9877             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
   9878                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
   9879                 int32_t *fwk_testPatternData =
   9880                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
   9881                 testPatternData.r = fwk_testPatternData[0];
   9882                 testPatternData.b = fwk_testPatternData[3];
   9883                 switch (gCamCapability[mCameraId]->color_arrangement) {
   9884                     case CAM_FILTER_ARRANGEMENT_RGGB:
   9885                     case CAM_FILTER_ARRANGEMENT_GRBG:
   9886                         testPatternData.gr = fwk_testPatternData[1];
   9887                         testPatternData.gb = fwk_testPatternData[2];
   9888                         break;
   9889                     case CAM_FILTER_ARRANGEMENT_GBRG:
   9890                     case CAM_FILTER_ARRANGEMENT_BGGR:
   9891                         testPatternData.gr = fwk_testPatternData[2];
   9892                         testPatternData.gb = fwk_testPatternData[1];
   9893                         break;
   9894                     default:
   9895                         LOGE("color arrangement %d is not supported",
   9896                                 gCamCapability[mCameraId]->color_arrangement);
   9897                         break;
   9898                 }
   9899             }
   9900             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
   9901                     testPatternData)) {
   9902                 rc = BAD_VALUE;
   9903             }
   9904         } else {
   9905             LOGE("Invalid framework sensor test pattern mode %d",
   9906                     fwk_testPatternMode);
   9907         }
   9908     }
   9909 
   9910     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   9911         size_t count = 0;
   9912         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
   9913         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
   9914                 gps_coords.data.d, gps_coords.count, count);
   9915         if (gps_coords.count != count) {
   9916             rc = BAD_VALUE;
   9917         }
   9918     }
   9919 
   9920     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   9921         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
   9922         size_t count = 0;
   9923         const char *gps_methods_src = (const char *)
   9924                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
   9925         memset(gps_methods, '\0', sizeof(gps_methods));
   9926         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
   9927         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
   9928                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
   9929         if (GPS_PROCESSING_METHOD_SIZE != count) {
   9930             rc = BAD_VALUE;
   9931         }
   9932     }
   9933 
   9934     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   9935         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   9936         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
   9937                 gps_timestamp)) {
   9938             rc = BAD_VALUE;
   9939         }
   9940     }
   9941 
   9942     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   9943         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   9944         cam_rotation_info_t rotation_info;
   9945         if (orientation == 0) {
   9946            rotation_info.rotation = ROTATE_0;
   9947         } else if (orientation == 90) {
   9948            rotation_info.rotation = ROTATE_90;
   9949         } else if (orientation == 180) {
   9950            rotation_info.rotation = ROTATE_180;
   9951         } else if (orientation == 270) {
   9952            rotation_info.rotation = ROTATE_270;
   9953         }
   9954         rotation_info.streamId = snapshotStreamId;
   9955         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
   9956         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
   9957             rc = BAD_VALUE;
   9958         }
   9959     }
   9960 
   9961     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
   9962         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   9963         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
   9964             rc = BAD_VALUE;
   9965         }
   9966     }
   9967 
   9968     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
   9969         uint32_t thumb_quality = (uint32_t)
   9970                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
   9971         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
   9972                 thumb_quality)) {
   9973             rc = BAD_VALUE;
   9974         }
   9975     }
   9976 
   9977     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   9978         cam_dimension_t dim;
   9979         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   9980         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   9981         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
   9982             rc = BAD_VALUE;
   9983         }
   9984     }
   9985 
   9986     // Internal metadata
   9987     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
   9988         size_t count = 0;
   9989         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
   9990         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
   9991                 privatedata.data.i32, privatedata.count, count);
   9992         if (privatedata.count != count) {
   9993             rc = BAD_VALUE;
   9994         }
   9995     }
   9996 
   9997     // EV step
   9998     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
   9999             gCamCapability[mCameraId]->exp_compensation_step)) {
   10000         rc = BAD_VALUE;
   10001     }
   10002 
   10003     // CDS info
   10004     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
   10005         cam_cds_data_t *cdsData = (cam_cds_data_t *)
   10006                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
   10007 
   10008         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10009                 CAM_INTF_META_CDS_DATA, *cdsData)) {
   10010             rc = BAD_VALUE;
   10011         }
   10012     }
   10013 
   10014     return rc;
   10015 }
   10016 
   10017 /*===========================================================================
   10018  * FUNCTION   : captureResultCb
   10019  *
   10020  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   10021  *
   10022  * PARAMETERS :
   10023  *   @frame  : frame information from mm-camera-interface
   10024  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   10025  *   @userdata: userdata
   10026  *
   10027  * RETURN     : NONE
   10028  *==========================================================================*/
   10029 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   10030                 camera3_stream_buffer_t *buffer,
   10031                 uint32_t frame_number, bool isInputBuffer, void *userdata)
   10032 {
   10033     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   10034     if (hw == NULL) {
   10035         LOGE("Invalid hw %p", hw);
   10036         return;
   10037     }
   10038 
   10039     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
   10040     return;
   10041 }
   10042 
   10043 
   10044 /*===========================================================================
   10045  * FUNCTION   : initialize
   10046  *
   10047  * DESCRIPTION: Pass framework callback pointers to HAL
   10048  *
   10049  * PARAMETERS :
   10050  *
   10051  *
   10052  * RETURN     : Success : 0
   10053  *              Failure: -ENODEV
   10054  *==========================================================================*/
   10055 
   10056 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   10057                                   const camera3_callback_ops_t *callback_ops)
   10058 {
   10059     LOGD("E");
   10060     QCamera3HardwareInterface *hw =
   10061         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10062     if (!hw) {
   10063         LOGE("NULL camera device");
   10064         return -ENODEV;
   10065     }
   10066 
   10067     int rc = hw->initialize(callback_ops);
   10068     LOGD("X");
   10069     return rc;
   10070 }
   10071 
   10072 /*===========================================================================
   10073  * FUNCTION   : configure_streams
   10074  *
   10075  * DESCRIPTION:
   10076  *
   10077  * PARAMETERS :
   10078  *
   10079  *
   10080  * RETURN     : Success: 0
   10081  *              Failure: -EINVAL (if stream configuration is invalid)
   10082  *                       -ENODEV (fatal error)
   10083  *==========================================================================*/
   10084 
   10085 int QCamera3HardwareInterface::configure_streams(
   10086         const struct camera3_device *device,
   10087         camera3_stream_configuration_t *stream_list)
   10088 {
   10089     LOGD("E");
   10090     QCamera3HardwareInterface *hw =
   10091         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10092     if (!hw) {
   10093         LOGE("NULL camera device");
   10094         return -ENODEV;
   10095     }
   10096     int rc = hw->configureStreams(stream_list);
   10097     LOGD("X");
   10098     return rc;
   10099 }
   10100 
   10101 /*===========================================================================
   10102  * FUNCTION   : construct_default_request_settings
   10103  *
   10104  * DESCRIPTION: Configure a settings buffer to meet the required use case
   10105  *
   10106  * PARAMETERS :
   10107  *
   10108  *
   10109  * RETURN     : Success: Return valid metadata
   10110  *              Failure: Return NULL
   10111  *==========================================================================*/
   10112 const camera_metadata_t* QCamera3HardwareInterface::
   10113     construct_default_request_settings(const struct camera3_device *device,
   10114                                         int type)
   10115 {
   10116 
   10117     LOGD("E");
   10118     camera_metadata_t* fwk_metadata = NULL;
   10119     QCamera3HardwareInterface *hw =
   10120         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10121     if (!hw) {
   10122         LOGE("NULL camera device");
   10123         return NULL;
   10124     }
   10125 
   10126     fwk_metadata = hw->translateCapabilityToMetadata(type);
   10127 
   10128     LOGD("X");
   10129     return fwk_metadata;
   10130 }
   10131 
   10132 /*===========================================================================
   10133  * FUNCTION   : process_capture_request
   10134  *
   10135  * DESCRIPTION:
   10136  *
   10137  * PARAMETERS :
   10138  *
   10139  *
   10140  * RETURN     :
   10141  *==========================================================================*/
   10142 int QCamera3HardwareInterface::process_capture_request(
   10143                     const struct camera3_device *device,
   10144                     camera3_capture_request_t *request)
   10145 {
   10146     LOGD("E");
   10147     QCamera3HardwareInterface *hw =
   10148         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10149     if (!hw) {
   10150         LOGE("NULL camera device");
   10151         return -EINVAL;
   10152     }
   10153 
   10154     int rc = hw->processCaptureRequest(request);
   10155     LOGD("X");
   10156     return rc;
   10157 }
   10158 
   10159 /*===========================================================================
   10160  * FUNCTION   : dump
   10161  *
   10162  * DESCRIPTION:
   10163  *
   10164  * PARAMETERS :
   10165  *
   10166  *
   10167  * RETURN     :
   10168  *==========================================================================*/
   10169 
   10170 void QCamera3HardwareInterface::dump(
   10171                 const struct camera3_device *device, int fd)
   10172 {
   10173     /* Log level property is read when "adb shell dumpsys media.camera" is
   10174        called so that the log level can be controlled without restarting
   10175        the media server */
   10176     getLogLevel();
   10177 
   10178     LOGD("E");
   10179     QCamera3HardwareInterface *hw =
   10180         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10181     if (!hw) {
   10182         LOGE("NULL camera device");
   10183         return;
   10184     }
   10185 
   10186     hw->dump(fd);
   10187     LOGD("X");
   10188     return;
   10189 }
   10190 
   10191 /*===========================================================================
   10192  * FUNCTION   : flush
   10193  *
   10194  * DESCRIPTION:
   10195  *
   10196  * PARAMETERS :
   10197  *
   10198  *
   10199  * RETURN     :
   10200  *==========================================================================*/
   10201 
   10202 int QCamera3HardwareInterface::flush(
   10203                 const struct camera3_device *device)
   10204 {
   10205     int rc;
   10206     LOGD("E");
   10207     QCamera3HardwareInterface *hw =
   10208         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10209     if (!hw) {
   10210         LOGE("NULL camera device");
   10211         return -EINVAL;
   10212     }
   10213 
   10214     pthread_mutex_lock(&hw->mMutex);
   10215     // Validate current state
   10216     switch (hw->mState) {
   10217         case STARTED:
   10218             /* valid state */
   10219             break;
   10220 
   10221         case ERROR:
   10222             pthread_mutex_unlock(&hw->mMutex);
   10223             hw->handleCameraDeviceError();
   10224             return -ENODEV;
   10225 
   10226         default:
   10227             LOGI("Flush returned during state %d", hw->mState);
   10228             pthread_mutex_unlock(&hw->mMutex);
   10229             return 0;
   10230     }
   10231     pthread_mutex_unlock(&hw->mMutex);
   10232 
   10233     rc = hw->flush(true /* restart channels */ );
   10234     LOGD("X");
   10235     return rc;
   10236 }
   10237 
   10238 /*===========================================================================
   10239  * FUNCTION   : close_camera_device
   10240  *
   10241  * DESCRIPTION:
   10242  *
   10243  * PARAMETERS :
   10244  *
   10245  *
   10246  * RETURN     :
   10247  *==========================================================================*/
   10248 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   10249 {
   10250     int ret = NO_ERROR;
   10251     QCamera3HardwareInterface *hw =
   10252         reinterpret_cast<QCamera3HardwareInterface *>(
   10253             reinterpret_cast<camera3_device_t *>(device)->priv);
   10254     if (!hw) {
   10255         LOGE("NULL camera device");
   10256         return BAD_VALUE;
   10257     }
   10258 
   10259     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
   10260     delete hw;
   10261     LOGI("[KPI Perf]: X");
   10262     return ret;
   10263 }
   10264 
   10265 /*===========================================================================
   10266  * FUNCTION   : getWaveletDenoiseProcessPlate
   10267  *
   10268  * DESCRIPTION: query wavelet denoise process plate
   10269  *
   10270  * PARAMETERS : None
   10271  *
   10272  * RETURN     : WNR prcocess plate value
   10273  *==========================================================================*/
   10274 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   10275 {
   10276     char prop[PROPERTY_VALUE_MAX];
   10277     memset(prop, 0, sizeof(prop));
   10278     property_get("persist.denoise.process.plates", prop, "0");
   10279     int processPlate = atoi(prop);
   10280     switch(processPlate) {
   10281     case 0:
   10282         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   10283     case 1:
   10284         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   10285     case 2:
   10286         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10287     case 3:
   10288         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   10289     default:
   10290         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10291     }
   10292 }
   10293 
   10294 
   10295 /*===========================================================================
   10296  * FUNCTION   : getTemporalDenoiseProcessPlate
   10297  *
   10298  * DESCRIPTION: query temporal denoise process plate
   10299  *
   10300  * PARAMETERS : None
   10301  *
   10302  * RETURN     : TNR prcocess plate value
   10303  *==========================================================================*/
   10304 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
   10305 {
   10306     char prop[PROPERTY_VALUE_MAX];
   10307     memset(prop, 0, sizeof(prop));
   10308     property_get("persist.tnr.process.plates", prop, "0");
   10309     int processPlate = atoi(prop);
   10310     switch(processPlate) {
   10311     case 0:
   10312         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   10313     case 1:
   10314         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   10315     case 2:
   10316         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10317     case 3:
   10318         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   10319     default:
   10320         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10321     }
   10322 }
   10323 
   10324 
   10325 /*===========================================================================
   10326  * FUNCTION   : extractSceneMode
   10327  *
   10328  * DESCRIPTION: Extract scene mode from frameworks set metadata
   10329  *
   10330  * PARAMETERS :
   10331  *      @frame_settings: CameraMetadata reference
   10332  *      @metaMode: ANDROID_CONTORL_MODE
   10333  *      @hal_metadata: hal metadata structure
   10334  *
   10335  * RETURN     : None
   10336  *==========================================================================*/
   10337 int32_t QCamera3HardwareInterface::extractSceneMode(
   10338         const CameraMetadata &frame_settings, uint8_t metaMode,
   10339         metadata_buffer_t *hal_metadata)
   10340 {
   10341     int32_t rc = NO_ERROR;
   10342 
   10343     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   10344         camera_metadata_ro_entry entry =
   10345                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
   10346         if (0 == entry.count)
   10347             return rc;
   10348 
   10349         uint8_t fwk_sceneMode = entry.data.u8[0];
   10350 
   10351         int val = lookupHalName(SCENE_MODES_MAP,
   10352                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   10353                 fwk_sceneMode);
   10354         if (NAME_NOT_FOUND != val) {
   10355             uint8_t sceneMode = (uint8_t)val;
   10356             LOGD("sceneMode: %d", sceneMode);
   10357             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10358                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   10359                 rc = BAD_VALUE;
   10360             }
   10361         }
   10362     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
   10363             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
   10364         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   10365         LOGD("sceneMode: %d", sceneMode);
   10366         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10367                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   10368             rc = BAD_VALUE;
   10369         }
   10370     }
   10371     return rc;
   10372 }
   10373 
   10374 /*===========================================================================
   10375  * FUNCTION   : setVideoHdrMode
   10376  *
   10377  * DESCRIPTION: Set Video HDR mode from frameworks set metadata
   10378  *
   10379  * PARAMETERS :
   10380  *      @hal_metadata: hal metadata structure
   10381  *      @metaMode: QCAMERA3_VIDEO_HDR_MODE
   10382  *
   10383  * RETURN     : None
   10384  *==========================================================================*/
   10385 int32_t QCamera3HardwareInterface::setVideoHdrMode(
   10386         metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
   10387 {
   10388     int32_t rc = NO_ERROR;
   10389     if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
   10390         LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
   10391         rc = BAD_VALUE;
   10392     } else {
   10393         cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
   10394         if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
   10395             LOGD("Setting HDR mode Off");
   10396             vhdr_type = CAM_SENSOR_HDR_OFF;
   10397         } else {
   10398             char video_hdr_prop[PROPERTY_VALUE_MAX];
   10399             memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
   10400             property_get("persist.camera.hdr.video", video_hdr_prop, "3");
   10401             uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
   10402             if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
   10403                     CAM_QCOM_FEATURE_SENSOR_HDR) &&
   10404                     (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
   10405                 LOGD("Setting HDR mode In Sensor");
   10406                 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
   10407             }
   10408             if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
   10409                     CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
   10410                     (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
   10411                 LOGD("Setting HDR mode Zigzag");
   10412                 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
   10413             }
   10414             if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
   10415                     CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
   10416                     (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
   10417                 LOGD("Setting HDR mode Staggered");
   10418                 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
   10419             }
   10420             if(vhdr_type == CAM_SENSOR_HDR_MAX) {
   10421                 LOGD("HDR mode not supported");
   10422                 rc = BAD_VALUE;
   10423             }
   10424         }
   10425         if(rc == NO_ERROR) {
   10426             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10427                     CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
   10428                 rc = BAD_VALUE;
   10429             }
   10430         }
   10431     }
   10432     return rc;
   10433 }
   10434 
   10435 /*===========================================================================
   10436  * FUNCTION   : needRotationReprocess
   10437  *
   10438  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   10439  *
   10440  * PARAMETERS : none
   10441  *
   10442  * RETURN     : true: needed
   10443  *              false: no need
   10444  *==========================================================================*/
   10445 bool QCamera3HardwareInterface::needRotationReprocess()
   10446 {
   10447     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
   10448         // current rotation is not zero, and pp has the capability to process rotation
   10449         LOGH("need do reprocess for rotation");
   10450         return true;
   10451     }
   10452 
   10453     return false;
   10454 }
   10455 
   10456 /*===========================================================================
   10457  * FUNCTION   : needReprocess
   10458  *
   10459  * DESCRIPTION: if reprocess in needed
   10460  *
   10461  * PARAMETERS : none
   10462  *
   10463  * RETURN     : true: needed
   10464  *              false: no need
   10465  *==========================================================================*/
   10466 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
   10467 {
   10468     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
   10469         // TODO: add for ZSL HDR later
   10470         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   10471         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
   10472             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
   10473             return true;
   10474         } else {
   10475             LOGH("already post processed frame");
   10476             return false;
   10477         }
   10478     }
   10479     return needRotationReprocess();
   10480 }
   10481 
   10482 /*===========================================================================
   10483  * FUNCTION   : needJpegExifRotation
   10484  *
   10485  * DESCRIPTION: if rotation from jpeg is needed
   10486  *
   10487  * PARAMETERS : none
   10488  *
   10489  * RETURN     : true: needed
   10490  *              false: no need
   10491  *==========================================================================*/
   10492 bool QCamera3HardwareInterface::needJpegExifRotation()
   10493 {
   10494    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
   10495     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
   10496        LOGD("Need use Jpeg EXIF Rotation");
   10497        return true;
   10498     }
   10499     return false;
   10500 }
   10501 
   10502 /*===========================================================================
   10503  * FUNCTION   : addOfflineReprocChannel
   10504  *
   10505  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
   10506  *              coming from input channel
   10507  *
   10508  * PARAMETERS :
   10509  *   @config  : reprocess configuration
   10510  *   @inputChHandle : pointer to the input (source) channel
   10511  *
   10512  *
   10513  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   10514  *==========================================================================*/
   10515 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
   10516         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
   10517 {
   10518     int32_t rc = NO_ERROR;
   10519     QCamera3ReprocessChannel *pChannel = NULL;
   10520 
   10521     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   10522             mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
   10523             CAM_QCOM_FEATURE_NONE, this, inputChHandle);
   10524     if (NULL == pChannel) {
   10525         LOGE("no mem for reprocess channel");
   10526         return NULL;
   10527     }
   10528 
   10529     rc = pChannel->initialize(IS_TYPE_NONE);
   10530     if (rc != NO_ERROR) {
   10531         LOGE("init reprocess channel failed, ret = %d", rc);
   10532         delete pChannel;
   10533         return NULL;
   10534     }
   10535 
   10536     // pp feature config
   10537     cam_pp_feature_config_t pp_config;
   10538     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   10539 
   10540     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   10541     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
   10542             & CAM_QCOM_FEATURE_DSDN) {
   10543         //Use CPP CDS incase h/w supports it.
   10544         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
   10545         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
   10546     }
   10547     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
   10548         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
   10549     }
   10550 
   10551     rc = pChannel->addReprocStreamsFromSource(pp_config,
   10552             config,
   10553             IS_TYPE_NONE,
   10554             mMetadataChannel);
   10555 
   10556     if (rc != NO_ERROR) {
   10557         delete pChannel;
   10558         return NULL;
   10559     }
   10560     return pChannel;
   10561 }
   10562 
   10563 /*===========================================================================
   10564  * FUNCTION   : getMobicatMask
   10565  *
   10566  * DESCRIPTION: returns mobicat mask
   10567  *
   10568  * PARAMETERS : none
   10569  *
   10570  * RETURN     : mobicat mask
   10571  *
   10572  *==========================================================================*/
   10573 uint8_t QCamera3HardwareInterface::getMobicatMask()
   10574 {
   10575     return m_MobicatMask;
   10576 }
   10577 
   10578 /*===========================================================================
   10579  * FUNCTION   : setMobicat
   10580  *
   10581  * DESCRIPTION: set Mobicat on/off.
   10582  *
   10583  * PARAMETERS :
   10584  *   @params  : none
   10585  *
   10586  * RETURN     : int32_t type of status
   10587  *              NO_ERROR  -- success
   10588  *              none-zero failure code
   10589  *==========================================================================*/
   10590 int32_t QCamera3HardwareInterface::setMobicat()
   10591 {
   10592     char value [PROPERTY_VALUE_MAX];
   10593     property_get("persist.camera.mobicat", value, "0");
   10594     int32_t ret = NO_ERROR;
   10595     uint8_t enableMobi = (uint8_t)atoi(value);
   10596 
   10597     if (enableMobi) {
   10598         tune_cmd_t tune_cmd;
   10599         tune_cmd.type = SET_RELOAD_CHROMATIX;
   10600         tune_cmd.module = MODULE_ALL;
   10601         tune_cmd.value = TRUE;
   10602         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   10603                 CAM_INTF_PARM_SET_VFE_COMMAND,
   10604                 tune_cmd);
   10605 
   10606         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   10607                 CAM_INTF_PARM_SET_PP_COMMAND,
   10608                 tune_cmd);
   10609     }
   10610     m_MobicatMask = enableMobi;
   10611 
   10612     return ret;
   10613 }
   10614 
   10615 /*===========================================================================
   10616 * FUNCTION   : getLogLevel
   10617 *
   10618 * DESCRIPTION: Reads the log level property into a variable
   10619 *
   10620 * PARAMETERS :
   10621 *   None
   10622 *
   10623 * RETURN     :
   10624 *   None
   10625 *==========================================================================*/
   10626 void QCamera3HardwareInterface::getLogLevel()
   10627 {
   10628     char prop[PROPERTY_VALUE_MAX];
   10629     uint32_t globalLogLevel = 0;
   10630 
   10631     property_get("persist.camera.hal.debug", prop, "0");
   10632     int val = atoi(prop);
   10633     if (0 <= val) {
   10634         gCamHal3LogLevel = (uint32_t)val;
   10635     }
   10636 
   10637     property_get("persist.camera.kpi.debug", prop, "1");
   10638     gKpiDebugLevel = atoi(prop);
   10639 
   10640     property_get("persist.camera.global.debug", prop, "0");
   10641     val = atoi(prop);
   10642     if (0 <= val) {
   10643         globalLogLevel = (uint32_t)val;
   10644     }
   10645 
   10646     /* Highest log level among hal.logs and global.logs is selected */
   10647     if (gCamHal3LogLevel < globalLogLevel)
   10648         gCamHal3LogLevel = globalLogLevel;
   10649 
   10650     return;
   10651 }
   10652 
   10653 /*===========================================================================
   10654  * FUNCTION   : validateStreamRotations
   10655  *
   10656  * DESCRIPTION: Check if the rotations requested are supported
   10657  *
   10658  * PARAMETERS :
   10659  *   @stream_list : streams to be configured
   10660  *
   10661  * RETURN     : NO_ERROR on success
   10662  *              -EINVAL on failure
   10663  *
   10664  *==========================================================================*/
   10665 int QCamera3HardwareInterface::validateStreamRotations(
   10666         camera3_stream_configuration_t *streamList)
   10667 {
   10668     int rc = NO_ERROR;
   10669 
   10670     /*
   10671     * Loop through all streams requested in configuration
   10672     * Check if unsupported rotations have been requested on any of them
   10673     */
   10674     for (size_t j = 0; j < streamList->num_streams; j++){
   10675         camera3_stream_t *newStream = streamList->streams[j];
   10676 
   10677         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
   10678         bool isImplDef = (newStream->format ==
   10679                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
   10680         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
   10681                 isImplDef);
   10682 
   10683         if (isRotated && (!isImplDef || isZsl)) {
   10684             LOGE("Error: Unsupported rotation of %d requested for stream"
   10685                     "type:%d and stream format:%d",
   10686                     newStream->rotation, newStream->stream_type,
   10687                     newStream->format);
   10688             rc = -EINVAL;
   10689             break;
   10690         }
   10691     }
   10692 
   10693     return rc;
   10694 }
   10695 
   10696 /*===========================================================================
   10697 * FUNCTION   : getFlashInfo
   10698 *
   10699 * DESCRIPTION: Retrieve information about whether the device has a flash.
   10700 *
   10701 * PARAMETERS :
   10702 *   @cameraId  : Camera id to query
   10703 *   @hasFlash  : Boolean indicating whether there is a flash device
   10704 *                associated with given camera
   10705 *   @flashNode : If a flash device exists, this will be its device node.
   10706 *
   10707 * RETURN     :
   10708 *   None
   10709 *==========================================================================*/
   10710 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
   10711         bool& hasFlash,
   10712         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
   10713 {
   10714     cam_capability_t* camCapability = gCamCapability[cameraId];
   10715     if (NULL == camCapability) {
   10716         hasFlash = false;
   10717         flashNode[0] = '\0';
   10718     } else {
   10719         hasFlash = camCapability->flash_available;
   10720         strlcpy(flashNode,
   10721                 (char*)camCapability->flash_dev_name,
   10722                 QCAMERA_MAX_FILEPATH_LENGTH);
   10723     }
   10724 }
   10725 
   10726 /*===========================================================================
   10727 * FUNCTION   : getEepromVersionInfo
   10728 *
   10729 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
   10730 *
   10731 * PARAMETERS : None
   10732 *
   10733 * RETURN     : string describing EEPROM version
   10734 *              "\0" if no such info available
   10735 *==========================================================================*/
   10736 const char *QCamera3HardwareInterface::getEepromVersionInfo()
   10737 {
   10738     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
   10739 }
   10740 
   10741 /*===========================================================================
   10742 * FUNCTION   : getLdafCalib
   10743 *
   10744 * DESCRIPTION: Retrieve Laser AF calibration data
   10745 *
   10746 * PARAMETERS : None
   10747 *
   10748 * RETURN     : Two uint32_t describing laser AF calibration data
   10749 *              NULL if none is available.
   10750 *==========================================================================*/
   10751 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
   10752 {
   10753     if (mLdafCalibExist) {
   10754         return &mLdafCalib[0];
   10755     } else {
   10756         return NULL;
   10757     }
   10758 }
   10759 
   10760 /*===========================================================================
   10761  * FUNCTION   : dynamicUpdateMetaStreamInfo
   10762  *
   10763  * DESCRIPTION: This function:
   10764  *             (1) stops all the channels
   10765  *             (2) returns error on pending requests and buffers
   10766  *             (3) sends metastream_info in setparams
   10767  *             (4) starts all channels
   10768  *             This is useful when sensor has to be restarted to apply any
   10769  *             settings such as frame rate from a different sensor mode
   10770  *
   10771  * PARAMETERS : None
   10772  *
   10773  * RETURN     : NO_ERROR on success
   10774  *              Error codes on failure
   10775  *
   10776  *==========================================================================*/
   10777 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
   10778 {
   10779     ATRACE_CALL();
   10780     int rc = NO_ERROR;
   10781 
   10782     LOGD("E");
   10783 
   10784     rc = stopAllChannels();
   10785     if (rc < 0) {
   10786         LOGE("stopAllChannels failed");
   10787         return rc;
   10788     }
   10789 
   10790     rc = notifyErrorForPendingRequests();
   10791     if (rc < 0) {
   10792         LOGE("notifyErrorForPendingRequests failed");
   10793         return rc;
   10794     }
   10795 
   10796     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   10797         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
   10798                 "Format:%d",
   10799                 mStreamConfigInfo.type[i],
   10800                 mStreamConfigInfo.stream_sizes[i].width,
   10801                 mStreamConfigInfo.stream_sizes[i].height,
   10802                 mStreamConfigInfo.postprocess_mask[i],
   10803                 mStreamConfigInfo.format[i]);
   10804     }
   10805 
   10806     /* Send meta stream info once again so that ISP can start */
   10807     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   10808             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   10809     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   10810             mParameters);
   10811     if (rc < 0) {
   10812         LOGE("set Metastreaminfo failed. Sensor mode does not change");
   10813     }
   10814 
   10815     rc = startAllChannels();
   10816     if (rc < 0) {
   10817         LOGE("startAllChannels failed");
   10818         return rc;
   10819     }
   10820 
   10821     LOGD("X");
   10822     return rc;
   10823 }
   10824 
   10825 /*===========================================================================
   10826  * FUNCTION   : stopAllChannels
   10827  *
   10828  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
   10829  *
   10830  * PARAMETERS : None
   10831  *
   10832  * RETURN     : NO_ERROR on success
   10833  *              Error codes on failure
   10834  *
   10835  *==========================================================================*/
   10836 int32_t QCamera3HardwareInterface::stopAllChannels()
   10837 {
   10838     int32_t rc = NO_ERROR;
   10839 
   10840     LOGD("Stopping all channels");
   10841     // Stop the Streams/Channels
   10842     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   10843         it != mStreamInfo.end(); it++) {
   10844         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   10845         if (channel) {
   10846             channel->stop();
   10847         }
   10848         (*it)->status = INVALID;
   10849     }
   10850 
   10851     if (mSupportChannel) {
   10852         mSupportChannel->stop();
   10853     }
   10854     if (mAnalysisChannel) {
   10855         mAnalysisChannel->stop();
   10856     }
   10857     if (mRawDumpChannel) {
   10858         mRawDumpChannel->stop();
   10859     }
   10860     if (mMetadataChannel) {
   10861         /* If content of mStreamInfo is not 0, there is metadata stream */
   10862         mMetadataChannel->stop();
   10863     }
   10864 
   10865     LOGD("All channels stopped");
   10866     return rc;
   10867 }
   10868 
   10869 /*===========================================================================
   10870  * FUNCTION   : startAllChannels
   10871  *
   10872  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
   10873  *
   10874  * PARAMETERS : None
   10875  *
   10876  * RETURN     : NO_ERROR on success
   10877  *              Error codes on failure
   10878  *
   10879  *==========================================================================*/
   10880 int32_t QCamera3HardwareInterface::startAllChannels()
   10881 {
   10882     int32_t rc = NO_ERROR;
   10883 
   10884     LOGD("Start all channels ");
   10885     // Start the Streams/Channels
   10886     if (mMetadataChannel) {
   10887         /* If content of mStreamInfo is not 0, there is metadata stream */
   10888         rc = mMetadataChannel->start();
   10889         if (rc < 0) {
   10890             LOGE("META channel start failed");
   10891             return rc;
   10892         }
   10893     }
   10894     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   10895         it != mStreamInfo.end(); it++) {
   10896         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   10897         if (channel) {
   10898             rc = channel->start();
   10899             if (rc < 0) {
   10900                 LOGE("channel start failed");
   10901                 return rc;
   10902             }
   10903         }
   10904     }
   10905     if (mAnalysisChannel) {
   10906         mAnalysisChannel->start();
   10907     }
   10908     if (mSupportChannel) {
   10909         rc = mSupportChannel->start();
   10910         if (rc < 0) {
   10911             LOGE("Support channel start failed");
   10912             return rc;
   10913         }
   10914     }
   10915     if (mRawDumpChannel) {
   10916         rc = mRawDumpChannel->start();
   10917         if (rc < 0) {
   10918             LOGE("RAW dump channel start failed");
   10919             return rc;
   10920         }
   10921     }
   10922 
   10923     LOGD("All channels started");
   10924     return rc;
   10925 }
   10926 
   10927 /*===========================================================================
   10928  * FUNCTION   : notifyErrorForPendingRequests
   10929  *
   10930  * DESCRIPTION: This function sends error for all the pending requests/buffers
   10931  *
   10932  * PARAMETERS : None
   10933  *
   10934  * RETURN     : Error codes
   10935  *              NO_ERROR on success
   10936  *
   10937  *==========================================================================*/
   10938 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
   10939 {
   10940     int32_t rc = NO_ERROR;
   10941     unsigned int frameNum = 0;
   10942     camera3_capture_result_t result;
   10943     camera3_stream_buffer_t *pStream_Buf = NULL;
   10944 
   10945     memset(&result, 0, sizeof(camera3_capture_result_t));
   10946 
   10947     if (mPendingRequestsList.size() > 0) {
   10948         pendingRequestIterator i = mPendingRequestsList.begin();
   10949         frameNum = i->frame_number;
   10950     } else {
   10951         /* There might still be pending buffers even though there are
   10952          no pending requests. Setting the frameNum to MAX so that
   10953          all the buffers with smaller frame numbers are returned */
   10954         frameNum = UINT_MAX;
   10955     }
   10956 
   10957     LOGH("Oldest frame num on mPendingRequestsList = %u",
   10958        frameNum);
   10959 
   10960     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
   10961             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
   10962 
   10963         if (req->frame_number < frameNum) {
   10964             // Send Error notify to frameworks for each buffer for which
   10965             // metadata buffer is already sent
   10966             LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
   10967                 req->frame_number, req->mPendingBufferList.size());
   10968 
   10969             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
   10970             if (NULL == pStream_Buf) {
   10971                 LOGE("No memory for pending buffers array");
   10972                 return NO_MEMORY;
   10973             }
   10974             memset(pStream_Buf, 0,
   10975                 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
   10976             result.result = NULL;
   10977             result.frame_number = req->frame_number;
   10978             result.num_output_buffers = req->mPendingBufferList.size();
   10979             result.output_buffers = pStream_Buf;
   10980 
   10981             size_t index = 0;
   10982             for (auto info = req->mPendingBufferList.begin();
   10983                 info != req->mPendingBufferList.end(); ) {
   10984 
   10985                 camera3_notify_msg_t notify_msg;
   10986                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   10987                 notify_msg.type = CAMERA3_MSG_ERROR;
   10988                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
   10989                 notify_msg.message.error.error_stream = info->stream;
   10990                 notify_msg.message.error.frame_number = req->frame_number;
   10991                 pStream_Buf[index].acquire_fence = -1;
   10992                 pStream_Buf[index].release_fence = -1;
   10993                 pStream_Buf[index].buffer = info->buffer;
   10994                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
   10995                 pStream_Buf[index].stream = info->stream;
   10996                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   10997                 index++;
   10998                 // Remove buffer from list
   10999                 info = req->mPendingBufferList.erase(info);
   11000             }
   11001 
   11002             // Remove this request from Map
   11003             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
   11004                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
   11005             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
   11006 
   11007             mCallbackOps->process_capture_result(mCallbackOps, &result);
   11008 
   11009             delete [] pStream_Buf;
   11010         } else {
   11011 
   11012             // Go through the pending requests info and send error request to framework
   11013             pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
   11014 
   11015             LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
   11016 
   11017             // Send error notify to frameworks
   11018             camera3_notify_msg_t notify_msg;
   11019             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   11020             notify_msg.type = CAMERA3_MSG_ERROR;
   11021             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   11022             notify_msg.message.error.error_stream = NULL;
   11023             notify_msg.message.error.frame_number = req->frame_number;
   11024             mCallbackOps->notify(mCallbackOps, &notify_msg);
   11025 
   11026             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
   11027             if (NULL == pStream_Buf) {
   11028                 LOGE("No memory for pending buffers array");
   11029                 return NO_MEMORY;
   11030             }
   11031             memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
   11032 
   11033             result.result = NULL;
   11034             result.frame_number = req->frame_number;
   11035             result.input_buffer = i->input_buffer;
   11036             result.num_output_buffers = req->mPendingBufferList.size();
   11037             result.output_buffers = pStream_Buf;
   11038 
   11039             size_t index = 0;
   11040             for (auto info = req->mPendingBufferList.begin();
   11041                 info != req->mPendingBufferList.end(); ) {
   11042                 pStream_Buf[index].acquire_fence = -1;
   11043                 pStream_Buf[index].release_fence = -1;
   11044                 pStream_Buf[index].buffer = info->buffer;
   11045                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
   11046                 pStream_Buf[index].stream = info->stream;
   11047                 index++;
   11048                 // Remove buffer from list
   11049                 info = req->mPendingBufferList.erase(info);
   11050             }
   11051 
   11052             // Remove this request from Map
   11053             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
   11054                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
   11055             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
   11056 
   11057             mCallbackOps->process_capture_result(mCallbackOps, &result);
   11058             delete [] pStream_Buf;
   11059             i = erasePendingRequest(i);
   11060         }
   11061     }
   11062 
   11063     /* Reset pending frame Drop list and requests list */
   11064     mPendingFrameDropList.clear();
   11065 
   11066     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   11067         req.mPendingBufferList.clear();
   11068     }
   11069     mPendingBuffersMap.mPendingBuffersInRequest.clear();
   11070     mPendingReprocessResultList.clear();
   11071     LOGH("Cleared all the pending buffers ");
   11072 
   11073     return rc;
   11074 }
   11075 
   11076 bool QCamera3HardwareInterface::isOnEncoder(
   11077         const cam_dimension_t max_viewfinder_size,
   11078         uint32_t width, uint32_t height)
   11079 {
   11080     return (width > (uint32_t)max_viewfinder_size.width ||
   11081             height > (uint32_t)max_viewfinder_size.height);
   11082 }
   11083 
   11084 /*===========================================================================
   11085  * FUNCTION   : setBundleInfo
   11086  *
   11087  * DESCRIPTION: Set bundle info for all streams that are bundle.
   11088  *
   11089  * PARAMETERS : None
   11090  *
   11091  * RETURN     : NO_ERROR on success
   11092  *              Error codes on failure
   11093  *==========================================================================*/
   11094 int32_t QCamera3HardwareInterface::setBundleInfo()
   11095 {
   11096     int32_t rc = NO_ERROR;
   11097 
   11098     if (mChannelHandle) {
   11099         cam_bundle_config_t bundleInfo;
   11100         memset(&bundleInfo, 0, sizeof(bundleInfo));
   11101         rc = mCameraHandle->ops->get_bundle_info(
   11102                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
   11103         if (rc != NO_ERROR) {
   11104             LOGE("get_bundle_info failed");
   11105             return rc;
   11106         }
   11107         if (mAnalysisChannel) {
   11108             mAnalysisChannel->setBundleInfo(bundleInfo);
   11109         }
   11110         if (mSupportChannel) {
   11111             mSupportChannel->setBundleInfo(bundleInfo);
   11112         }
   11113         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   11114                 it != mStreamInfo.end(); it++) {
   11115             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   11116             channel->setBundleInfo(bundleInfo);
   11117         }
   11118         if (mRawDumpChannel) {
   11119             mRawDumpChannel->setBundleInfo(bundleInfo);
   11120         }
   11121     }
   11122 
   11123     return rc;
   11124 }
   11125 
   11126 /*===========================================================================
   11127  * FUNCTION   : get_num_overall_buffers
   11128  *
   11129  * DESCRIPTION: Estimate number of pending buffers across all requests.
   11130  *
   11131  * PARAMETERS : None
   11132  *
   11133  * RETURN     : Number of overall pending buffers
   11134  *
   11135  *==========================================================================*/
   11136 uint32_t PendingBuffersMap::get_num_overall_buffers()
   11137 {
   11138     uint32_t sum_buffers = 0;
   11139     for (auto &req : mPendingBuffersInRequest) {
   11140         sum_buffers += req.mPendingBufferList.size();
   11141     }
   11142     return sum_buffers;
   11143 }
   11144 
   11145 /*===========================================================================
   11146  * FUNCTION   : removeBuf
   11147  *
   11148  * DESCRIPTION: Remove a matching buffer from tracker.
   11149  *
   11150  * PARAMETERS : @buffer: image buffer for the callback
   11151  *
   11152  * RETURN     : None
   11153  *
   11154  *==========================================================================*/
   11155 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
   11156 {
   11157     bool buffer_found = false;
   11158     for (auto req = mPendingBuffersInRequest.begin();
   11159             req != mPendingBuffersInRequest.end(); req++) {
   11160         for (auto k = req->mPendingBufferList.begin();
   11161                 k != req->mPendingBufferList.end(); k++ ) {
   11162             if (k->buffer == buffer) {
   11163                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
   11164                         req->frame_number, buffer);
   11165                 k = req->mPendingBufferList.erase(k);
   11166                 if (req->mPendingBufferList.empty()) {
   11167                     // Remove this request from Map
   11168                     req = mPendingBuffersInRequest.erase(req);
   11169                 }
   11170                 buffer_found = true;
   11171                 break;
   11172             }
   11173         }
   11174         if (buffer_found) {
   11175             break;
   11176         }
   11177     }
   11178     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
   11179             get_num_overall_buffers());
   11180 }
   11181 
   11182 /*===========================================================================
   11183  * FUNCTION   : setPAAFSupport
   11184  *
   11185  * DESCRIPTION: Set the preview-assisted auto focus support bit in
   11186  *              feature mask according to stream type and filter
   11187  *              arrangement
   11188  *
   11189  * PARAMETERS : @feature_mask: current feature mask, which may be modified
   11190  *              @stream_type: stream type
   11191  *              @filter_arrangement: filter arrangement
   11192  *
   11193  * RETURN     : None
   11194  *==========================================================================*/
   11195 void QCamera3HardwareInterface::setPAAFSupport(
   11196         cam_feature_mask_t& feature_mask,
   11197         cam_stream_type_t stream_type,
   11198         cam_color_filter_arrangement_t filter_arrangement)
   11199 {
   11200     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
   11201             feature_mask, stream_type, filter_arrangement);
   11202 
   11203     switch (filter_arrangement) {
   11204     case CAM_FILTER_ARRANGEMENT_RGGB:
   11205     case CAM_FILTER_ARRANGEMENT_GRBG:
   11206     case CAM_FILTER_ARRANGEMENT_GBRG:
   11207     case CAM_FILTER_ARRANGEMENT_BGGR:
   11208         if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
   11209                 (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
   11210                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
   11211             feature_mask |= CAM_QCOM_FEATURE_PAAF;
   11212         }
   11213         break;
   11214     case CAM_FILTER_ARRANGEMENT_Y:
   11215         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
   11216             feature_mask |= CAM_QCOM_FEATURE_PAAF;
   11217         }
   11218         break;
   11219     default:
   11220         break;
   11221     }
   11222 }
   11223 
   11224 /*===========================================================================
   11225 * FUNCTION   : getSensorMountAngle
   11226 *
   11227 * DESCRIPTION: Retrieve sensor mount angle
   11228 *
   11229 * PARAMETERS : None
   11230 *
   11231 * RETURN     : sensor mount angle in uint32_t
   11232 *==========================================================================*/
   11233 uint32_t QCamera3HardwareInterface::getSensorMountAngle()
   11234 {
   11235     return gCamCapability[mCameraId]->sensor_mount_angle;
   11236 }
   11237 
   11238 /*===========================================================================
   11239 * FUNCTION   : getRelatedCalibrationData
   11240 *
   11241 * DESCRIPTION: Retrieve related system calibration data
   11242 *
   11243 * PARAMETERS : None
   11244 *
   11245 * RETURN     : Pointer of related system calibration data
   11246 *==========================================================================*/
   11247 const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
   11248 {
   11249     return (const cam_related_system_calibration_data_t *)
   11250             &(gCamCapability[mCameraId]->related_cam_calibration);
   11251 }
   11252 }; //end namespace qcamera
   11253