Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define LOG_TAG "QCamera3HWI"
     31 //#define LOG_NDEBUG 0
     32 
     33 #define __STDC_LIMIT_MACROS
     34 
     35 // To remove
     36 #include <cutils/properties.h>
     37 
     38 // System dependencies
     39 #include <dlfcn.h>
     40 #include <fcntl.h>
     41 #include <stdio.h>
     42 #include <stdlib.h>
     43 #include <time.h>
     44 #include <sync/sync.h>
     45 #include "gralloc_priv.h"
     46 
     47 // Display dependencies
     48 #include "qdMetaData.h"
     49 
     50 // Camera dependencies
     51 #include "android/QCamera3External.h"
     52 #include "util/QCameraFlash.h"
     53 #include "QCamera3HWI.h"
     54 #include "QCamera3VendorTags.h"
     55 #include "QCameraTrace.h"
     56 
     57 extern "C" {
     58 #include "mm_camera_dbg.h"
     59 }
     60 #include "cam_cond.h"
     61 
     62 using namespace android;
     63 
     64 namespace qcamera {
     65 
     66 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     67 
     68 #define EMPTY_PIPELINE_DELAY 2
     69 #define PARTIAL_RESULT_COUNT 2
     70 #define FRAME_SKIP_DELAY     0
     71 
     72 #define MAX_VALUE_8BIT ((1<<8)-1)
     73 #define MAX_VALUE_10BIT ((1<<10)-1)
     74 #define MAX_VALUE_12BIT ((1<<12)-1)
     75 
     76 #define VIDEO_4K_WIDTH  3840
     77 #define VIDEO_4K_HEIGHT 2160
     78 
     79 #define MAX_EIS_WIDTH 3840
     80 #define MAX_EIS_HEIGHT 2160
     81 
     82 #define MAX_RAW_STREAMS        1
     83 #define MAX_STALLING_STREAMS   1
     84 #define MAX_PROCESSED_STREAMS  3
     85 /* Batch mode is enabled only if FPS set is equal to or greater than this */
     86 #define MIN_FPS_FOR_BATCH_MODE (120)
     87 #define PREVIEW_FPS_FOR_HFR    (30)
     88 #define DEFAULT_VIDEO_FPS      (30.0)
     89 #define TEMPLATE_MAX_PREVIEW_FPS (30.0)
     90 #define MAX_HFR_BATCH_SIZE     (8)
     91 #define REGIONS_TUPLE_COUNT    5
     92 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
     93 // Set a threshold for detection of missing buffers //seconds
     94 #define MISSING_REQUEST_BUF_TIMEOUT 3
     95 #define FLUSH_TIMEOUT 3
     96 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
     97 
     98 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
     99                                               CAM_QCOM_FEATURE_CROP |\
    100                                               CAM_QCOM_FEATURE_ROTATION |\
    101                                               CAM_QCOM_FEATURE_SHARPNESS |\
    102                                               CAM_QCOM_FEATURE_SCALE |\
    103                                               CAM_QCOM_FEATURE_CAC |\
    104                                               CAM_QCOM_FEATURE_CDS )
    105 /* Per configuration size for static metadata length*/
    106 #define PER_CONFIGURATION_SIZE_3 (3)
    107 
    108 #define TIMEOUT_NEVER -1
    109 
    110 // Whether to check for the GPU stride padding, or use the default
    111 //#define CHECK_GPU_PIXEL_ALIGNMENT
    112 
    113 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
    114 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
    115 extern pthread_mutex_t gCamLock;
    116 volatile uint32_t gCamHal3LogLevel = 1;
    117 extern uint8_t gNumCameraSessions;
    118 
    119 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
    120     {"On",  CAM_CDS_MODE_ON},
    121     {"Off", CAM_CDS_MODE_OFF},
    122     {"Auto",CAM_CDS_MODE_AUTO}
    123 };
    124 
    125 const QCamera3HardwareInterface::QCameraMap<
    126         camera_metadata_enum_android_control_effect_mode_t,
    127         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
    128     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
    129     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
    130     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
    131     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
    132     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
    133     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
    134     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
    135     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
    136     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
    137 };
    138 
    139 const QCamera3HardwareInterface::QCameraMap<
    140         camera_metadata_enum_android_control_awb_mode_t,
    141         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
    142     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
    143     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
    144     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
    145     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
    146     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
    147     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
    148     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
    149     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
    150     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
    151 };
    152 
    153 const QCamera3HardwareInterface::QCameraMap<
    154         camera_metadata_enum_android_control_scene_mode_t,
    155         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
    156     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
    157     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
    158     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
    159     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
    160     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
    161     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
    162     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
    163     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
    164     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
    165     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
    166     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
    167     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
    168     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
    169     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    170     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    171     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    172 };
    173 
    174 const QCamera3HardwareInterface::QCameraMap<
    175         camera_metadata_enum_android_control_af_mode_t,
    176         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    177     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
    178     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    179     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    180     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    181     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    182     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    183     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    184 };
    185 
    186 const QCamera3HardwareInterface::QCameraMap<
    187         camera_metadata_enum_android_color_correction_aberration_mode_t,
    188         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
    189     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
    190             CAM_COLOR_CORRECTION_ABERRATION_OFF },
    191     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
    192             CAM_COLOR_CORRECTION_ABERRATION_FAST },
    193     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
    194             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
    195 };
    196 
    197 const QCamera3HardwareInterface::QCameraMap<
    198         camera_metadata_enum_android_control_ae_antibanding_mode_t,
    199         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    200     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    201     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    202     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    203     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    204 };
    205 
    206 const QCamera3HardwareInterface::QCameraMap<
    207         camera_metadata_enum_android_control_ae_mode_t,
    208         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    209     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    210     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    211     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    212     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    213     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    214 };
    215 
    216 const QCamera3HardwareInterface::QCameraMap<
    217         camera_metadata_enum_android_flash_mode_t,
    218         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    219     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    220     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    221     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    222 };
    223 
    224 const QCamera3HardwareInterface::QCameraMap<
    225         camera_metadata_enum_android_statistics_face_detect_mode_t,
    226         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    227     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    228     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
    229     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    230 };
    231 
    232 const QCamera3HardwareInterface::QCameraMap<
    233         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
    234         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
    235     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
    236       CAM_FOCUS_UNCALIBRATED },
    237     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
    238       CAM_FOCUS_APPROXIMATE },
    239     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
    240       CAM_FOCUS_CALIBRATED }
    241 };
    242 
    243 const QCamera3HardwareInterface::QCameraMap<
    244         camera_metadata_enum_android_lens_state_t,
    245         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
    246     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
    247     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
    248 };
    249 
    250 const int32_t available_thumbnail_sizes[] = {0, 0,
    251                                              176, 144,
    252                                              240, 144,
    253                                              256, 144,
    254                                              240, 160,
    255                                              256, 154,
    256                                              240, 240,
    257                                              320, 240};
    258 
    259 const QCamera3HardwareInterface::QCameraMap<
    260         camera_metadata_enum_android_sensor_test_pattern_mode_t,
    261         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
    262     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
    263     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
    264     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
    265     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
    266     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
    267     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
    268 };
    269 
    270 /* Since there is no mapping for all the options some Android enum are not listed.
    271  * Also, the order in this list is important because while mapping from HAL to Android it will
    272  * traverse from lower to higher index which means that for HAL values that are map to different
    273  * Android values, the traverse logic will select the first one found.
    274  */
    275 const QCamera3HardwareInterface::QCameraMap<
    276         camera_metadata_enum_android_sensor_reference_illuminant1_t,
    277         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
    278     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
    279     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    280     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
    281     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
    282     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
    283     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
    284     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
    285     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
    286     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
    287     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
    288     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
    289     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
    290     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
    291     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
    292     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    293     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
    294 };
    295 
    296 const QCamera3HardwareInterface::QCameraMap<
    297         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
    298     { 60, CAM_HFR_MODE_60FPS},
    299     { 90, CAM_HFR_MODE_90FPS},
    300     { 120, CAM_HFR_MODE_120FPS},
    301     { 150, CAM_HFR_MODE_150FPS},
    302     { 180, CAM_HFR_MODE_180FPS},
    303     { 210, CAM_HFR_MODE_210FPS},
    304     { 240, CAM_HFR_MODE_240FPS},
    305     { 480, CAM_HFR_MODE_480FPS},
    306 };
    307 
    308 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    309     .initialize                         = QCamera3HardwareInterface::initialize,
    310     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
    311     .register_stream_buffers            = NULL,
    312     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
    313     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
    314     .get_metadata_vendor_tag_ops        = NULL,
    315     .dump                               = QCamera3HardwareInterface::dump,
    316     .flush                              = QCamera3HardwareInterface::flush,
    317     .reserved                           = {0},
    318 };
    319 
    320 // initialise to some default value
    321 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
    322 
    323 /*===========================================================================
    324  * FUNCTION   : QCamera3HardwareInterface
    325  *
    326  * DESCRIPTION: constructor of QCamera3HardwareInterface
    327  *
    328  * PARAMETERS :
    329  *   @cameraId  : camera ID
    330  *
    331  * RETURN     : none
    332  *==========================================================================*/
    333 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
    334         const camera_module_callbacks_t *callbacks)
    335     : mCameraId(cameraId),
    336       mCameraHandle(NULL),
    337       mCameraInitialized(false),
    338       mCallbackOps(NULL),
    339       mMetadataChannel(NULL),
    340       mPictureChannel(NULL),
    341       mRawChannel(NULL),
    342       mSupportChannel(NULL),
    343       mAnalysisChannel(NULL),
    344       mRawDumpChannel(NULL),
    345       mDummyBatchChannel(NULL),
    346       m_perfLock(),
    347       mCommon(),
    348       mChannelHandle(0),
    349       mFirstConfiguration(true),
    350       mFlush(false),
    351       mFlushPerf(false),
    352       mParamHeap(NULL),
    353       mParameters(NULL),
    354       mPrevParameters(NULL),
    355       m_bIsVideo(false),
    356       m_bIs4KVideo(false),
    357       m_bEisSupportedSize(false),
    358       m_bEisEnable(false),
    359       m_MobicatMask(0),
    360       mMinProcessedFrameDuration(0),
    361       mMinJpegFrameDuration(0),
    362       mMinRawFrameDuration(0),
    363       mMetaFrameCount(0U),
    364       mUpdateDebugLevel(false),
    365       mCallbacks(callbacks),
    366       mCaptureIntent(ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW),
    367       mCacMode(0),
    368       mHybridAeEnable(0),
    369       /* DevCamDebug metadata internal m control*/
    370       mDevCamDebugMetaEnable(0),
    371       /* DevCamDebug metadata end */
    372       mBatchSize(0),
    373       mToBeQueuedVidBufs(0),
    374       mHFRVideoFps(DEFAULT_VIDEO_FPS),
    375       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
    376       mFirstFrameNumberInBatch(0),
    377       mNeedSensorRestart(false),
    378       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
    379       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
    380       mLdafCalibExist(false),
    381       mPowerHintEnabled(false),
    382       mLastCustIntentFrmNum(-1),
    383       mState(CLOSED),
    384       mIsDeviceLinked(false),
    385       mIsMainCamera(true),
    386       mLinkedCameraId(0),
    387       m_pRelCamSyncHeap(NULL),
    388       m_pRelCamSyncBuf(NULL),
    389       mAfTrigger()
    390 {
    391     getLogLevel();
    392     m_perfLock.lock_init();
    393     mCommon.init(gCamCapability[cameraId]);
    394     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    395     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
    396     mCameraDevice.common.close = close_camera_device;
    397     mCameraDevice.ops = &mCameraOps;
    398     mCameraDevice.priv = this;
    399     gCamCapability[cameraId]->version = CAM_HAL_V3;
    400     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    401     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    402     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    403 
    404     PTHREAD_COND_INIT(&mBuffersCond);
    405 
    406     PTHREAD_COND_INIT(&mRequestCond);
    407     mPendingLiveRequest = 0;
    408     mCurrentRequestId = -1;
    409     pthread_mutex_init(&mMutex, NULL);
    410 
    411     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    412         mDefaultMetadata[i] = NULL;
    413 
    414     // Getting system props of different kinds
    415     char prop[PROPERTY_VALUE_MAX];
    416     memset(prop, 0, sizeof(prop));
    417     property_get("persist.camera.raw.dump", prop, "0");
    418     mEnableRawDump = atoi(prop);
    419     if (mEnableRawDump)
    420         LOGD("Raw dump from Camera HAL enabled");
    421 
    422     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
    423     memset(mLdafCalib, 0, sizeof(mLdafCalib));
    424 
    425     memset(prop, 0, sizeof(prop));
    426     property_get("persist.camera.tnr.preview", prop, "0");
    427     m_bTnrPreview = (uint8_t)atoi(prop);
    428 
    429     memset(prop, 0, sizeof(prop));
    430     property_get("persist.camera.tnr.video", prop, "0");
    431     m_bTnrVideo = (uint8_t)atoi(prop);
    432 
    433     memset(prop, 0, sizeof(prop));
    434     property_get("persist.camera.avtimer.debug", prop, "0");
    435     m_debug_avtimer = (uint8_t)atoi(prop);
    436 
    437     m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
    438 
    439     //Load and read GPU library.
    440     lib_surface_utils = NULL;
    441     LINK_get_surface_pixel_alignment = NULL;
    442     mSurfaceStridePadding = CAM_PAD_TO_64;
    443 #ifdef CHECK_GPU_PIXEL_ALIGNMENT
    444     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
    445     if (lib_surface_utils) {
    446         *(void **)&LINK_get_surface_pixel_alignment =
    447                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
    448          if (LINK_get_surface_pixel_alignment) {
    449              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
    450          }
    451          dlclose(lib_surface_utils);
    452     }
    453 #endif
    454     m60HzZone = is60HzZone();
    455 }
    456 
    457 /*===========================================================================
    458  * FUNCTION   : ~QCamera3HardwareInterface
    459  *
    460  * DESCRIPTION: destructor of QCamera3HardwareInterface
    461  *
    462  * PARAMETERS : none
    463  *
    464  * RETURN     : none
    465  *==========================================================================*/
    466 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    467 {
    468     LOGD("E");
    469 
    470     /* Turn off current power hint before acquiring perfLock in case they
    471      * conflict with each other */
    472     disablePowerHint();
    473 
    474     m_perfLock.lock_acq();
    475 
    476     /* We need to stop all streams before deleting any stream */
    477     if (mRawDumpChannel) {
    478         mRawDumpChannel->stop();
    479     }
    480 
    481     // NOTE: 'camera3_stream_t *' objects are already freed at
    482     //        this stage by the framework
    483     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    484         it != mStreamInfo.end(); it++) {
    485         QCamera3ProcessingChannel *channel = (*it)->channel;
    486         if (channel) {
    487             channel->stop();
    488         }
    489     }
    490     if (mSupportChannel)
    491         mSupportChannel->stop();
    492 
    493     if (mAnalysisChannel) {
    494         mAnalysisChannel->stop();
    495     }
    496     if (mMetadataChannel) {
    497         mMetadataChannel->stop();
    498     }
    499     if (mChannelHandle) {
    500         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
    501                 mChannelHandle);
    502         LOGD("stopping channel %d", mChannelHandle);
    503     }
    504 
    505     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    506         it != mStreamInfo.end(); it++) {
    507         QCamera3ProcessingChannel *channel = (*it)->channel;
    508         if (channel)
    509             delete channel;
    510         free (*it);
    511     }
    512     if (mSupportChannel) {
    513         delete mSupportChannel;
    514         mSupportChannel = NULL;
    515     }
    516 
    517     if (mAnalysisChannel) {
    518         delete mAnalysisChannel;
    519         mAnalysisChannel = NULL;
    520     }
    521     if (mRawDumpChannel) {
    522         delete mRawDumpChannel;
    523         mRawDumpChannel = NULL;
    524     }
    525     if (mDummyBatchChannel) {
    526         delete mDummyBatchChannel;
    527         mDummyBatchChannel = NULL;
    528     }
    529     mPictureChannel = NULL;
    530 
    531     if (mMetadataChannel) {
    532         delete mMetadataChannel;
    533         mMetadataChannel = NULL;
    534     }
    535 
    536     /* Clean up all channels */
    537     if (mCameraInitialized) {
    538         if(!mFirstConfiguration){
    539             //send the last unconfigure
    540             cam_stream_size_info_t stream_config_info;
    541             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
    542             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
    543             stream_config_info.buffer_info.max_buffers =
    544                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
    545             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
    546                     stream_config_info);
    547             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    548             if (rc < 0) {
    549                 LOGE("set_parms failed for unconfigure");
    550             }
    551         }
    552         deinitParameters();
    553     }
    554 
    555     if (mChannelHandle) {
    556         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
    557                 mChannelHandle);
    558         LOGH("deleting channel %d", mChannelHandle);
    559         mChannelHandle = 0;
    560     }
    561 
    562     if (mState != CLOSED)
    563         closeCamera();
    564 
    565     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
    566         req.mPendingBufferList.clear();
    567     }
    568     mPendingBuffersMap.mPendingBuffersInRequest.clear();
    569     mPendingReprocessResultList.clear();
    570     for (pendingRequestIterator i = mPendingRequestsList.begin();
    571             i != mPendingRequestsList.end();) {
    572         i = erasePendingRequest(i);
    573     }
    574     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    575         if (mDefaultMetadata[i])
    576             free_camera_metadata(mDefaultMetadata[i]);
    577 
    578     m_perfLock.lock_rel();
    579     m_perfLock.lock_deinit();
    580 
    581     pthread_cond_destroy(&mRequestCond);
    582 
    583     pthread_cond_destroy(&mBuffersCond);
    584 
    585     pthread_mutex_destroy(&mMutex);
    586     LOGD("X");
    587 }
    588 
    589 /*===========================================================================
    590  * FUNCTION   : erasePendingRequest
    591  *
    592  * DESCRIPTION: function to erase a desired pending request after freeing any
    593  *              allocated memory
    594  *
    595  * PARAMETERS :
    596  *   @i       : iterator pointing to pending request to be erased
    597  *
    598  * RETURN     : iterator pointing to the next request
    599  *==========================================================================*/
    600 QCamera3HardwareInterface::pendingRequestIterator
    601         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
    602 {
    603     if (i->input_buffer != NULL) {
    604         free(i->input_buffer);
    605         i->input_buffer = NULL;
    606     }
    607     if (i->settings != NULL)
    608         free_camera_metadata((camera_metadata_t*)i->settings);
    609     return mPendingRequestsList.erase(i);
    610 }
    611 
    612 /*===========================================================================
    613  * FUNCTION   : camEvtHandle
    614  *
    615  * DESCRIPTION: Function registered to mm-camera-interface to handle events
    616  *
    617  * PARAMETERS :
    618  *   @camera_handle : interface layer camera handle
    619  *   @evt           : ptr to event
    620  *   @user_data     : user data ptr
    621  *
    622  * RETURN     : none
    623  *==========================================================================*/
    624 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
    625                                           mm_camera_event_t *evt,
    626                                           void *user_data)
    627 {
    628     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
    629     if (obj && evt) {
    630         switch(evt->server_event_type) {
    631             case CAM_EVENT_TYPE_DAEMON_DIED:
    632                 pthread_mutex_lock(&obj->mMutex);
    633                 obj->mState = ERROR;
    634                 pthread_mutex_unlock(&obj->mMutex);
    635                 LOGE("Fatal, camera daemon died");
    636                 break;
    637 
    638             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
    639                 LOGD("HAL got request pull from Daemon");
    640                 pthread_mutex_lock(&obj->mMutex);
    641                 obj->mWokenUpByDaemon = true;
    642                 obj->unblockRequestIfNecessary();
    643                 pthread_mutex_unlock(&obj->mMutex);
    644                 break;
    645 
    646             default:
    647                 LOGW("Warning: Unhandled event %d",
    648                         evt->server_event_type);
    649                 break;
    650         }
    651     } else {
    652         LOGE("NULL user_data/evt");
    653     }
    654 }
    655 
    656 /*===========================================================================
    657  * FUNCTION   : openCamera
    658  *
    659  * DESCRIPTION: open camera
    660  *
    661  * PARAMETERS :
    662  *   @hw_device  : double ptr for camera device struct
    663  *
    664  * RETURN     : int32_t type of status
    665  *              NO_ERROR  -- success
    666  *              none-zero failure code
    667  *==========================================================================*/
    668 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    669 {
    670     int rc = 0;
    671     if (mState != CLOSED) {
    672         *hw_device = NULL;
    673         return PERMISSION_DENIED;
    674     }
    675 
    676     m_perfLock.lock_acq();
    677     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
    678              mCameraId);
    679 
    680     rc = openCamera();
    681     if (rc == 0) {
    682         *hw_device = &mCameraDevice.common;
    683     } else
    684         *hw_device = NULL;
    685 
    686     m_perfLock.lock_rel();
    687     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
    688              mCameraId, rc);
    689 
    690     if (rc == NO_ERROR) {
    691         mState = OPENED;
    692     }
    693     return rc;
    694 }
    695 
    696 /*===========================================================================
    697  * FUNCTION   : openCamera
    698  *
    699  * DESCRIPTION: open camera
    700  *
    701  * PARAMETERS : none
    702  *
    703  * RETURN     : int32_t type of status
    704  *              NO_ERROR  -- success
    705  *              none-zero failure code
    706  *==========================================================================*/
    707 int QCamera3HardwareInterface::openCamera()
    708 {
    709     int rc = 0;
    710     char value[PROPERTY_VALUE_MAX];
    711 
    712     KPI_ATRACE_CALL();
    713     if (mCameraHandle) {
    714         LOGE("Failure: Camera already opened");
    715         return ALREADY_EXISTS;
    716     }
    717 
    718     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
    719     if (rc < 0) {
    720         LOGE("Failed to reserve flash for camera id: %d",
    721                 mCameraId);
    722         return UNKNOWN_ERROR;
    723     }
    724 
    725     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
    726     if (rc) {
    727         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
    728         return rc;
    729     }
    730 
    731     if (!mCameraHandle) {
    732         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
    733         return -ENODEV;
    734     }
    735 
    736     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
    737             camEvtHandle, (void *)this);
    738 
    739     if (rc < 0) {
    740         LOGE("Error, failed to register event callback");
    741         /* Not closing camera here since it is already handled in destructor */
    742         return FAILED_TRANSACTION;
    743     }
    744 
    745     mExifParams.debug_params =
    746             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
    747     if (mExifParams.debug_params) {
    748         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
    749     } else {
    750         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
    751         return NO_MEMORY;
    752     }
    753     mFirstConfiguration = true;
    754 
    755     //Notify display HAL that a camera session is active.
    756     //But avoid calling the same during bootup because camera service might open/close
    757     //cameras at boot time during its initialization and display service will also internally
    758     //wait for camera service to initialize first while calling this display API, resulting in a
    759     //deadlock situation. Since boot time camera open/close calls are made only to fetch
    760     //capabilities, no need of this display bw optimization.
    761     //Use "service.bootanim.exit" property to know boot status.
    762     property_get("service.bootanim.exit", value, "0");
    763     if (atoi(value) == 1) {
    764         pthread_mutex_lock(&gCamLock);
    765         if (gNumCameraSessions++ == 0) {
    766             setCameraLaunchStatus(true);
    767         }
    768         pthread_mutex_unlock(&gCamLock);
    769     }
    770 
    771     //fill the session id needed while linking dual cam
    772     pthread_mutex_lock(&gCamLock);
    773     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
    774         &sessionId[mCameraId]);
    775     pthread_mutex_unlock(&gCamLock);
    776 
    777     if (rc < 0) {
    778         LOGE("Error, failed to get sessiion id");
    779         return UNKNOWN_ERROR;
    780     } else {
    781         //Allocate related cam sync buffer
    782         //this is needed for the payload that goes along with bundling cmd for related
    783         //camera use cases
    784         m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
    785         rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
    786         if(rc != OK) {
    787             rc = NO_MEMORY;
    788             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
    789             return NO_MEMORY;
    790         }
    791 
    792         //Map memory for related cam sync buffer
    793         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
    794                 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
    795                 m_pRelCamSyncHeap->getFd(0),
    796                 sizeof(cam_sync_related_sensors_event_info_t),
    797                 m_pRelCamSyncHeap->getPtr(0));
    798         if(rc < 0) {
    799             LOGE("Dualcam: failed to map Related cam sync buffer");
    800             rc = FAILED_TRANSACTION;
    801             return NO_MEMORY;
    802         }
    803         m_pRelCamSyncBuf =
    804                 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
    805     }
    806 
    807     LOGH("mCameraId=%d",mCameraId);
    808 
    809     return NO_ERROR;
    810 }
    811 
    812 /*===========================================================================
    813  * FUNCTION   : closeCamera
    814  *
    815  * DESCRIPTION: close camera
    816  *
    817  * PARAMETERS : none
    818  *
    819  * RETURN     : int32_t type of status
    820  *              NO_ERROR  -- success
    821  *              none-zero failure code
    822  *==========================================================================*/
    823 int QCamera3HardwareInterface::closeCamera()
    824 {
    825     KPI_ATRACE_CALL();
    826     int rc = NO_ERROR;
    827     char value[PROPERTY_VALUE_MAX];
    828 
    829     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
    830              mCameraId);
    831     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    832     mCameraHandle = NULL;
    833 
    834     //reset session id to some invalid id
    835     pthread_mutex_lock(&gCamLock);
    836     sessionId[mCameraId] = 0xDEADBEEF;
    837     pthread_mutex_unlock(&gCamLock);
    838 
    839     //Notify display HAL that there is no active camera session
    840     //but avoid calling the same during bootup. Refer to openCamera
    841     //for more details.
    842     property_get("service.bootanim.exit", value, "0");
    843     if (atoi(value) == 1) {
    844         pthread_mutex_lock(&gCamLock);
    845         if (--gNumCameraSessions == 0) {
    846             setCameraLaunchStatus(false);
    847         }
    848         pthread_mutex_unlock(&gCamLock);
    849     }
    850 
    851     if (NULL != m_pRelCamSyncHeap) {
    852         m_pRelCamSyncHeap->deallocate();
    853         delete m_pRelCamSyncHeap;
    854         m_pRelCamSyncHeap = NULL;
    855         m_pRelCamSyncBuf = NULL;
    856     }
    857 
    858     if (mExifParams.debug_params) {
    859         free(mExifParams.debug_params);
    860         mExifParams.debug_params = NULL;
    861     }
    862     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
    863         LOGW("Failed to release flash for camera id: %d",
    864                 mCameraId);
    865     }
    866     mState = CLOSED;
    867     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
    868          mCameraId, rc);
    869     return rc;
    870 }
    871 
    872 /*===========================================================================
    873  * FUNCTION   : initialize
    874  *
    875  * DESCRIPTION: Initialize frameworks callback functions
    876  *
    877  * PARAMETERS :
    878  *   @callback_ops : callback function to frameworks
    879  *
    880  * RETURN     :
    881  *
    882  *==========================================================================*/
    883 int QCamera3HardwareInterface::initialize(
    884         const struct camera3_callback_ops *callback_ops)
    885 {
    886     ATRACE_CALL();
    887     int rc;
    888 
    889     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
    890     pthread_mutex_lock(&mMutex);
    891 
    892     // Validate current state
    893     switch (mState) {
    894         case OPENED:
    895             /* valid state */
    896             break;
    897         default:
    898             LOGE("Invalid state %d", mState);
    899             rc = -ENODEV;
    900             goto err1;
    901     }
    902 
    903     rc = initParameters();
    904     if (rc < 0) {
    905         LOGE("initParamters failed %d", rc);
    906         goto err1;
    907     }
    908     mCallbackOps = callback_ops;
    909 
    910     mChannelHandle = mCameraHandle->ops->add_channel(
    911             mCameraHandle->camera_handle, NULL, NULL, this);
    912     if (mChannelHandle == 0) {
    913         LOGE("add_channel failed");
    914         rc = -ENOMEM;
    915         pthread_mutex_unlock(&mMutex);
    916         return rc;
    917     }
    918 
    919     pthread_mutex_unlock(&mMutex);
    920     mCameraInitialized = true;
    921     mState = INITIALIZED;
    922     LOGI("X");
    923     return 0;
    924 
    925 err1:
    926     pthread_mutex_unlock(&mMutex);
    927     return rc;
    928 }
    929 
    930 /*===========================================================================
    931  * FUNCTION   : validateStreamDimensions
    932  *
    933  * DESCRIPTION: Check if the configuration requested are those advertised
    934  *
    935  * PARAMETERS :
    936  *   @stream_list : streams to be configured
    937  *
    938  * RETURN     :
    939  *
    940  *==========================================================================*/
    941 int QCamera3HardwareInterface::validateStreamDimensions(
    942         camera3_stream_configuration_t *streamList)
    943 {
    944     int rc = NO_ERROR;
    945     size_t count = 0;
    946 
    947     camera3_stream_t *inputStream = NULL;
    948     /*
    949     * Loop through all streams to find input stream if it exists*
    950     */
    951     for (size_t i = 0; i< streamList->num_streams; i++) {
    952         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
    953             if (inputStream != NULL) {
    954                 LOGE("Error, Multiple input streams requested");
    955                 return -EINVAL;
    956             }
    957             inputStream = streamList->streams[i];
    958         }
    959     }
    960     /*
    961     * Loop through all streams requested in configuration
    962     * Check if unsupported sizes have been requested on any of them
    963     */
    964     for (size_t j = 0; j < streamList->num_streams; j++) {
    965         bool sizeFound = false;
    966         camera3_stream_t *newStream = streamList->streams[j];
    967 
    968         uint32_t rotatedHeight = newStream->height;
    969         uint32_t rotatedWidth = newStream->width;
    970         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
    971                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
    972             rotatedHeight = newStream->width;
    973             rotatedWidth = newStream->height;
    974         }
    975 
    976         /*
    977         * Sizes are different for each type of stream format check against
    978         * appropriate table.
    979         */
    980         switch (newStream->format) {
    981         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
    982         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
    983         case HAL_PIXEL_FORMAT_RAW10:
    984             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
    985             for (size_t i = 0; i < count; i++) {
    986                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
    987                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
    988                     sizeFound = true;
    989                     break;
    990                 }
    991             }
    992             break;
    993         case HAL_PIXEL_FORMAT_BLOB:
    994             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
    995             /* Verify set size against generated sizes table */
    996             for (size_t i = 0; i < count; i++) {
    997                 if (((int32_t)rotatedWidth ==
    998                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
    999                         ((int32_t)rotatedHeight ==
   1000                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
   1001                     sizeFound = true;
   1002                     break;
   1003                 }
   1004             }
   1005             break;
   1006         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1007         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1008         default:
   1009             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1010                     || newStream->stream_type == CAMERA3_STREAM_INPUT
   1011                     || IS_USAGE_ZSL(newStream->usage)) {
   1012                 if (((int32_t)rotatedWidth ==
   1013                                 gCamCapability[mCameraId]->active_array_size.width) &&
   1014                                 ((int32_t)rotatedHeight ==
   1015                                 gCamCapability[mCameraId]->active_array_size.height)) {
   1016                     sizeFound = true;
   1017                     break;
   1018                 }
   1019                 /* We could potentially break here to enforce ZSL stream
   1020                  * set from frameworks always is full active array size
   1021                  * but it is not clear from the spc if framework will always
   1022                  * follow that, also we have logic to override to full array
   1023                  * size, so keeping the logic lenient at the moment
   1024                  */
   1025             }
   1026             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
   1027                     MAX_SIZES_CNT);
   1028             for (size_t i = 0; i < count; i++) {
   1029                 if (((int32_t)rotatedWidth ==
   1030                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
   1031                             ((int32_t)rotatedHeight ==
   1032                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
   1033                     sizeFound = true;
   1034                     break;
   1035                 }
   1036             }
   1037             break;
   1038         } /* End of switch(newStream->format) */
   1039 
   1040         /* We error out even if a single stream has unsupported size set */
   1041         if (!sizeFound) {
   1042             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
   1043                     rotatedWidth, rotatedHeight, newStream->format,
   1044                     gCamCapability[mCameraId]->active_array_size.width,
   1045                     gCamCapability[mCameraId]->active_array_size.height);
   1046             rc = -EINVAL;
   1047             break;
   1048         }
   1049     } /* End of for each stream */
   1050     return rc;
   1051 }
   1052 
   1053 /*===========================================================================
   1054  * FUNCTION   : validateUsageFlags
   1055  *
   1056  * DESCRIPTION: Check if the configuration usage flags map to same internal format.
   1057  *
   1058  * PARAMETERS :
   1059  *   @stream_list : streams to be configured
   1060  *
   1061  * RETURN     :
   1062  *   NO_ERROR if the usage flags are supported
   1063  *   error code if usage flags are not supported
   1064  *
   1065  *==========================================================================*/
   1066 int QCamera3HardwareInterface::validateUsageFlags(
   1067         const camera3_stream_configuration_t* streamList)
   1068 {
   1069     for (size_t j = 0; j < streamList->num_streams; j++) {
   1070         const camera3_stream_t *newStream = streamList->streams[j];
   1071 
   1072         if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
   1073             (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
   1074              newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
   1075             continue;
   1076         }
   1077 
   1078         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
   1079         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
   1080         bool isZSL = IS_USAGE_ZSL(newStream->usage);
   1081         cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
   1082                 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height);
   1083         cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
   1084                 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height);
   1085         cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
   1086                 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height);
   1087 
   1088         // Color space for this camera device is guaranteed to be ITU_R_601_FR.
   1089         // So color spaces will always match.
   1090 
   1091         // Check whether underlying formats of shared streams match.
   1092         if (isVideo && isPreview && videoFormat != previewFormat) {
   1093             LOGE("Combined video and preview usage flag is not supported");
   1094             return -EINVAL;
   1095         }
   1096         if (isPreview && isZSL && previewFormat != zslFormat) {
   1097             LOGE("Combined preview and zsl usage flag is not supported");
   1098             return -EINVAL;
   1099         }
   1100         if (isVideo && isZSL && videoFormat != zslFormat) {
   1101             LOGE("Combined video and zsl usage flag is not supported");
   1102             return -EINVAL;
   1103         }
   1104     }
   1105     return NO_ERROR;
   1106 }
   1107 
   1108 /*===========================================================================
   1109  * FUNCTION   : validateUsageFlagsForEis
   1110  *
   1111  * DESCRIPTION: Check if the configuration usage flags conflict with Eis
   1112  *
   1113  * PARAMETERS :
   1114  *   @stream_list : streams to be configured
   1115  *
   1116  * RETURN     :
   1117  *   NO_ERROR if the usage flags are supported
   1118  *   error code if usage flags are not supported
   1119  *
   1120  *==========================================================================*/
   1121 int QCamera3HardwareInterface::validateUsageFlagsForEis(
   1122         const camera3_stream_configuration_t* streamList)
   1123 {
   1124     for (size_t j = 0; j < streamList->num_streams; j++) {
   1125         const camera3_stream_t *newStream = streamList->streams[j];
   1126 
   1127         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
   1128         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
   1129 
   1130         // Because EIS is "hard-coded" for certain use case, and current
   1131         // implementation doesn't support shared preview and video on the same
   1132         // stream, return failure if EIS is forced on.
   1133         if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
   1134             LOGE("Combined video and preview usage flag is not supported due to EIS");
   1135             return -EINVAL;
   1136         }
   1137     }
   1138     return NO_ERROR;
   1139 }
   1140 
   1141 
   1142 /*==============================================================================
   1143  * FUNCTION   : isSupportChannelNeeded
   1144  *
   1145  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
   1146  *
   1147  * PARAMETERS :
   1148  *   @stream_list : streams to be configured
   1149  *   @stream_config_info : the config info for streams to be configured
   1150  *
   1151  * RETURN     : Boolen true/false decision
   1152  *
   1153  *==========================================================================*/
   1154 bool QCamera3HardwareInterface::isSupportChannelNeeded(
   1155         camera3_stream_configuration_t *streamList,
   1156         cam_stream_size_info_t stream_config_info)
   1157 {
   1158     uint32_t i;
   1159     bool pprocRequested = false;
   1160     /* Check for conditions where PProc pipeline does not have any streams*/
   1161     for (i = 0; i < stream_config_info.num_streams; i++) {
   1162         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
   1163                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
   1164             pprocRequested = true;
   1165             break;
   1166         }
   1167     }
   1168 
   1169     if (pprocRequested == false )
   1170         return true;
   1171 
   1172     /* Dummy stream needed if only raw or jpeg streams present */
   1173     for (i = 0; i < streamList->num_streams; i++) {
   1174         switch(streamList->streams[i]->format) {
   1175             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1176             case HAL_PIXEL_FORMAT_RAW10:
   1177             case HAL_PIXEL_FORMAT_RAW16:
   1178             case HAL_PIXEL_FORMAT_BLOB:
   1179                 break;
   1180             default:
   1181                 return false;
   1182         }
   1183     }
   1184     return true;
   1185 }
   1186 
   1187 /*==============================================================================
   1188  * FUNCTION   : getSensorOutputSize
   1189  *
   1190  * DESCRIPTION: Get sensor output size based on current stream configuratoin
   1191  *
   1192  * PARAMETERS :
   1193  *   @sensor_dim : sensor output dimension (output)
   1194  *
   1195  * RETURN     : int32_t type of status
   1196  *              NO_ERROR  -- success
   1197  *              none-zero failure code
   1198  *
   1199  *==========================================================================*/
   1200 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
   1201 {
   1202     int32_t rc = NO_ERROR;
   1203 
   1204     cam_dimension_t max_dim = {0, 0};
   1205     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   1206         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
   1207             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
   1208         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
   1209             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
   1210     }
   1211 
   1212     clear_metadata_buffer(mParameters);
   1213 
   1214     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
   1215             max_dim);
   1216     if (rc != NO_ERROR) {
   1217         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
   1218         return rc;
   1219     }
   1220 
   1221     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   1222     if (rc != NO_ERROR) {
   1223         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
   1224         return rc;
   1225     }
   1226 
   1227     clear_metadata_buffer(mParameters);
   1228     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
   1229 
   1230     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
   1231             mParameters);
   1232     if (rc != NO_ERROR) {
   1233         LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
   1234         return rc;
   1235     }
   1236 
   1237     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
   1238     LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
   1239 
   1240     return rc;
   1241 }
   1242 
   1243 /*==============================================================================
   1244  * FUNCTION   : enablePowerHint
   1245  *
   1246  * DESCRIPTION: enable single powerhint for preview and different video modes.
   1247  *
   1248  * PARAMETERS :
   1249  *
   1250  * RETURN     : NULL
   1251  *
   1252  *==========================================================================*/
   1253 void QCamera3HardwareInterface::enablePowerHint()
   1254 {
   1255     if (!mPowerHintEnabled) {
   1256         m_perfLock.powerHint(PowerHint::VIDEO_ENCODE, true);
   1257         mPowerHintEnabled = true;
   1258     }
   1259 }
   1260 
   1261 /*==============================================================================
   1262  * FUNCTION   : disablePowerHint
   1263  *
   1264  * DESCRIPTION: disable current powerhint.
   1265  *
   1266  * PARAMETERS :
   1267  *
   1268  * RETURN     : NULL
   1269  *
   1270  *==========================================================================*/
   1271 void QCamera3HardwareInterface::disablePowerHint()
   1272 {
   1273     if (mPowerHintEnabled) {
   1274         m_perfLock.powerHint(PowerHint::VIDEO_ENCODE, false);
   1275         mPowerHintEnabled = false;
   1276     }
   1277 }
   1278 
   1279 /*==============================================================================
   1280  * FUNCTION   : addToPPFeatureMask
   1281  *
   1282  * DESCRIPTION: add additional features to pp feature mask based on
   1283  *              stream type and usecase
   1284  *
   1285  * PARAMETERS :
   1286  *   @stream_format : stream type for feature mask
   1287  *   @stream_idx : stream idx within postprocess_mask list to change
   1288  *
   1289  * RETURN     : NULL
   1290  *
   1291  *==========================================================================*/
   1292 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
   1293         uint32_t stream_idx)
   1294 {
   1295     char feature_mask_value[PROPERTY_VALUE_MAX];
   1296     cam_feature_mask_t feature_mask;
   1297     int args_converted;
   1298     int property_len;
   1299 
   1300     /* Get feature mask from property */
   1301     property_len = property_get("persist.camera.hal3.feature",
   1302             feature_mask_value, "0");
   1303     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
   1304             (feature_mask_value[1] == 'x')) {
   1305         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
   1306     } else {
   1307         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
   1308     }
   1309     if (1 != args_converted) {
   1310         feature_mask = 0;
   1311         LOGE("Wrong feature mask %s", feature_mask_value);
   1312         return;
   1313     }
   1314 
   1315     switch (stream_format) {
   1316     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
   1317         /* Add LLVD to pp feature mask only if video hint is enabled */
   1318         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
   1319             mStreamConfigInfo.postprocess_mask[stream_idx]
   1320                     |= CAM_QTI_FEATURE_SW_TNR;
   1321             LOGH("Added SW TNR to pp feature mask");
   1322         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
   1323             mStreamConfigInfo.postprocess_mask[stream_idx]
   1324                     |= CAM_QCOM_FEATURE_LLVD;
   1325             LOGH("Added LLVD SeeMore to pp feature mask");
   1326         }
   1327         break;
   1328     }
   1329     default:
   1330         break;
   1331     }
   1332     LOGD("PP feature mask %llx",
   1333             mStreamConfigInfo.postprocess_mask[stream_idx]);
   1334 }
   1335 
   1336 /*==============================================================================
   1337  * FUNCTION   : updateFpsInPreviewBuffer
   1338  *
   1339  * DESCRIPTION: update FPS information in preview buffer.
   1340  *
   1341  * PARAMETERS :
   1342  *   @metadata    : pointer to metadata buffer
   1343  *   @frame_number: frame_number to look for in pending buffer list
   1344  *
   1345  * RETURN     : None
   1346  *
   1347  *==========================================================================*/
   1348 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
   1349         uint32_t frame_number)
   1350 {
   1351     // Mark all pending buffers for this particular request
   1352     // with corresponding framerate information
   1353     for (List<PendingBuffersInRequest>::iterator req =
   1354             mPendingBuffersMap.mPendingBuffersInRequest.begin();
   1355             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   1356         for(List<PendingBufferInfo>::iterator j =
   1357                 req->mPendingBufferList.begin();
   1358                 j != req->mPendingBufferList.end(); j++) {
   1359             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
   1360             if ((req->frame_number == frame_number) &&
   1361                 (channel->getStreamTypeMask() &
   1362                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
   1363                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
   1364                     CAM_INTF_PARM_FPS_RANGE, metadata) {
   1365                     int32_t cameraFps = float_range->max_fps;
   1366                     struct private_handle_t *priv_handle =
   1367                         (struct private_handle_t *)(*(j->buffer));
   1368                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
   1369                 }
   1370             }
   1371         }
   1372     }
   1373 }
   1374 
   1375 /*==============================================================================
   1376  * FUNCTION   : updateTimeStampInPendingBuffers
   1377  *
   1378  * DESCRIPTION: update timestamp in display metadata for all pending buffers
   1379  *              of a frame number
   1380  *
   1381  * PARAMETERS :
   1382  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
   1383  *   @timestamp   : timestamp to be set
   1384  *
   1385  * RETURN     : None
   1386  *
   1387  *==========================================================================*/
   1388 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
   1389         uint32_t frameNumber, nsecs_t timestamp)
   1390 {
   1391     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
   1392             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   1393         if (req->frame_number != frameNumber)
   1394             continue;
   1395 
   1396         for (auto k = req->mPendingBufferList.begin();
   1397                 k != req->mPendingBufferList.end(); k++ ) {
   1398             struct private_handle_t *priv_handle =
   1399                     (struct private_handle_t *) (*(k->buffer));
   1400             setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
   1401         }
   1402     }
   1403     return;
   1404 }
   1405 
   1406 /*===========================================================================
   1407  * FUNCTION   : configureStreams
   1408  *
   1409  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
   1410  *              and output streams.
   1411  *
   1412  * PARAMETERS :
   1413  *   @stream_list : streams to be configured
   1414  *
   1415  * RETURN     :
   1416  *
   1417  *==========================================================================*/
   1418 int QCamera3HardwareInterface::configureStreams(
   1419         camera3_stream_configuration_t *streamList)
   1420 {
   1421     ATRACE_CALL();
   1422     int rc = 0;
   1423 
   1424     // Acquire perfLock before configure streams
   1425     m_perfLock.lock_acq();
   1426     rc = configureStreamsPerfLocked(streamList);
   1427     m_perfLock.lock_rel();
   1428 
   1429     return rc;
   1430 }
   1431 
   1432 /*===========================================================================
   1433  * FUNCTION   : configureStreamsPerfLocked
   1434  *
   1435  * DESCRIPTION: configureStreams while perfLock is held.
   1436  *
   1437  * PARAMETERS :
   1438  *   @stream_list : streams to be configured
   1439  *
   1440  * RETURN     : int32_t type of status
   1441  *              NO_ERROR  -- success
   1442  *              none-zero failure code
   1443  *==========================================================================*/
   1444 int QCamera3HardwareInterface::configureStreamsPerfLocked(
   1445         camera3_stream_configuration_t *streamList)
   1446 {
   1447     ATRACE_CALL();
   1448     int rc = 0;
   1449 
   1450     // Sanity check stream_list
   1451     if (streamList == NULL) {
   1452         LOGE("NULL stream configuration");
   1453         return BAD_VALUE;
   1454     }
   1455     if (streamList->streams == NULL) {
   1456         LOGE("NULL stream list");
   1457         return BAD_VALUE;
   1458     }
   1459 
   1460     if (streamList->num_streams < 1) {
   1461         LOGE("Bad number of streams requested: %d",
   1462                 streamList->num_streams);
   1463         return BAD_VALUE;
   1464     }
   1465 
   1466     if (streamList->num_streams >= MAX_NUM_STREAMS) {
   1467         LOGE("Maximum number of streams %d exceeded: %d",
   1468                 MAX_NUM_STREAMS, streamList->num_streams);
   1469         return BAD_VALUE;
   1470     }
   1471 
   1472     rc = validateUsageFlags(streamList);
   1473     if (rc != NO_ERROR) {
   1474         return rc;
   1475     }
   1476 
   1477     mOpMode = streamList->operation_mode;
   1478     LOGD("mOpMode: %d", mOpMode);
   1479 
   1480     /* first invalidate all the steams in the mStreamList
   1481      * if they appear again, they will be validated */
   1482     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   1483             it != mStreamInfo.end(); it++) {
   1484         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1485         if (channel) {
   1486           channel->stop();
   1487         }
   1488         (*it)->status = INVALID;
   1489     }
   1490 
   1491     if (mRawDumpChannel) {
   1492         mRawDumpChannel->stop();
   1493         delete mRawDumpChannel;
   1494         mRawDumpChannel = NULL;
   1495     }
   1496 
   1497     if (mSupportChannel)
   1498         mSupportChannel->stop();
   1499 
   1500     if (mAnalysisChannel) {
   1501         mAnalysisChannel->stop();
   1502     }
   1503     if (mMetadataChannel) {
   1504         /* If content of mStreamInfo is not 0, there is metadata stream */
   1505         mMetadataChannel->stop();
   1506     }
   1507     if (mChannelHandle) {
   1508         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   1509                 mChannelHandle);
   1510         LOGD("stopping channel %d", mChannelHandle);
   1511     }
   1512 
   1513     pthread_mutex_lock(&mMutex);
   1514 
   1515     // Check state
   1516     switch (mState) {
   1517         case INITIALIZED:
   1518         case CONFIGURED:
   1519         case STARTED:
   1520             /* valid state */
   1521             break;
   1522         default:
   1523             LOGE("Invalid state %d", mState);
   1524             pthread_mutex_unlock(&mMutex);
   1525             return -ENODEV;
   1526     }
   1527 
   1528     /* Check whether we have video stream */
   1529     m_bIs4KVideo = false;
   1530     m_bIsVideo = false;
   1531     m_bEisSupportedSize = true;
   1532     m_bTnrEnabled = false;
   1533     bool isZsl = false;
   1534     bool isPreview = false;
   1535     uint32_t videoWidth = 0U;
   1536     uint32_t videoHeight = 0U;
   1537     size_t rawStreamCnt = 0;
   1538     size_t stallStreamCnt = 0;
   1539     size_t processedStreamCnt = 0;
   1540     // Number of streams on ISP encoder path
   1541     size_t numStreamsOnEncoder = 0;
   1542     size_t numYuv888OnEncoder = 0;
   1543     bool bYuv888OverrideJpeg = false;
   1544     cam_dimension_t largeYuv888Size = {0, 0};
   1545     cam_dimension_t maxViewfinderSize = {0, 0};
   1546     bool bJpegExceeds4K = false;
   1547     bool bJpegOnEncoder = false;
   1548     bool bUseCommonFeatureMask = false;
   1549     cam_feature_mask_t commonFeatureMask = 0;
   1550     bool bSmallJpegSize = false;
   1551     uint32_t width_ratio;
   1552     uint32_t height_ratio;
   1553     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
   1554     camera3_stream_t *inputStream = NULL;
   1555     bool isJpeg = false;
   1556     cam_dimension_t jpegSize = {0, 0};
   1557 
   1558     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
   1559 
   1560     /*EIS configuration*/
   1561     bool eisSupported = false;
   1562     bool oisSupported = false;
   1563     int32_t margin_index = -1;
   1564     uint8_t eis_prop_set;
   1565     uint32_t maxEisWidth = 0;
   1566     uint32_t maxEisHeight = 0;
   1567 
   1568     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
   1569 
   1570     size_t count = IS_TYPE_MAX;
   1571     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
   1572     for (size_t i = 0; i < count; i++) {
   1573         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
   1574             eisSupported = true;
   1575             margin_index = (int32_t)i;
   1576             break;
   1577         }
   1578     }
   1579 
   1580     count = CAM_OPT_STAB_MAX;
   1581     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
   1582     for (size_t i = 0; i < count; i++) {
   1583         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
   1584             oisSupported = true;
   1585             break;
   1586         }
   1587     }
   1588 
   1589     if (eisSupported) {
   1590         maxEisWidth = MAX_EIS_WIDTH;
   1591         maxEisHeight = MAX_EIS_HEIGHT;
   1592     }
   1593 
   1594     /* EIS setprop control */
   1595     char eis_prop[PROPERTY_VALUE_MAX];
   1596     memset(eis_prop, 0, sizeof(eis_prop));
   1597     property_get("persist.camera.eis.enable", eis_prop, "0");
   1598     eis_prop_set = (uint8_t)atoi(eis_prop);
   1599 
   1600     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
   1601             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   1602             (gCamCapability[mCameraId]->position != CAM_POSITION_FRONT);
   1603 
   1604     /* stream configurations */
   1605     for (size_t i = 0; i < streamList->num_streams; i++) {
   1606         camera3_stream_t *newStream = streamList->streams[i];
   1607         LOGI("stream[%d] type = %d, format = %d, width = %d, "
   1608                 "height = %d, rotation = %d, usage = 0x%x",
   1609                  i, newStream->stream_type, newStream->format,
   1610                 newStream->width, newStream->height, newStream->rotation,
   1611                 newStream->usage);
   1612         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1613                 newStream->stream_type == CAMERA3_STREAM_INPUT){
   1614             isZsl = true;
   1615         }
   1616         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
   1617                 IS_USAGE_PREVIEW(newStream->usage)) {
   1618             isPreview = true;
   1619         }
   1620 
   1621         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
   1622             inputStream = newStream;
   1623         }
   1624 
   1625         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1626             isJpeg = true;
   1627             jpegSize.width = newStream->width;
   1628             jpegSize.height = newStream->height;
   1629             if (newStream->width > VIDEO_4K_WIDTH ||
   1630                     newStream->height > VIDEO_4K_HEIGHT)
   1631                 bJpegExceeds4K = true;
   1632         }
   1633 
   1634         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
   1635                 (IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
   1636             if (IS_USAGE_VIDEO(newStream->usage)) {
   1637                 videoWidth = newStream->width;
   1638                 videoHeight = newStream->height;
   1639                 m_bIsVideo = true;
   1640                 if ((VIDEO_4K_WIDTH <= newStream->width) &&
   1641                         (VIDEO_4K_HEIGHT <= newStream->height)) {
   1642                     m_bIs4KVideo = true;
   1643                 }
   1644             }
   1645             m_bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
   1646                                   (newStream->height <= maxEisHeight);
   1647         }
   1648         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1649                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
   1650             switch (newStream->format) {
   1651             case HAL_PIXEL_FORMAT_BLOB:
   1652                 stallStreamCnt++;
   1653                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1654                         newStream->height)) {
   1655                     numStreamsOnEncoder++;
   1656                     bJpegOnEncoder = true;
   1657                 }
   1658                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
   1659                         newStream->width);
   1660                 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
   1661                         newStream->height);;
   1662                 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
   1663                         "FATAL: max_downscale_factor cannot be zero and so assert");
   1664                 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
   1665                     (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
   1666                     LOGH("Setting small jpeg size flag to true");
   1667                     bSmallJpegSize = true;
   1668                 }
   1669                 break;
   1670             case HAL_PIXEL_FORMAT_RAW10:
   1671             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1672             case HAL_PIXEL_FORMAT_RAW16:
   1673                 rawStreamCnt++;
   1674                 break;
   1675             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1676                 processedStreamCnt++;
   1677                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1678                         newStream->height)) {
   1679                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
   1680                             !IS_USAGE_ZSL(newStream->usage)) {
   1681                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1682                     }
   1683                     numStreamsOnEncoder++;
   1684                 }
   1685                 break;
   1686             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1687                 processedStreamCnt++;
   1688                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1689                         newStream->height)) {
   1690                     // If Yuv888 size is not greater than 4K, set feature mask
   1691                     // to SUPERSET so that it support concurrent request on
   1692                     // YUV and JPEG.
   1693                     if (newStream->width <= VIDEO_4K_WIDTH &&
   1694                             newStream->height <= VIDEO_4K_HEIGHT) {
   1695                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1696                     }
   1697                     numStreamsOnEncoder++;
   1698                     numYuv888OnEncoder++;
   1699                     largeYuv888Size.width = newStream->width;
   1700                     largeYuv888Size.height = newStream->height;
   1701                 }
   1702                 break;
   1703             default:
   1704                 processedStreamCnt++;
   1705                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1706                         newStream->height)) {
   1707                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1708                     numStreamsOnEncoder++;
   1709                 }
   1710                 break;
   1711             }
   1712 
   1713         }
   1714     }
   1715 
   1716     if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
   1717         pthread_mutex_unlock(&mMutex);
   1718         return -EINVAL;
   1719     }
   1720     /* Logic to enable/disable TNR based on specific config size/etc.*/
   1721     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
   1722             ((videoWidth == 1920 && videoHeight == 1080) ||
   1723             (videoWidth == 1280 && videoHeight == 720)) &&
   1724             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
   1725         m_bTnrEnabled = true;
   1726 
   1727     /* Check if num_streams is sane */
   1728     if (stallStreamCnt > MAX_STALLING_STREAMS ||
   1729             rawStreamCnt > MAX_RAW_STREAMS ||
   1730             processedStreamCnt > MAX_PROCESSED_STREAMS) {
   1731         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
   1732                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
   1733         pthread_mutex_unlock(&mMutex);
   1734         return -EINVAL;
   1735     }
   1736     /* Check whether we have zsl stream or 4k video case */
   1737     if (isZsl && m_bIsVideo) {
   1738         LOGE("Currently invalid configuration ZSL&Video!");
   1739         pthread_mutex_unlock(&mMutex);
   1740         return -EINVAL;
   1741     }
   1742     /* Check if stream sizes are sane */
   1743     if (numStreamsOnEncoder > 2) {
   1744         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
   1745         pthread_mutex_unlock(&mMutex);
   1746         return -EINVAL;
   1747     } else if (1 < numStreamsOnEncoder){
   1748         bUseCommonFeatureMask = true;
   1749         LOGH("Multiple streams above max viewfinder size, common mask needed");
   1750     }
   1751 
   1752     /* Check if BLOB size is greater than 4k in 4k recording case */
   1753     if (m_bIs4KVideo && bJpegExceeds4K) {
   1754         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
   1755         pthread_mutex_unlock(&mMutex);
   1756         return -EINVAL;
   1757     }
   1758 
   1759     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
   1760     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
   1761     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
   1762     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
   1763     // configurations:
   1764     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
   1765     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
   1766     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
   1767     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
   1768         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
   1769                 __func__);
   1770         pthread_mutex_unlock(&mMutex);
   1771         return -EINVAL;
   1772     }
   1773 
   1774     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
   1775     // the YUV stream's size is greater or equal to the JPEG size, set common
   1776     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
   1777     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
   1778             jpegSize.width, jpegSize.height) &&
   1779             largeYuv888Size.width > jpegSize.width &&
   1780             largeYuv888Size.height > jpegSize.height) {
   1781         bYuv888OverrideJpeg = true;
   1782     } else if (!isJpeg && numStreamsOnEncoder > 1) {
   1783         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1784     }
   1785 
   1786     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
   1787             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
   1788             commonFeatureMask);
   1789     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
   1790             numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
   1791 
   1792     rc = validateStreamDimensions(streamList);
   1793     if (rc == NO_ERROR) {
   1794         rc = validateStreamRotations(streamList);
   1795     }
   1796     if (rc != NO_ERROR) {
   1797         LOGE("Invalid stream configuration requested!");
   1798         pthread_mutex_unlock(&mMutex);
   1799         return rc;
   1800     }
   1801 
   1802     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
   1803     camera3_stream_t *jpegStream = NULL;
   1804     for (size_t i = 0; i < streamList->num_streams; i++) {
   1805         camera3_stream_t *newStream = streamList->streams[i];
   1806         LOGH("newStream type = %d, stream format = %d "
   1807                 "stream size : %d x %d, stream rotation = %d",
   1808                  newStream->stream_type, newStream->format,
   1809                 newStream->width, newStream->height, newStream->rotation);
   1810         //if the stream is in the mStreamList validate it
   1811         bool stream_exists = false;
   1812         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1813                 it != mStreamInfo.end(); it++) {
   1814             if ((*it)->stream == newStream) {
   1815                 QCamera3ProcessingChannel *channel =
   1816                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1817                 stream_exists = true;
   1818                 if (channel)
   1819                     delete channel;
   1820                 (*it)->status = VALID;
   1821                 (*it)->stream->priv = NULL;
   1822                 (*it)->channel = NULL;
   1823             }
   1824         }
   1825         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
   1826             //new stream
   1827             stream_info_t* stream_info;
   1828             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
   1829             if (!stream_info) {
   1830                LOGE("Could not allocate stream info");
   1831                rc = -ENOMEM;
   1832                pthread_mutex_unlock(&mMutex);
   1833                return rc;
   1834             }
   1835             stream_info->stream = newStream;
   1836             stream_info->status = VALID;
   1837             stream_info->channel = NULL;
   1838             mStreamInfo.push_back(stream_info);
   1839         }
   1840         /* Covers Opaque ZSL and API1 F/W ZSL */
   1841         if (IS_USAGE_ZSL(newStream->usage)
   1842                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
   1843             if (zslStream != NULL) {
   1844                 LOGE("Multiple input/reprocess streams requested!");
   1845                 pthread_mutex_unlock(&mMutex);
   1846                 return BAD_VALUE;
   1847             }
   1848             zslStream = newStream;
   1849         }
   1850         /* Covers YUV reprocess */
   1851         if (inputStream != NULL) {
   1852             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
   1853                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1854                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1855                     && inputStream->width == newStream->width
   1856                     && inputStream->height == newStream->height) {
   1857                 if (zslStream != NULL) {
   1858                     /* This scenario indicates multiple YUV streams with same size
   1859                      * as input stream have been requested, since zsl stream handle
   1860                      * is solely use for the purpose of overriding the size of streams
   1861                      * which share h/w streams we will just make a guess here as to
   1862                      * which of the stream is a ZSL stream, this will be refactored
   1863                      * once we make generic logic for streams sharing encoder output
   1864                      */
   1865                     LOGH("Warning, Multiple ip/reprocess streams requested!");
   1866                 }
   1867                 zslStream = newStream;
   1868             }
   1869         }
   1870         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1871             jpegStream = newStream;
   1872         }
   1873     }
   1874 
   1875     /* If a zsl stream is set, we know that we have configured at least one input or
   1876        bidirectional stream */
   1877     if (NULL != zslStream) {
   1878         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
   1879         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
   1880         mInputStreamInfo.format = zslStream->format;
   1881         mInputStreamInfo.usage = zslStream->usage;
   1882         LOGD("Input stream configured! %d x %d, format %d, usage %d",
   1883                  mInputStreamInfo.dim.width,
   1884                 mInputStreamInfo.dim.height,
   1885                 mInputStreamInfo.format, mInputStreamInfo.usage);
   1886     }
   1887 
   1888     cleanAndSortStreamInfo();
   1889     if (mMetadataChannel) {
   1890         delete mMetadataChannel;
   1891         mMetadataChannel = NULL;
   1892     }
   1893     if (mSupportChannel) {
   1894         delete mSupportChannel;
   1895         mSupportChannel = NULL;
   1896     }
   1897 
   1898     if (mAnalysisChannel) {
   1899         delete mAnalysisChannel;
   1900         mAnalysisChannel = NULL;
   1901     }
   1902 
   1903     if (mDummyBatchChannel) {
   1904         delete mDummyBatchChannel;
   1905         mDummyBatchChannel = NULL;
   1906     }
   1907 
   1908     //Create metadata channel and initialize it
   1909     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
   1910     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
   1911             gCamCapability[mCameraId]->color_arrangement);
   1912     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
   1913                     mChannelHandle, mCameraHandle->ops, captureResultCb,
   1914                     setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
   1915     if (mMetadataChannel == NULL) {
   1916         LOGE("failed to allocate metadata channel");
   1917         rc = -ENOMEM;
   1918         pthread_mutex_unlock(&mMutex);
   1919         return rc;
   1920     }
   1921     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
   1922     if (rc < 0) {
   1923         LOGE("metadata channel initialization failed");
   1924         delete mMetadataChannel;
   1925         mMetadataChannel = NULL;
   1926         pthread_mutex_unlock(&mMutex);
   1927         return rc;
   1928     }
   1929 
   1930     // Create analysis stream all the time, even when h/w support is not available
   1931     {
   1932         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1933         cam_analysis_info_t analysisInfo;
   1934         rc = mCommon.getAnalysisInfo(
   1935                 FALSE,
   1936                 TRUE,
   1937                 analysisFeatureMask,
   1938                 &analysisInfo);
   1939         if (rc != NO_ERROR) {
   1940             LOGE("getAnalysisInfo failed, ret = %d", rc);
   1941             pthread_mutex_unlock(&mMutex);
   1942             return rc;
   1943         }
   1944 
   1945         cam_color_filter_arrangement_t analysis_color_arrangement =
   1946                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
   1947                 CAM_FILTER_ARRANGEMENT_Y :
   1948                 gCamCapability[mCameraId]->color_arrangement);
   1949         setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
   1950                 analysis_color_arrangement);
   1951 
   1952         mAnalysisChannel = new QCamera3SupportChannel(
   1953                 mCameraHandle->camera_handle,
   1954                 mChannelHandle,
   1955                 mCameraHandle->ops,
   1956                 &analysisInfo.analysis_padding_info,
   1957                 analysisFeatureMask,
   1958                 CAM_STREAM_TYPE_ANALYSIS,
   1959                 &analysisInfo.analysis_max_res,
   1960                 (analysisInfo.analysis_format
   1961                 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
   1962                 : CAM_FORMAT_YUV_420_NV21),
   1963                 analysisInfo.hw_analysis_supported,
   1964                 this,
   1965                 0); // force buffer count to 0
   1966         if (!mAnalysisChannel) {
   1967             LOGE("H/W Analysis channel cannot be created");
   1968             pthread_mutex_unlock(&mMutex);
   1969             return -ENOMEM;
   1970         }
   1971     }
   1972 
   1973     bool isRawStreamRequested = false;
   1974     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
   1975     /* Allocate channel objects for the requested streams */
   1976     for (size_t i = 0; i < streamList->num_streams; i++) {
   1977         camera3_stream_t *newStream = streamList->streams[i];
   1978         uint32_t stream_usage = newStream->usage;
   1979         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
   1980         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
   1981         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1982                 || IS_USAGE_ZSL(newStream->usage)) &&
   1983             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
   1984             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1985             if (bUseCommonFeatureMask) {
   1986                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1987                         commonFeatureMask;
   1988             } else {
   1989                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1990                         CAM_QCOM_FEATURE_NONE;
   1991             }
   1992 
   1993         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1994                 LOGH("Input stream configured, reprocess config");
   1995         } else {
   1996             //for non zsl streams find out the format
   1997             switch (newStream->format) {
   1998             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
   1999             {
   2000                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2001                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2002                 /* add additional features to pp feature mask */
   2003                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
   2004                         mStreamConfigInfo.num_streams);
   2005 
   2006                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
   2007                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2008                                 CAM_STREAM_TYPE_VIDEO;
   2009                     if (m_bTnrEnabled && m_bTnrVideo) {
   2010                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   2011                             CAM_QCOM_FEATURE_CPP_TNR;
   2012                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
   2013                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
   2014                                 ~CAM_QCOM_FEATURE_CDS;
   2015                     }
   2016                 } else {
   2017                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2018                             CAM_STREAM_TYPE_PREVIEW;
   2019                     if (m_bTnrEnabled && m_bTnrPreview) {
   2020                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   2021                                 CAM_QCOM_FEATURE_CPP_TNR;
   2022                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
   2023                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
   2024                                 ~CAM_QCOM_FEATURE_CDS;
   2025                     }
   2026                     padding_info.width_padding = mSurfaceStridePadding;
   2027                     padding_info.height_padding = CAM_PAD_TO_2;
   2028                 }
   2029                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
   2030                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
   2031                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2032                             newStream->height;
   2033                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2034                             newStream->width;
   2035                 }
   2036             }
   2037             break;
   2038             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   2039                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
   2040                 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
   2041                     if (bUseCommonFeatureMask)
   2042                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2043                                 commonFeatureMask;
   2044                     else
   2045                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2046                                 CAM_QCOM_FEATURE_NONE;
   2047                 } else {
   2048                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2049                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2050                 }
   2051             break;
   2052             case HAL_PIXEL_FORMAT_BLOB:
   2053                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   2054                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
   2055                 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
   2056                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2057                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2058                 } else {
   2059                     if (bUseCommonFeatureMask &&
   2060                             isOnEncoder(maxViewfinderSize, newStream->width,
   2061                             newStream->height)) {
   2062                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
   2063                     } else {
   2064                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   2065                     }
   2066                 }
   2067                 if (isZsl) {
   2068                     if (zslStream) {
   2069                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2070                                 (int32_t)zslStream->width;
   2071                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2072                                 (int32_t)zslStream->height;
   2073                     } else {
   2074                         LOGE("Error, No ZSL stream identified");
   2075                         pthread_mutex_unlock(&mMutex);
   2076                         return -EINVAL;
   2077                     }
   2078                 } else if (m_bIs4KVideo) {
   2079                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
   2080                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
   2081                 } else if (bYuv888OverrideJpeg) {
   2082                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2083                             (int32_t)largeYuv888Size.width;
   2084                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2085                             (int32_t)largeYuv888Size.height;
   2086                 }
   2087                 break;
   2088             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   2089             case HAL_PIXEL_FORMAT_RAW16:
   2090             case HAL_PIXEL_FORMAT_RAW10:
   2091                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
   2092                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   2093                 isRawStreamRequested = true;
   2094                 break;
   2095             default:
   2096                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
   2097                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   2098                 break;
   2099             }
   2100         }
   2101 
   2102         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2103                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2104                 gCamCapability[mCameraId]->color_arrangement);
   2105 
   2106         if (newStream->priv == NULL) {
   2107             //New stream, construct channel
   2108             switch (newStream->stream_type) {
   2109             case CAMERA3_STREAM_INPUT:
   2110                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
   2111                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
   2112                 break;
   2113             case CAMERA3_STREAM_BIDIRECTIONAL:
   2114                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
   2115                     GRALLOC_USAGE_HW_CAMERA_WRITE;
   2116                 break;
   2117             case CAMERA3_STREAM_OUTPUT:
   2118                 /* For video encoding stream, set read/write rarely
   2119                  * flag so that they may be set to un-cached */
   2120                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
   2121                     newStream->usage |=
   2122                          (GRALLOC_USAGE_SW_READ_RARELY |
   2123                          GRALLOC_USAGE_SW_WRITE_RARELY |
   2124                          GRALLOC_USAGE_HW_CAMERA_WRITE);
   2125                 else if (IS_USAGE_ZSL(newStream->usage))
   2126                 {
   2127                     LOGD("ZSL usage flag skipping");
   2128                 }
   2129                 else if (newStream == zslStream
   2130                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   2131                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
   2132                 } else
   2133                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
   2134                 break;
   2135             default:
   2136                 LOGE("Invalid stream_type %d", newStream->stream_type);
   2137                 break;
   2138             }
   2139 
   2140             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
   2141                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   2142                 QCamera3ProcessingChannel *channel = NULL;
   2143                 switch (newStream->format) {
   2144                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   2145                     if ((newStream->usage &
   2146                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
   2147                             (streamList->operation_mode ==
   2148                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   2149                     ) {
   2150                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2151                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   2152                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
   2153                                 this,
   2154                                 newStream,
   2155                                 (cam_stream_type_t)
   2156                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2157                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2158                                 mMetadataChannel,
   2159                                 0); //heap buffers are not required for HFR video channel
   2160                         if (channel == NULL) {
   2161                             LOGE("allocation of channel failed");
   2162                             pthread_mutex_unlock(&mMutex);
   2163                             return -ENOMEM;
   2164                         }
   2165                         //channel->getNumBuffers() will return 0 here so use
   2166                         //MAX_INFLIGH_HFR_REQUESTS
   2167                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
   2168                         newStream->priv = channel;
   2169                         LOGI("num video buffers in HFR mode: %d",
   2170                                  MAX_INFLIGHT_HFR_REQUESTS);
   2171                     } else {
   2172                         /* Copy stream contents in HFR preview only case to create
   2173                          * dummy batch channel so that sensor streaming is in
   2174                          * HFR mode */
   2175                         if (!m_bIsVideo && (streamList->operation_mode ==
   2176                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
   2177                             mDummyBatchStream = *newStream;
   2178                             mDummyBatchStream.usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
   2179                         }
   2180                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2181                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   2182                                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
   2183                                 this,
   2184                                 newStream,
   2185                                 (cam_stream_type_t)
   2186                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2187                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2188                                 mMetadataChannel,
   2189                                 MAX_INFLIGHT_REQUESTS);
   2190                         if (channel == NULL) {
   2191                             LOGE("allocation of channel failed");
   2192                             pthread_mutex_unlock(&mMutex);
   2193                             return -ENOMEM;
   2194                         }
   2195                         newStream->max_buffers = MAX_INFLIGHT_60FPS_REQUESTS;
   2196                         newStream->priv = channel;
   2197                     }
   2198                     break;
   2199                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
   2200                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
   2201                             mChannelHandle,
   2202                             mCameraHandle->ops, captureResultCb,
   2203                             setBufferErrorStatus, &padding_info,
   2204                             this,
   2205                             newStream,
   2206                             (cam_stream_type_t)
   2207                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2208                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2209                             mMetadataChannel);
   2210                     if (channel == NULL) {
   2211                         LOGE("allocation of YUV channel failed");
   2212                         pthread_mutex_unlock(&mMutex);
   2213                         return -ENOMEM;
   2214                     }
   2215                     newStream->max_buffers = channel->getNumBuffers();
   2216                     newStream->priv = channel;
   2217                     break;
   2218                 }
   2219                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   2220                 case HAL_PIXEL_FORMAT_RAW16:
   2221                 case HAL_PIXEL_FORMAT_RAW10:
   2222                     mRawChannel = new QCamera3RawChannel(
   2223                             mCameraHandle->camera_handle, mChannelHandle,
   2224                             mCameraHandle->ops, captureResultCb,
   2225                             setBufferErrorStatus, &padding_info,
   2226                             this, newStream,
   2227                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2228                             mMetadataChannel,
   2229                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
   2230                     if (mRawChannel == NULL) {
   2231                         LOGE("allocation of raw channel failed");
   2232                         pthread_mutex_unlock(&mMutex);
   2233                         return -ENOMEM;
   2234                     }
   2235                     newStream->max_buffers = mRawChannel->getNumBuffers();
   2236                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
   2237                     break;
   2238                 case HAL_PIXEL_FORMAT_BLOB:
   2239                     // Max live snapshot inflight buffer is 1. This is to mitigate
   2240                     // frame drop issues for video snapshot. The more buffers being
   2241                     // allocated, the more frame drops there are.
   2242                     mPictureChannel = new QCamera3PicChannel(
   2243                             mCameraHandle->camera_handle, mChannelHandle,
   2244                             mCameraHandle->ops, captureResultCb,
   2245                             setBufferErrorStatus, &padding_info, this, newStream,
   2246                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2247                             m_bIs4KVideo, isZsl, mMetadataChannel,
   2248                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
   2249                     if (mPictureChannel == NULL) {
   2250                         LOGE("allocation of channel failed");
   2251                         pthread_mutex_unlock(&mMutex);
   2252                         return -ENOMEM;
   2253                     }
   2254                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
   2255                     newStream->max_buffers = mPictureChannel->getNumBuffers();
   2256                     mPictureChannel->overrideYuvSize(
   2257                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
   2258                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
   2259                     break;
   2260 
   2261                 default:
   2262                     LOGE("not a supported format 0x%x", newStream->format);
   2263                     pthread_mutex_unlock(&mMutex);
   2264                     return -EINVAL;
   2265                 }
   2266             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
   2267                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
   2268             } else {
   2269                 LOGE("Error, Unknown stream type");
   2270                 pthread_mutex_unlock(&mMutex);
   2271                 return -EINVAL;
   2272             }
   2273 
   2274             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
   2275             if (channel != NULL && channel->isUBWCEnabled()) {
   2276                 cam_format_t fmt = channel->getStreamDefaultFormat(
   2277                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2278                         newStream->width, newStream->height);
   2279                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
   2280                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
   2281                 }
   2282             }
   2283 
   2284             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   2285                     it != mStreamInfo.end(); it++) {
   2286                 if ((*it)->stream == newStream) {
   2287                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
   2288                     break;
   2289                 }
   2290             }
   2291         } else {
   2292             // Channel already exists for this stream
   2293             // Do nothing for now
   2294         }
   2295         padding_info = gCamCapability[mCameraId]->padding_info;
   2296 
   2297         /* Do not add entries for input stream in metastream info
   2298          * since there is no real stream associated with it
   2299          */
   2300         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
   2301             mStreamConfigInfo.num_streams++;
   2302     }
   2303 
   2304     //RAW DUMP channel
   2305     if (mEnableRawDump && isRawStreamRequested == false){
   2306         cam_dimension_t rawDumpSize;
   2307         rawDumpSize = getMaxRawSize(mCameraId);
   2308         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
   2309         setPAAFSupport(rawDumpFeatureMask,
   2310                 CAM_STREAM_TYPE_RAW,
   2311                 gCamCapability[mCameraId]->color_arrangement);
   2312         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
   2313                                   mChannelHandle,
   2314                                   mCameraHandle->ops,
   2315                                   rawDumpSize,
   2316                                   &padding_info,
   2317                                   this, rawDumpFeatureMask);
   2318         if (!mRawDumpChannel) {
   2319             LOGE("Raw Dump channel cannot be created");
   2320             pthread_mutex_unlock(&mMutex);
   2321             return -ENOMEM;
   2322         }
   2323     }
   2324 
   2325 
   2326     if (mAnalysisChannel) {
   2327         cam_analysis_info_t analysisInfo;
   2328         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
   2329         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2330                 CAM_STREAM_TYPE_ANALYSIS;
   2331         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2332                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2333         rc = mCommon.getAnalysisInfo(FALSE, TRUE,
   2334                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2335                 &analysisInfo);
   2336         if (rc != NO_ERROR) {
   2337             LOGE("getAnalysisInfo failed, ret = %d", rc);
   2338             pthread_mutex_unlock(&mMutex);
   2339             return rc;
   2340         }
   2341         cam_color_filter_arrangement_t analysis_color_arrangement =
   2342                 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
   2343                 CAM_FILTER_ARRANGEMENT_Y :
   2344                 gCamCapability[mCameraId]->color_arrangement);
   2345         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2346                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2347                 analysis_color_arrangement);
   2348 
   2349         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2350                 analysisInfo.analysis_max_res;
   2351         mStreamConfigInfo.num_streams++;
   2352     }
   2353 
   2354     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
   2355         cam_analysis_info_t supportInfo;
   2356         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
   2357         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2358         setPAAFSupport(callbackFeatureMask,
   2359                 CAM_STREAM_TYPE_CALLBACK,
   2360                 gCamCapability[mCameraId]->color_arrangement);
   2361         rc = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
   2362         if (rc != NO_ERROR) {
   2363             LOGE("getAnalysisInfo failed, ret = %d", rc);
   2364             pthread_mutex_unlock(&mMutex);
   2365             return rc;
   2366         }
   2367         mSupportChannel = new QCamera3SupportChannel(
   2368                 mCameraHandle->camera_handle,
   2369                 mChannelHandle,
   2370                 mCameraHandle->ops,
   2371                 &gCamCapability[mCameraId]->padding_info,
   2372                 callbackFeatureMask,
   2373                 CAM_STREAM_TYPE_CALLBACK,
   2374                 &QCamera3SupportChannel::kDim,
   2375                 CAM_FORMAT_YUV_420_NV21,
   2376                 supportInfo.hw_analysis_supported,
   2377                 this, 0);
   2378         if (!mSupportChannel) {
   2379             LOGE("dummy channel cannot be created");
   2380             pthread_mutex_unlock(&mMutex);
   2381             return -ENOMEM;
   2382         }
   2383     }
   2384 
   2385     if (mSupportChannel) {
   2386         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2387                 QCamera3SupportChannel::kDim;
   2388         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2389                 CAM_STREAM_TYPE_CALLBACK;
   2390         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2391                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2392         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2393                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2394                 gCamCapability[mCameraId]->color_arrangement);
   2395         mStreamConfigInfo.num_streams++;
   2396     }
   2397 
   2398     if (mRawDumpChannel) {
   2399         cam_dimension_t rawSize;
   2400         rawSize = getMaxRawSize(mCameraId);
   2401         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2402                 rawSize;
   2403         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2404                 CAM_STREAM_TYPE_RAW;
   2405         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2406                 CAM_QCOM_FEATURE_NONE;
   2407         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2408                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2409                 gCamCapability[mCameraId]->color_arrangement);
   2410         mStreamConfigInfo.num_streams++;
   2411     }
   2412     /* In HFR mode, if video stream is not added, create a dummy channel so that
   2413      * ISP can create a batch mode even for preview only case. This channel is
   2414      * never 'start'ed (no stream-on), it is only 'initialized'  */
   2415     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   2416             !m_bIsVideo) {
   2417         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2418         setPAAFSupport(dummyFeatureMask,
   2419                 CAM_STREAM_TYPE_VIDEO,
   2420                 gCamCapability[mCameraId]->color_arrangement);
   2421         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2422                 mChannelHandle,
   2423                 mCameraHandle->ops, captureResultCb,
   2424                 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
   2425                 this,
   2426                 &mDummyBatchStream,
   2427                 CAM_STREAM_TYPE_VIDEO,
   2428                 dummyFeatureMask,
   2429                 mMetadataChannel);
   2430         if (NULL == mDummyBatchChannel) {
   2431             LOGE("creation of mDummyBatchChannel failed."
   2432                     "Preview will use non-hfr sensor mode ");
   2433         }
   2434     }
   2435     if (mDummyBatchChannel) {
   2436         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2437                 mDummyBatchStream.width;
   2438         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2439                 mDummyBatchStream.height;
   2440         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2441                 CAM_STREAM_TYPE_VIDEO;
   2442         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2443                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2444         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2445                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2446                 gCamCapability[mCameraId]->color_arrangement);
   2447         mStreamConfigInfo.num_streams++;
   2448     }
   2449 
   2450     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
   2451     mStreamConfigInfo.buffer_info.max_buffers =
   2452             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   2453 
   2454     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
   2455     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2456             i != mPendingRequestsList.end();) {
   2457         i = erasePendingRequest(i);
   2458     }
   2459     mPendingFrameDropList.clear();
   2460     // Initialize/Reset the pending buffers list
   2461     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   2462         req.mPendingBufferList.clear();
   2463     }
   2464     mPendingBuffersMap.mPendingBuffersInRequest.clear();
   2465 
   2466     mPendingReprocessResultList.clear();
   2467 
   2468     mCurJpegMeta.clear();
   2469     //Get min frame duration for this streams configuration
   2470     deriveMinFrameDuration();
   2471 
   2472     // Update state
   2473     mState = CONFIGURED;
   2474 
   2475     if (streamList->session_parameters != nullptr) {
   2476         CameraMetadata meta;
   2477         meta = streamList->session_parameters;
   2478 
   2479         // send an unconfigure to the backend so that the isp
   2480         // resources are deallocated
   2481         if (!mFirstConfiguration) {
   2482             cam_stream_size_info_t stream_config_info;
   2483             int32_t hal_version = CAM_HAL_V3;
   2484             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
   2485             stream_config_info.buffer_info.min_buffers =
   2486                     MIN_INFLIGHT_REQUESTS;
   2487             stream_config_info.buffer_info.max_buffers =
   2488                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   2489             clear_metadata_buffer(mParameters);
   2490             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   2491                     CAM_INTF_PARM_HAL_VERSION, hal_version);
   2492             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   2493                     CAM_INTF_META_STREAM_INFO, stream_config_info);
   2494             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   2495                     mParameters);
   2496             if (rc < 0) {
   2497                 LOGE("set_parms for unconfigure failed");
   2498                 pthread_mutex_unlock(&mMutex);
   2499                 return rc;
   2500             }
   2501         }
   2502         /* get eis information for stream configuration */
   2503         cam_is_type_t is_type;
   2504         char is_type_value[PROPERTY_VALUE_MAX];
   2505         property_get("persist.camera.is_type", is_type_value, "0");
   2506         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
   2507 
   2508         int32_t hal_version = CAM_HAL_V3;
   2509         clear_metadata_buffer(mParameters);
   2510         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
   2511         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, mCaptureIntent);
   2512 
   2513         uint8_t fwkVideoStabMode=0;
   2514         if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
   2515             fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
   2516         }
   2517         //If EIS is enabled, turn it on for video
   2518         bool setEis = m_bEisEnable && (m_bIsVideo || fwkVideoStabMode) && m_bEisSupportedSize &&
   2519                 !meta.exists(QCAMERA3_USE_AV_TIMER);
   2520         int32_t vsMode;
   2521         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
   2522         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
   2523             rc = BAD_VALUE;
   2524         }
   2525 
   2526         //IS type will be 0 unless EIS is supported. If EIS is supported
   2527         //it could either be 1 or 4 depending on the stream and video size
   2528         if (setEis) {
   2529             if (!m_bEisSupportedSize) {
   2530                 is_type = IS_TYPE_DIS;
   2531             } else {
   2532                 is_type = IS_TYPE_EIS_2_0;
   2533             }
   2534             mStreamConfigInfo.is_type = is_type;
   2535         } else {
   2536             mStreamConfigInfo.is_type = IS_TYPE_NONE;
   2537         }
   2538 
   2539         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   2540                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   2541         int32_t tintless_value = 1;
   2542         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   2543                 CAM_INTF_PARM_TINTLESS, tintless_value);
   2544         //Disable CDS for HFR mode or if DIS/EIS is on.
   2545         //CDS is a session parameter in the backend/ISP, so need to be set/reset
   2546         //after every configure_stream
   2547         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
   2548                 (m_bIsVideo)) {
   2549             int32_t cds = CAM_CDS_MODE_OFF;
   2550             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   2551                     CAM_INTF_PARM_CDS_MODE, cds))
   2552                 LOGE("Failed to disable CDS for HFR mode");
   2553 
   2554         }
   2555 
   2556         if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
   2557             uint8_t* use_av_timer = NULL;
   2558 
   2559             if (m_debug_avtimer){
   2560                 use_av_timer = &m_debug_avtimer;
   2561             }
   2562             else{
   2563                 use_av_timer =
   2564                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
   2565             }
   2566 
   2567             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
   2568                 rc = BAD_VALUE;
   2569             }
   2570         }
   2571 
   2572         setMobicat();
   2573 
   2574         /* Set fps and hfr mode while sending meta stream info so that sensor
   2575          * can configure appropriate streaming mode */
   2576         mHFRVideoFps = DEFAULT_VIDEO_FPS;
   2577         mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
   2578         mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
   2579         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   2580             rc = setHalFpsRange(meta, mParameters);
   2581             if (rc == NO_ERROR) {
   2582                 int32_t max_fps =
   2583                     (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   2584                 if (mBatchSize) {
   2585                     /* For HFR, more buffers are dequeued upfront to improve the performance */
   2586                     mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
   2587                     mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
   2588                 } else if (max_fps == 60) {
   2589                     /* for 60 fps usecas increase inflight requests */
   2590                     mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
   2591                     mMaxInFlightRequests = MAX_INFLIGHT_60FPS_REQUESTS;
   2592                 } else if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
   2593                     /* for non 60 fps video use cases, set min = max inflight requests to
   2594                     avoid frame drops due to degraded system performance */
   2595                     mMinInFlightRequests = MAX_INFLIGHT_REQUESTS;
   2596                 }
   2597             }
   2598             else {
   2599                 LOGE("setHalFpsRange failed");
   2600             }
   2601         }
   2602         memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
   2603 
   2604 
   2605         //TODO: validate the arguments, HSV scenemode should have only the
   2606         //advertised fps ranges
   2607 
   2608         /*set the capture intent, hal version, tintless, stream info,
   2609          *and disenable parameters to the backend*/
   2610         LOGD("set_parms META_STREAM_INFO " );
   2611         for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   2612             LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
   2613                     "Format:%d",
   2614                     mStreamConfigInfo.type[i],
   2615                     mStreamConfigInfo.stream_sizes[i].width,
   2616                     mStreamConfigInfo.stream_sizes[i].height,
   2617                     mStreamConfigInfo.postprocess_mask[i],
   2618                     mStreamConfigInfo.format[i]);
   2619         }
   2620 
   2621         rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   2622                     mParameters);
   2623         if (rc < 0) {
   2624             LOGE("set_parms failed for hal version, stream info");
   2625         }
   2626 
   2627         cam_dimension_t sensor_dim;
   2628         memset(&sensor_dim, 0, sizeof(sensor_dim));
   2629         rc = getSensorOutputSize(sensor_dim);
   2630         if (rc != NO_ERROR) {
   2631             LOGE("Failed to get sensor output size");
   2632             pthread_mutex_unlock(&mMutex);
   2633             goto error_exit;
   2634         }
   2635 
   2636         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
   2637                 gCamCapability[mCameraId]->active_array_size.height,
   2638                 sensor_dim.width, sensor_dim.height);
   2639 
   2640         /* Set batchmode before initializing channel. Since registerBuffer
   2641          * internally initializes some of the channels, better set batchmode
   2642          * even before first register buffer */
   2643         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2644             it != mStreamInfo.end(); it++) {
   2645             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   2646             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   2647                     && mBatchSize) {
   2648                 rc = channel->setBatchSize(mBatchSize);
   2649                 //Disable per frame map unmap for HFR/batchmode case
   2650                 rc |= channel->setPerFrameMapUnmap(false);
   2651                 if (NO_ERROR != rc) {
   2652                     LOGE("Channel init failed %d", rc);
   2653                     pthread_mutex_unlock(&mMutex);
   2654                     goto error_exit;
   2655                 }
   2656             }
   2657         }
   2658 
   2659         //First initialize all streams
   2660         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   2661             it != mStreamInfo.end(); it++) {
   2662             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   2663             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
   2664                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
   2665                setEis)
   2666                 rc = channel->initialize(is_type);
   2667             else {
   2668                 rc = channel->initialize(IS_TYPE_NONE);
   2669             }
   2670             if (NO_ERROR != rc) {
   2671                 LOGE("Channel initialization failed %d", rc);
   2672                 pthread_mutex_unlock(&mMutex);
   2673                 goto error_exit;
   2674             }
   2675         }
   2676 
   2677         if (mRawDumpChannel) {
   2678             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
   2679             if (rc != NO_ERROR) {
   2680                 LOGE("Error: Raw Dump Channel init failed");
   2681                 pthread_mutex_unlock(&mMutex);
   2682                 goto error_exit;
   2683             }
   2684         }
   2685         if (mSupportChannel) {
   2686             rc = mSupportChannel->initialize(IS_TYPE_NONE);
   2687             if (rc < 0) {
   2688                 LOGE("Support channel initialization failed");
   2689                 pthread_mutex_unlock(&mMutex);
   2690                 goto error_exit;
   2691             }
   2692         }
   2693         if (mAnalysisChannel) {
   2694             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
   2695             if (rc < 0) {
   2696                 LOGE("Analysis channel initialization failed");
   2697                 pthread_mutex_unlock(&mMutex);
   2698                 goto error_exit;
   2699             }
   2700         }
   2701         if (mDummyBatchChannel) {
   2702             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
   2703             if (rc < 0) {
   2704                 LOGE("mDummyBatchChannel setBatchSize failed");
   2705                 pthread_mutex_unlock(&mMutex);
   2706                 goto error_exit;
   2707             }
   2708             rc = mDummyBatchChannel->initialize(is_type);
   2709             if (rc < 0) {
   2710                 LOGE("mDummyBatchChannel initialization failed");
   2711                 pthread_mutex_unlock(&mMutex);
   2712                 goto error_exit;
   2713             }
   2714         }
   2715 
   2716         // Set bundle info
   2717         rc = setBundleInfo();
   2718         if (rc < 0) {
   2719             LOGE("setBundleInfo failed %d", rc);
   2720             pthread_mutex_unlock(&mMutex);
   2721             goto error_exit;
   2722         }
   2723 
   2724     }
   2725 
   2726     pthread_mutex_unlock(&mMutex);
   2727 
   2728 error_exit:
   2729 
   2730     return rc;
   2731 }
   2732 
   2733 /*===========================================================================
   2734  * FUNCTION   : validateCaptureRequest
   2735  *
   2736  * DESCRIPTION: validate a capture request from camera service
   2737  *
   2738  * PARAMETERS :
   2739  *   @request : request from framework to process
   2740  *
   2741  * RETURN     :
   2742  *
   2743  *==========================================================================*/
   2744 int QCamera3HardwareInterface::validateCaptureRequest(
   2745                     camera3_capture_request_t *request)
   2746 {
   2747     ssize_t idx = 0;
   2748     const camera3_stream_buffer_t *b;
   2749     CameraMetadata meta;
   2750 
   2751     /* Sanity check the request */
   2752     if (request == NULL) {
   2753         LOGE("NULL capture request");
   2754         return BAD_VALUE;
   2755     }
   2756 
   2757     if ((request->settings == NULL) && (mState == CONFIGURED)) {
   2758         /*settings cannot be null for the first request*/
   2759         return BAD_VALUE;
   2760     }
   2761 
   2762     uint32_t frameNumber = request->frame_number;
   2763     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
   2764         LOGE("Request %d: No output buffers provided!",
   2765                 __FUNCTION__, frameNumber);
   2766         return BAD_VALUE;
   2767     }
   2768     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
   2769         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
   2770                  request->num_output_buffers, MAX_NUM_STREAMS);
   2771         return BAD_VALUE;
   2772     }
   2773     if (request->input_buffer != NULL) {
   2774         b = request->input_buffer;
   2775         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2776             LOGE("Request %d: Buffer %ld: Status not OK!",
   2777                      frameNumber, (long)idx);
   2778             return BAD_VALUE;
   2779         }
   2780         if (b->release_fence != -1) {
   2781             LOGE("Request %d: Buffer %ld: Has a release fence!",
   2782                      frameNumber, (long)idx);
   2783             return BAD_VALUE;
   2784         }
   2785         if (b->buffer == NULL) {
   2786             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
   2787                      frameNumber, (long)idx);
   2788             return BAD_VALUE;
   2789         }
   2790     }
   2791 
   2792     // Validate all buffers
   2793     b = request->output_buffers;
   2794     do {
   2795         QCamera3ProcessingChannel *channel =
   2796                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
   2797         if (channel == NULL) {
   2798             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
   2799                      frameNumber, (long)idx);
   2800             return BAD_VALUE;
   2801         }
   2802         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2803             LOGE("Request %d: Buffer %ld: Status not OK!",
   2804                      frameNumber, (long)idx);
   2805             return BAD_VALUE;
   2806         }
   2807         if (b->release_fence != -1) {
   2808             LOGE("Request %d: Buffer %ld: Has a release fence!",
   2809                      frameNumber, (long)idx);
   2810             return BAD_VALUE;
   2811         }
   2812         if (b->buffer == NULL) {
   2813             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
   2814                      frameNumber, (long)idx);
   2815             return BAD_VALUE;
   2816         }
   2817         if (*(b->buffer) == NULL) {
   2818             LOGE("Request %d: Buffer %ld: NULL private handle!",
   2819                      frameNumber, (long)idx);
   2820             return BAD_VALUE;
   2821         }
   2822         idx++;
   2823         b = request->output_buffers + idx;
   2824     } while (idx < (ssize_t)request->num_output_buffers);
   2825 
   2826     return NO_ERROR;
   2827 }
   2828 
   2829 /*===========================================================================
   2830  * FUNCTION   : deriveMinFrameDuration
   2831  *
   2832  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
   2833  *              on currently configured streams.
   2834  *
   2835  * PARAMETERS : NONE
   2836  *
   2837  * RETURN     : NONE
   2838  *
   2839  *==========================================================================*/
   2840 void QCamera3HardwareInterface::deriveMinFrameDuration()
   2841 {
   2842     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
   2843 
   2844     maxJpegDim = 0;
   2845     maxProcessedDim = 0;
   2846     maxRawDim = 0;
   2847 
   2848     // Figure out maximum jpeg, processed, and raw dimensions
   2849     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   2850         it != mStreamInfo.end(); it++) {
   2851 
   2852         // Input stream doesn't have valid stream_type
   2853         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
   2854             continue;
   2855 
   2856         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
   2857         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2858             if (dimension > maxJpegDim)
   2859                 maxJpegDim = dimension;
   2860         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2861                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2862                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
   2863             if (dimension > maxRawDim)
   2864                 maxRawDim = dimension;
   2865         } else {
   2866             if (dimension > maxProcessedDim)
   2867                 maxProcessedDim = dimension;
   2868         }
   2869     }
   2870 
   2871     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
   2872             MAX_SIZES_CNT);
   2873 
   2874     //Assume all jpeg dimensions are in processed dimensions.
   2875     if (maxJpegDim > maxProcessedDim)
   2876         maxProcessedDim = maxJpegDim;
   2877     //Find the smallest raw dimension that is greater or equal to jpeg dimension
   2878     if (maxProcessedDim > maxRawDim) {
   2879         maxRawDim = INT32_MAX;
   2880 
   2881         for (size_t i = 0; i < count; i++) {
   2882             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
   2883                     gCamCapability[mCameraId]->raw_dim[i].height;
   2884             if (dimension >= maxProcessedDim && dimension < maxRawDim)
   2885                 maxRawDim = dimension;
   2886         }
   2887     }
   2888 
   2889     //Find minimum durations for processed, jpeg, and raw
   2890     for (size_t i = 0; i < count; i++) {
   2891         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
   2892                 gCamCapability[mCameraId]->raw_dim[i].height) {
   2893             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
   2894             break;
   2895         }
   2896     }
   2897     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   2898     for (size_t i = 0; i < count; i++) {
   2899         if (maxProcessedDim ==
   2900                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
   2901                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
   2902             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2903             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2904             break;
   2905         }
   2906     }
   2907 }
   2908 
   2909 /*===========================================================================
   2910  * FUNCTION   : getMinFrameDuration
   2911  *
   2912  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
   2913  *              and current request configuration.
   2914  *
   2915  * PARAMETERS : @request: requset sent by the frameworks
   2916  *
   2917  * RETURN     : min farme duration for a particular request
   2918  *
   2919  *==========================================================================*/
   2920 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
   2921 {
   2922     bool hasJpegStream = false;
   2923     bool hasRawStream = false;
   2924     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
   2925         const camera3_stream_t *stream = request->output_buffers[i].stream;
   2926         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
   2927             hasJpegStream = true;
   2928         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2929                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2930                 stream->format == HAL_PIXEL_FORMAT_RAW16)
   2931             hasRawStream = true;
   2932     }
   2933 
   2934     if (!hasJpegStream)
   2935         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
   2936     else
   2937         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
   2938 }
   2939 
   2940 /*===========================================================================
   2941  * FUNCTION   : handleBuffersDuringFlushLock
   2942  *
   2943  * DESCRIPTION: Account for buffers returned from back-end during flush
   2944  *              This function is executed while mMutex is held by the caller.
   2945  *
   2946  * PARAMETERS :
   2947  *   @buffer: image buffer for the callback
   2948  *
   2949  * RETURN     :
   2950  *==========================================================================*/
   2951 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
   2952 {
   2953     bool buffer_found = false;
   2954     for (List<PendingBuffersInRequest>::iterator req =
   2955             mPendingBuffersMap.mPendingBuffersInRequest.begin();
   2956             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   2957         for (List<PendingBufferInfo>::iterator i =
   2958                 req->mPendingBufferList.begin();
   2959                 i != req->mPendingBufferList.end(); i++) {
   2960             if (i->buffer == buffer->buffer) {
   2961                 mPendingBuffersMap.numPendingBufsAtFlush--;
   2962                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
   2963                     buffer->buffer, req->frame_number,
   2964                     mPendingBuffersMap.numPendingBufsAtFlush);
   2965                 buffer_found = true;
   2966                 break;
   2967             }
   2968         }
   2969         if (buffer_found) {
   2970             break;
   2971         }
   2972     }
   2973     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
   2974         //signal the flush()
   2975         LOGD("All buffers returned to HAL. Continue flush");
   2976         pthread_cond_signal(&mBuffersCond);
   2977     }
   2978 }
   2979 
   2980 
   2981 /*===========================================================================
   2982  * FUNCTION   : handlePendingReprocResults
   2983  *
   2984  * DESCRIPTION: check and notify on any pending reprocess results
   2985  *
   2986  * PARAMETERS :
   2987  *   @frame_number   : Pending request frame number
   2988  *
   2989  * RETURN     : int32_t type of status
   2990  *              NO_ERROR  -- success
   2991  *              none-zero failure code
   2992  *==========================================================================*/
   2993 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
   2994 {
   2995     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
   2996             j != mPendingReprocessResultList.end(); j++) {
   2997         if (j->frame_number == frame_number) {
   2998             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
   2999 
   3000             LOGD("Delayed reprocess notify %d",
   3001                     frame_number);
   3002 
   3003             for (pendingRequestIterator k = mPendingRequestsList.begin();
   3004                     k != mPendingRequestsList.end(); k++) {
   3005 
   3006                 if (k->frame_number == j->frame_number) {
   3007                     LOGD("Found reprocess frame number %d in pending reprocess List "
   3008                             "Take it out!!",
   3009                             k->frame_number);
   3010 
   3011                     camera3_capture_result result;
   3012                     memset(&result, 0, sizeof(camera3_capture_result));
   3013                     result.frame_number = frame_number;
   3014                     result.num_output_buffers = 1;
   3015                     result.output_buffers =  &j->buffer;
   3016                     result.input_buffer = k->input_buffer;
   3017                     result.result = k->settings;
   3018                     result.partial_result = PARTIAL_RESULT_COUNT;
   3019                     mCallbackOps->process_capture_result(mCallbackOps, &result);
   3020 
   3021                     erasePendingRequest(k);
   3022                     break;
   3023                 }
   3024             }
   3025             mPendingReprocessResultList.erase(j);
   3026             break;
   3027         }
   3028     }
   3029     return NO_ERROR;
   3030 }
   3031 
   3032 /*===========================================================================
   3033  * FUNCTION   : handleBatchMetadata
   3034  *
   3035  * DESCRIPTION: Handles metadata buffer callback in batch mode
   3036  *
   3037  * PARAMETERS : @metadata_buf: metadata buffer
   3038  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   3039  *                 the meta buf in this method
   3040  *
   3041  * RETURN     :
   3042  *
   3043  *==========================================================================*/
   3044 void QCamera3HardwareInterface::handleBatchMetadata(
   3045         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
   3046 {
   3047     ATRACE_CALL();
   3048 
   3049     if (NULL == metadata_buf) {
   3050         LOGE("metadata_buf is NULL");
   3051         return;
   3052     }
   3053     /* In batch mode, the metdata will contain the frame number and timestamp of
   3054      * the last frame in the batch. Eg: a batch containing buffers from request
   3055      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
   3056      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
   3057      * multiple process_capture_results */
   3058     metadata_buffer_t *metadata =
   3059             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   3060     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
   3061     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
   3062     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
   3063     uint32_t frame_number = 0, urgent_frame_number = 0;
   3064     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
   3065     bool invalid_metadata = false;
   3066     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
   3067     size_t loopCount = 1;
   3068 
   3069     int32_t *p_frame_number_valid =
   3070             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   3071     uint32_t *p_frame_number =
   3072             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   3073     int64_t *p_capture_time =
   3074             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   3075     int32_t *p_urgent_frame_number_valid =
   3076             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   3077     uint32_t *p_urgent_frame_number =
   3078             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   3079 
   3080     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
   3081             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
   3082             (NULL == p_urgent_frame_number)) {
   3083         LOGE("Invalid metadata");
   3084         invalid_metadata = true;
   3085     } else {
   3086         frame_number_valid = *p_frame_number_valid;
   3087         last_frame_number = *p_frame_number;
   3088         last_frame_capture_time = *p_capture_time;
   3089         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   3090         last_urgent_frame_number = *p_urgent_frame_number;
   3091     }
   3092 
   3093     /* In batchmode, when no video buffers are requested, set_parms are sent
   3094      * for every capture_request. The difference between consecutive urgent
   3095      * frame numbers and frame numbers should be used to interpolate the
   3096      * corresponding frame numbers and time stamps */
   3097     pthread_mutex_lock(&mMutex);
   3098     if (urgent_frame_number_valid) {
   3099         ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
   3100         if(idx < 0) {
   3101             LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
   3102                 last_urgent_frame_number);
   3103             mState = ERROR;
   3104             pthread_mutex_unlock(&mMutex);
   3105             return;
   3106         }
   3107         first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
   3108         urgentFrameNumDiff = last_urgent_frame_number + 1 -
   3109                 first_urgent_frame_number;
   3110 
   3111         LOGH("urgent_frm: valid: %d frm_num: %d - %d",
   3112                  urgent_frame_number_valid,
   3113                 first_urgent_frame_number, last_urgent_frame_number);
   3114     }
   3115 
   3116     if (frame_number_valid) {
   3117         ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
   3118         if(idx < 0) {
   3119             LOGE("Invalid frame number received: %d. Irrecoverable error",
   3120                 last_frame_number);
   3121             mState = ERROR;
   3122             pthread_mutex_unlock(&mMutex);
   3123             return;
   3124         }
   3125         first_frame_number = mPendingBatchMap.valueAt(idx);
   3126         frameNumDiff = last_frame_number + 1 -
   3127                 first_frame_number;
   3128         mPendingBatchMap.removeItem(last_frame_number);
   3129 
   3130         LOGH("frm: valid: %d frm_num: %d - %d",
   3131                  frame_number_valid,
   3132                 first_frame_number, last_frame_number);
   3133 
   3134     }
   3135     pthread_mutex_unlock(&mMutex);
   3136 
   3137     if (urgent_frame_number_valid || frame_number_valid) {
   3138         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
   3139         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
   3140             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
   3141                      urgentFrameNumDiff, last_urgent_frame_number);
   3142         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
   3143             LOGE("frameNumDiff: %d frameNum: %d",
   3144                      frameNumDiff, last_frame_number);
   3145     }
   3146 
   3147     for (size_t i = 0; i < loopCount; i++) {
   3148         /* handleMetadataWithLock is called even for invalid_metadata for
   3149          * pipeline depth calculation */
   3150         if (!invalid_metadata) {
   3151             /* Infer frame number. Batch metadata contains frame number of the
   3152              * last frame */
   3153             if (urgent_frame_number_valid) {
   3154                 if (i < urgentFrameNumDiff) {
   3155                     urgent_frame_number =
   3156                             first_urgent_frame_number + i;
   3157                     LOGD("inferred urgent frame_number: %d",
   3158                              urgent_frame_number);
   3159                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   3160                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
   3161                 } else {
   3162                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
   3163                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   3164                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
   3165                 }
   3166             }
   3167 
   3168             /* Infer frame number. Batch metadata contains frame number of the
   3169              * last frame */
   3170             if (frame_number_valid) {
   3171                 if (i < frameNumDiff) {
   3172                     frame_number = first_frame_number + i;
   3173                     LOGD("inferred frame_number: %d", frame_number);
   3174                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   3175                             CAM_INTF_META_FRAME_NUMBER, frame_number);
   3176                 } else {
   3177                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
   3178                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   3179                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
   3180                 }
   3181             }
   3182 
   3183             if (last_frame_capture_time) {
   3184                 //Infer timestamp
   3185                 first_frame_capture_time = last_frame_capture_time -
   3186                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
   3187                 capture_time =
   3188                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
   3189                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   3190                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
   3191                 LOGH("batch capture_time: %lld, capture_time: %lld",
   3192                          last_frame_capture_time, capture_time);
   3193             }
   3194         }
   3195         pthread_mutex_lock(&mMutex);
   3196         handleMetadataWithLock(metadata_buf,
   3197                 false /* free_and_bufdone_meta_buf */,
   3198                 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
   3199                 (i == frameNumDiff-1) /* last metadata in the batch metadata */);
   3200         pthread_mutex_unlock(&mMutex);
   3201     }
   3202 
   3203     /* BufDone metadata buffer */
   3204     if (free_and_bufdone_meta_buf) {
   3205         mMetadataChannel->bufDone(metadata_buf);
   3206         free(metadata_buf);
   3207     }
   3208 }
   3209 
   3210 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
   3211         camera3_error_msg_code_t errorCode)
   3212 {
   3213     camera3_notify_msg_t notify_msg;
   3214     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3215     notify_msg.type = CAMERA3_MSG_ERROR;
   3216     notify_msg.message.error.error_code = errorCode;
   3217     notify_msg.message.error.error_stream = NULL;
   3218     notify_msg.message.error.frame_number = frameNumber;
   3219     mCallbackOps->notify(mCallbackOps, &notify_msg);
   3220 
   3221     return;
   3222 }
   3223 
   3224 /*===========================================================================
   3225  * FUNCTION   : handleMetadataWithLock
   3226  *
   3227  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
   3228  *
   3229  * PARAMETERS : @metadata_buf: metadata buffer
   3230  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   3231  *                 the meta buf in this method
   3232  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
   3233  *                  last urgent metadata in a batch. Always true for non-batch mode
   3234  *              @lastMetadataInBatch: Boolean to indicate whether this is the
   3235  *                  last metadata in a batch. Always true for non-batch mode
   3236  *
   3237  * RETURN     :
   3238  *
   3239  *==========================================================================*/
   3240 void QCamera3HardwareInterface::handleMetadataWithLock(
   3241     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
   3242     bool lastUrgentMetadataInBatch, bool lastMetadataInBatch)
   3243 {
   3244     ATRACE_CALL();
   3245     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
   3246         //during flush do not send metadata from this thread
   3247         LOGD("not sending metadata during flush or when mState is error");
   3248         if (free_and_bufdone_meta_buf) {
   3249             mMetadataChannel->bufDone(metadata_buf);
   3250             free(metadata_buf);
   3251         }
   3252         return;
   3253     }
   3254 
   3255     //not in flush
   3256     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   3257     int32_t frame_number_valid, urgent_frame_number_valid;
   3258     uint32_t frame_number, urgent_frame_number;
   3259     int64_t capture_time, capture_time_av;
   3260     nsecs_t currentSysTime;
   3261 
   3262     int32_t *p_frame_number_valid =
   3263             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   3264     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   3265     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   3266     int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
   3267     int32_t *p_urgent_frame_number_valid =
   3268             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   3269     uint32_t *p_urgent_frame_number =
   3270             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   3271     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
   3272             metadata) {
   3273         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
   3274                  *p_frame_number_valid, *p_frame_number);
   3275     }
   3276 
   3277     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
   3278             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
   3279         LOGE("Invalid metadata");
   3280         if (free_and_bufdone_meta_buf) {
   3281             mMetadataChannel->bufDone(metadata_buf);
   3282             free(metadata_buf);
   3283         }
   3284         goto done_metadata;
   3285     }
   3286     frame_number_valid =        *p_frame_number_valid;
   3287     frame_number =              *p_frame_number;
   3288     capture_time =              *p_capture_time;
   3289     capture_time_av =           *p_capture_time_av;
   3290     urgent_frame_number_valid = *p_urgent_frame_number_valid;
   3291     urgent_frame_number =       *p_urgent_frame_number;
   3292     currentSysTime =            systemTime(CLOCK_MONOTONIC);
   3293 
   3294     if (!gCamCapability[mCameraId]->timestamp_calibrated) {
   3295         const int tries = 3;
   3296         nsecs_t bestGap, measured;
   3297         for (int i = 0; i < tries; ++i) {
   3298             const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
   3299             const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
   3300             const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
   3301             const nsecs_t gap = tmono2 - tmono;
   3302             if (i == 0 || gap < bestGap) {
   3303                 bestGap = gap;
   3304                 measured = tbase - ((tmono + tmono2) >> 1);
   3305             }
   3306         }
   3307         capture_time -= measured;
   3308     }
   3309 
   3310     // Detect if buffers from any requests are overdue
   3311     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   3312         if ( (currentSysTime - req.timestamp) >
   3313             s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
   3314             for (auto &missed : req.mPendingBufferList) {
   3315                 assert(missed.stream->priv);
   3316                 if (missed.stream->priv) {
   3317                     QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
   3318                     assert(ch->mStreams[0]);
   3319                     if (ch->mStreams[0]) {
   3320                         LOGW("Missing: frame = %d, buffer = %p,"
   3321                             "stream type = %d, stream format = %d",
   3322                             req.frame_number, missed.buffer,
   3323                             ch->mStreams[0]->getMyType(), missed.stream->format);
   3324                         ch->timeoutFrame(req.frame_number);
   3325                     }
   3326                 }
   3327             }
   3328         }
   3329     }
   3330     //Partial result on process_capture_result for timestamp
   3331     if (urgent_frame_number_valid) {
   3332         LOGD("valid urgent frame_number = %u, capture_time = %lld",
   3333            urgent_frame_number, capture_time);
   3334 
   3335         //Recieved an urgent Frame Number, handle it
   3336         //using partial results
   3337         for (pendingRequestIterator i =
   3338                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
   3339             LOGD("Iterator Frame = %d urgent frame = %d",
   3340                  i->frame_number, urgent_frame_number);
   3341 
   3342             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
   3343                     (i->partial_result_cnt == 0)) {
   3344                 LOGE("Error: HAL missed urgent metadata for frame number %d",
   3345                          i->frame_number);
   3346                 i->partialResultDropped = true;
   3347                 i->partial_result_cnt++;
   3348             }
   3349 
   3350             if (i->frame_number == urgent_frame_number &&
   3351                      i->bUrgentReceived == 0) {
   3352 
   3353                 camera3_capture_result_t result;
   3354                 memset(&result, 0, sizeof(camera3_capture_result_t));
   3355 
   3356                 i->partial_result_cnt++;
   3357                 i->bUrgentReceived = 1;
   3358                 // Extract 3A metadata
   3359                 result.result = translateCbUrgentMetadataToResultMetadata(
   3360                         metadata, lastUrgentMetadataInBatch, urgent_frame_number);
   3361                 // Populate metadata result
   3362                 result.frame_number = urgent_frame_number;
   3363                 result.num_output_buffers = 0;
   3364                 result.output_buffers = NULL;
   3365                 result.partial_result = i->partial_result_cnt;
   3366 
   3367                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3368                 LOGD("urgent frame_number = %u, capture_time = %lld",
   3369                       result.frame_number, capture_time);
   3370                 free_camera_metadata((camera_metadata_t *)result.result);
   3371                 break;
   3372             }
   3373         }
   3374     }
   3375 
   3376     if (!frame_number_valid) {
   3377         LOGD("Not a valid normal frame number, used as SOF only");
   3378         if (free_and_bufdone_meta_buf) {
   3379             mMetadataChannel->bufDone(metadata_buf);
   3380             free(metadata_buf);
   3381         }
   3382         goto done_metadata;
   3383     }
   3384     LOGH("valid frame_number = %u, capture_time = %lld",
   3385             frame_number, capture_time);
   3386 
   3387     for (pendingRequestIterator i = mPendingRequestsList.begin();
   3388             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   3389         // Flush out all entries with less or equal frame numbers.
   3390 
   3391         camera3_capture_result_t result;
   3392         memset(&result, 0, sizeof(camera3_capture_result_t));
   3393 
   3394         LOGD("frame_number in the list is %u", i->frame_number);
   3395         i->partial_result_cnt++;
   3396         result.partial_result = i->partial_result_cnt;
   3397 
   3398         // Check whether any stream buffer corresponding to this is dropped or not
   3399         // If dropped, then send the ERROR_BUFFER for the corresponding stream
   3400         // The API does not expect a blob buffer to be dropped
   3401         if (p_cam_frame_drop) {
   3402             /* Clear notify_msg structure */
   3403             camera3_notify_msg_t notify_msg;
   3404             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3405             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3406                     j != i->buffers.end(); j++) {
   3407                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
   3408                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3409                 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
   3410                     if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
   3411                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   3412                         LOGE("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
   3413                                 __func__, i->frame_number, streamID, j->stream->format);
   3414                         notify_msg.type = CAMERA3_MSG_ERROR;
   3415                         notify_msg.message.error.frame_number = i->frame_number;
   3416                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
   3417                         notify_msg.message.error.error_stream = j->stream;
   3418                         mCallbackOps->notify(mCallbackOps, &notify_msg);
   3419                         LOGE("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
   3420                                 __func__, i->frame_number, streamID, j->stream->format);
   3421                         PendingFrameDropInfo PendingFrameDrop;
   3422                         PendingFrameDrop.frame_number=i->frame_number;
   3423                         PendingFrameDrop.stream_ID = streamID;
   3424                         // Add the Frame drop info to mPendingFrameDropList
   3425                         mPendingFrameDropList.push_back(PendingFrameDrop);
   3426                    }
   3427                }
   3428             }
   3429         }
   3430 
   3431         // Send empty metadata with already filled buffers for dropped metadata
   3432         // and send valid metadata with already filled buffers for current metadata
   3433         /* we could hit this case when we either
   3434          * 1. have a pending reprocess request or
   3435          * 2. miss a metadata buffer callback */
   3436         bool errorResult = false;
   3437         if (i->frame_number < frame_number) {
   3438             if (i->input_buffer) {
   3439                 /* this will be handled in handleInputBufferWithLock */
   3440                 i++;
   3441                 continue;
   3442             } else {
   3443                 mPendingLiveRequest--;
   3444                 errorResult = true;
   3445             }
   3446         } else {
   3447             mPendingLiveRequest--;
   3448             /* Clear notify_msg structure */
   3449             camera3_notify_msg_t notify_msg;
   3450             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3451 
   3452             // Send shutter notify to frameworks
   3453             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3454             notify_msg.message.shutter.frame_number = i->frame_number;
   3455             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3456             mCallbackOps->notify(mCallbackOps, &notify_msg);
   3457 
   3458             errorResult = i->partialResultDropped;
   3459 
   3460             i->timestamp = capture_time;
   3461 
   3462             /* Set the timestamp in display metadata so that clients aware of
   3463                private_handle such as VT can use this un-modified timestamps.
   3464                Camera framework is unaware of this timestamp and cannot change this */
   3465             updateTimeStampInPendingBuffers(i->frame_number, capture_time_av);
   3466 
   3467             // Find channel requiring metadata, meaning internal offline postprocess
   3468             // is needed.
   3469             //TODO: for now, we don't support two streams requiring metadata at the same time.
   3470             // (because we are not making copies, and metadata buffer is not reference counted.
   3471             bool internalPproc = false;
   3472             for (pendingBufferIterator iter = i->buffers.begin();
   3473                     iter != i->buffers.end(); iter++) {
   3474                 if (iter->need_metadata) {
   3475                     internalPproc = true;
   3476                     QCamera3ProcessingChannel *channel =
   3477                             (QCamera3ProcessingChannel *)iter->stream->priv;
   3478                     channel->queueReprocMetadata(metadata_buf);
   3479                     break;
   3480                 }
   3481             }
   3482 
   3483             // atrace_begin(ATRACE_TAG_ALWAYS, "translateFromHalMetadata");
   3484             result.result = translateFromHalMetadata(metadata,
   3485                     *i, internalPproc, lastMetadataInBatch);
   3486             // atrace_end(ATRACE_TAG_ALWAYS);
   3487 
   3488             saveExifParams(metadata);
   3489 
   3490             if (i->blob_request) {
   3491                 {
   3492                     //Dump tuning metadata if enabled and available
   3493                     char prop[PROPERTY_VALUE_MAX];
   3494                     memset(prop, 0, sizeof(prop));
   3495                     property_get("persist.camera.dumpmetadata", prop, "0");
   3496                     int32_t enabled = atoi(prop);
   3497                     if (enabled && metadata->is_tuning_params_valid) {
   3498                         dumpMetadataToFile(metadata->tuning_params,
   3499                                mMetaFrameCount,
   3500                                enabled,
   3501                                "Snapshot",
   3502                                frame_number);
   3503                     }
   3504                 }
   3505             }
   3506 
   3507             if (!internalPproc) {
   3508                 LOGD("couldn't find need_metadata for this metadata");
   3509                 // Return metadata buffer
   3510                 if (free_and_bufdone_meta_buf) {
   3511                     mMetadataChannel->bufDone(metadata_buf);
   3512                     free(metadata_buf);
   3513                 }
   3514             }
   3515         }
   3516         if (errorResult) {
   3517             notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
   3518         }
   3519 
   3520         if (!errorResult && !result.result) {
   3521             LOGE("metadata is NULL");
   3522         }
   3523         result.frame_number = i->frame_number;
   3524         result.input_buffer = i->input_buffer;
   3525         result.num_output_buffers = 0;
   3526         result.output_buffers = NULL;
   3527         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3528                     j != i->buffers.end(); j++) {
   3529             if (j->buffer) {
   3530                result.num_output_buffers++;
   3531             }
   3532         }
   3533 
   3534         updateFpsInPreviewBuffer(metadata, i->frame_number);
   3535 
   3536         if (result.num_output_buffers > 0) {
   3537             camera3_stream_buffer_t *result_buffers =
   3538                 new camera3_stream_buffer_t[result.num_output_buffers];
   3539             if (result_buffers != NULL) {
   3540                 size_t result_buffers_idx = 0;
   3541                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3542                         j != i->buffers.end(); j++) {
   3543                     if (j->buffer) {
   3544                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   3545                                 m != mPendingFrameDropList.end(); m++) {
   3546                             QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   3547                             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3548                             if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
   3549                                 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   3550                                 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
   3551                                         frame_number, streamID);
   3552                                 m = mPendingFrameDropList.erase(m);
   3553                                 break;
   3554                             }
   3555                         }
   3556                         j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
   3557                         mPendingBuffersMap.removeBuf(j->buffer->buffer);
   3558                         result_buffers[result_buffers_idx++] = *(j->buffer);
   3559                         free(j->buffer);
   3560                         j->buffer = NULL;
   3561                     }
   3562                 }
   3563 
   3564                 result.output_buffers = result_buffers;
   3565                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3566                 LOGD("meta frame_number = %u, capture_time = %lld",
   3567                         result.frame_number, i->timestamp);
   3568                 delete[] result_buffers;
   3569             }else {
   3570                 LOGE("Fatal error: out of memory");
   3571             }
   3572         } else if (!errorResult) {
   3573             mCallbackOps->process_capture_result(mCallbackOps, &result);
   3574             LOGD("meta frame_number = %u, capture_time = %lld",
   3575                     result.frame_number, i->timestamp);
   3576         }
   3577 
   3578         if (result.result) {
   3579             free_camera_metadata((camera_metadata_t *)result.result);
   3580         }
   3581         i = erasePendingRequest(i);
   3582 
   3583         if (!mPendingReprocessResultList.empty()) {
   3584             handlePendingReprocResults(frame_number + 1);
   3585         }
   3586     }
   3587 
   3588 done_metadata:
   3589     for (pendingRequestIterator i = mPendingRequestsList.begin();
   3590             i != mPendingRequestsList.end() ;i++) {
   3591         i->pipeline_depth++;
   3592     }
   3593     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
   3594     unblockRequestIfNecessary();
   3595 }
   3596 
   3597 /*===========================================================================
   3598  * FUNCTION   : hdrPlusPerfLock
   3599  *
   3600  * DESCRIPTION: perf lock for HDR+ using custom intent
   3601  *
   3602  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
   3603  *
   3604  * RETURN     : None
   3605  *
   3606  *==========================================================================*/
   3607 void QCamera3HardwareInterface::hdrPlusPerfLock(
   3608         mm_camera_super_buf_t *metadata_buf)
   3609 {
   3610     if (NULL == metadata_buf) {
   3611         LOGE("metadata_buf is NULL");
   3612         return;
   3613     }
   3614     metadata_buffer_t *metadata =
   3615             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   3616     int32_t *p_frame_number_valid =
   3617             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   3618     uint32_t *p_frame_number =
   3619             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   3620 
   3621     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
   3622         LOGE("%s: Invalid metadata", __func__);
   3623         return;
   3624     }
   3625 
   3626     //acquire perf lock for 5 sec after the last HDR frame is captured
   3627     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
   3628         if ((p_frame_number != NULL) &&
   3629                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
   3630             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
   3631         }
   3632     }
   3633 
   3634     //release lock after perf lock timer is expired. If lock is already released,
   3635     //isTimerReset returns false
   3636     if (m_perfLock.isTimerReset()) {
   3637         mLastCustIntentFrmNum = -1;
   3638         m_perfLock.lock_rel_timed();
   3639     }
   3640 }
   3641 
   3642 /*===========================================================================
   3643  * FUNCTION   : handleInputBufferWithLock
   3644  *
   3645  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
   3646  *
   3647  * PARAMETERS : @frame_number: frame number of the input buffer
   3648  *
   3649  * RETURN     :
   3650  *
   3651  *==========================================================================*/
   3652 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
   3653 {
   3654     ATRACE_CALL();
   3655     pendingRequestIterator i = mPendingRequestsList.begin();
   3656     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   3657         i++;
   3658     }
   3659     if (i != mPendingRequestsList.end() && i->input_buffer) {
   3660         //found the right request
   3661         if (!i->shutter_notified) {
   3662             CameraMetadata settings;
   3663             camera3_notify_msg_t notify_msg;
   3664             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3665             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   3666             if(i->settings) {
   3667                 settings = i->settings;
   3668                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   3669                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   3670                 } else {
   3671                     LOGE("No timestamp in input settings! Using current one.");
   3672                 }
   3673             } else {
   3674                 LOGE("Input settings missing!");
   3675             }
   3676 
   3677             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3678             notify_msg.message.shutter.frame_number = frame_number;
   3679             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3680             mCallbackOps->notify(mCallbackOps, &notify_msg);
   3681             i->shutter_notified = true;
   3682             LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
   3683                         i->frame_number, notify_msg.message.shutter.timestamp);
   3684         }
   3685 
   3686         if (i->input_buffer->release_fence != -1) {
   3687            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   3688            close(i->input_buffer->release_fence);
   3689            if (rc != OK) {
   3690                LOGE("input buffer sync wait failed %d", rc);
   3691            }
   3692         }
   3693 
   3694         camera3_capture_result result;
   3695         memset(&result, 0, sizeof(camera3_capture_result));
   3696         result.frame_number = frame_number;
   3697         result.result = i->settings;
   3698         result.input_buffer = i->input_buffer;
   3699         result.partial_result = PARTIAL_RESULT_COUNT;
   3700 
   3701         mCallbackOps->process_capture_result(mCallbackOps, &result);
   3702         LOGD("Input request metadata and input buffer frame_number = %u",
   3703                         i->frame_number);
   3704         i = erasePendingRequest(i);
   3705     } else {
   3706         LOGE("Could not find input request for frame number %d", frame_number);
   3707     }
   3708 }
   3709 
   3710 /*===========================================================================
   3711  * FUNCTION   : handleBufferWithLock
   3712  *
   3713  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   3714  *
   3715  * PARAMETERS : @buffer: image buffer for the callback
   3716  *              @frame_number: frame number of the image buffer
   3717  *
   3718  * RETURN     :
   3719  *
   3720  *==========================================================================*/
   3721 void QCamera3HardwareInterface::handleBufferWithLock(
   3722     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   3723 {
   3724     ATRACE_CALL();
   3725     /* Nothing to be done during error state */
   3726     if ((ERROR == mState) || (DEINIT == mState)) {
   3727         return;
   3728     }
   3729     if (mFlushPerf) {
   3730         handleBuffersDuringFlushLock(buffer);
   3731         return;
   3732     }
   3733     //not in flush
   3734     // If the frame number doesn't exist in the pending request list,
   3735     // directly send the buffer to the frameworks, and update pending buffers map
   3736     // Otherwise, book-keep the buffer.
   3737     pendingRequestIterator i = mPendingRequestsList.begin();
   3738     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   3739         i++;
   3740     }
   3741     if (i == mPendingRequestsList.end()) {
   3742         // Verify all pending requests frame_numbers are greater
   3743         for (pendingRequestIterator j = mPendingRequestsList.begin();
   3744                 j != mPendingRequestsList.end(); j++) {
   3745             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
   3746                 LOGW("Error: pending live frame number %d is smaller than %d",
   3747                          j->frame_number, frame_number);
   3748             }
   3749         }
   3750         camera3_capture_result_t result;
   3751         memset(&result, 0, sizeof(camera3_capture_result_t));
   3752         result.result = NULL;
   3753         result.frame_number = frame_number;
   3754         result.num_output_buffers = 1;
   3755         result.partial_result = 0;
   3756         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   3757                 m != mPendingFrameDropList.end(); m++) {
   3758             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
   3759             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3760             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
   3761                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   3762                 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
   3763                          frame_number, streamID);
   3764                 m = mPendingFrameDropList.erase(m);
   3765                 break;
   3766             }
   3767         }
   3768         buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
   3769         result.output_buffers = buffer;
   3770         LOGH("result frame_number = %d, buffer = %p",
   3771                  frame_number, buffer->buffer);
   3772 
   3773         mPendingBuffersMap.removeBuf(buffer->buffer);
   3774 
   3775         mCallbackOps->process_capture_result(mCallbackOps, &result);
   3776     } else {
   3777         if (i->input_buffer) {
   3778             CameraMetadata settings;
   3779             camera3_notify_msg_t notify_msg;
   3780             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3781             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   3782             if(i->settings) {
   3783                 settings = i->settings;
   3784                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   3785                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   3786                 } else {
   3787                     LOGW("No timestamp in input settings! Using current one.");
   3788                 }
   3789             } else {
   3790                 LOGE("Input settings missing!");
   3791             }
   3792 
   3793             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3794             notify_msg.message.shutter.frame_number = frame_number;
   3795             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3796 
   3797             if (i->input_buffer->release_fence != -1) {
   3798                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   3799                close(i->input_buffer->release_fence);
   3800                if (rc != OK) {
   3801                    LOGE("input buffer sync wait failed %d", rc);
   3802                }
   3803             }
   3804             buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
   3805             mPendingBuffersMap.removeBuf(buffer->buffer);
   3806 
   3807             bool notifyNow = true;
   3808             for (pendingRequestIterator j = mPendingRequestsList.begin();
   3809                     j != mPendingRequestsList.end(); j++) {
   3810                 if (j->frame_number < frame_number) {
   3811                     notifyNow = false;
   3812                     break;
   3813                 }
   3814             }
   3815 
   3816             if (notifyNow) {
   3817                 camera3_capture_result result;
   3818                 memset(&result, 0, sizeof(camera3_capture_result));
   3819                 result.frame_number = frame_number;
   3820                 result.result = i->settings;
   3821                 result.input_buffer = i->input_buffer;
   3822                 result.num_output_buffers = 1;
   3823                 result.output_buffers = buffer;
   3824                 result.partial_result = PARTIAL_RESULT_COUNT;
   3825 
   3826                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   3827                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3828                 LOGD("Notify reprocess now %d!", frame_number);
   3829                 i = erasePendingRequest(i);
   3830             } else {
   3831                 // Cache reprocess result for later
   3832                 PendingReprocessResult pendingResult;
   3833                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
   3834                 pendingResult.notify_msg = notify_msg;
   3835                 pendingResult.buffer = *buffer;
   3836                 pendingResult.frame_number = frame_number;
   3837                 mPendingReprocessResultList.push_back(pendingResult);
   3838                 LOGD("Cache reprocess result %d!", frame_number);
   3839             }
   3840         } else {
   3841             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3842                 j != i->buffers.end(); j++) {
   3843                 if (j->stream == buffer->stream) {
   3844                     if (j->buffer != NULL) {
   3845                         LOGE("Error: buffer is already set");
   3846                     } else {
   3847                         j->buffer = (camera3_stream_buffer_t *)malloc(
   3848                             sizeof(camera3_stream_buffer_t));
   3849                         *(j->buffer) = *buffer;
   3850                         LOGH("cache buffer %p at result frame_number %u",
   3851                              buffer->buffer, frame_number);
   3852                     }
   3853                 }
   3854             }
   3855         }
   3856     }
   3857 }
   3858 
   3859 /*===========================================================================
   3860  * FUNCTION   : unblockRequestIfNecessary
   3861  *
   3862  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
   3863  *              that mMutex is held when this function is called.
   3864  *
   3865  * PARAMETERS :
   3866  *
   3867  * RETURN     :
   3868  *
   3869  *==========================================================================*/
   3870 void QCamera3HardwareInterface::unblockRequestIfNecessary()
   3871 {
   3872    // Unblock process_capture_request
   3873    pthread_cond_signal(&mRequestCond);
   3874 }
   3875 
   3876 
   3877 /*===========================================================================
   3878  * FUNCTION   : processCaptureRequest
   3879  *
   3880  * DESCRIPTION: process a capture request from camera service
   3881  *
   3882  * PARAMETERS :
   3883  *   @request : request from framework to process
   3884  *
   3885  * RETURN     :
   3886  *
   3887  *==========================================================================*/
   3888 int QCamera3HardwareInterface::processCaptureRequest(
   3889                     camera3_capture_request_t *request)
   3890 {
   3891     ATRACE_CALL();
   3892     int rc = NO_ERROR;
   3893     int32_t request_id;
   3894     CameraMetadata meta;
   3895     bool isVidBufRequested = false;
   3896     camera3_stream_buffer_t *pInputBuffer = NULL;
   3897 
   3898     pthread_mutex_lock(&mMutex);
   3899 
   3900     // Validate current state
   3901     switch (mState) {
   3902         case CONFIGURED:
   3903         case STARTED:
   3904             /* valid state */
   3905             break;
   3906 
   3907         case ERROR:
   3908             pthread_mutex_unlock(&mMutex);
   3909             handleCameraDeviceError();
   3910             return -ENODEV;
   3911 
   3912         default:
   3913             LOGE("Invalid state %d", mState);
   3914             pthread_mutex_unlock(&mMutex);
   3915             return -ENODEV;
   3916     }
   3917 
   3918     rc = validateCaptureRequest(request);
   3919     if (rc != NO_ERROR) {
   3920         LOGE("incoming request is not valid");
   3921         pthread_mutex_unlock(&mMutex);
   3922         return rc;
   3923     }
   3924 
   3925     meta = request->settings;
   3926 
   3927     // For first capture request, send capture intent, and
   3928     // stream on all streams
   3929     if (mState == CONFIGURED) {
   3930         m_perfLock.lock_acq();
   3931         //update settings from app here
   3932         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
   3933             mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
   3934             LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
   3935         }
   3936         if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
   3937             mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
   3938             LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
   3939         }
   3940         if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
   3941             mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
   3942             LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
   3943 
   3944             if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
   3945                 (mLinkedCameraId != mCameraId) ) {
   3946                 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
   3947                     mLinkedCameraId, mCameraId);
   3948                 pthread_mutex_unlock(&mMutex);
   3949                 goto error_exit;
   3950             }
   3951         }
   3952 
   3953         // add bundle related cameras
   3954         LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
   3955         if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
   3956             if (mIsDeviceLinked)
   3957                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
   3958             else
   3959                 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
   3960 
   3961             pthread_mutex_lock(&gCamLock);
   3962 
   3963             if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
   3964                 LOGE("Dualcam: Invalid Session Id ");
   3965                 pthread_mutex_unlock(&gCamLock);
   3966                 pthread_mutex_unlock(&mMutex);
   3967                 goto error_exit;
   3968             }
   3969 
   3970             if (mIsMainCamera == 1) {
   3971                 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
   3972                 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
   3973                 // related session id should be session id of linked session
   3974                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   3975             } else {
   3976                 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
   3977                 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
   3978                 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   3979             }
   3980             pthread_mutex_unlock(&gCamLock);
   3981 
   3982             rc = mCameraHandle->ops->sync_related_sensors(
   3983                     mCameraHandle->camera_handle, m_pRelCamSyncBuf);
   3984             if (rc < 0) {
   3985                 LOGE("Dualcam: link failed");
   3986                 pthread_mutex_unlock(&mMutex);
   3987                 goto error_exit;
   3988             }
   3989         }
   3990 
   3991         //Then start them.
   3992         LOGH("Start META Channel");
   3993         rc = mMetadataChannel->start();
   3994         if (rc < 0) {
   3995             LOGE("META channel start failed");
   3996             pthread_mutex_unlock(&mMutex);
   3997             goto error_exit;
   3998         }
   3999 
   4000         if (mAnalysisChannel) {
   4001             rc = mAnalysisChannel->start();
   4002             if (rc < 0) {
   4003                 LOGE("Analysis channel start failed");
   4004                 mMetadataChannel->stop();
   4005                 pthread_mutex_unlock(&mMutex);
   4006                 goto error_exit;
   4007             }
   4008         }
   4009 
   4010         if (mSupportChannel) {
   4011             rc = mSupportChannel->start();
   4012             if (rc < 0) {
   4013                 LOGE("Support channel start failed");
   4014                 mMetadataChannel->stop();
   4015                 /* Although support and analysis are mutually exclusive today
   4016                    adding it in anycase for future proofing */
   4017                 if (mAnalysisChannel) {
   4018                     mAnalysisChannel->stop();
   4019                 }
   4020                 pthread_mutex_unlock(&mMutex);
   4021                 goto error_exit;
   4022             }
   4023         }
   4024         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4025             it != mStreamInfo.end(); it++) {
   4026             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   4027             LOGH("Start Processing Channel mask=%d",
   4028                      channel->getStreamTypeMask());
   4029             rc = channel->start();
   4030             if (rc < 0) {
   4031                 LOGE("channel start failed");
   4032                 pthread_mutex_unlock(&mMutex);
   4033                 goto error_exit;
   4034             }
   4035         }
   4036 
   4037         if (mRawDumpChannel) {
   4038             LOGD("Starting raw dump stream");
   4039             rc = mRawDumpChannel->start();
   4040             if (rc != NO_ERROR) {
   4041                 LOGE("Error Starting Raw Dump Channel");
   4042                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4043                       it != mStreamInfo.end(); it++) {
   4044                     QCamera3Channel *channel =
   4045                         (QCamera3Channel *)(*it)->stream->priv;
   4046                     LOGH("Stopping Processing Channel mask=%d",
   4047                         channel->getStreamTypeMask());
   4048                     channel->stop();
   4049                 }
   4050                 if (mSupportChannel)
   4051                     mSupportChannel->stop();
   4052                 if (mAnalysisChannel) {
   4053                     mAnalysisChannel->stop();
   4054                 }
   4055                 mMetadataChannel->stop();
   4056                 pthread_mutex_unlock(&mMutex);
   4057                 goto error_exit;
   4058             }
   4059         }
   4060 
   4061         if (mChannelHandle) {
   4062 
   4063             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   4064                     mChannelHandle);
   4065             if (rc != NO_ERROR) {
   4066                 LOGE("start_channel failed %d", rc);
   4067                 pthread_mutex_unlock(&mMutex);
   4068                 goto error_exit;
   4069             }
   4070         }
   4071 
   4072         goto no_error;
   4073 error_exit:
   4074         m_perfLock.lock_rel();
   4075         return rc;
   4076 no_error:
   4077         m_perfLock.lock_rel();
   4078 
   4079         mWokenUpByDaemon = false;
   4080         mPendingLiveRequest = 0;
   4081         mFirstConfiguration = false;
   4082         enablePowerHint();
   4083     }
   4084 
   4085     uint32_t frameNumber = request->frame_number;
   4086     cam_stream_ID_t streamsArray;
   4087 
   4088     if (mFlushPerf) {
   4089         //we cannot accept any requests during flush
   4090         LOGE("process_capture_request cannot proceed during flush");
   4091         pthread_mutex_unlock(&mMutex);
   4092         return NO_ERROR; //should return an error
   4093     }
   4094 
   4095     if (meta.exists(ANDROID_REQUEST_ID)) {
   4096         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
   4097         mCurrentRequestId = request_id;
   4098         LOGD("Received request with id: %d", request_id);
   4099     } else if (mState == CONFIGURED || mCurrentRequestId == -1){
   4100         LOGE("Unable to find request id field, \
   4101                 & no previous id available");
   4102         pthread_mutex_unlock(&mMutex);
   4103         return NAME_NOT_FOUND;
   4104     } else {
   4105         LOGD("Re-using old request id");
   4106         request_id = mCurrentRequestId;
   4107     }
   4108 
   4109     LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
   4110                                     request->num_output_buffers,
   4111                                     request->input_buffer,
   4112                                     frameNumber);
   4113     // Acquire all request buffers first
   4114     streamsArray.num_streams = 0;
   4115     int blob_request = 0;
   4116     uint32_t snapshotStreamId = 0;
   4117     for (size_t i = 0; i < request->num_output_buffers; i++) {
   4118         const camera3_stream_buffer_t& output = request->output_buffers[i];
   4119         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   4120 
   4121         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   4122             //Call function to store local copy of jpeg data for encode params.
   4123             blob_request = 1;
   4124             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
   4125         }
   4126 
   4127         if (output.acquire_fence != -1) {
   4128            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
   4129            close(output.acquire_fence);
   4130            if (rc != OK) {
   4131               LOGE("sync wait failed %d", rc);
   4132               pthread_mutex_unlock(&mMutex);
   4133               return rc;
   4134            }
   4135         }
   4136 
   4137         streamsArray.stream_request[streamsArray.num_streams++].streamID =
   4138             channel->getStreamID(channel->getStreamTypeMask());
   4139 
   4140         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
   4141             isVidBufRequested = true;
   4142         }
   4143     }
   4144 
   4145     if (blob_request) {
   4146         KPI_ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
   4147     }
   4148     if (blob_request && mRawDumpChannel) {
   4149         LOGD("Trigger Raw based on blob request if Raw dump is enabled");
   4150         streamsArray.stream_request[streamsArray.num_streams].streamID =
   4151             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
   4152         streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
   4153     }
   4154 
   4155     if(request->input_buffer == NULL) {
   4156         /* Parse the settings:
   4157          * - For every request in NORMAL MODE
   4158          * - For every request in HFR mode during preview only case
   4159          * - For first request of every batch in HFR mode during video
   4160          * recording. In batchmode the same settings except frame number is
   4161          * repeated in each request of the batch.
   4162          */
   4163         if (!mBatchSize ||
   4164            (mBatchSize && !isVidBufRequested) ||
   4165            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
   4166             rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
   4167             if (rc < 0) {
   4168                 LOGE("fail to set frame parameters");
   4169                 pthread_mutex_unlock(&mMutex);
   4170                 return rc;
   4171             }
   4172         }
   4173         /* For batchMode HFR, setFrameParameters is not called for every
   4174          * request. But only frame number of the latest request is parsed.
   4175          * Keep track of first and last frame numbers in a batch so that
   4176          * metadata for the frame numbers of batch can be duplicated in
   4177          * handleBatchMetadta */
   4178         if (mBatchSize) {
   4179             if (!mToBeQueuedVidBufs) {
   4180                 //start of the batch
   4181                 mFirstFrameNumberInBatch = request->frame_number;
   4182             }
   4183             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   4184                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
   4185                 LOGE("Failed to set the frame number in the parameters");
   4186                 pthread_mutex_unlock(&mMutex);
   4187                 return BAD_VALUE;
   4188             }
   4189         }
   4190         if (mNeedSensorRestart) {
   4191             /* Unlock the mutex as restartSensor waits on the channels to be
   4192              * stopped, which in turn calls stream callback functions -
   4193              * handleBufferWithLock and handleMetadataWithLock */
   4194             pthread_mutex_unlock(&mMutex);
   4195             rc = dynamicUpdateMetaStreamInfo();
   4196             if (rc != NO_ERROR) {
   4197                 LOGE("Restarting the sensor failed");
   4198                 return BAD_VALUE;
   4199             }
   4200             mNeedSensorRestart = false;
   4201             pthread_mutex_lock(&mMutex);
   4202         }
   4203     } else {
   4204 
   4205         if (request->input_buffer->acquire_fence != -1) {
   4206            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
   4207            close(request->input_buffer->acquire_fence);
   4208            if (rc != OK) {
   4209               LOGE("input buffer sync wait failed %d", rc);
   4210               pthread_mutex_unlock(&mMutex);
   4211               return rc;
   4212            }
   4213         }
   4214     }
   4215 
   4216     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
   4217         mLastCustIntentFrmNum = frameNumber;
   4218     }
   4219     /* Update pending request list and pending buffers map */
   4220     PendingRequestInfo pendingRequest = {};
   4221     pendingRequestIterator latestRequest;
   4222     pendingRequest.frame_number = frameNumber;
   4223     pendingRequest.num_buffers = request->num_output_buffers;
   4224     pendingRequest.request_id = request_id;
   4225     pendingRequest.blob_request = blob_request;
   4226     pendingRequest.timestamp = 0;
   4227     pendingRequest.bUrgentReceived = 0;
   4228     if (request->input_buffer) {
   4229         pendingRequest.input_buffer =
   4230                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
   4231         *(pendingRequest.input_buffer) = *(request->input_buffer);
   4232         pInputBuffer = pendingRequest.input_buffer;
   4233     } else {
   4234        pendingRequest.input_buffer = NULL;
   4235        pInputBuffer = NULL;
   4236     }
   4237 
   4238     pendingRequest.pipeline_depth = 0;
   4239     pendingRequest.partial_result_cnt = 0;
   4240     extractJpegMetadata(mCurJpegMeta, request);
   4241     pendingRequest.jpegMetadata = mCurJpegMeta;
   4242     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
   4243     pendingRequest.shutter_notified = false;
   4244 
   4245     //extract capture intent
   4246     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   4247         mCaptureIntent =
   4248                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   4249     }
   4250     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
   4251         mHybridAeEnable =
   4252                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
   4253     }
   4254     pendingRequest.capture_intent = mCaptureIntent;
   4255     pendingRequest.hybrid_ae_enable = mHybridAeEnable;
   4256     /* DevCamDebug metadata processCaptureRequest */
   4257     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
   4258         mDevCamDebugMetaEnable =
   4259                 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
   4260     }
   4261     pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
   4262     /* DevCamDebug metadata end */
   4263 
   4264     //extract CAC info
   4265     if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
   4266         mCacMode =
   4267                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
   4268     }
   4269     pendingRequest.fwkCacMode = mCacMode;
   4270 
   4271     PendingBuffersInRequest bufsForCurRequest;
   4272     bufsForCurRequest.frame_number = frameNumber;
   4273     // Mark current timestamp for the new request
   4274     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
   4275 
   4276     for (size_t i = 0; i < request->num_output_buffers; i++) {
   4277         RequestedBufferInfo requestedBuf;
   4278         memset(&requestedBuf, 0, sizeof(requestedBuf));
   4279         requestedBuf.stream = request->output_buffers[i].stream;
   4280         requestedBuf.buffer = NULL;
   4281         pendingRequest.buffers.push_back(requestedBuf);
   4282 
   4283         // Add to buffer handle the pending buffers list
   4284         PendingBufferInfo bufferInfo;
   4285         bufferInfo.buffer = request->output_buffers[i].buffer;
   4286         bufferInfo.stream = request->output_buffers[i].stream;
   4287         bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
   4288         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
   4289         LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
   4290             frameNumber, bufferInfo.buffer,
   4291             channel->getStreamTypeMask(), bufferInfo.stream->format);
   4292     }
   4293     // Add this request packet into mPendingBuffersMap
   4294     mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
   4295     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
   4296         mPendingBuffersMap.get_num_overall_buffers());
   4297 
   4298     latestRequest = mPendingRequestsList.insert(
   4299             mPendingRequestsList.end(), pendingRequest);
   4300     if(mFlush) {
   4301         LOGI("mFlush is true");
   4302         pthread_mutex_unlock(&mMutex);
   4303         return NO_ERROR;
   4304     }
   4305 
   4306     int indexUsed;
   4307     // Notify metadata channel we receive a request
   4308     mMetadataChannel->request(NULL, frameNumber, indexUsed);
   4309 
   4310     if(request->input_buffer != NULL){
   4311         LOGD("Input request, frame_number %d", frameNumber);
   4312         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
   4313         if (NO_ERROR != rc) {
   4314             LOGE("fail to set reproc parameters");
   4315             pthread_mutex_unlock(&mMutex);
   4316             return rc;
   4317         }
   4318     }
   4319 
   4320     // Call request on other streams
   4321     uint32_t streams_need_metadata = 0;
   4322     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
   4323     for (size_t i = 0; i < request->num_output_buffers; i++) {
   4324         const camera3_stream_buffer_t& output = request->output_buffers[i];
   4325         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   4326 
   4327         if (channel == NULL) {
   4328             LOGW("invalid channel pointer for stream");
   4329             continue;
   4330         }
   4331 
   4332         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   4333             LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
   4334                       output.buffer, request->input_buffer, frameNumber);
   4335             if(request->input_buffer != NULL){
   4336                 rc = channel->request(output.buffer, frameNumber,
   4337                         pInputBuffer, &mReprocMeta, indexUsed);
   4338                 if (rc < 0) {
   4339                     LOGE("Fail to request on picture channel");
   4340                     pthread_mutex_unlock(&mMutex);
   4341                     return rc;
   4342                 }
   4343             } else {
   4344                 LOGD("snapshot request with buffer %p, frame_number %d",
   4345                          output.buffer, frameNumber);
   4346                 if (!request->settings) {
   4347                     rc = channel->request(output.buffer, frameNumber,
   4348                             NULL, mPrevParameters, indexUsed);
   4349                 } else {
   4350                     rc = channel->request(output.buffer, frameNumber,
   4351                             NULL, mParameters, indexUsed);
   4352                 }
   4353                 if (rc < 0) {
   4354                     LOGE("Fail to request on picture channel");
   4355                     pthread_mutex_unlock(&mMutex);
   4356                     return rc;
   4357                 }
   4358 
   4359                 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
   4360                 uint32_t j = 0;
   4361                 for (j = 0; j < streamsArray.num_streams; j++) {
   4362                     if (streamsArray.stream_request[j].streamID == streamId) {
   4363                       if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   4364                           streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
   4365                       else
   4366                           streamsArray.stream_request[j].buf_index = indexUsed;
   4367                         break;
   4368                     }
   4369                 }
   4370                 if (j == streamsArray.num_streams) {
   4371                     LOGE("Did not find matching stream to update index");
   4372                     assert(0);
   4373                 }
   4374 
   4375                 pendingBufferIter->need_metadata = true;
   4376                 streams_need_metadata++;
   4377             }
   4378         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   4379             bool needMetadata = false;
   4380             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
   4381             rc = yuvChannel->request(output.buffer, frameNumber,
   4382                     pInputBuffer,
   4383                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata, indexUsed);
   4384             if (rc < 0) {
   4385                 LOGE("Fail to request on YUV channel");
   4386                 pthread_mutex_unlock(&mMutex);
   4387                 return rc;
   4388             }
   4389 
   4390             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
   4391             uint32_t j = 0;
   4392             for (j = 0; j < streamsArray.num_streams; j++) {
   4393                 if (streamsArray.stream_request[j].streamID == streamId) {
   4394                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   4395                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
   4396                     else
   4397                         streamsArray.stream_request[j].buf_index = indexUsed;
   4398                     break;
   4399                 }
   4400             }
   4401             if (j == streamsArray.num_streams) {
   4402                 LOGE("Did not find matching stream to update index");
   4403                 assert(0);
   4404             }
   4405 
   4406             pendingBufferIter->need_metadata = needMetadata;
   4407             if (needMetadata)
   4408                 streams_need_metadata += 1;
   4409             LOGD("calling YUV channel request, need_metadata is %d",
   4410                      needMetadata);
   4411         } else {
   4412             LOGD("request with buffer %p, frame_number %d",
   4413                   output.buffer, frameNumber);
   4414 
   4415             rc = channel->request(output.buffer, frameNumber, indexUsed);
   4416 
   4417             uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
   4418             uint32_t j = 0;
   4419             for (j = 0; j < streamsArray.num_streams; j++) {
   4420                 if (streamsArray.stream_request[j].streamID == streamId) {
   4421                     if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   4422                         streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
   4423                     else
   4424                         streamsArray.stream_request[j].buf_index = indexUsed;
   4425                     break;
   4426                 }
   4427             }
   4428             if (j == streamsArray.num_streams) {
   4429                 LOGE("Did not find matching stream to update index");
   4430                 assert(0);
   4431             }
   4432 
   4433             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   4434                     && mBatchSize) {
   4435                 mToBeQueuedVidBufs++;
   4436                 if (mToBeQueuedVidBufs == mBatchSize) {
   4437                     channel->queueBatchBuf();
   4438                 }
   4439             }
   4440             if (rc < 0) {
   4441                 LOGE("request failed");
   4442                 pthread_mutex_unlock(&mMutex);
   4443                 return rc;
   4444             }
   4445         }
   4446         pendingBufferIter++;
   4447     }
   4448 
   4449     //If 2 streams have need_metadata set to true, fail the request, unless
   4450     //we copy/reference count the metadata buffer
   4451     if (streams_need_metadata > 1) {
   4452         LOGE("not supporting request in which two streams requires"
   4453                 " 2 HAL metadata for reprocessing");
   4454         pthread_mutex_unlock(&mMutex);
   4455         return -EINVAL;
   4456     }
   4457 
   4458     if (request->input_buffer == NULL) {
   4459         /* Set the parameters to backend:
   4460          * - For every request in NORMAL MODE
   4461          * - For every request in HFR mode during preview only case
   4462          * - Once every batch in HFR mode during video recording
   4463          */
   4464         if (!mBatchSize ||
   4465            (mBatchSize && !isVidBufRequested) ||
   4466            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
   4467             LOGD("set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
   4468                      mBatchSize, isVidBufRequested,
   4469                     mToBeQueuedVidBufs);
   4470 
   4471             if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
   4472                 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
   4473                     uint32_t m = 0;
   4474                     for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
   4475                         if (streamsArray.stream_request[k].streamID ==
   4476                                 mBatchedStreamsArray.stream_request[m].streamID)
   4477                             break;
   4478                         }
   4479                         if (m == mBatchedStreamsArray.num_streams) {
   4480                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
   4481                                 streamsArray.stream_request[k].streamID;
   4482                             mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
   4483                                 streamsArray.stream_request[k].buf_index;
   4484                             mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
   4485                         }
   4486                 }
   4487                 streamsArray = mBatchedStreamsArray;
   4488             }
   4489             /* Update stream id of all the requested buffers */
   4490             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
   4491                 LOGE("Failed to set stream type mask in the parameters");
   4492                 pthread_mutex_unlock(&mMutex);
   4493                 return BAD_VALUE;
   4494             }
   4495 
   4496             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   4497                     mParameters);
   4498             if (rc < 0) {
   4499                 LOGE("set_parms failed");
   4500             }
   4501             /* reset to zero coz, the batch is queued */
   4502             mToBeQueuedVidBufs = 0;
   4503             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
   4504             memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
   4505         } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
   4506             for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
   4507                 uint32_t m = 0;
   4508                 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
   4509                     if (streamsArray.stream_request[k].streamID ==
   4510                             mBatchedStreamsArray.stream_request[m].streamID)
   4511                         break;
   4512                 }
   4513                 if (m == mBatchedStreamsArray.num_streams) {
   4514                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
   4515                         streamsArray.stream_request[k].streamID;
   4516                     mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
   4517                         streamsArray.stream_request[k].buf_index;
   4518                     mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
   4519                 }
   4520             }
   4521         }
   4522         mPendingLiveRequest++;
   4523     }
   4524 
   4525     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
   4526 
   4527     mState = STARTED;
   4528     // Added a timed condition wait
   4529     struct timespec ts;
   4530     uint8_t isValidTimeout = 1;
   4531     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
   4532     if (rc < 0) {
   4533       isValidTimeout = 0;
   4534       LOGE("Error reading the real time clock!!");
   4535     }
   4536     else {
   4537       // Make timeout as 5 sec for request to be honored
   4538       ts.tv_sec += 5;
   4539     }
   4540     //Block on conditional variable
   4541     while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
   4542             (mState != ERROR) && (mState != DEINIT)) {
   4543         if (!isValidTimeout) {
   4544             LOGD("Blocking on conditional wait");
   4545             pthread_cond_wait(&mRequestCond, &mMutex);
   4546         }
   4547         else {
   4548             LOGD("Blocking on timed conditional wait");
   4549             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
   4550             if (rc == ETIMEDOUT) {
   4551                 rc = -ENODEV;
   4552                 LOGE("Unblocked on timeout!!!!");
   4553                 break;
   4554             }
   4555         }
   4556         LOGD("Unblocked");
   4557         if (mWokenUpByDaemon) {
   4558             mWokenUpByDaemon = false;
   4559             if (mPendingLiveRequest < mMaxInFlightRequests)
   4560                 break;
   4561         }
   4562     }
   4563     pthread_mutex_unlock(&mMutex);
   4564 
   4565     return rc;
   4566 }
   4567 
   4568 /*===========================================================================
   4569  * FUNCTION   : dump
   4570  *
   4571  * DESCRIPTION:
   4572  *
   4573  * PARAMETERS :
   4574  *
   4575  *
   4576  * RETURN     :
   4577  *==========================================================================*/
   4578 void QCamera3HardwareInterface::dump(int fd)
   4579 {
   4580     pthread_mutex_lock(&mMutex);
   4581     dprintf(fd, "\n Camera HAL3 information Begin \n");
   4582 
   4583     dprintf(fd, "\nNumber of pending requests: %zu \n",
   4584         mPendingRequestsList.size());
   4585     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   4586     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
   4587     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   4588     for(pendingRequestIterator i = mPendingRequestsList.begin();
   4589             i != mPendingRequestsList.end(); i++) {
   4590         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
   4591         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
   4592         i->input_buffer);
   4593     }
   4594     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
   4595                 mPendingBuffersMap.get_num_overall_buffers());
   4596     dprintf(fd, "-------+------------------\n");
   4597     dprintf(fd, " Frame | Stream type mask \n");
   4598     dprintf(fd, "-------+------------------\n");
   4599     for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   4600         for(auto &j : req.mPendingBufferList) {
   4601             QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
   4602             dprintf(fd, " %5d | %11d \n",
   4603                     req.frame_number, channel->getStreamTypeMask());
   4604         }
   4605     }
   4606     dprintf(fd, "-------+------------------\n");
   4607 
   4608     dprintf(fd, "\nPending frame drop list: %zu\n",
   4609         mPendingFrameDropList.size());
   4610     dprintf(fd, "-------+-----------\n");
   4611     dprintf(fd, " Frame | Stream ID \n");
   4612     dprintf(fd, "-------+-----------\n");
   4613     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
   4614         i != mPendingFrameDropList.end(); i++) {
   4615         dprintf(fd, " %5d | %9d \n",
   4616             i->frame_number, i->stream_ID);
   4617     }
   4618     dprintf(fd, "-------+-----------\n");
   4619 
   4620     dprintf(fd, "\n Camera HAL3 information End \n");
   4621 
   4622     /* use dumpsys media.camera as trigger to send update debug level event */
   4623     mUpdateDebugLevel = true;
   4624     pthread_mutex_unlock(&mMutex);
   4625     return;
   4626 }
   4627 
   4628 /*===========================================================================
   4629  * FUNCTION   : flush
   4630  *
   4631  * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
   4632  *              conditionally restarts channels
   4633  *
   4634  * PARAMETERS :
   4635  *  @ restartChannels: re-start all channels
   4636  *
   4637  *
   4638  * RETURN     :
   4639  *          0 on success
   4640  *          Error code on failure
   4641  *==========================================================================*/
   4642 int QCamera3HardwareInterface::flush(bool restartChannels)
   4643 {
   4644     KPI_ATRACE_CALL();
   4645     int32_t rc = NO_ERROR;
   4646 
   4647     LOGD("Unblocking Process Capture Request");
   4648     pthread_mutex_lock(&mMutex);
   4649     mFlush = true;
   4650     pthread_mutex_unlock(&mMutex);
   4651 
   4652     rc = stopAllChannels();
   4653     // unlink of dualcam
   4654     if (mIsDeviceLinked) {
   4655         m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
   4656         pthread_mutex_lock(&gCamLock);
   4657 
   4658         if (mIsMainCamera == 1) {
   4659             m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
   4660             m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
   4661             // related session id should be session id of linked session
   4662             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   4663         } else {
   4664             m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
   4665             m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
   4666             m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
   4667         }
   4668         pthread_mutex_unlock(&gCamLock);
   4669 
   4670         rc = mCameraHandle->ops->sync_related_sensors(
   4671                 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
   4672         if (rc < 0) {
   4673             LOGE("Dualcam: Unlink failed, but still proceed to close");
   4674         }
   4675     }
   4676 
   4677     if (rc < 0) {
   4678         LOGE("stopAllChannels failed");
   4679         return rc;
   4680     }
   4681     if (mChannelHandle) {
   4682         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   4683                 mChannelHandle);
   4684     }
   4685 
   4686     // Reset bundle info
   4687     rc = setBundleInfo();
   4688     if (rc < 0) {
   4689         LOGE("setBundleInfo failed %d", rc);
   4690         return rc;
   4691     }
   4692 
   4693     // Mutex Lock
   4694     pthread_mutex_lock(&mMutex);
   4695 
   4696     // Unblock process_capture_request
   4697     mPendingLiveRequest = 0;
   4698     pthread_cond_signal(&mRequestCond);
   4699 
   4700     rc = notifyErrorForPendingRequests();
   4701     if (rc < 0) {
   4702         LOGE("notifyErrorForPendingRequests failed");
   4703         pthread_mutex_unlock(&mMutex);
   4704         return rc;
   4705     }
   4706 
   4707     mFlush = false;
   4708 
   4709     // Start the Streams/Channels
   4710     if (restartChannels) {
   4711         rc = startAllChannels();
   4712         if (rc < 0) {
   4713             LOGE("startAllChannels failed");
   4714             pthread_mutex_unlock(&mMutex);
   4715             return rc;
   4716         }
   4717     }
   4718 
   4719     if (mChannelHandle) {
   4720         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   4721                     mChannelHandle);
   4722         if (rc < 0) {
   4723             LOGE("start_channel failed");
   4724             pthread_mutex_unlock(&mMutex);
   4725             return rc;
   4726         }
   4727     }
   4728 
   4729     pthread_mutex_unlock(&mMutex);
   4730 
   4731     return 0;
   4732 }
   4733 
   4734 /*===========================================================================
   4735  * FUNCTION   : flushPerf
   4736  *
   4737  * DESCRIPTION: This is the performance optimization version of flush that does
   4738  *              not use stream off, rather flushes the system
   4739  *
   4740  * PARAMETERS :
   4741  *
   4742  *
   4743  * RETURN     : 0 : success
   4744  *              -EINVAL: input is malformed (device is not valid)
   4745  *              -ENODEV: if the device has encountered a serious error
   4746  *==========================================================================*/
   4747 int QCamera3HardwareInterface::flushPerf()
   4748 {
   4749     ATRACE_CALL();
   4750     int32_t rc = 0;
   4751     struct timespec timeout;
   4752     bool timed_wait = false;
   4753 
   4754     pthread_mutex_lock(&mMutex);
   4755     mFlushPerf = true;
   4756     mPendingBuffersMap.numPendingBufsAtFlush =
   4757         mPendingBuffersMap.get_num_overall_buffers();
   4758     LOGD("Calling flush. Wait for %d buffers to return",
   4759         mPendingBuffersMap.numPendingBufsAtFlush);
   4760 
   4761     /* send the flush event to the backend */
   4762     rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
   4763     if (rc < 0) {
   4764         LOGE("Error in flush: IOCTL failure");
   4765         mFlushPerf = false;
   4766         pthread_mutex_unlock(&mMutex);
   4767         return -ENODEV;
   4768     }
   4769 
   4770     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
   4771         LOGD("No pending buffers in HAL, return flush");
   4772         mFlushPerf = false;
   4773         pthread_mutex_unlock(&mMutex);
   4774         return rc;
   4775     }
   4776 
   4777     /* wait on a signal that buffers were received */
   4778     rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
   4779     if (rc < 0) {
   4780         LOGE("Error reading the real time clock, cannot use timed wait");
   4781     } else {
   4782         timeout.tv_sec += FLUSH_TIMEOUT;
   4783         timed_wait = true;
   4784     }
   4785 
   4786     //Block on conditional variable
   4787     while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
   4788         LOGD("Waiting on mBuffersCond");
   4789         if (!timed_wait) {
   4790             rc = pthread_cond_wait(&mBuffersCond, &mMutex);
   4791             if (rc != 0) {
   4792                  LOGE("pthread_cond_wait failed due to rc = %s",
   4793                         strerror(rc));
   4794                  break;
   4795             }
   4796         } else {
   4797             rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
   4798             if (rc != 0) {
   4799                 LOGE("pthread_cond_timedwait failed due to rc = %s",
   4800                             strerror(rc));
   4801                 break;
   4802             }
   4803         }
   4804     }
   4805     if (rc != 0) {
   4806         mFlushPerf = false;
   4807         pthread_mutex_unlock(&mMutex);
   4808         return -ENODEV;
   4809     }
   4810 
   4811     LOGD("Received buffers, now safe to return them");
   4812 
   4813     //make sure the channels handle flush
   4814     //currently only required for the picture channel to release snapshot resources
   4815     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4816             it != mStreamInfo.end(); it++) {
   4817         QCamera3Channel *channel = (*it)->channel;
   4818         if (channel) {
   4819             rc = channel->flush();
   4820             if (rc) {
   4821                LOGE("Flushing the channels failed with error %d", rc);
   4822                // even though the channel flush failed we need to continue and
   4823                // return the buffers we have to the framework, however the return
   4824                // value will be an error
   4825                rc = -ENODEV;
   4826             }
   4827         }
   4828     }
   4829 
   4830     /* notify the frameworks and send errored results */
   4831     rc = notifyErrorForPendingRequests();
   4832     if (rc < 0) {
   4833         LOGE("notifyErrorForPendingRequests failed");
   4834         pthread_mutex_unlock(&mMutex);
   4835         return rc;
   4836     }
   4837 
   4838     //unblock process_capture_request
   4839     mPendingLiveRequest = 0;
   4840     unblockRequestIfNecessary();
   4841 
   4842     mFlushPerf = false;
   4843     pthread_mutex_unlock(&mMutex);
   4844     LOGD ("Flush Operation complete. rc = %d", rc);
   4845     return rc;
   4846 }
   4847 
   4848 /*===========================================================================
   4849  * FUNCTION   : handleCameraDeviceError
   4850  *
   4851  * DESCRIPTION: This function calls internal flush and notifies the error to
   4852  *              framework and updates the state variable.
   4853  *
   4854  * PARAMETERS : None
   4855  *
   4856  * RETURN     : NO_ERROR on Success
   4857  *              Error code on failure
   4858  *==========================================================================*/
   4859 int32_t QCamera3HardwareInterface::handleCameraDeviceError()
   4860 {
   4861     int32_t rc = NO_ERROR;
   4862 
   4863     pthread_mutex_lock(&mMutex);
   4864     if (mState != ERROR) {
   4865         //if mState != ERROR, nothing to be done
   4866         pthread_mutex_unlock(&mMutex);
   4867         return NO_ERROR;
   4868     }
   4869     pthread_mutex_unlock(&mMutex);
   4870 
   4871     rc = flush(false /* restart channels */);
   4872     if (NO_ERROR != rc) {
   4873         LOGE("internal flush to handle mState = ERROR failed");
   4874     }
   4875 
   4876     pthread_mutex_lock(&mMutex);
   4877     mState = DEINIT;
   4878     pthread_mutex_unlock(&mMutex);
   4879 
   4880     camera3_notify_msg_t notify_msg;
   4881     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   4882     notify_msg.type = CAMERA3_MSG_ERROR;
   4883     notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
   4884     notify_msg.message.error.error_stream = NULL;
   4885     notify_msg.message.error.frame_number = 0;
   4886     mCallbackOps->notify(mCallbackOps, &notify_msg);
   4887 
   4888     return rc;
   4889 }
   4890 
   4891 /*===========================================================================
   4892  * FUNCTION   : captureResultCb
   4893  *
   4894  * DESCRIPTION: Callback handler for all capture result
   4895  *              (streams, as well as metadata)
   4896  *
   4897  * PARAMETERS :
   4898  *   @metadata : metadata information
   4899  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   4900  *               NULL if metadata.
   4901  *
   4902  * RETURN     : NONE
   4903  *==========================================================================*/
   4904 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   4905                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
   4906 {
   4907     if (metadata_buf) {
   4908         pthread_mutex_lock(&mMutex);
   4909         uint8_t batchSize = mBatchSize;
   4910         pthread_mutex_unlock(&mMutex);
   4911         if (batchSize) {
   4912             handleBatchMetadata(metadata_buf,
   4913                     true /* free_and_bufdone_meta_buf */);
   4914         } else { /* mBatchSize = 0 */
   4915             hdrPlusPerfLock(metadata_buf);
   4916             pthread_mutex_lock(&mMutex);
   4917             handleMetadataWithLock(metadata_buf,
   4918                     true /* free_and_bufdone_meta_buf */,
   4919                     true /* last urgent frame of batch metadata */,
   4920                     true /* last frame of batch metadata */ );
   4921             pthread_mutex_unlock(&mMutex);
   4922         }
   4923     } else if (isInputBuffer) {
   4924         pthread_mutex_lock(&mMutex);
   4925         handleInputBufferWithLock(frame_number);
   4926         pthread_mutex_unlock(&mMutex);
   4927     } else {
   4928         pthread_mutex_lock(&mMutex);
   4929         handleBufferWithLock(buffer, frame_number);
   4930         pthread_mutex_unlock(&mMutex);
   4931     }
   4932     return;
   4933 }
   4934 
   4935 /*===========================================================================
   4936  * FUNCTION   : getReprocessibleOutputStreamId
   4937  *
   4938  * DESCRIPTION: Get source output stream id for the input reprocess stream
   4939  *              based on size and format, which would be the largest
   4940  *              output stream if an input stream exists.
   4941  *
   4942  * PARAMETERS :
   4943  *   @id      : return the stream id if found
   4944  *
   4945  * RETURN     : int32_t type of status
   4946  *              NO_ERROR  -- success
   4947  *              none-zero failure code
   4948  *==========================================================================*/
   4949 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
   4950 {
   4951     /* check if any output or bidirectional stream with the same size and format
   4952        and return that stream */
   4953     if ((mInputStreamInfo.dim.width > 0) &&
   4954             (mInputStreamInfo.dim.height > 0)) {
   4955         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4956                 it != mStreamInfo.end(); it++) {
   4957 
   4958             camera3_stream_t *stream = (*it)->stream;
   4959             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
   4960                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
   4961                     (stream->format == mInputStreamInfo.format)) {
   4962                 // Usage flag for an input stream and the source output stream
   4963                 // may be different.
   4964                 LOGD("Found reprocessible output stream! %p", *it);
   4965                 LOGD("input stream usage 0x%x, current stream usage 0x%x",
   4966                          stream->usage, mInputStreamInfo.usage);
   4967 
   4968                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
   4969                 if (channel != NULL && channel->mStreams[0]) {
   4970                     id = channel->mStreams[0]->getMyServerID();
   4971                     return NO_ERROR;
   4972                 }
   4973             }
   4974         }
   4975     } else {
   4976         LOGD("No input stream, so no reprocessible output stream");
   4977     }
   4978     return NAME_NOT_FOUND;
   4979 }
   4980 
   4981 /*===========================================================================
   4982  * FUNCTION   : lookupFwkName
   4983  *
   4984  * DESCRIPTION: In case the enum is not same in fwk and backend
   4985  *              make sure the parameter is correctly propogated
   4986  *
   4987  * PARAMETERS  :
   4988  *   @arr      : map between the two enums
   4989  *   @len      : len of the map
   4990  *   @hal_name : name of the hal_parm to map
   4991  *
   4992  * RETURN     : int type of status
   4993  *              fwk_name  -- success
   4994  *              none-zero failure code
   4995  *==========================================================================*/
   4996 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
   4997         size_t len, halType hal_name)
   4998 {
   4999 
   5000     for (size_t i = 0; i < len; i++) {
   5001         if (arr[i].hal_name == hal_name) {
   5002             return arr[i].fwk_name;
   5003         }
   5004     }
   5005 
   5006     /* Not able to find matching framework type is not necessarily
   5007      * an error case. This happens when mm-camera supports more attributes
   5008      * than the frameworks do */
   5009     LOGH("Cannot find matching framework type");
   5010     return NAME_NOT_FOUND;
   5011 }
   5012 
   5013 /*===========================================================================
   5014  * FUNCTION   : lookupHalName
   5015  *
   5016  * DESCRIPTION: In case the enum is not same in fwk and backend
   5017  *              make sure the parameter is correctly propogated
   5018  *
   5019  * PARAMETERS  :
   5020  *   @arr      : map between the two enums
   5021  *   @len      : len of the map
   5022  *   @fwk_name : name of the hal_parm to map
   5023  *
   5024  * RETURN     : int32_t type of status
   5025  *              hal_name  -- success
   5026  *              none-zero failure code
   5027  *==========================================================================*/
   5028 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
   5029         size_t len, fwkType fwk_name)
   5030 {
   5031     for (size_t i = 0; i < len; i++) {
   5032         if (arr[i].fwk_name == fwk_name) {
   5033             return arr[i].hal_name;
   5034         }
   5035     }
   5036 
   5037     LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
   5038     return NAME_NOT_FOUND;
   5039 }
   5040 
   5041 /*===========================================================================
   5042  * FUNCTION   : lookupProp
   5043  *
   5044  * DESCRIPTION: lookup a value by its name
   5045  *
   5046  * PARAMETERS :
   5047  *   @arr     : map between the two enums
   5048  *   @len     : size of the map
   5049  *   @name    : name to be looked up
   5050  *
   5051  * RETURN     : Value if found
   5052  *              CAM_CDS_MODE_MAX if not found
   5053  *==========================================================================*/
   5054 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
   5055         size_t len, const char *name)
   5056 {
   5057     if (name) {
   5058         for (size_t i = 0; i < len; i++) {
   5059             if (!strcmp(arr[i].desc, name)) {
   5060                 return arr[i].val;
   5061             }
   5062         }
   5063     }
   5064     return CAM_CDS_MODE_MAX;
   5065 }
   5066 
   5067 /*===========================================================================
   5068  *
   5069  * DESCRIPTION:
   5070  *
   5071  * PARAMETERS :
   5072  *   @metadata : metadata information from callback
   5073  *   @pendingRequest: pending request for this metadata
   5074  *   @pprocDone: whether internal offline postprocsesing is done
   5075  *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
   5076  *                         in a batch. Always true for non-batch mode.
   5077  *
   5078  * RETURN     : camera_metadata_t*
   5079  *              metadata in a format specified by fwk
   5080  *==========================================================================*/
   5081 camera_metadata_t*
   5082 QCamera3HardwareInterface::translateFromHalMetadata(
   5083                                  metadata_buffer_t *metadata,
   5084                                  const PendingRequestInfo& pendingRequest,
   5085                                  bool pprocDone,
   5086                                  bool lastMetadataInBatch)
   5087 {
   5088     CameraMetadata camMetadata;
   5089     camera_metadata_t *resultMetadata;
   5090 
   5091     if (!lastMetadataInBatch) {
   5092         /* In batch mode, use empty metadata if this is not the last in batch*/
   5093         resultMetadata = allocate_camera_metadata(0, 0);
   5094         return resultMetadata;
   5095     }
   5096 
   5097     if (pendingRequest.jpegMetadata.entryCount())
   5098         camMetadata.append(pendingRequest.jpegMetadata);
   5099 
   5100     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
   5101     camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
   5102     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
   5103     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
   5104     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
   5105     if (mBatchSize == 0) {
   5106         // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
   5107         camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
   5108     }
   5109 
   5110     // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
   5111     // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
   5112     if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
   5113         // DevCamDebug metadata translateFromHalMetadata AF
   5114         IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
   5115                 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
   5116             int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
   5117             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
   5118         }
   5119         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
   5120                 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
   5121             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
   5122             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
   5123         }
   5124         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
   5125                 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
   5126             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
   5127             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
   5128         }
   5129         IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
   5130                 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
   5131             int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
   5132             camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
   5133         }
   5134         IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
   5135                 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
   5136             int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
   5137             camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
   5138         }
   5139         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
   5140                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
   5141             int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
   5142                 *DevCamDebug_af_monitor_pdaf_target_pos;
   5143             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
   5144                 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
   5145         }
   5146         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
   5147                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
   5148             int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
   5149                 *DevCamDebug_af_monitor_pdaf_confidence;
   5150             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
   5151                 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
   5152         }
   5153         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
   5154                 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
   5155             int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
   5156             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
   5157                 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
   5158         }
   5159         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
   5160                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
   5161             int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
   5162                 *DevCamDebug_af_monitor_tof_target_pos;
   5163             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
   5164                 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
   5165         }
   5166         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
   5167                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
   5168             int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
   5169                 *DevCamDebug_af_monitor_tof_confidence;
   5170             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
   5171                 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
   5172         }
   5173         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
   5174                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
   5175             int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
   5176             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
   5177                 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
   5178         }
   5179         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
   5180                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
   5181             int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
   5182             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
   5183                 &fwk_DevCamDebug_af_monitor_type_select, 1);
   5184         }
   5185         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
   5186                 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
   5187             int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
   5188             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
   5189                 &fwk_DevCamDebug_af_monitor_refocus, 1);
   5190         }
   5191         IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
   5192                 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
   5193             int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
   5194             camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
   5195                 &fwk_DevCamDebug_af_monitor_target_pos, 1);
   5196         }
   5197         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
   5198                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
   5199             int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
   5200                 *DevCamDebug_af_search_pdaf_target_pos;
   5201             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
   5202                 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
   5203         }
   5204         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
   5205                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
   5206             int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
   5207             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
   5208                 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
   5209         }
   5210         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
   5211                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
   5212             int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
   5213             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
   5214                 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
   5215         }
   5216         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
   5217                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
   5218             int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
   5219             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
   5220                 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
   5221         }
   5222         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
   5223                 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
   5224             int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
   5225             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
   5226                 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
   5227         }
   5228         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
   5229                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
   5230             int32_t fwk_DevCamDebug_af_search_tof_target_pos =
   5231                 *DevCamDebug_af_search_tof_target_pos;
   5232             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
   5233                 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
   5234         }
   5235         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
   5236                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
   5237             int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
   5238             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
   5239                 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
   5240         }
   5241         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
   5242                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
   5243             int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
   5244             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
   5245                 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
   5246         }
   5247         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
   5248                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
   5249             int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
   5250             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
   5251                 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
   5252         }
   5253         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
   5254                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
   5255             int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
   5256             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
   5257                 &fwk_DevCamDebug_af_search_tof_confidence, 1);
   5258         }
   5259         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
   5260                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
   5261             int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
   5262             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
   5263                 &fwk_DevCamDebug_af_search_type_select, 1);
   5264         }
   5265         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
   5266                 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
   5267             int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
   5268             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
   5269                 &fwk_DevCamDebug_af_search_next_pos, 1);
   5270         }
   5271         IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
   5272                 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
   5273             int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
   5274             camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
   5275                 &fwk_DevCamDebug_af_search_target_pos, 1);
   5276         }
   5277         // DevCamDebug metadata translateFromHalMetadata AEC
   5278         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
   5279                 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
   5280             int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
   5281             camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
   5282     }
   5283         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
   5284                 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
   5285             int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
   5286             camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
   5287         }
   5288         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
   5289                 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
   5290             int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
   5291             camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
   5292         }
   5293         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
   5294                 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
   5295             int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
   5296             camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
   5297         }
   5298         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
   5299                 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
   5300             int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
   5301             camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
   5302         }
   5303         IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
   5304                 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
   5305             float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
   5306             camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
   5307         }
   5308         IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
   5309                 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
   5310             int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
   5311             camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
   5312         }
   5313         IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
   5314                 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
   5315             float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
   5316             camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
   5317         }
   5318         // DevCamDebug metadata translateFromHalMetadata AWB
   5319         IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
   5320                 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
   5321             float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
   5322             camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
   5323         }
   5324         IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
   5325                 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
   5326             float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
   5327             camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
   5328         }
   5329         IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
   5330                 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
   5331             float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
   5332             camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
   5333         }
   5334         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
   5335                 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
   5336             int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
   5337             camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
   5338         }
   5339         IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
   5340                 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
   5341             int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
   5342             camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
   5343         }
   5344     }
   5345     // atrace_end(ATRACE_TAG_ALWAYS);
   5346 
   5347     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
   5348         int64_t fwk_frame_number = *frame_number;
   5349         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   5350     }
   5351 
   5352     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
   5353         int32_t fps_range[2];
   5354         fps_range[0] = (int32_t)float_range->min_fps;
   5355         fps_range[1] = (int32_t)float_range->max_fps;
   5356         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   5357                                       fps_range, 2);
   5358         LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
   5359              fps_range[0], fps_range[1]);
   5360     }
   5361 
   5362     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
   5363         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
   5364     }
   5365 
   5366     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   5367         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
   5368                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
   5369                 *sceneMode);
   5370         if (NAME_NOT_FOUND != val) {
   5371             uint8_t fwkSceneMode = (uint8_t)val;
   5372             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
   5373             LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
   5374                      fwkSceneMode);
   5375         }
   5376     }
   5377 
   5378     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
   5379         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
   5380         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
   5381     }
   5382 
   5383     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
   5384         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
   5385         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
   5386     }
   5387 
   5388     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
   5389         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
   5390         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
   5391     }
   5392 
   5393     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
   5394             CAM_INTF_META_EDGE_MODE, metadata) {
   5395         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
   5396     }
   5397 
   5398     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
   5399         uint8_t fwk_flashPower = (uint8_t) *flashPower;
   5400         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
   5401     }
   5402 
   5403     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
   5404         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   5405     }
   5406 
   5407     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
   5408         if (0 <= *flashState) {
   5409             uint8_t fwk_flashState = (uint8_t) *flashState;
   5410             if (!gCamCapability[mCameraId]->flash_available) {
   5411                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   5412             }
   5413             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
   5414         }
   5415     }
   5416 
   5417     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
   5418         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
   5419         if (NAME_NOT_FOUND != val) {
   5420             uint8_t fwk_flashMode = (uint8_t)val;
   5421             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
   5422         }
   5423     }
   5424 
   5425     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
   5426         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
   5427         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
   5428     }
   5429 
   5430     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
   5431         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   5432     }
   5433 
   5434     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
   5435         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   5436     }
   5437 
   5438     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
   5439         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   5440     }
   5441 
   5442     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
   5443         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
   5444         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
   5445     }
   5446 
   5447     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
   5448         uint8_t fwk_videoStab = (uint8_t) *videoStab;
   5449         LOGD("fwk_videoStab = %d", fwk_videoStab);
   5450         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
   5451     } else {
   5452         // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
   5453         // and so hardcoding the Video Stab result to OFF mode.
   5454         uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   5455         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
   5456         LOGD("%s: EIS result default to OFF mode", __func__);
   5457     }
   5458 
   5459     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
   5460         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
   5461         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
   5462     }
   5463 
   5464     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
   5465         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
   5466     }
   5467 
   5468     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
   5469         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
   5470         float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
   5471 
   5472         adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
   5473               gCamCapability[mCameraId]->color_arrangement);
   5474 
   5475         LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
   5476           blackLevelAppliedPattern->cam_black_level[0],
   5477           blackLevelAppliedPattern->cam_black_level[1],
   5478           blackLevelAppliedPattern->cam_black_level[2],
   5479           blackLevelAppliedPattern->cam_black_level[3]);
   5480         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
   5481                 BLACK_LEVEL_PATTERN_CNT);
   5482 
   5483         // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
   5484         // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
   5485         // depth space.
   5486         fwk_blackLevelInd[0] /= 4.0;
   5487         fwk_blackLevelInd[1] /= 4.0;
   5488         fwk_blackLevelInd[2] /= 4.0;
   5489         fwk_blackLevelInd[3] /= 4.0;
   5490         camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
   5491                 BLACK_LEVEL_PATTERN_CNT);
   5492     }
   5493 
   5494     // Fixed whitelevel is used by ISP/Sensor
   5495     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
   5496             &gCamCapability[mCameraId]->white_level, 1);
   5497 
   5498     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
   5499             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
   5500         int32_t scalerCropRegion[4];
   5501         scalerCropRegion[0] = hScalerCropRegion->left;
   5502         scalerCropRegion[1] = hScalerCropRegion->top;
   5503         scalerCropRegion[2] = hScalerCropRegion->width;
   5504         scalerCropRegion[3] = hScalerCropRegion->height;
   5505 
   5506         // Adjust crop region from sensor output coordinate system to active
   5507         // array coordinate system.
   5508         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
   5509                 scalerCropRegion[2], scalerCropRegion[3]);
   5510 
   5511         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   5512     }
   5513 
   5514     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
   5515         LOGD("sensorExpTime = %lld", *sensorExpTime);
   5516         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   5517     }
   5518 
   5519     IF_META_AVAILABLE(int64_t, sensorFameDuration,
   5520             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
   5521         LOGD("sensorFameDuration = %lld", *sensorFameDuration);
   5522         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   5523     }
   5524 
   5525     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
   5526             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
   5527         LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
   5528         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   5529                 sensorRollingShutterSkew, 1);
   5530     }
   5531 
   5532     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
   5533         LOGD("sensorSensitivity = %d", *sensorSensitivity);
   5534         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
   5535 
   5536         //calculate the noise profile based on sensitivity
   5537         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
   5538         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
   5539         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
   5540         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
   5541             noise_profile[i]   = noise_profile_S;
   5542             noise_profile[i+1] = noise_profile_O;
   5543         }
   5544         LOGD("noise model entry (S, O) is (%f, %f)",
   5545                 noise_profile_S, noise_profile_O);
   5546         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
   5547                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
   5548     }
   5549 
   5550     int32_t fwk_ispSensitivity = 100;
   5551     IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
   5552         fwk_ispSensitivity = (int32_t) *ispSensitivity;
   5553     }
   5554     IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
   5555         fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
   5556     }
   5557     camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
   5558 
   5559     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
   5560         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
   5561         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
   5562     }
   5563 
   5564     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
   5565         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   5566                 *faceDetectMode);
   5567         if (NAME_NOT_FOUND != val) {
   5568             uint8_t fwk_faceDetectMode = (uint8_t)val;
   5569             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
   5570 
   5571             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
   5572                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
   5573                         CAM_INTF_META_FACE_DETECTION, metadata) {
   5574                     uint8_t numFaces = MIN(
   5575                             faceDetectionInfo->num_faces_detected, MAX_ROI);
   5576                     int32_t faceIds[MAX_ROI];
   5577                     uint8_t faceScores[MAX_ROI];
   5578                     int32_t faceRectangles[MAX_ROI * 4];
   5579                     int32_t faceLandmarks[MAX_ROI * 6];
   5580                     size_t j = 0, k = 0;
   5581 
   5582                     for (size_t i = 0; i < numFaces; i++) {
   5583                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
   5584                         // Adjust crop region from sensor output coordinate system to active
   5585                         // array coordinate system.
   5586                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
   5587                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
   5588                                 rect.width, rect.height);
   5589 
   5590                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   5591                                 faceRectangles+j, -1);
   5592 
   5593                         j+= 4;
   5594                     }
   5595                     if (numFaces <= 0) {
   5596                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
   5597                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
   5598                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
   5599                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
   5600                     }
   5601 
   5602                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
   5603                             numFaces);
   5604                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   5605                             faceRectangles, numFaces * 4U);
   5606                     if (fwk_faceDetectMode ==
   5607                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
   5608                         IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
   5609                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
   5610 
   5611                             for (size_t i = 0; i < numFaces; i++) {
   5612                                 // Map the co-ordinate sensor output coordinate system to active
   5613                                 // array coordinate system.
   5614                                 mCropRegionMapper.toActiveArray(
   5615                                         landmarks->face_landmarks[i].left_eye_center.x,
   5616                                         landmarks->face_landmarks[i].left_eye_center.y);
   5617                                 mCropRegionMapper.toActiveArray(
   5618                                         landmarks->face_landmarks[i].right_eye_center.x,
   5619                                         landmarks->face_landmarks[i].right_eye_center.y);
   5620                                 mCropRegionMapper.toActiveArray(
   5621                                         landmarks->face_landmarks[i].mouth_center.x,
   5622                                         landmarks->face_landmarks[i].mouth_center.y);
   5623 
   5624                                 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
   5625                                 k+= 6;
   5626                             }
   5627                         }
   5628 
   5629                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   5630                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   5631                                 faceLandmarks, numFaces * 6U);
   5632                    }
   5633                 }
   5634             }
   5635         }
   5636     }
   5637 
   5638     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
   5639         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
   5640         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
   5641     }
   5642 
   5643     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
   5644             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
   5645         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
   5646         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
   5647     }
   5648 
   5649     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
   5650             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
   5651         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
   5652                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
   5653     }
   5654 
   5655     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
   5656             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
   5657         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
   5658                 CAM_MAX_SHADING_MAP_HEIGHT);
   5659         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
   5660                 CAM_MAX_SHADING_MAP_WIDTH);
   5661         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   5662                 lensShadingMap->lens_shading, 4U * map_width * map_height);
   5663     }
   5664 
   5665     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
   5666         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
   5667         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
   5668     }
   5669 
   5670     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
   5671         //Populate CAM_INTF_META_TONEMAP_CURVES
   5672         /* ch0 = G, ch 1 = B, ch 2 = R*/
   5673         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   5674             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
   5675                      tonemap->tonemap_points_cnt,
   5676                     CAM_MAX_TONEMAP_CURVE_SIZE);
   5677             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   5678         }
   5679 
   5680         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
   5681                         &tonemap->curves[0].tonemap_points[0][0],
   5682                         tonemap->tonemap_points_cnt * 2);
   5683 
   5684         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
   5685                         &tonemap->curves[1].tonemap_points[0][0],
   5686                         tonemap->tonemap_points_cnt * 2);
   5687 
   5688         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
   5689                         &tonemap->curves[2].tonemap_points[0][0],
   5690                         tonemap->tonemap_points_cnt * 2);
   5691     }
   5692 
   5693     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
   5694             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
   5695         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
   5696                 CC_GAINS_COUNT);
   5697     }
   5698 
   5699     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
   5700             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
   5701         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   5702                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
   5703                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
   5704     }
   5705 
   5706     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
   5707             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
   5708         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   5709             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
   5710                      toneCurve->tonemap_points_cnt,
   5711                     CAM_MAX_TONEMAP_CURVE_SIZE);
   5712             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   5713         }
   5714         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
   5715                 (float*)toneCurve->curve.tonemap_points,
   5716                 toneCurve->tonemap_points_cnt * 2);
   5717     }
   5718 
   5719     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
   5720             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
   5721         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   5722                 predColorCorrectionGains->gains, 4);
   5723     }
   5724 
   5725     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
   5726             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
   5727         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   5728                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
   5729                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
   5730     }
   5731 
   5732     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
   5733         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
   5734     }
   5735 
   5736     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
   5737         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
   5738         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
   5739     }
   5740 
   5741     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
   5742         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
   5743         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
   5744     }
   5745 
   5746     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
   5747         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   5748                 *effectMode);
   5749         if (NAME_NOT_FOUND != val) {
   5750             uint8_t fwk_effectMode = (uint8_t)val;
   5751             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
   5752         }
   5753     }
   5754 
   5755     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
   5756             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
   5757         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
   5758                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
   5759         if (NAME_NOT_FOUND != fwk_testPatternMode) {
   5760             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
   5761         }
   5762         int32_t fwk_testPatternData[4];
   5763         fwk_testPatternData[0] = testPatternData->r;
   5764         fwk_testPatternData[3] = testPatternData->b;
   5765         switch (gCamCapability[mCameraId]->color_arrangement) {
   5766         case CAM_FILTER_ARRANGEMENT_RGGB:
   5767         case CAM_FILTER_ARRANGEMENT_GRBG:
   5768             fwk_testPatternData[1] = testPatternData->gr;
   5769             fwk_testPatternData[2] = testPatternData->gb;
   5770             break;
   5771         case CAM_FILTER_ARRANGEMENT_GBRG:
   5772         case CAM_FILTER_ARRANGEMENT_BGGR:
   5773             fwk_testPatternData[2] = testPatternData->gr;
   5774             fwk_testPatternData[1] = testPatternData->gb;
   5775             break;
   5776         default:
   5777             LOGE("color arrangement %d is not supported",
   5778                 gCamCapability[mCameraId]->color_arrangement);
   5779             break;
   5780         }
   5781         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
   5782     }
   5783 
   5784     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
   5785         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
   5786     }
   5787 
   5788     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
   5789         String8 str((const char *)gps_methods);
   5790         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
   5791     }
   5792 
   5793     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
   5794         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
   5795     }
   5796 
   5797     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
   5798         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
   5799     }
   5800 
   5801     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
   5802         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
   5803         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
   5804     }
   5805 
   5806     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
   5807         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
   5808         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
   5809     }
   5810 
   5811     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
   5812         int32_t fwk_thumb_size[2];
   5813         fwk_thumb_size[0] = thumb_size->width;
   5814         fwk_thumb_size[1] = thumb_size->height;
   5815         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
   5816     }
   5817 
   5818     // Skip reprocess metadata for high speed mode.
   5819     if (mBatchSize == 0) {
   5820         IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
   5821             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
   5822                      privateData,
   5823                      MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
   5824         }
   5825     }
   5826 
   5827     if (metadata->is_tuning_params_valid) {
   5828         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
   5829         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
   5830         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
   5831 
   5832 
   5833         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
   5834                 sizeof(uint32_t));
   5835         data += sizeof(uint32_t);
   5836 
   5837         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
   5838                 sizeof(uint32_t));
   5839         LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
   5840         data += sizeof(uint32_t);
   5841 
   5842         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
   5843                 sizeof(uint32_t));
   5844         LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
   5845         data += sizeof(uint32_t);
   5846 
   5847         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
   5848                 sizeof(uint32_t));
   5849         LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
   5850         data += sizeof(uint32_t);
   5851 
   5852         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
   5853                 sizeof(uint32_t));
   5854         LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
   5855         data += sizeof(uint32_t);
   5856 
   5857         metadata->tuning_params.tuning_mod3_data_size = 0;
   5858         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
   5859                 sizeof(uint32_t));
   5860         LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
   5861         data += sizeof(uint32_t);
   5862 
   5863         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
   5864                 TUNING_SENSOR_DATA_MAX);
   5865         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
   5866                 count);
   5867         data += count;
   5868 
   5869         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
   5870                 TUNING_VFE_DATA_MAX);
   5871         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
   5872                 count);
   5873         data += count;
   5874 
   5875         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
   5876                 TUNING_CPP_DATA_MAX);
   5877         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
   5878                 count);
   5879         data += count;
   5880 
   5881         count = MIN(metadata->tuning_params.tuning_cac_data_size,
   5882                 TUNING_CAC_DATA_MAX);
   5883         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
   5884                 count);
   5885         data += count;
   5886 
   5887         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
   5888                 (int32_t *)(void *)tuning_meta_data_blob,
   5889                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
   5890     }
   5891 
   5892     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
   5893             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
   5894         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   5895                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
   5896                 NEUTRAL_COL_POINTS);
   5897     }
   5898 
   5899     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
   5900         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
   5901         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
   5902     }
   5903 
   5904     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
   5905         int32_t aeRegions[REGIONS_TUPLE_COUNT];
   5906         // Adjust crop region from sensor output coordinate system to active
   5907         // array coordinate system.
   5908         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
   5909                 hAeRegions->rect.width, hAeRegions->rect.height);
   5910 
   5911         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   5912         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
   5913                 REGIONS_TUPLE_COUNT);
   5914         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   5915                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
   5916                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
   5917                 hAeRegions->rect.height);
   5918     }
   5919 
   5920     if (!pendingRequest.focusStateSent) {
   5921         if (pendingRequest.focusStateValid) {
   5922             camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
   5923             LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
   5924         } else {
   5925             IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
   5926                 uint8_t fwk_afState = (uint8_t) *afState;
   5927                 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
   5928                 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
   5929             }
   5930         }
   5931     }
   5932 
   5933     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
   5934         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   5935     }
   5936 
   5937     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
   5938         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
   5939     }
   5940 
   5941     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
   5942         uint8_t fwk_lensState = *lensState;
   5943         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
   5944     }
   5945 
   5946     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
   5947         /*af regions*/
   5948         int32_t afRegions[REGIONS_TUPLE_COUNT];
   5949         // Adjust crop region from sensor output coordinate system to active
   5950         // array coordinate system.
   5951         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
   5952                 hAfRegions->rect.width, hAfRegions->rect.height);
   5953 
   5954         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   5955         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
   5956                 REGIONS_TUPLE_COUNT);
   5957         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   5958                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
   5959                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
   5960                 hAfRegions->rect.height);
   5961     }
   5962 
   5963     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
   5964         uint32_t ab_mode = *hal_ab_mode;
   5965         if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
   5966                 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
   5967               ab_mode = CAM_ANTIBANDING_MODE_AUTO;
   5968         }
   5969         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   5970                 ab_mode);
   5971         if (NAME_NOT_FOUND != val) {
   5972             uint8_t fwk_ab_mode = (uint8_t)val;
   5973             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
   5974         }
   5975     }
   5976 
   5977     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   5978         int val = lookupFwkName(SCENE_MODES_MAP,
   5979                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
   5980         if (NAME_NOT_FOUND != val) {
   5981             uint8_t fwkBestshotMode = (uint8_t)val;
   5982             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
   5983             LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
   5984         } else {
   5985             LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
   5986         }
   5987     }
   5988 
   5989     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
   5990          uint8_t fwk_mode = (uint8_t) *mode;
   5991          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
   5992     }
   5993 
   5994     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   5995     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   5996 
   5997     int32_t hotPixelMap[2];
   5998     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
   5999 
   6000     // CDS
   6001     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
   6002         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
   6003     }
   6004 
   6005     // TNR
   6006     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
   6007         uint8_t tnr_enable       = tnr->denoise_enable;
   6008         int32_t tnr_process_type = (int32_t)tnr->process_plates;
   6009 
   6010         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   6011         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   6012     }
   6013 
   6014     // Reprocess crop data
   6015     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
   6016         uint8_t cnt = crop_data->num_of_streams;
   6017         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
   6018             // mm-qcamera-daemon only posts crop_data for streams
   6019             // not linked to pproc. So no valid crop metadata is not
   6020             // necessarily an error case.
   6021             LOGD("No valid crop metadata entries");
   6022         } else {
   6023             uint32_t reproc_stream_id;
   6024             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   6025                 LOGD("No reprocessible stream found, ignore crop data");
   6026             } else {
   6027                 int rc = NO_ERROR;
   6028                 Vector<int32_t> roi_map;
   6029                 int32_t *crop = new int32_t[cnt*4];
   6030                 if (NULL == crop) {
   6031                    rc = NO_MEMORY;
   6032                 }
   6033                 if (NO_ERROR == rc) {
   6034                     int32_t streams_found = 0;
   6035                     for (size_t i = 0; i < cnt; i++) {
   6036                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
   6037                             if (pprocDone) {
   6038                                 // HAL already does internal reprocessing,
   6039                                 // either via reprocessing before JPEG encoding,
   6040                                 // or offline postprocessing for pproc bypass case.
   6041                                 crop[0] = 0;
   6042                                 crop[1] = 0;
   6043                                 crop[2] = mInputStreamInfo.dim.width;
   6044                                 crop[3] = mInputStreamInfo.dim.height;
   6045                             } else {
   6046                                 crop[0] = crop_data->crop_info[i].crop.left;
   6047                                 crop[1] = crop_data->crop_info[i].crop.top;
   6048                                 crop[2] = crop_data->crop_info[i].crop.width;
   6049                                 crop[3] = crop_data->crop_info[i].crop.height;
   6050                             }
   6051                             roi_map.add(crop_data->crop_info[i].roi_map.left);
   6052                             roi_map.add(crop_data->crop_info[i].roi_map.top);
   6053                             roi_map.add(crop_data->crop_info[i].roi_map.width);
   6054                             roi_map.add(crop_data->crop_info[i].roi_map.height);
   6055                             streams_found++;
   6056                             LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
   6057                                     crop[0], crop[1], crop[2], crop[3]);
   6058                             LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
   6059                                     crop_data->crop_info[i].roi_map.left,
   6060                                     crop_data->crop_info[i].roi_map.top,
   6061                                     crop_data->crop_info[i].roi_map.width,
   6062                                     crop_data->crop_info[i].roi_map.height);
   6063                             break;
   6064 
   6065                        }
   6066                     }
   6067                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
   6068                             &streams_found, 1);
   6069                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
   6070                             crop, (size_t)(streams_found * 4));
   6071                     if (roi_map.array()) {
   6072                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
   6073                                 roi_map.array(), roi_map.size());
   6074                     }
   6075                }
   6076                if (crop) {
   6077                    delete [] crop;
   6078                }
   6079             }
   6080         }
   6081     }
   6082 
   6083     if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
   6084         // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
   6085         // so hardcoding the CAC result to OFF mode.
   6086         uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   6087         camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
   6088     } else {
   6089         IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
   6090             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   6091                     *cacMode);
   6092             if (NAME_NOT_FOUND != val) {
   6093                 uint8_t resultCacMode = (uint8_t)val;
   6094                 // check whether CAC result from CB is equal to Framework set CAC mode
   6095                 // If not equal then set the CAC mode came in corresponding request
   6096                 if (pendingRequest.fwkCacMode != resultCacMode) {
   6097                     resultCacMode = pendingRequest.fwkCacMode;
   6098                 }
   6099                 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
   6100                 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
   6101             } else {
   6102                 LOGE("Invalid CAC camera parameter: %d", *cacMode);
   6103             }
   6104         }
   6105     }
   6106 
   6107     // Post blob of cam_cds_data through vendor tag.
   6108     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
   6109         uint8_t cnt = cdsInfo->num_of_streams;
   6110         cam_cds_data_t cdsDataOverride;
   6111         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
   6112         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
   6113         cdsDataOverride.num_of_streams = 1;
   6114         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
   6115             uint32_t reproc_stream_id;
   6116             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   6117                 LOGD("No reprocessible stream found, ignore cds data");
   6118             } else {
   6119                 for (size_t i = 0; i < cnt; i++) {
   6120                     if (cdsInfo->cds_info[i].stream_id ==
   6121                             reproc_stream_id) {
   6122                         cdsDataOverride.cds_info[0].cds_enable =
   6123                                 cdsInfo->cds_info[i].cds_enable;
   6124                         break;
   6125                     }
   6126                 }
   6127             }
   6128         } else {
   6129             LOGD("Invalid stream count %d in CDS_DATA", cnt);
   6130         }
   6131         camMetadata.update(QCAMERA3_CDS_INFO,
   6132                 (uint8_t *)&cdsDataOverride,
   6133                 sizeof(cam_cds_data_t));
   6134     }
   6135 
   6136     // Ldaf calibration data
   6137     if (!mLdafCalibExist) {
   6138         IF_META_AVAILABLE(uint32_t, ldafCalib,
   6139                 CAM_INTF_META_LDAF_EXIF, metadata) {
   6140             mLdafCalibExist = true;
   6141             mLdafCalib[0] = ldafCalib[0];
   6142             mLdafCalib[1] = ldafCalib[1];
   6143             LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
   6144                     ldafCalib[0], ldafCalib[1]);
   6145         }
   6146     }
   6147 
   6148     // AF scene change
   6149     IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
   6150         camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
   6151         camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, afSceneChange, 1);
   6152     }
   6153 
   6154     resultMetadata = camMetadata.release();
   6155     return resultMetadata;
   6156 }
   6157 
   6158 /*===========================================================================
   6159  * FUNCTION   : saveExifParams
   6160  *
   6161  * DESCRIPTION:
   6162  *
   6163  * PARAMETERS :
   6164  *   @metadata : metadata information from callback
   6165  *
   6166  * RETURN     : none
   6167  *
   6168  *==========================================================================*/
   6169 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
   6170 {
   6171     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
   6172             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
   6173         if (mExifParams.debug_params) {
   6174             mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
   6175             mExifParams.debug_params->ae_debug_params_valid = TRUE;
   6176         }
   6177     }
   6178     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
   6179             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
   6180         if (mExifParams.debug_params) {
   6181             mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
   6182             mExifParams.debug_params->awb_debug_params_valid = TRUE;
   6183         }
   6184     }
   6185     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
   6186             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
   6187         if (mExifParams.debug_params) {
   6188             mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
   6189             mExifParams.debug_params->af_debug_params_valid = TRUE;
   6190         }
   6191     }
   6192     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
   6193             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
   6194         if (mExifParams.debug_params) {
   6195             mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
   6196             mExifParams.debug_params->asd_debug_params_valid = TRUE;
   6197         }
   6198     }
   6199     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
   6200             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
   6201         if (mExifParams.debug_params) {
   6202             mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
   6203             mExifParams.debug_params->stats_debug_params_valid = TRUE;
   6204         }
   6205     }
   6206     IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
   6207             CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
   6208         if (mExifParams.debug_params) {
   6209             mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
   6210             mExifParams.debug_params->bestats_debug_params_valid = TRUE;
   6211         }
   6212     }
   6213     IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
   6214             CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
   6215         if (mExifParams.debug_params) {
   6216             mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
   6217             mExifParams.debug_params->bhist_debug_params_valid = TRUE;
   6218         }
   6219     }
   6220     IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
   6221             CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
   6222         if (mExifParams.debug_params) {
   6223             mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
   6224             mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
   6225         }
   6226     }
   6227 }
   6228 
   6229 /*===========================================================================
   6230  * FUNCTION   : get3AExifParams
   6231  *
   6232  * DESCRIPTION:
   6233  *
   6234  * PARAMETERS : none
   6235  *
   6236  *
   6237  * RETURN     : mm_jpeg_exif_params_t
   6238  *
   6239  *==========================================================================*/
   6240 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
   6241 {
   6242     return mExifParams;
   6243 }
   6244 
   6245 /*===========================================================================
   6246  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
   6247  *
   6248  * DESCRIPTION:
   6249  *
   6250  * PARAMETERS :
   6251  *   @metadata : metadata information from callback
   6252  *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
   6253  *                               urgent metadata in a batch. Always true for
   6254  *                               non-batch mode.
   6255  *   @frame_number :             frame number for this urgent metadata
   6256  *
   6257  * RETURN     : camera_metadata_t*
   6258  *              metadata in a format specified by fwk
   6259  *==========================================================================*/
   6260 camera_metadata_t*
   6261 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
   6262                                 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
   6263                                  uint32_t frame_number)
   6264 {
   6265     CameraMetadata camMetadata;
   6266     camera_metadata_t *resultMetadata;
   6267 
   6268     if (!lastUrgentMetadataInBatch) {
   6269         /* In batch mode, use empty metadata if this is not the last in batch
   6270          */
   6271         resultMetadata = allocate_camera_metadata(0, 0);
   6272         return resultMetadata;
   6273     }
   6274 
   6275     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
   6276         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
   6277         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
   6278         LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
   6279     }
   6280 
   6281     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
   6282         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
   6283                 &aecTrigger->trigger, 1);
   6284         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
   6285                 &aecTrigger->trigger_id, 1);
   6286         LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
   6287                  aecTrigger->trigger);
   6288         LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
   6289                 aecTrigger->trigger_id);
   6290     }
   6291 
   6292     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
   6293         uint8_t fwk_ae_state = (uint8_t) *ae_state;
   6294         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
   6295         LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
   6296     }
   6297 
   6298     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
   6299         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
   6300         if (NAME_NOT_FOUND != val) {
   6301             uint8_t fwkAfMode = (uint8_t)val;
   6302             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
   6303             LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
   6304         } else {
   6305             LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
   6306                     val);
   6307         }
   6308     }
   6309 
   6310     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
   6311         LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
   6312             af_trigger->trigger);
   6313         LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
   6314             af_trigger->trigger_id);
   6315 
   6316         IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
   6317             mAfTrigger = *af_trigger;
   6318             uint32_t fwk_AfState = (uint32_t) *afState;
   6319 
   6320             // If this is the result for a new trigger, check if there is new early
   6321             // af state. If there is, use the last af state for all results
   6322             // preceding current partial frame number.
   6323             for (auto & pendingRequest : mPendingRequestsList) {
   6324                 if (pendingRequest.frame_number < frame_number) {
   6325                     pendingRequest.focusStateValid = true;
   6326                     pendingRequest.focusState = fwk_AfState;
   6327                 } else if (pendingRequest.frame_number == frame_number) {
   6328                     IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
   6329                         // Check if early AF state for trigger exists. If yes, send AF state as
   6330                         // partial result for better latency.
   6331                         uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
   6332                         pendingRequest.focusStateSent = true;
   6333                         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
   6334                         LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
   6335                                  frame_number, fwkEarlyAfState);
   6336                     }
   6337                 }
   6338             }
   6339         }
   6340     }
   6341     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
   6342         &mAfTrigger.trigger, 1);
   6343     camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
   6344 
   6345     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
   6346         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   6347                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
   6348         if (NAME_NOT_FOUND != val) {
   6349             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
   6350             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
   6351             LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
   6352         } else {
   6353             LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
   6354         }
   6355     }
   6356 
   6357     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   6358     uint32_t aeMode = CAM_AE_MODE_MAX;
   6359     int32_t flashMode = CAM_FLASH_MODE_MAX;
   6360     int32_t redeye = -1;
   6361     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
   6362         aeMode = *pAeMode;
   6363     }
   6364     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
   6365         flashMode = *pFlashMode;
   6366     }
   6367     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
   6368         redeye = *pRedeye;
   6369     }
   6370 
   6371     if (1 == redeye) {
   6372         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   6373         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6374     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
   6375         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   6376                 flashMode);
   6377         if (NAME_NOT_FOUND != val) {
   6378             fwk_aeMode = (uint8_t)val;
   6379             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6380         } else {
   6381             LOGE("Unsupported flash mode %d", flashMode);
   6382         }
   6383     } else if (aeMode == CAM_AE_MODE_ON) {
   6384         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
   6385         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6386     } else if (aeMode == CAM_AE_MODE_OFF) {
   6387         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   6388         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6389     } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
   6390         fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
   6391         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   6392     } else {
   6393         LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
   6394               "flashMode:%d, aeMode:%u!!!",
   6395                  redeye, flashMode, aeMode);
   6396     }
   6397 
   6398     resultMetadata = camMetadata.release();
   6399     return resultMetadata;
   6400 }
   6401 
   6402 /*===========================================================================
   6403  * FUNCTION   : dumpMetadataToFile
   6404  *
   6405  * DESCRIPTION: Dumps tuning metadata to file system
   6406  *
   6407  * PARAMETERS :
   6408  *   @meta           : tuning metadata
   6409  *   @dumpFrameCount : current dump frame count
   6410  *   @enabled        : Enable mask
   6411  *
   6412  *==========================================================================*/
   6413 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
   6414                                                    uint32_t &dumpFrameCount,
   6415                                                    bool enabled,
   6416                                                    const char *type,
   6417                                                    uint32_t frameNumber)
   6418 {
   6419     //Some sanity checks
   6420     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
   6421         LOGE("Tuning sensor data size bigger than expected %d: %d",
   6422               meta.tuning_sensor_data_size,
   6423               TUNING_SENSOR_DATA_MAX);
   6424         return;
   6425     }
   6426 
   6427     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
   6428         LOGE("Tuning VFE data size bigger than expected %d: %d",
   6429               meta.tuning_vfe_data_size,
   6430               TUNING_VFE_DATA_MAX);
   6431         return;
   6432     }
   6433 
   6434     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
   6435         LOGE("Tuning CPP data size bigger than expected %d: %d",
   6436               meta.tuning_cpp_data_size,
   6437               TUNING_CPP_DATA_MAX);
   6438         return;
   6439     }
   6440 
   6441     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
   6442         LOGE("Tuning CAC data size bigger than expected %d: %d",
   6443               meta.tuning_cac_data_size,
   6444               TUNING_CAC_DATA_MAX);
   6445         return;
   6446     }
   6447     //
   6448 
   6449     if(enabled){
   6450         char timeBuf[FILENAME_MAX];
   6451         char buf[FILENAME_MAX];
   6452         memset(buf, 0, sizeof(buf));
   6453         memset(timeBuf, 0, sizeof(timeBuf));
   6454         time_t current_time;
   6455         struct tm * timeinfo;
   6456         time (&current_time);
   6457         timeinfo = localtime (&current_time);
   6458         if (timeinfo != NULL) {
   6459             /* Consistent naming for Jpeg+meta+raw: meta name */
   6460             strftime (timeBuf, sizeof(timeBuf),
   6461                     QCAMERA_DUMP_FRM_LOCATION"IMG_%Y%m%d_%H%M%S", timeinfo);
   6462             /* Consistent naming for Jpeg+meta+raw: meta name end*/
   6463         }
   6464         String8 filePath(timeBuf);
   6465          /* Consistent naming for Jpeg+meta+raw */
   6466         snprintf(buf,
   6467                 sizeof(buf),
   6468                 "%dm_%s_%d.bin",
   6469                 dumpFrameCount,
   6470                 type,
   6471                 frameNumber);
   6472          /* Consistent naming for Jpeg+meta+raw end */
   6473         filePath.append(buf);
   6474         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
   6475         if (file_fd >= 0) {
   6476             ssize_t written_len = 0;
   6477             meta.tuning_data_version = TUNING_DATA_VERSION;
   6478             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
   6479             written_len += write(file_fd, data, sizeof(uint32_t));
   6480             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
   6481             LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
   6482             written_len += write(file_fd, data, sizeof(uint32_t));
   6483             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
   6484             LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
   6485             written_len += write(file_fd, data, sizeof(uint32_t));
   6486             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
   6487             LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
   6488             written_len += write(file_fd, data, sizeof(uint32_t));
   6489             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
   6490             LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
   6491             written_len += write(file_fd, data, sizeof(uint32_t));
   6492             meta.tuning_mod3_data_size = 0;
   6493             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
   6494             LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
   6495             written_len += write(file_fd, data, sizeof(uint32_t));
   6496             size_t total_size = meta.tuning_sensor_data_size;
   6497             data = (void *)((uint8_t *)&meta.data);
   6498             written_len += write(file_fd, data, total_size);
   6499             total_size = meta.tuning_vfe_data_size;
   6500             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
   6501             written_len += write(file_fd, data, total_size);
   6502             total_size = meta.tuning_cpp_data_size;
   6503             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
   6504             written_len += write(file_fd, data, total_size);
   6505             total_size = meta.tuning_cac_data_size;
   6506             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
   6507             written_len += write(file_fd, data, total_size);
   6508             close(file_fd);
   6509         }else {
   6510             LOGE("fail to open file for metadata dumping");
   6511         }
   6512     }
   6513 }
   6514 
   6515 /*===========================================================================
   6516  * FUNCTION   : cleanAndSortStreamInfo
   6517  *
   6518  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
   6519  *              and sort them such that raw stream is at the end of the list
   6520  *              This is a workaround for camera daemon constraint.
   6521  *
   6522  * PARAMETERS : None
   6523  *
   6524  *==========================================================================*/
   6525 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
   6526 {
   6527     List<stream_info_t *> newStreamInfo;
   6528 
   6529     /*clean up invalid streams*/
   6530     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   6531             it != mStreamInfo.end();) {
   6532         if(((*it)->status) == INVALID){
   6533             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
   6534             delete channel;
   6535             free(*it);
   6536             it = mStreamInfo.erase(it);
   6537         } else {
   6538             it++;
   6539         }
   6540     }
   6541 
   6542     // Move preview/video/callback/snapshot streams into newList
   6543     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   6544             it != mStreamInfo.end();) {
   6545         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
   6546                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
   6547                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
   6548             newStreamInfo.push_back(*it);
   6549             it = mStreamInfo.erase(it);
   6550         } else
   6551             it++;
   6552     }
   6553     // Move raw streams into newList
   6554     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   6555             it != mStreamInfo.end();) {
   6556         newStreamInfo.push_back(*it);
   6557         it = mStreamInfo.erase(it);
   6558     }
   6559 
   6560     mStreamInfo = newStreamInfo;
   6561 }
   6562 
   6563 /*===========================================================================
   6564  * FUNCTION   : extractJpegMetadata
   6565  *
   6566  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
   6567  *              JPEG metadata is cached in HAL, and return as part of capture
   6568  *              result when metadata is returned from camera daemon.
   6569  *
   6570  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
   6571  *              @request:      capture request
   6572  *
   6573  *==========================================================================*/
   6574 void QCamera3HardwareInterface::extractJpegMetadata(
   6575         CameraMetadata& jpegMetadata,
   6576         const camera3_capture_request_t *request)
   6577 {
   6578     CameraMetadata frame_settings;
   6579     frame_settings = request->settings;
   6580 
   6581     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
   6582         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
   6583                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
   6584                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
   6585 
   6586     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
   6587         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
   6588                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
   6589                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
   6590 
   6591     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
   6592         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
   6593                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
   6594                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
   6595 
   6596     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
   6597         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
   6598                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
   6599                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
   6600 
   6601     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
   6602         jpegMetadata.update(ANDROID_JPEG_QUALITY,
   6603                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
   6604                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
   6605 
   6606     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
   6607         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
   6608                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
   6609                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
   6610 
   6611     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   6612         int32_t thumbnail_size[2];
   6613         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   6614         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   6615         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   6616             int32_t orientation =
   6617                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   6618             if ((orientation == 90) || (orientation == 270)) {
   6619                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
   6620                int32_t temp;
   6621                temp = thumbnail_size[0];
   6622                thumbnail_size[0] = thumbnail_size[1];
   6623                thumbnail_size[1] = temp;
   6624             }
   6625          }
   6626          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
   6627                 thumbnail_size,
   6628                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   6629     }
   6630 
   6631 }
   6632 
   6633 /*===========================================================================
   6634  * FUNCTION   : convertToRegions
   6635  *
   6636  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   6637  *
   6638  * PARAMETERS :
   6639  *   @rect   : cam_rect_t struct to convert
   6640  *   @region : int32_t destination array
   6641  *   @weight : if we are converting from cam_area_t, weight is valid
   6642  *             else weight = -1
   6643  *
   6644  *==========================================================================*/
   6645 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
   6646         int32_t *region, int weight)
   6647 {
   6648     region[0] = rect.left;
   6649     region[1] = rect.top;
   6650     region[2] = rect.left + rect.width;
   6651     region[3] = rect.top + rect.height;
   6652     if (weight > -1) {
   6653         region[4] = weight;
   6654     }
   6655 }
   6656 
   6657 /*===========================================================================
   6658  * FUNCTION   : convertFromRegions
   6659  *
   6660  * DESCRIPTION: helper method to convert from array to cam_rect_t
   6661  *
   6662  * PARAMETERS :
   6663  *   @rect   : cam_rect_t struct to convert
   6664  *   @region : int32_t destination array
   6665  *   @weight : if we are converting from cam_area_t, weight is valid
   6666  *             else weight = -1
   6667  *
   6668  *==========================================================================*/
   6669 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
   6670         const camera_metadata_t *settings, uint32_t tag)
   6671 {
   6672     CameraMetadata frame_settings;
   6673     frame_settings = settings;
   6674     int32_t x_min = frame_settings.find(tag).data.i32[0];
   6675     int32_t y_min = frame_settings.find(tag).data.i32[1];
   6676     int32_t x_max = frame_settings.find(tag).data.i32[2];
   6677     int32_t y_max = frame_settings.find(tag).data.i32[3];
   6678     roi.weight = frame_settings.find(tag).data.i32[4];
   6679     roi.rect.left = x_min;
   6680     roi.rect.top = y_min;
   6681     roi.rect.width = x_max - x_min;
   6682     roi.rect.height = y_max - y_min;
   6683 }
   6684 
   6685 /*===========================================================================
   6686  * FUNCTION   : resetIfNeededROI
   6687  *
   6688  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   6689  *              crop region
   6690  *
   6691  * PARAMETERS :
   6692  *   @roi       : cam_area_t struct to resize
   6693  *   @scalerCropRegion : cam_crop_region_t region to compare against
   6694  *
   6695  *
   6696  *==========================================================================*/
   6697 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   6698                                                  const cam_crop_region_t* scalerCropRegion)
   6699 {
   6700     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   6701     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   6702     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
   6703     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
   6704 
   6705     /* According to spec weight = 0 is used to indicate roi needs to be disabled
   6706      * without having this check the calculations below to validate if the roi
   6707      * is inside scalar crop region will fail resulting in the roi not being
   6708      * reset causing algorithm to continue to use stale roi window
   6709      */
   6710     if (roi->weight == 0) {
   6711         return true;
   6712     }
   6713 
   6714     if ((roi_x_max < scalerCropRegion->left) ||
   6715         // right edge of roi window is left of scalar crop's left edge
   6716         (roi_y_max < scalerCropRegion->top)  ||
   6717         // bottom edge of roi window is above scalar crop's top edge
   6718         (roi->rect.left > crop_x_max) ||
   6719         // left edge of roi window is beyond(right) of scalar crop's right edge
   6720         (roi->rect.top > crop_y_max)){
   6721         // top edge of roi windo is above scalar crop's top edge
   6722         return false;
   6723     }
   6724     if (roi->rect.left < scalerCropRegion->left) {
   6725         roi->rect.left = scalerCropRegion->left;
   6726     }
   6727     if (roi->rect.top < scalerCropRegion->top) {
   6728         roi->rect.top = scalerCropRegion->top;
   6729     }
   6730     if (roi_x_max > crop_x_max) {
   6731         roi_x_max = crop_x_max;
   6732     }
   6733     if (roi_y_max > crop_y_max) {
   6734         roi_y_max = crop_y_max;
   6735     }
   6736     roi->rect.width = roi_x_max - roi->rect.left;
   6737     roi->rect.height = roi_y_max - roi->rect.top;
   6738     return true;
   6739 }
   6740 
   6741 /*===========================================================================
   6742  * FUNCTION   : convertLandmarks
   6743  *
   6744  * DESCRIPTION: helper method to extract the landmarks from face detection info
   6745  *
   6746  * PARAMETERS :
   6747  *   @landmark_data : input landmark data to be converted
   6748  *   @landmarks : int32_t destination array
   6749  *
   6750  *
   6751  *==========================================================================*/
   6752 void QCamera3HardwareInterface::convertLandmarks(
   6753         cam_face_landmarks_info_t landmark_data,
   6754         int32_t *landmarks)
   6755 {
   6756     landmarks[0] = (int32_t)landmark_data.left_eye_center.x;
   6757     landmarks[1] = (int32_t)landmark_data.left_eye_center.y;
   6758     landmarks[2] = (int32_t)landmark_data.right_eye_center.x;
   6759     landmarks[3] = (int32_t)landmark_data.right_eye_center.y;
   6760     landmarks[4] = (int32_t)landmark_data.mouth_center.x;
   6761     landmarks[5] = (int32_t)landmark_data.mouth_center.y;
   6762 }
   6763 
   6764 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   6765 /*===========================================================================
   6766  * FUNCTION   : initCapabilities
   6767  *
   6768  * DESCRIPTION: initialize camera capabilities in static data struct
   6769  *
   6770  * PARAMETERS :
   6771  *   @cameraId  : camera Id
   6772  *
   6773  * RETURN     : int32_t type of status
   6774  *              NO_ERROR  -- success
   6775  *              none-zero failure code
   6776  *==========================================================================*/
   6777 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
   6778 {
   6779     int rc = 0;
   6780     mm_camera_vtbl_t *cameraHandle = NULL;
   6781     QCamera3HeapMemory *capabilityHeap = NULL;
   6782 
   6783     rc = camera_open((uint8_t)cameraId, &cameraHandle);
   6784     if (rc) {
   6785         LOGE("camera_open failed. rc = %d", rc);
   6786         goto open_failed;
   6787     }
   6788     if (!cameraHandle) {
   6789         LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
   6790         goto open_failed;
   6791     }
   6792 
   6793     capabilityHeap = new QCamera3HeapMemory(1);
   6794     if (capabilityHeap == NULL) {
   6795         LOGE("creation of capabilityHeap failed");
   6796         goto heap_creation_failed;
   6797     }
   6798     /* Allocate memory for capability buffer */
   6799     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
   6800     if(rc != OK) {
   6801         LOGE("No memory for cappability");
   6802         goto allocate_failed;
   6803     }
   6804 
   6805     /* Map memory for capability buffer */
   6806     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   6807     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   6808                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   6809                                 capabilityHeap->getFd(0),
   6810                                 sizeof(cam_capability_t),
   6811                                 capabilityHeap->getPtr(0));
   6812     if(rc < 0) {
   6813         LOGE("failed to map capability buffer");
   6814         goto map_failed;
   6815     }
   6816 
   6817     /* Query Capability */
   6818     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   6819     if(rc < 0) {
   6820         LOGE("failed to query capability");
   6821         goto query_failed;
   6822     }
   6823     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   6824     if (!gCamCapability[cameraId]) {
   6825         LOGE("out of memory");
   6826         goto query_failed;
   6827     }
   6828     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   6829                                         sizeof(cam_capability_t));
   6830 
   6831     int index;
   6832     for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
   6833         cam_analysis_info_t *p_analysis_info =
   6834                 &gCamCapability[cameraId]->analysis_info[index];
   6835         p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
   6836         p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
   6837     }
   6838     rc = 0;
   6839 
   6840 query_failed:
   6841     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   6842                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   6843 map_failed:
   6844     capabilityHeap->deallocate();
   6845 allocate_failed:
   6846     delete capabilityHeap;
   6847 heap_creation_failed:
   6848     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   6849     cameraHandle = NULL;
   6850 open_failed:
   6851     return rc;
   6852 }
   6853 
   6854 /*==========================================================================
   6855  * FUNCTION   : get3Aversion
   6856  *
   6857  * DESCRIPTION: get the Q3A S/W version
   6858  *
   6859  * PARAMETERS :
   6860  *  @sw_version: Reference of Q3A structure which will hold version info upon
   6861  *               return
   6862  *
   6863  * RETURN     : None
   6864  *
   6865  *==========================================================================*/
   6866 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
   6867 {
   6868     if(gCamCapability[mCameraId])
   6869         sw_version = gCamCapability[mCameraId]->q3a_version;
   6870     else
   6871         LOGE("Capability structure NULL!");
   6872 }
   6873 
   6874 
   6875 /*===========================================================================
   6876  * FUNCTION   : initParameters
   6877  *
   6878  * DESCRIPTION: initialize camera parameters
   6879  *
   6880  * PARAMETERS :
   6881  *
   6882  * RETURN     : int32_t type of status
   6883  *              NO_ERROR  -- success
   6884  *              none-zero failure code
   6885  *==========================================================================*/
   6886 int QCamera3HardwareInterface::initParameters()
   6887 {
   6888     int rc = 0;
   6889 
   6890     //Allocate Set Param Buffer
   6891     mParamHeap = new QCamera3HeapMemory(1);
   6892     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
   6893     if(rc != OK) {
   6894         rc = NO_MEMORY;
   6895         LOGE("Failed to allocate SETPARM Heap memory");
   6896         delete mParamHeap;
   6897         mParamHeap = NULL;
   6898         return rc;
   6899     }
   6900 
   6901     //Map memory for parameters buffer
   6902     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   6903             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   6904             mParamHeap->getFd(0),
   6905             sizeof(metadata_buffer_t),
   6906             (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
   6907     if(rc < 0) {
   6908         LOGE("failed to map SETPARM buffer");
   6909         rc = FAILED_TRANSACTION;
   6910         mParamHeap->deallocate();
   6911         delete mParamHeap;
   6912         mParamHeap = NULL;
   6913         return rc;
   6914     }
   6915 
   6916     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
   6917 
   6918     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
   6919     return rc;
   6920 }
   6921 
   6922 /*===========================================================================
   6923  * FUNCTION   : deinitParameters
   6924  *
   6925  * DESCRIPTION: de-initialize camera parameters
   6926  *
   6927  * PARAMETERS :
   6928  *
   6929  * RETURN     : NONE
   6930  *==========================================================================*/
   6931 void QCamera3HardwareInterface::deinitParameters()
   6932 {
   6933     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   6934             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   6935 
   6936     mParamHeap->deallocate();
   6937     delete mParamHeap;
   6938     mParamHeap = NULL;
   6939 
   6940     mParameters = NULL;
   6941 
   6942     free(mPrevParameters);
   6943     mPrevParameters = NULL;
   6944 }
   6945 
   6946 /*===========================================================================
   6947  * FUNCTION   : calcMaxJpegSize
   6948  *
   6949  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   6950  *
   6951  * PARAMETERS :
   6952  *
   6953  * RETURN     : max_jpeg_size
   6954  *==========================================================================*/
   6955 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
   6956 {
   6957     size_t max_jpeg_size = 0;
   6958     size_t temp_width, temp_height;
   6959     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
   6960             MAX_SIZES_CNT);
   6961     for (size_t i = 0; i < count; i++) {
   6962         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
   6963         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
   6964         if (temp_width * temp_height > max_jpeg_size ) {
   6965             max_jpeg_size = temp_width * temp_height;
   6966         }
   6967     }
   6968     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   6969     return max_jpeg_size;
   6970 }
   6971 
   6972 /*===========================================================================
   6973  * FUNCTION   : getMaxRawSize
   6974  *
   6975  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
   6976  *
   6977  * PARAMETERS :
   6978  *
   6979  * RETURN     : Largest supported Raw Dimension
   6980  *==========================================================================*/
   6981 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
   6982 {
   6983     int max_width = 0;
   6984     cam_dimension_t maxRawSize;
   6985 
   6986     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
   6987     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
   6988         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
   6989             max_width = gCamCapability[camera_id]->raw_dim[i].width;
   6990             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
   6991         }
   6992     }
   6993     return maxRawSize;
   6994 }
   6995 
   6996 
   6997 /*===========================================================================
   6998  * FUNCTION   : calcMaxJpegDim
   6999  *
   7000  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
   7001  *
   7002  * PARAMETERS :
   7003  *
   7004  * RETURN     : max_jpeg_dim
   7005  *==========================================================================*/
   7006 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
   7007 {
   7008     cam_dimension_t max_jpeg_dim;
   7009     cam_dimension_t curr_jpeg_dim;
   7010     max_jpeg_dim.width = 0;
   7011     max_jpeg_dim.height = 0;
   7012     curr_jpeg_dim.width = 0;
   7013     curr_jpeg_dim.height = 0;
   7014     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   7015         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   7016         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   7017         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
   7018             max_jpeg_dim.width * max_jpeg_dim.height ) {
   7019             max_jpeg_dim.width = curr_jpeg_dim.width;
   7020             max_jpeg_dim.height = curr_jpeg_dim.height;
   7021         }
   7022     }
   7023     return max_jpeg_dim;
   7024 }
   7025 
   7026 /*===========================================================================
   7027  * FUNCTION   : addStreamConfig
   7028  *
   7029  * DESCRIPTION: adds the stream configuration to the array
   7030  *
   7031  * PARAMETERS :
   7032  * @available_stream_configs : pointer to stream configuration array
   7033  * @scalar_format            : scalar format
   7034  * @dim                      : configuration dimension
   7035  * @config_type              : input or output configuration type
   7036  *
   7037  * RETURN     : NONE
   7038  *==========================================================================*/
   7039 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
   7040         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
   7041 {
   7042     available_stream_configs.add(scalar_format);
   7043     available_stream_configs.add(dim.width);
   7044     available_stream_configs.add(dim.height);
   7045     available_stream_configs.add(config_type);
   7046 }
   7047 
   7048 /*===========================================================================
   7049  * FUNCTION   : suppportBurstCapture
   7050  *
   7051  * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
   7052  *
   7053  * PARAMETERS :
   7054  *   @cameraId  : camera Id
   7055  *
   7056  * RETURN     : true if camera supports BURST_CAPTURE
   7057  *              false otherwise
   7058  *==========================================================================*/
   7059 bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
   7060 {
   7061     const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
   7062     const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
   7063     const int32_t highResWidth = 3264;
   7064     const int32_t highResHeight = 2448;
   7065 
   7066     if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
   7067         // Maximum resolution images cannot be captured at >= 10fps
   7068         // -> not supporting BURST_CAPTURE
   7069         return false;
   7070     }
   7071 
   7072     if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
   7073         // Maximum resolution images can be captured at >= 20fps
   7074         // --> supporting BURST_CAPTURE
   7075         return true;
   7076     }
   7077 
   7078     // Find the smallest highRes resolution, or largest resolution if there is none
   7079     size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
   7080             MAX_SIZES_CNT);
   7081     size_t highRes = 0;
   7082     while ((highRes + 1 < totalCnt) &&
   7083             (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
   7084             gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
   7085             highResWidth * highResHeight)) {
   7086         highRes++;
   7087     }
   7088     if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
   7089         return true;
   7090     } else {
   7091         return false;
   7092     }
   7093 }
   7094 
   7095 /*===========================================================================
   7096  * FUNCTION   : initStaticMetadata
   7097  *
   7098  * DESCRIPTION: initialize the static metadata
   7099  *
   7100  * PARAMETERS :
   7101  *   @cameraId  : camera Id
   7102  *
   7103  * RETURN     : int32_t type of status
   7104  *              0  -- success
   7105  *              non-zero failure code
   7106  *==========================================================================*/
   7107 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
   7108 {
   7109     int rc = 0;
   7110     CameraMetadata staticInfo;
   7111     size_t count = 0;
   7112     bool limitedDevice = false;
   7113     char prop[PROPERTY_VALUE_MAX];
   7114     bool supportBurst = false;
   7115 
   7116     supportBurst = supportBurstCapture(cameraId);
   7117 
   7118     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
   7119      * guaranteed or if min fps of max resolution is less than 20 fps, its
   7120      * advertised as limited device*/
   7121     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
   7122             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
   7123             (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
   7124             !supportBurst;
   7125 
   7126     uint8_t supportedHwLvl = limitedDevice ?
   7127             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
   7128             // LEVEL_3 - This device will support level 3.
   7129             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
   7130 
   7131     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   7132             &supportedHwLvl, 1);
   7133 
   7134     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   7135     /*HAL 3 only*/
   7136     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   7137                     &gCamCapability[cameraId]->min_focus_distance, 1);
   7138 
   7139     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   7140                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   7141 
   7142     /*should be using focal lengths but sensor doesn't provide that info now*/
   7143     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   7144                       &gCamCapability[cameraId]->focal_length,
   7145                       1);
   7146 
   7147     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   7148             gCamCapability[cameraId]->apertures,
   7149             MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
   7150 
   7151     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   7152             gCamCapability[cameraId]->filter_densities,
   7153             MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
   7154 
   7155 
   7156     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   7157             (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
   7158             MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
   7159 
   7160     int32_t lens_shading_map_size[] = {
   7161             MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
   7162             MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
   7163     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   7164                       lens_shading_map_size,
   7165                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   7166 
   7167     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   7168             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
   7169 
   7170     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   7171             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
   7172 
   7173     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   7174             &gCamCapability[cameraId]->max_frame_duration, 1);
   7175 
   7176     camera_metadata_rational baseGainFactor = {
   7177             gCamCapability[cameraId]->base_gain_factor.numerator,
   7178             gCamCapability[cameraId]->base_gain_factor.denominator};
   7179     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   7180                       &baseGainFactor, 1);
   7181 
   7182     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   7183                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
   7184 
   7185     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   7186             gCamCapability[cameraId]->pixel_array_size.height};
   7187     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   7188                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
   7189 
   7190     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
   7191             gCamCapability[cameraId]->active_array_size.top,
   7192             gCamCapability[cameraId]->active_array_size.width,
   7193             gCamCapability[cameraId]->active_array_size.height};
   7194     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   7195             active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
   7196 
   7197     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   7198             &gCamCapability[cameraId]->white_level, 1);
   7199 
   7200     int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
   7201     adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
   7202             gCamCapability[cameraId]->color_arrangement);
   7203     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   7204             adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
   7205 
   7206     bool hasBlackRegions = false;
   7207     if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
   7208         LOGW("black_region_count: %d is bounded to %d",
   7209             gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
   7210         gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
   7211     }
   7212     if (gCamCapability[cameraId]->optical_black_region_count != 0) {
   7213         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
   7214         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
   7215             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
   7216         }
   7217         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
   7218                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
   7219         hasBlackRegions = true;
   7220     }
   7221 
   7222     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   7223             &gCamCapability[cameraId]->flash_charge_duration, 1);
   7224 
   7225     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   7226             &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   7227 
   7228     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
   7229             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
   7230             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
   7231     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   7232             &timestampSource, 1);
   7233 
   7234     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   7235             &gCamCapability[cameraId]->histogram_size, 1);
   7236 
   7237     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   7238             &gCamCapability[cameraId]->max_histogram_count, 1);
   7239 
   7240     int32_t sharpness_map_size[] = {
   7241             gCamCapability[cameraId]->sharpness_map_size.width,
   7242             gCamCapability[cameraId]->sharpness_map_size.height};
   7243 
   7244     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   7245             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   7246 
   7247     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   7248             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   7249 
   7250     int32_t scalar_formats[] = {
   7251             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
   7252             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
   7253             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
   7254             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
   7255             HAL_PIXEL_FORMAT_RAW10,
   7256             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
   7257     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
   7258     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   7259                       scalar_formats,
   7260                       scalar_formats_count);
   7261 
   7262     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
   7263     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   7264     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   7265             count, MAX_SIZES_CNT, available_processed_sizes);
   7266     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   7267             available_processed_sizes, count * 2);
   7268 
   7269     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
   7270     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
   7271     makeTable(gCamCapability[cameraId]->raw_dim,
   7272             count, MAX_SIZES_CNT, available_raw_sizes);
   7273     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   7274             available_raw_sizes, count * 2);
   7275 
   7276     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   7277     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
   7278     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   7279             count, MAX_SIZES_CNT, available_fps_ranges);
   7280     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   7281             available_fps_ranges, count * 2);
   7282 
   7283     camera_metadata_rational exposureCompensationStep = {
   7284             gCamCapability[cameraId]->exp_compensation_step.numerator,
   7285             gCamCapability[cameraId]->exp_compensation_step.denominator};
   7286     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   7287                       &exposureCompensationStep, 1);
   7288 
   7289     Vector<uint8_t> availableVstabModes;
   7290     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
   7291     char eis_prop[PROPERTY_VALUE_MAX];
   7292     memset(eis_prop, 0, sizeof(eis_prop));
   7293     property_get("persist.camera.eis.enable", eis_prop, "0");
   7294     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
   7295     if (facingBack && eis_prop_set) {
   7296         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
   7297     }
   7298     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   7299                       availableVstabModes.array(), availableVstabModes.size());
   7300 
   7301     /*HAL 1 and HAL 3 common*/
   7302     float maxZoom = 4;
   7303     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   7304             &maxZoom, 1);
   7305 
   7306     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
   7307     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
   7308 
   7309     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
   7310     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
   7311         max3aRegions[2] = 0; /* AF not supported */
   7312     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   7313             max3aRegions, 3);
   7314 
   7315     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
   7316     memset(prop, 0, sizeof(prop));
   7317     property_get("persist.camera.facedetect", prop, "1");
   7318     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
   7319     LOGD("Support face detection mode: %d",
   7320              supportedFaceDetectMode);
   7321 
   7322     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
   7323     Vector<uint8_t> availableFaceDetectModes;
   7324     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
   7325     if (supportedFaceDetectMode == 1) {
   7326         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   7327     } else if (supportedFaceDetectMode == 2) {
   7328         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   7329     } else if (supportedFaceDetectMode == 3) {
   7330         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   7331         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   7332     } else {
   7333         maxFaces = 0;
   7334     }
   7335     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   7336             availableFaceDetectModes.array(),
   7337             availableFaceDetectModes.size());
   7338     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   7339             (int32_t *)&maxFaces, 1);
   7340 
   7341     int32_t exposureCompensationRange[] = {
   7342             gCamCapability[cameraId]->exposure_compensation_min,
   7343             gCamCapability[cameraId]->exposure_compensation_max};
   7344     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   7345             exposureCompensationRange,
   7346             sizeof(exposureCompensationRange)/sizeof(int32_t));
   7347 
   7348     uint8_t lensFacing = (facingBack) ?
   7349             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   7350     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   7351 
   7352     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   7353                       available_thumbnail_sizes,
   7354                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   7355 
   7356     /*all sizes will be clubbed into this tag*/
   7357     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   7358     /*android.scaler.availableStreamConfigurations*/
   7359     Vector<int32_t> available_stream_configs;
   7360     cam_dimension_t active_array_dim;
   7361     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
   7362     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
   7363     /* Add input/output stream configurations for each scalar formats*/
   7364     for (size_t j = 0; j < scalar_formats_count; j++) {
   7365         switch (scalar_formats[j]) {
   7366         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   7367         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   7368         case HAL_PIXEL_FORMAT_RAW10:
   7369             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7370                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   7371                 addStreamConfig(available_stream_configs, scalar_formats[j],
   7372                         gCamCapability[cameraId]->raw_dim[i],
   7373                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   7374             }
   7375             break;
   7376         case HAL_PIXEL_FORMAT_BLOB:
   7377             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7378                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   7379                 addStreamConfig(available_stream_configs, scalar_formats[j],
   7380                         gCamCapability[cameraId]->picture_sizes_tbl[i],
   7381                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   7382             }
   7383             break;
   7384         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   7385         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   7386         default:
   7387             cam_dimension_t largest_picture_size;
   7388             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
   7389             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7390                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   7391                 addStreamConfig(available_stream_configs, scalar_formats[j],
   7392                         gCamCapability[cameraId]->picture_sizes_tbl[i],
   7393                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   7394                 /* Book keep largest */
   7395                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
   7396                         >= largest_picture_size.width &&
   7397                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
   7398                         >= largest_picture_size.height)
   7399                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
   7400             }
   7401             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
   7402             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
   7403                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   7404                  addStreamConfig(available_stream_configs, scalar_formats[j],
   7405                          largest_picture_size,
   7406                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
   7407             }
   7408             break;
   7409         }
   7410     }
   7411 
   7412     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   7413                       available_stream_configs.array(), available_stream_configs.size());
   7414 
   7415     /* android.scaler.availableMinFrameDurations */
   7416     Vector<int64_t> available_min_durations;
   7417     for (size_t j = 0; j < scalar_formats_count; j++) {
   7418         switch (scalar_formats[j]) {
   7419         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   7420         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   7421         case HAL_PIXEL_FORMAT_RAW10:
   7422             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7423                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   7424                 available_min_durations.add(scalar_formats[j]);
   7425                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
   7426                 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
   7427                 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
   7428             }
   7429             break;
   7430         default:
   7431             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   7432                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   7433                 available_min_durations.add(scalar_formats[j]);
   7434                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
   7435                 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
   7436                 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
   7437             }
   7438             break;
   7439         }
   7440     }
   7441     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   7442                       available_min_durations.array(), available_min_durations.size());
   7443 
   7444     Vector<int32_t> available_hfr_configs;
   7445     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
   7446         int32_t fps = 0;
   7447         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
   7448         case CAM_HFR_MODE_60FPS:
   7449             fps = 60;
   7450             break;
   7451         case CAM_HFR_MODE_90FPS:
   7452             fps = 90;
   7453             break;
   7454         case CAM_HFR_MODE_120FPS:
   7455             fps = 120;
   7456             break;
   7457         case CAM_HFR_MODE_150FPS:
   7458             fps = 150;
   7459             break;
   7460         case CAM_HFR_MODE_180FPS:
   7461             fps = 180;
   7462             break;
   7463         case CAM_HFR_MODE_210FPS:
   7464             fps = 210;
   7465             break;
   7466         case CAM_HFR_MODE_240FPS:
   7467             fps = 240;
   7468             break;
   7469         case CAM_HFR_MODE_480FPS:
   7470             fps = 480;
   7471             break;
   7472         case CAM_HFR_MODE_OFF:
   7473         case CAM_HFR_MODE_MAX:
   7474         default:
   7475             break;
   7476         }
   7477 
   7478         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
   7479         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
   7480             /* For each HFR frame rate, need to advertise one variable fps range
   7481              * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
   7482              * and [120, 120]. While camcorder preview alone is running [30, 120] is
   7483              * set by the app. When video recording is started, [120, 120] is
   7484              * set. This way sensor configuration does not change when recording
   7485              * is started */
   7486 
   7487             /* (width, height, fps_min, fps_max, batch_size_max) */
   7488             for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
   7489                 j < MAX_SIZES_CNT; j++) {
   7490                 available_hfr_configs.add(
   7491                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
   7492                 available_hfr_configs.add(
   7493                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
   7494                 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
   7495                 available_hfr_configs.add(fps);
   7496                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   7497 
   7498                 /* (width, height, fps_min, fps_max, batch_size_max) */
   7499                 available_hfr_configs.add(
   7500                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
   7501                 available_hfr_configs.add(
   7502                         gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
   7503                 available_hfr_configs.add(fps);
   7504                 available_hfr_configs.add(fps);
   7505                 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   7506             }
   7507        }
   7508     }
   7509     //Advertise HFR capability only if the property is set
   7510     memset(prop, 0, sizeof(prop));
   7511     property_get("persist.camera.hal3hfr.enable", prop, "1");
   7512     uint8_t hfrEnable = (uint8_t)atoi(prop);
   7513 
   7514     if(hfrEnable && available_hfr_configs.array()) {
   7515         staticInfo.update(
   7516                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
   7517                 available_hfr_configs.array(), available_hfr_configs.size());
   7518     }
   7519 
   7520     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
   7521     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   7522                       &max_jpeg_size, 1);
   7523 
   7524     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   7525     size_t size = 0;
   7526     count = CAM_EFFECT_MODE_MAX;
   7527     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
   7528     for (size_t i = 0; i < count; i++) {
   7529         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   7530                 gCamCapability[cameraId]->supported_effects[i]);
   7531         if (NAME_NOT_FOUND != val) {
   7532             avail_effects[size] = (uint8_t)val;
   7533             size++;
   7534         }
   7535     }
   7536     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   7537                       avail_effects,
   7538                       size);
   7539 
   7540     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   7541     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   7542     size_t supported_scene_modes_cnt = 0;
   7543     count = CAM_SCENE_MODE_MAX;
   7544     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
   7545     for (size_t i = 0; i < count; i++) {
   7546         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
   7547                 CAM_SCENE_MODE_OFF) {
   7548             int val = lookupFwkName(SCENE_MODES_MAP,
   7549                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
   7550                     gCamCapability[cameraId]->supported_scene_modes[i]);
   7551             if (NAME_NOT_FOUND != val) {
   7552                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   7553                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
   7554                 supported_scene_modes_cnt++;
   7555             }
   7556         }
   7557     }
   7558     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   7559                       avail_scene_modes,
   7560                       supported_scene_modes_cnt);
   7561 
   7562     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
   7563     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   7564                       supported_scene_modes_cnt,
   7565                       CAM_SCENE_MODE_MAX,
   7566                       scene_mode_overrides,
   7567                       supported_indexes,
   7568                       cameraId);
   7569 
   7570     if (supported_scene_modes_cnt == 0) {
   7571         supported_scene_modes_cnt = 1;
   7572         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
   7573     }
   7574 
   7575     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   7576             scene_mode_overrides, supported_scene_modes_cnt * 3);
   7577 
   7578     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
   7579                                          ANDROID_CONTROL_MODE_AUTO,
   7580                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
   7581     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
   7582             available_control_modes,
   7583             3);
   7584 
   7585     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   7586     size = 0;
   7587     count = CAM_ANTIBANDING_MODE_MAX;
   7588     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
   7589     for (size_t i = 0; i < count; i++) {
   7590         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   7591                 gCamCapability[cameraId]->supported_antibandings[i]);
   7592         if (NAME_NOT_FOUND != val) {
   7593             avail_antibanding_modes[size] = (uint8_t)val;
   7594             size++;
   7595         }
   7596 
   7597     }
   7598     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   7599                       avail_antibanding_modes,
   7600                       size);
   7601 
   7602     uint8_t avail_abberation_modes[] = {
   7603             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
   7604             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
   7605             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
   7606     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
   7607     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
   7608     if (0 == count) {
   7609         //  If no aberration correction modes are available for a device, this advertise OFF mode
   7610         size = 1;
   7611     } else {
   7612         // If count is not zero then atleast one among the FAST or HIGH quality is supported
   7613         // So, advertize all 3 modes if atleast any one mode is supported as per the
   7614         // new M requirement
   7615         size = 3;
   7616     }
   7617     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   7618             avail_abberation_modes,
   7619             size);
   7620 
   7621     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   7622     size = 0;
   7623     count = CAM_FOCUS_MODE_MAX;
   7624     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
   7625     for (size_t i = 0; i < count; i++) {
   7626         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   7627                 gCamCapability[cameraId]->supported_focus_modes[i]);
   7628         if (NAME_NOT_FOUND != val) {
   7629             avail_af_modes[size] = (uint8_t)val;
   7630             size++;
   7631         }
   7632     }
   7633     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   7634                       avail_af_modes,
   7635                       size);
   7636 
   7637     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   7638     size = 0;
   7639     count = CAM_WB_MODE_MAX;
   7640     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
   7641     for (size_t i = 0; i < count; i++) {
   7642         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   7643                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   7644                 gCamCapability[cameraId]->supported_white_balances[i]);
   7645         if (NAME_NOT_FOUND != val) {
   7646             avail_awb_modes[size] = (uint8_t)val;
   7647             size++;
   7648         }
   7649     }
   7650     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   7651                       avail_awb_modes,
   7652                       size);
   7653 
   7654     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   7655     count = CAM_FLASH_FIRING_LEVEL_MAX;
   7656     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
   7657             count);
   7658     for (size_t i = 0; i < count; i++) {
   7659         available_flash_levels[i] =
   7660                 gCamCapability[cameraId]->supported_firing_levels[i];
   7661     }
   7662     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   7663             available_flash_levels, count);
   7664 
   7665     uint8_t flashAvailable;
   7666     if (gCamCapability[cameraId]->flash_available)
   7667         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
   7668     else
   7669         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
   7670     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   7671             &flashAvailable, 1);
   7672 
   7673     Vector<uint8_t> avail_ae_modes;
   7674     count = CAM_AE_MODE_MAX;
   7675     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
   7676     for (size_t i = 0; i < count; i++) {
   7677         uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
   7678         if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
   7679             aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
   7680         }
   7681         avail_ae_modes.add(aeMode);
   7682     }
   7683     if (flashAvailable) {
   7684         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
   7685         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
   7686     }
   7687     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   7688                       avail_ae_modes.array(),
   7689                       avail_ae_modes.size());
   7690 
   7691     int32_t sensitivity_range[2];
   7692     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   7693     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   7694     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   7695                       sensitivity_range,
   7696                       sizeof(sensitivity_range) / sizeof(int32_t));
   7697 
   7698     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   7699                       &gCamCapability[cameraId]->max_analog_sensitivity,
   7700                       1);
   7701 
   7702     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   7703     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   7704                       &sensor_orientation,
   7705                       1);
   7706 
   7707     int32_t max_output_streams[] = {
   7708             MAX_STALLING_STREAMS,
   7709             MAX_PROCESSED_STREAMS,
   7710             MAX_RAW_STREAMS};
   7711     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   7712             max_output_streams,
   7713             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
   7714 
   7715     uint8_t avail_leds = 0;
   7716     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
   7717                       &avail_leds, 0);
   7718 
   7719     uint8_t focus_dist_calibrated;
   7720     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
   7721             gCamCapability[cameraId]->focus_dist_calibrated);
   7722     if (NAME_NOT_FOUND != val) {
   7723         focus_dist_calibrated = (uint8_t)val;
   7724         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   7725                      &focus_dist_calibrated, 1);
   7726     }
   7727 
   7728     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
   7729     size = 0;
   7730     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
   7731             MAX_TEST_PATTERN_CNT);
   7732     for (size_t i = 0; i < count; i++) {
   7733         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
   7734                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
   7735         if (NAME_NOT_FOUND != testpatternMode) {
   7736             avail_testpattern_modes[size] = testpatternMode;
   7737             size++;
   7738         }
   7739     }
   7740     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   7741                       avail_testpattern_modes,
   7742                       size);
   7743 
   7744     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
   7745     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
   7746                       &max_pipeline_depth,
   7747                       1);
   7748 
   7749     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
   7750     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   7751                       &partial_result_count,
   7752                        1);
   7753 
   7754     int32_t max_stall_duration = MAX_REPROCESS_STALL;
   7755     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
   7756 
   7757     Vector<uint8_t> available_capabilities;
   7758     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   7759     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
   7760     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
   7761     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
   7762     if (supportBurst) {
   7763         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
   7764     }
   7765     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   7766     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
   7767     if (hfrEnable && available_hfr_configs.array()) {
   7768         available_capabilities.add(
   7769                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
   7770     }
   7771 
   7772     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
   7773         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
   7774     }
   7775     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   7776             available_capabilities.array(),
   7777             available_capabilities.size());
   7778 
   7779     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
   7780     //Assumption is that all bayer cameras support MANUAL_SENSOR.
   7781     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   7782             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   7783 
   7784     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   7785             &aeLockAvailable, 1);
   7786 
   7787     //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
   7788     //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
   7789     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   7790             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   7791 
   7792     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   7793             &awbLockAvailable, 1);
   7794 
   7795     int32_t max_input_streams = 1;
   7796     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   7797                       &max_input_streams,
   7798                       1);
   7799 
   7800     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
   7801     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
   7802             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
   7803             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
   7804             HAL_PIXEL_FORMAT_YCbCr_420_888};
   7805     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   7806                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
   7807 
   7808     int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
   7809     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
   7810                       &max_latency,
   7811                       1);
   7812 
   7813     int32_t isp_sensitivity_range[2];
   7814     isp_sensitivity_range[0] =
   7815         gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
   7816     isp_sensitivity_range[1] =
   7817         gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
   7818     staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
   7819                       isp_sensitivity_range,
   7820                       sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
   7821 
   7822     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
   7823                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
   7824     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   7825             available_hot_pixel_modes,
   7826             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
   7827 
   7828     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
   7829                                          ANDROID_SHADING_MODE_FAST,
   7830                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
   7831     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
   7832                       available_shading_modes,
   7833                       3);
   7834 
   7835     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
   7836                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
   7837     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   7838                       available_lens_shading_map_modes,
   7839                       2);
   7840 
   7841     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
   7842                                       ANDROID_EDGE_MODE_FAST,
   7843                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
   7844                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
   7845     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   7846             available_edge_modes,
   7847             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
   7848 
   7849     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
   7850                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
   7851                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
   7852                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
   7853                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
   7854     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   7855             available_noise_red_modes,
   7856             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
   7857 
   7858     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
   7859                                          ANDROID_TONEMAP_MODE_FAST,
   7860                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
   7861     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   7862             available_tonemap_modes,
   7863             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
   7864 
   7865     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
   7866     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   7867             available_hot_pixel_map_modes,
   7868             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
   7869 
   7870     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   7871             gCamCapability[cameraId]->reference_illuminant1);
   7872     if (NAME_NOT_FOUND != val) {
   7873         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   7874         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
   7875     }
   7876 
   7877     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   7878             gCamCapability[cameraId]->reference_illuminant2);
   7879     if (NAME_NOT_FOUND != val) {
   7880         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   7881         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
   7882     }
   7883 
   7884     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
   7885             (void *)gCamCapability[cameraId]->forward_matrix1,
   7886             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   7887 
   7888     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
   7889             (void *)gCamCapability[cameraId]->forward_matrix2,
   7890             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   7891 
   7892     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
   7893             (void *)gCamCapability[cameraId]->color_transform1,
   7894             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   7895 
   7896     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
   7897             (void *)gCamCapability[cameraId]->color_transform2,
   7898             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   7899 
   7900     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
   7901             (void *)gCamCapability[cameraId]->calibration_transform1,
   7902             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   7903 
   7904     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
   7905             (void *)gCamCapability[cameraId]->calibration_transform2,
   7906             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   7907 
   7908     int32_t session_keys[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, QCAMERA3_USE_AV_TIMER,
   7909         ANDROID_CONTROL_AE_TARGET_FPS_RANGE};
   7910 
   7911     staticInfo.update(ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, session_keys,
   7912             sizeof(session_keys) / sizeof(session_keys[0]));
   7913 
   7914     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
   7915        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
   7916        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   7917        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   7918        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
   7919        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   7920        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
   7921        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
   7922        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
   7923        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
   7924        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
   7925        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
   7926        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   7927        ANDROID_JPEG_GPS_COORDINATES,
   7928        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
   7929        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
   7930        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
   7931        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   7932        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
   7933        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
   7934        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
   7935        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
   7936        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
   7937        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
   7938        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
   7939        ANDROID_STATISTICS_FACE_DETECT_MODE,
   7940        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   7941        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
   7942        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   7943        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
   7944        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
   7945        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
   7946        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
   7947        QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_AV_TIMER,
   7948        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
   7949        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
   7950        /* DevCamDebug metadata request_keys_basic */
   7951        DEVCAMDEBUG_META_ENABLE,
   7952        /* DevCamDebug metadata end */
   7953        };
   7954 
   7955     size_t request_keys_cnt =
   7956             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
   7957     Vector<int32_t> available_request_keys;
   7958     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
   7959     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   7960         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
   7961     }
   7962 
   7963     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
   7964             available_request_keys.array(), available_request_keys.size());
   7965 
   7966     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
   7967        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
   7968        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
   7969        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AF_SCENE_CHANGE, ANDROID_CONTROL_AWB_MODE,
   7970        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
   7971        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   7972        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
   7973        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
   7974        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
   7975        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   7976        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   7977        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
   7978        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
   7979        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
   7980        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
   7981        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   7982        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
   7983        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   7984        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
   7985        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   7986        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   7987        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
   7988        ANDROID_STATISTICS_FACE_SCORES,
   7989        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
   7990        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
   7991        QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
   7992        QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
   7993        QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
   7994        QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
   7995        QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
   7996        QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
   7997        QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
   7998        // DevCamDebug metadata result_keys_basic
   7999        DEVCAMDEBUG_META_ENABLE,
   8000        // DevCamDebug metadata result_keys AF
   8001        DEVCAMDEBUG_AF_LENS_POSITION,
   8002        DEVCAMDEBUG_AF_TOF_CONFIDENCE,
   8003        DEVCAMDEBUG_AF_TOF_DISTANCE,
   8004        DEVCAMDEBUG_AF_LUMA,
   8005        DEVCAMDEBUG_AF_HAF_STATE,
   8006        DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
   8007        DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
   8008        DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
   8009        DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
   8010        DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
   8011        DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
   8012        DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
   8013        DEVCAMDEBUG_AF_MONITOR_REFOCUS,
   8014        DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
   8015        DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
   8016        DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
   8017        DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
   8018        DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
   8019        DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
   8020        DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
   8021        DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
   8022        DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
   8023        DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
   8024        DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
   8025        DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
   8026        DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
   8027        DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
   8028        // DevCamDebug metadata result_keys AEC
   8029        DEVCAMDEBUG_AEC_TARGET_LUMA,
   8030        DEVCAMDEBUG_AEC_COMP_LUMA,
   8031        DEVCAMDEBUG_AEC_AVG_LUMA,
   8032        DEVCAMDEBUG_AEC_CUR_LUMA,
   8033        DEVCAMDEBUG_AEC_LINECOUNT,
   8034        DEVCAMDEBUG_AEC_REAL_GAIN,
   8035        DEVCAMDEBUG_AEC_EXP_INDEX,
   8036        DEVCAMDEBUG_AEC_LUX_IDX,
   8037        // DevCamDebug metadata result_keys AWB
   8038        DEVCAMDEBUG_AWB_R_GAIN,
   8039        DEVCAMDEBUG_AWB_G_GAIN,
   8040        DEVCAMDEBUG_AWB_B_GAIN,
   8041        DEVCAMDEBUG_AWB_CCT,
   8042        DEVCAMDEBUG_AWB_DECISION,
   8043        /* DevCamDebug metadata end */
   8044        };
   8045     size_t result_keys_cnt =
   8046             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
   8047 
   8048     Vector<int32_t> available_result_keys;
   8049     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
   8050     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   8051         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
   8052     }
   8053     if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
   8054         available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
   8055         available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
   8056     }
   8057     if (supportedFaceDetectMode == 1) {
   8058         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
   8059         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
   8060     } else if ((supportedFaceDetectMode == 2) ||
   8061             (supportedFaceDetectMode == 3)) {
   8062         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
   8063         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
   8064     }
   8065     if (hasBlackRegions) {
   8066         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
   8067         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
   8068     }
   8069     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   8070             available_result_keys.array(), available_result_keys.size());
   8071 
   8072     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   8073        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   8074        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
   8075        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
   8076        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   8077        ANDROID_SCALER_CROPPING_TYPE,
   8078        ANDROID_SYNC_MAX_LATENCY,
   8079        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   8080        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   8081        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   8082        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
   8083        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
   8084        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   8085        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   8086        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   8087        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   8088        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   8089        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   8090        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   8091        ANDROID_LENS_FACING,
   8092        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   8093        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   8094        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   8095        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   8096        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   8097        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   8098        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   8099        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
   8100        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
   8101        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   8102        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
   8103        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   8104        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   8105        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   8106        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   8107        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   8108        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
   8109        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   8110        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   8111        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   8112        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   8113        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   8114        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   8115        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   8116        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   8117        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   8118        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   8119        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   8120        ANDROID_TONEMAP_MAX_CURVE_POINTS,
   8121        ANDROID_CONTROL_AVAILABLE_MODES,
   8122        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   8123        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   8124        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   8125        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
   8126        ANDROID_SHADING_AVAILABLE_MODES,
   8127        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   8128        ANDROID_SENSOR_OPAQUE_RAW_SIZE, QCAMERA3_OPAQUE_RAW_FORMAT
   8129        };
   8130 
   8131     Vector<int32_t> available_characteristics_keys;
   8132     available_characteristics_keys.appendArray(characteristics_keys_basic,
   8133             sizeof(characteristics_keys_basic)/sizeof(int32_t));
   8134     if (hasBlackRegions) {
   8135         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
   8136     }
   8137 
   8138     /*available stall durations depend on the hw + sw and will be different for different devices */
   8139     /*have to add for raw after implementation*/
   8140     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
   8141     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
   8142 
   8143     Vector<int64_t> available_stall_durations;
   8144     for (uint32_t j = 0; j < stall_formats_count; j++) {
   8145         if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
   8146             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
   8147                     gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
   8148                 available_stall_durations.add(stall_formats[j]);
   8149                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
   8150                 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
   8151                 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
   8152           }
   8153         } else {
   8154             for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
   8155                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   8156                 available_stall_durations.add(stall_formats[j]);
   8157                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
   8158                 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
   8159                 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
   8160             }
   8161         }
   8162     }
   8163     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   8164                       available_stall_durations.array(),
   8165                       available_stall_durations.size());
   8166 
   8167     //QCAMERA3_OPAQUE_RAW
   8168     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   8169     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   8170     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
   8171     case LEGACY_RAW:
   8172         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   8173             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
   8174         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   8175             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   8176         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   8177             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
   8178         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   8179         break;
   8180     case MIPI_RAW:
   8181         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   8182             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
   8183         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   8184             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
   8185         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   8186             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
   8187         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
   8188         break;
   8189     default:
   8190         LOGE("unknown opaque_raw_format %d",
   8191                 gCamCapability[cameraId]->opaque_raw_fmt);
   8192         break;
   8193     }
   8194     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
   8195 
   8196     Vector<int32_t> strides;
   8197     for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   8198             gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   8199         cam_stream_buf_plane_info_t buf_planes;
   8200         strides.add(gCamCapability[cameraId]->raw_dim[i].width);
   8201         strides.add(gCamCapability[cameraId]->raw_dim[i].height);
   8202         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   8203             &gCamCapability[cameraId]->padding_info, &buf_planes);
   8204         strides.add(buf_planes.plane_info.mp[0].stride);
   8205     }
   8206 
   8207     if (!strides.isEmpty()) {
   8208         staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
   8209                 strides.size());
   8210         available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
   8211     }
   8212     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   8213                       available_characteristics_keys.array(),
   8214                       available_characteristics_keys.size());
   8215 
   8216     Vector<int32_t> opaque_size;
   8217     for (size_t j = 0; j < scalar_formats_count; j++) {
   8218         if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
   8219             for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
   8220                     gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
   8221                 cam_stream_buf_plane_info_t buf_planes;
   8222 
   8223                 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   8224                          &gCamCapability[cameraId]->padding_info, &buf_planes);
   8225 
   8226                 if (rc == 0) {
   8227                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
   8228                     opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
   8229                     opaque_size.add(buf_planes.plane_info.frame_len);
   8230                 }else {
   8231                     LOGE("raw frame calculation failed!");
   8232                 }
   8233             }
   8234         }
   8235     }
   8236 
   8237     if ((opaque_size.size() > 0) &&
   8238             (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
   8239         staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
   8240     else
   8241         LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
   8242 
   8243     gStaticMetadata[cameraId] = staticInfo.release();
   8244     return rc;
   8245 }
   8246 
   8247 /*===========================================================================
   8248  * FUNCTION   : makeTable
   8249  *
   8250  * DESCRIPTION: make a table of sizes
   8251  *
   8252  * PARAMETERS :
   8253  *
   8254  *
   8255  *==========================================================================*/
   8256 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
   8257         size_t max_size, int32_t *sizeTable)
   8258 {
   8259     size_t j = 0;
   8260     if (size > max_size) {
   8261        size = max_size;
   8262     }
   8263     for (size_t i = 0; i < size; i++) {
   8264         sizeTable[j] = dimTable[i].width;
   8265         sizeTable[j+1] = dimTable[i].height;
   8266         j+=2;
   8267     }
   8268 }
   8269 
   8270 /*===========================================================================
   8271  * FUNCTION   : makeFPSTable
   8272  *
   8273  * DESCRIPTION: make a table of fps ranges
   8274  *
   8275  * PARAMETERS :
   8276  *
   8277  *==========================================================================*/
   8278 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
   8279         size_t max_size, int32_t *fpsRangesTable)
   8280 {
   8281     size_t j = 0;
   8282     if (size > max_size) {
   8283        size = max_size;
   8284     }
   8285     for (size_t i = 0; i < size; i++) {
   8286         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   8287         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   8288         j+=2;
   8289     }
   8290 }
   8291 
   8292 /*===========================================================================
   8293  * FUNCTION   : makeOverridesList
   8294  *
   8295  * DESCRIPTION: make a list of scene mode overrides
   8296  *
   8297  * PARAMETERS :
   8298  *
   8299  *
   8300  *==========================================================================*/
   8301 void QCamera3HardwareInterface::makeOverridesList(
   8302         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
   8303         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
   8304 {
   8305     /*daemon will give a list of overrides for all scene modes.
   8306       However we should send the fwk only the overrides for the scene modes
   8307       supported by the framework*/
   8308     size_t j = 0;
   8309     if (size > max_size) {
   8310        size = max_size;
   8311     }
   8312     size_t focus_count = CAM_FOCUS_MODE_MAX;
   8313     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
   8314             focus_count);
   8315     for (size_t i = 0; i < size; i++) {
   8316         bool supt = false;
   8317         size_t index = supported_indexes[i];
   8318         overridesList[j] = gCamCapability[camera_id]->flash_available ?
   8319                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
   8320         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   8321                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   8322                 overridesTable[index].awb_mode);
   8323         if (NAME_NOT_FOUND != val) {
   8324             overridesList[j+1] = (uint8_t)val;
   8325         }
   8326         uint8_t focus_override = overridesTable[index].af_mode;
   8327         for (size_t k = 0; k < focus_count; k++) {
   8328            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   8329               supt = true;
   8330               break;
   8331            }
   8332         }
   8333         if (supt) {
   8334             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   8335                     focus_override);
   8336             if (NAME_NOT_FOUND != val) {
   8337                 overridesList[j+2] = (uint8_t)val;
   8338             }
   8339         } else {
   8340            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   8341         }
   8342         j+=3;
   8343     }
   8344 }
   8345 
   8346 /*===========================================================================
   8347  * FUNCTION   : filterJpegSizes
   8348  *
   8349  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
   8350  *              could be downscaled to
   8351  *
   8352  * PARAMETERS :
   8353  *
   8354  * RETURN     : length of jpegSizes array
   8355  *==========================================================================*/
   8356 
   8357 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
   8358         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
   8359         uint8_t downscale_factor)
   8360 {
   8361     if (0 == downscale_factor) {
   8362         downscale_factor = 1;
   8363     }
   8364 
   8365     int32_t min_width = active_array_size.width / downscale_factor;
   8366     int32_t min_height = active_array_size.height / downscale_factor;
   8367     size_t jpegSizesCnt = 0;
   8368     if (processedSizesCnt > maxCount) {
   8369         processedSizesCnt = maxCount;
   8370     }
   8371     for (size_t i = 0; i < processedSizesCnt; i+=2) {
   8372         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
   8373             jpegSizes[jpegSizesCnt] = processedSizes[i];
   8374             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
   8375             jpegSizesCnt += 2;
   8376         }
   8377     }
   8378     return jpegSizesCnt;
   8379 }
   8380 
   8381 /*===========================================================================
   8382  * FUNCTION   : computeNoiseModelEntryS
   8383  *
   8384  * DESCRIPTION: function to map a given sensitivity to the S noise
   8385  *              model parameters in the DNG noise model.
   8386  *
   8387  * PARAMETERS : sens : the sensor sensitivity
   8388  *
   8389  ** RETURN    : S (sensor amplification) noise
   8390  *
   8391  *==========================================================================*/
   8392 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
   8393     double s = gCamCapability[mCameraId]->gradient_S * sens +
   8394             gCamCapability[mCameraId]->offset_S;
   8395     return ((s < 0.0) ? 0.0 : s);
   8396 }
   8397 
   8398 /*===========================================================================
   8399  * FUNCTION   : computeNoiseModelEntryO
   8400  *
   8401  * DESCRIPTION: function to map a given sensitivity to the O noise
   8402  *              model parameters in the DNG noise model.
   8403  *
   8404  * PARAMETERS : sens : the sensor sensitivity
   8405  *
   8406  ** RETURN    : O (sensor readout) noise
   8407  *
   8408  *==========================================================================*/
   8409 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
   8410     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
   8411     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
   8412             1.0 : (1.0 * sens / max_analog_sens);
   8413     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
   8414             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
   8415     return ((o < 0.0) ? 0.0 : o);
   8416 }
   8417 
   8418 /*===========================================================================
   8419  * FUNCTION   : getSensorSensitivity
   8420  *
   8421  * DESCRIPTION: convert iso_mode to an integer value
   8422  *
   8423  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   8424  *
   8425  ** RETURN    : sensitivity supported by sensor
   8426  *
   8427  *==========================================================================*/
   8428 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   8429 {
   8430     int32_t sensitivity;
   8431 
   8432     switch (iso_mode) {
   8433     case CAM_ISO_MODE_100:
   8434         sensitivity = 100;
   8435         break;
   8436     case CAM_ISO_MODE_200:
   8437         sensitivity = 200;
   8438         break;
   8439     case CAM_ISO_MODE_400:
   8440         sensitivity = 400;
   8441         break;
   8442     case CAM_ISO_MODE_800:
   8443         sensitivity = 800;
   8444         break;
   8445     case CAM_ISO_MODE_1600:
   8446         sensitivity = 1600;
   8447         break;
   8448     default:
   8449         sensitivity = -1;
   8450         break;
   8451     }
   8452     return sensitivity;
   8453 }
   8454 
   8455 /*===========================================================================
   8456  * FUNCTION   : getCamInfo
   8457  *
   8458  * DESCRIPTION: query camera capabilities
   8459  *
   8460  * PARAMETERS :
   8461  *   @cameraId  : camera Id
   8462  *   @info      : camera info struct to be filled in with camera capabilities
   8463  *
   8464  * RETURN     : int type of status
   8465  *              NO_ERROR  -- success
   8466  *              none-zero failure code
   8467  *==========================================================================*/
   8468 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
   8469         struct camera_info *info)
   8470 {
   8471     ATRACE_CALL();
   8472     int rc = 0;
   8473 
   8474     pthread_mutex_lock(&gCamLock);
   8475     if (NULL == gCamCapability[cameraId]) {
   8476         rc = initCapabilities(cameraId);
   8477         if (rc < 0) {
   8478             pthread_mutex_unlock(&gCamLock);
   8479             return rc;
   8480         }
   8481     }
   8482 
   8483     if (NULL == gStaticMetadata[cameraId]) {
   8484         rc = initStaticMetadata(cameraId);
   8485         if (rc < 0) {
   8486             pthread_mutex_unlock(&gCamLock);
   8487             return rc;
   8488         }
   8489     }
   8490 
   8491     switch(gCamCapability[cameraId]->position) {
   8492     case CAM_POSITION_BACK:
   8493         info->facing = CAMERA_FACING_BACK;
   8494         break;
   8495 
   8496     case CAM_POSITION_FRONT:
   8497         info->facing = CAMERA_FACING_FRONT;
   8498         break;
   8499 
   8500     default:
   8501         LOGE("Unknown position type for camera id:%d", cameraId);
   8502         rc = -1;
   8503         break;
   8504     }
   8505 
   8506 
   8507     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
   8508     info->device_version = CAMERA_DEVICE_API_VERSION_3_5;
   8509     info->static_camera_characteristics = gStaticMetadata[cameraId];
   8510 
   8511     //For now assume both cameras can operate independently.
   8512     info->conflicting_devices = NULL;
   8513     info->conflicting_devices_length = 0;
   8514 
   8515     //resource cost is 100 * MIN(1.0, m/M),
   8516     //where m is throughput requirement with maximum stream configuration
   8517     //and M is CPP maximum throughput.
   8518     float max_fps = 0.0;
   8519     for (uint32_t i = 0;
   8520             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
   8521         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
   8522             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
   8523     }
   8524     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
   8525             gCamCapability[cameraId]->active_array_size.width *
   8526             gCamCapability[cameraId]->active_array_size.height * max_fps /
   8527             gCamCapability[cameraId]->max_pixel_bandwidth;
   8528     info->resource_cost = 100 * MIN(1.0, ratio);
   8529     LOGI("camera %d resource cost is %d", cameraId,
   8530             info->resource_cost);
   8531 
   8532     pthread_mutex_unlock(&gCamLock);
   8533     return rc;
   8534 }
   8535 
   8536 /*===========================================================================
   8537  * FUNCTION   : translateCapabilityToMetadata
   8538  *
   8539  * DESCRIPTION: translate the capability into camera_metadata_t
   8540  *
   8541  * PARAMETERS : type of the request
   8542  *
   8543  *
   8544  * RETURN     : success: camera_metadata_t*
   8545  *              failure: NULL
   8546  *
   8547  *==========================================================================*/
   8548 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   8549 {
   8550     if (mDefaultMetadata[type] != NULL) {
   8551         return mDefaultMetadata[type];
   8552     }
   8553     //first time we are handling this request
   8554     //fill up the metadata structure using the wrapper class
   8555     CameraMetadata settings;
   8556     //translate from cam_capability_t to camera_metadata_tag_t
   8557     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   8558     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   8559     int32_t defaultRequestID = 0;
   8560     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   8561 
   8562     /* OIS disable */
   8563     char ois_prop[PROPERTY_VALUE_MAX];
   8564     memset(ois_prop, 0, sizeof(ois_prop));
   8565     property_get("persist.camera.ois.disable", ois_prop, "0");
   8566     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
   8567 
   8568     /* Force video to use OIS */
   8569     char videoOisProp[PROPERTY_VALUE_MAX];
   8570     memset(videoOisProp, 0, sizeof(videoOisProp));
   8571     property_get("persist.camera.ois.video", videoOisProp, "1");
   8572     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
   8573 
   8574     // Hybrid AE enable/disable
   8575     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
   8576     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
   8577     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
   8578     const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
   8579 
   8580     uint8_t controlIntent = 0;
   8581     uint8_t focusMode;
   8582     uint8_t vsMode;
   8583     uint8_t optStabMode;
   8584     uint8_t cacMode;
   8585     uint8_t edge_mode;
   8586     uint8_t noise_red_mode;
   8587     uint8_t shading_mode;
   8588     uint8_t hot_pixel_mode;
   8589     uint8_t tonemap_mode;
   8590     bool highQualityModeEntryAvailable = FALSE;
   8591     bool fastModeEntryAvailable = FALSE;
   8592     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   8593     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8594     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   8595 
   8596     switch (type) {
   8597       case CAMERA3_TEMPLATE_PREVIEW:
   8598         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   8599         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8600         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8601         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8602         edge_mode = ANDROID_EDGE_MODE_FAST;
   8603         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8604         shading_mode = ANDROID_SHADING_MODE_FAST;
   8605         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
   8606         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8607         break;
   8608       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   8609         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   8610         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8611         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8612         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   8613         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   8614         shading_mode = ANDROID_SHADING_MODE_HIGH_QUALITY;
   8615         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
   8616         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   8617         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   8618         // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
   8619         for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
   8620             if (gCamCapability[mCameraId]->aberration_modes[i] ==
   8621                     CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
   8622                 highQualityModeEntryAvailable = TRUE;
   8623             } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
   8624                     CAM_COLOR_CORRECTION_ABERRATION_FAST) {
   8625                 fastModeEntryAvailable = TRUE;
   8626             }
   8627         }
   8628         if (highQualityModeEntryAvailable) {
   8629             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
   8630         } else if (fastModeEntryAvailable) {
   8631             cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8632         }
   8633         if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
   8634             shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
   8635         }
   8636         break;
   8637       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   8638         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   8639         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   8640         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8641         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8642         edge_mode = ANDROID_EDGE_MODE_FAST;
   8643         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8644         shading_mode = ANDROID_SHADING_MODE_FAST;
   8645         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
   8646         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8647         if (forceVideoOis)
   8648             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8649         break;
   8650       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   8651         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   8652         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   8653         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8654         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8655         edge_mode = ANDROID_EDGE_MODE_FAST;
   8656         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8657         shading_mode = ANDROID_SHADING_MODE_FAST;
   8658         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
   8659         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8660         if (forceVideoOis)
   8661             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8662         break;
   8663       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   8664         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   8665         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8666         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8667         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8668         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
   8669         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
   8670         shading_mode = ANDROID_SHADING_MODE_FAST;
   8671         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
   8672         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8673         break;
   8674       case CAMERA3_TEMPLATE_MANUAL:
   8675         edge_mode = ANDROID_EDGE_MODE_FAST;
   8676         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8677         shading_mode = ANDROID_SHADING_MODE_FAST;
   8678         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
   8679         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8680         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8681         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
   8682         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   8683         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8684         break;
   8685       default:
   8686         edge_mode = ANDROID_EDGE_MODE_FAST;
   8687         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   8688         shading_mode = ANDROID_SHADING_MODE_FAST;
   8689         hot_pixel_mode = ANDROID_HOT_PIXEL_MODE_FAST;
   8690         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   8691         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   8692         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   8693         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   8694         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8695         break;
   8696     }
   8697     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
   8698     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   8699     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
   8700     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
   8701         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   8702     }
   8703     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   8704 
   8705     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   8706             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
   8707         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   8708     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   8709             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
   8710             || ois_disable)
   8711         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   8712     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
   8713     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
   8714 
   8715     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   8716             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   8717 
   8718     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   8719     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   8720 
   8721     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   8722     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   8723 
   8724     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   8725     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   8726 
   8727     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   8728     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   8729 
   8730     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   8731     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   8732 
   8733     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   8734     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   8735 
   8736     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   8737     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   8738 
   8739     /*flash*/
   8740     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   8741     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   8742 
   8743     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   8744     settings.update(ANDROID_FLASH_FIRING_POWER,
   8745             &flashFiringLevel, 1);
   8746 
   8747     /* lens */
   8748     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   8749     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   8750 
   8751     if (gCamCapability[mCameraId]->filter_densities_count) {
   8752         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   8753         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   8754                         gCamCapability[mCameraId]->filter_densities_count);
   8755     }
   8756 
   8757     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   8758     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   8759 
   8760     if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
   8761         float default_focus_distance = 0;
   8762         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
   8763     }
   8764 
   8765     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   8766     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   8767 
   8768     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   8769     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   8770 
   8771     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
   8772     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
   8773 
   8774     /* face detection (default to OFF) */
   8775     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   8776     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   8777 
   8778     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   8779     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   8780 
   8781     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   8782     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   8783 
   8784     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   8785     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   8786 
   8787 
   8788     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   8789     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
   8790 
   8791     /* Exposure time(Update the Min Exposure Time)*/
   8792     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   8793     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   8794 
   8795     /* frame duration */
   8796     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
   8797     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
   8798 
   8799     /* sensitivity */
   8800     static const int32_t default_sensitivity = 100;
   8801     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   8802     static const int32_t default_isp_sensitivity =
   8803             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
   8804     settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
   8805 
   8806     /*edge mode*/
   8807     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
   8808 
   8809     /*noise reduction mode*/
   8810     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
   8811 
   8812     /*shading mode*/
   8813     settings.update(ANDROID_SHADING_MODE, &shading_mode, 1);
   8814 
   8815     /*hot pixel mode*/
   8816     settings.update(ANDROID_HOT_PIXEL_MODE, &hot_pixel_mode, 1);
   8817 
   8818     /*color correction mode*/
   8819     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   8820     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
   8821 
   8822     /*transform matrix mode*/
   8823     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
   8824 
   8825     int32_t scaler_crop_region[4];
   8826     scaler_crop_region[0] = 0;
   8827     scaler_crop_region[1] = 0;
   8828     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
   8829     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
   8830     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
   8831 
   8832     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   8833     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
   8834 
   8835     /*focus distance*/
   8836     float focus_distance = 0.0;
   8837     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
   8838 
   8839     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
   8840     /* Restrict default preview template to max 30 fps */
   8841     float max_range = 0.0;
   8842     float max_fixed_fps = 0.0;
   8843     int32_t fps_range[2] = {0, 0};
   8844     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
   8845             i++) {
   8846         if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
   8847                 TEMPLATE_MAX_PREVIEW_FPS) {
   8848             continue;
   8849         }
   8850         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
   8851             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   8852         if (type == CAMERA3_TEMPLATE_PREVIEW ||
   8853                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
   8854                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
   8855             if (range > max_range) {
   8856                 fps_range[0] =
   8857                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   8858                 fps_range[1] =
   8859                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   8860                 max_range = range;
   8861             }
   8862         } else {
   8863             if (range < 0.01 && max_fixed_fps <
   8864                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
   8865                 fps_range[0] =
   8866                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   8867                 fps_range[1] =
   8868                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   8869                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   8870             }
   8871         }
   8872     }
   8873     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
   8874 
   8875     /*precapture trigger*/
   8876     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
   8877     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
   8878 
   8879     /*af trigger*/
   8880     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   8881     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
   8882 
   8883     /* ae & af regions */
   8884     int32_t active_region[] = {
   8885             gCamCapability[mCameraId]->active_array_size.left,
   8886             gCamCapability[mCameraId]->active_array_size.top,
   8887             gCamCapability[mCameraId]->active_array_size.left +
   8888                     gCamCapability[mCameraId]->active_array_size.width,
   8889             gCamCapability[mCameraId]->active_array_size.top +
   8890                     gCamCapability[mCameraId]->active_array_size.height,
   8891             0};
   8892     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
   8893             sizeof(active_region) / sizeof(active_region[0]));
   8894     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
   8895             sizeof(active_region) / sizeof(active_region[0]));
   8896 
   8897     /* black level lock */
   8898     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   8899     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
   8900 
   8901     //special defaults for manual template
   8902     if (type == CAMERA3_TEMPLATE_MANUAL) {
   8903         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
   8904         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
   8905 
   8906         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
   8907         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
   8908 
   8909         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
   8910         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
   8911 
   8912         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
   8913         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
   8914 
   8915         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
   8916         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
   8917 
   8918         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
   8919         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
   8920     }
   8921 
   8922 
   8923     /* TNR
   8924      * We'll use this location to determine which modes TNR will be set.
   8925      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
   8926      * This is not to be confused with linking on a per stream basis that decision
   8927      * is still on per-session basis and will be handled as part of config stream
   8928      */
   8929     uint8_t tnr_enable = 0;
   8930 
   8931     if (m_bTnrPreview || m_bTnrVideo) {
   8932 
   8933         switch (type) {
   8934             case CAMERA3_TEMPLATE_VIDEO_RECORD:
   8935                     tnr_enable = 1;
   8936                     break;
   8937 
   8938             default:
   8939                     tnr_enable = 0;
   8940                     break;
   8941         }
   8942 
   8943         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
   8944         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   8945         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   8946 
   8947         LOGD("TNR:%d with process plate %d for template:%d",
   8948                              tnr_enable, tnr_process_type, type);
   8949     }
   8950 
   8951     //Update Link tags to default
   8952     uint8_t sync_type = CAM_TYPE_STANDALONE;
   8953     settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
   8954 
   8955     uint8_t is_main = 0; //this doesn't matter as app should overwrite
   8956     settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
   8957 
   8958     uint8_t related_camera_id = mCameraId;
   8959     settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
   8960 
   8961     /* CDS default */
   8962     char prop[PROPERTY_VALUE_MAX];
   8963     memset(prop, 0, sizeof(prop));
   8964     property_get("persist.camera.CDS", prop, "Auto");
   8965     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
   8966     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
   8967     if (CAM_CDS_MODE_MAX == cds_mode) {
   8968         cds_mode = CAM_CDS_MODE_AUTO;
   8969     }
   8970 
   8971     /* Disabling CDS in templates which have TNR enabled*/
   8972     if (tnr_enable)
   8973         cds_mode = CAM_CDS_MODE_OFF;
   8974 
   8975     int32_t mode = cds_mode;
   8976     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
   8977 
   8978     /* hybrid ae */
   8979     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
   8980 
   8981     mDefaultMetadata[type] = settings.release();
   8982 
   8983     return mDefaultMetadata[type];
   8984 }
   8985 
   8986 /*===========================================================================
   8987  * FUNCTION   : setFrameParameters
   8988  *
   8989  * DESCRIPTION: set parameters per frame as requested in the metadata from
   8990  *              framework
   8991  *
   8992  * PARAMETERS :
   8993  *   @request   : request that needs to be serviced
   8994  *   @streamsArray : Stream ID of all the requested streams
   8995  *   @blob_request: Whether this request is a blob request or not
   8996  *
   8997  * RETURN     : success: NO_ERROR
   8998  *              failure:
   8999  *==========================================================================*/
   9000 int QCamera3HardwareInterface::setFrameParameters(
   9001                     camera3_capture_request_t *request,
   9002                     cam_stream_ID_t streamsArray,
   9003                     int blob_request,
   9004                     uint32_t snapshotStreamId)
   9005 {
   9006     /*translate from camera_metadata_t type to parm_type_t*/
   9007     int rc = 0;
   9008     int32_t hal_version = CAM_HAL_V3;
   9009 
   9010     clear_metadata_buffer(mParameters);
   9011     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
   9012         LOGE("Failed to set hal version in the parameters");
   9013         return BAD_VALUE;
   9014     }
   9015 
   9016     /*we need to update the frame number in the parameters*/
   9017     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
   9018             request->frame_number)) {
   9019         LOGE("Failed to set the frame number in the parameters");
   9020         return BAD_VALUE;
   9021     }
   9022 
   9023     /* Update stream id of all the requested buffers */
   9024     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
   9025         LOGE("Failed to set stream type mask in the parameters");
   9026         return BAD_VALUE;
   9027     }
   9028 
   9029     if (mUpdateDebugLevel) {
   9030         uint32_t dummyDebugLevel = 0;
   9031         /* The value of dummyDebugLevel is irrelavent. On
   9032          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
   9033         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
   9034                 dummyDebugLevel)) {
   9035             LOGE("Failed to set UPDATE_DEBUG_LEVEL");
   9036             return BAD_VALUE;
   9037         }
   9038         mUpdateDebugLevel = false;
   9039     }
   9040 
   9041     if(request->settings != NULL){
   9042         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
   9043         if (blob_request)
   9044             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
   9045     }
   9046 
   9047     return rc;
   9048 }
   9049 
   9050 /*===========================================================================
   9051  * FUNCTION   : setReprocParameters
   9052  *
   9053  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
   9054  *              return it.
   9055  *
   9056  * PARAMETERS :
   9057  *   @request   : request that needs to be serviced
   9058  *
   9059  * RETURN     : success: NO_ERROR
   9060  *              failure:
   9061  *==========================================================================*/
   9062 int32_t QCamera3HardwareInterface::setReprocParameters(
   9063         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
   9064         uint32_t snapshotStreamId)
   9065 {
   9066     /*translate from camera_metadata_t type to parm_type_t*/
   9067     int rc = 0;
   9068 
   9069     if (NULL == request->settings){
   9070         LOGE("Reprocess settings cannot be NULL");
   9071         return BAD_VALUE;
   9072     }
   9073 
   9074     if (NULL == reprocParam) {
   9075         LOGE("Invalid reprocessing metadata buffer");
   9076         return BAD_VALUE;
   9077     }
   9078     clear_metadata_buffer(reprocParam);
   9079 
   9080     /*we need to update the frame number in the parameters*/
   9081     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
   9082             request->frame_number)) {
   9083         LOGE("Failed to set the frame number in the parameters");
   9084         return BAD_VALUE;
   9085     }
   9086 
   9087     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
   9088     if (rc < 0) {
   9089         LOGE("Failed to translate reproc request");
   9090         return rc;
   9091     }
   9092 
   9093     CameraMetadata frame_settings;
   9094     frame_settings = request->settings;
   9095     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
   9096             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
   9097         int32_t *crop_count =
   9098                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
   9099         int32_t *crop_data =
   9100                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
   9101         int32_t *roi_map =
   9102                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
   9103         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
   9104             cam_crop_data_t crop_meta;
   9105             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
   9106             crop_meta.num_of_streams = 1;
   9107             crop_meta.crop_info[0].crop.left   = crop_data[0];
   9108             crop_meta.crop_info[0].crop.top    = crop_data[1];
   9109             crop_meta.crop_info[0].crop.width  = crop_data[2];
   9110             crop_meta.crop_info[0].crop.height = crop_data[3];
   9111 
   9112             crop_meta.crop_info[0].roi_map.left =
   9113                     roi_map[0];
   9114             crop_meta.crop_info[0].roi_map.top =
   9115                     roi_map[1];
   9116             crop_meta.crop_info[0].roi_map.width =
   9117                     roi_map[2];
   9118             crop_meta.crop_info[0].roi_map.height =
   9119                     roi_map[3];
   9120 
   9121             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
   9122                 rc = BAD_VALUE;
   9123             }
   9124             LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
   9125                     request->input_buffer->stream,
   9126                     crop_meta.crop_info[0].crop.left,
   9127                     crop_meta.crop_info[0].crop.top,
   9128                     crop_meta.crop_info[0].crop.width,
   9129                     crop_meta.crop_info[0].crop.height);
   9130             LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
   9131                     request->input_buffer->stream,
   9132                     crop_meta.crop_info[0].roi_map.left,
   9133                     crop_meta.crop_info[0].roi_map.top,
   9134                     crop_meta.crop_info[0].roi_map.width,
   9135                     crop_meta.crop_info[0].roi_map.height);
   9136             } else {
   9137                 LOGE("Invalid reprocess crop count %d!", *crop_count);
   9138             }
   9139     } else {
   9140         LOGE("No crop data from matching output stream");
   9141     }
   9142 
   9143     /* These settings are not needed for regular requests so handle them specially for
   9144        reprocess requests; information needed for EXIF tags */
   9145     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   9146         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   9147                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   9148         if (NAME_NOT_FOUND != val) {
   9149             uint32_t flashMode = (uint32_t)val;
   9150             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
   9151                 rc = BAD_VALUE;
   9152             }
   9153         } else {
   9154             LOGE("Could not map fwk flash mode %d to correct hal flash mode",
   9155                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   9156         }
   9157     } else {
   9158         LOGH("No flash mode in reprocess settings");
   9159     }
   9160 
   9161     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
   9162         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
   9163         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
   9164             rc = BAD_VALUE;
   9165         }
   9166     } else {
   9167         LOGH("No flash state in reprocess settings");
   9168     }
   9169 
   9170     return rc;
   9171 }
   9172 
   9173 /*===========================================================================
   9174  * FUNCTION   : saveRequestSettings
   9175  *
   9176  * DESCRIPTION: Add any settings that might have changed to the request settings
   9177  *              and save the settings to be applied on the frame
   9178  *
   9179  * PARAMETERS :
   9180  *   @jpegMetadata : the extracted and/or modified jpeg metadata
   9181  *   @request      : request with initial settings
   9182  *
   9183  * RETURN     :
   9184  * camera_metadata_t* : pointer to the saved request settings
   9185  *==========================================================================*/
   9186 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
   9187         const CameraMetadata &jpegMetadata,
   9188         camera3_capture_request_t *request)
   9189 {
   9190     camera_metadata_t *resultMetadata;
   9191     CameraMetadata camMetadata;
   9192     camMetadata = request->settings;
   9193 
   9194     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   9195         int32_t thumbnail_size[2];
   9196         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   9197         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   9198         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
   9199                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   9200     }
   9201 
   9202     resultMetadata = camMetadata.release();
   9203     return resultMetadata;
   9204 }
   9205 
   9206 /*===========================================================================
   9207  * FUNCTION   : setHalFpsRange
   9208  *
   9209  * DESCRIPTION: set FPS range parameter
   9210  *
   9211  *
   9212  * PARAMETERS :
   9213  *   @settings    : Metadata from framework
   9214  *   @hal_metadata: Metadata buffer
   9215  *
   9216  *
   9217  * RETURN     : success: NO_ERROR
   9218  *              failure:
   9219  *==========================================================================*/
   9220 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
   9221         metadata_buffer_t *hal_metadata)
   9222 {
   9223     int32_t rc = NO_ERROR;
   9224     cam_fps_range_t fps_range;
   9225     fps_range.min_fps = (float)
   9226             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   9227     fps_range.max_fps = (float)
   9228             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   9229     fps_range.video_min_fps = fps_range.min_fps;
   9230     fps_range.video_max_fps = fps_range.max_fps;
   9231 
   9232     LOGD("aeTargetFpsRange fps: [%f %f]",
   9233             fps_range.min_fps, fps_range.max_fps);
   9234     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
   9235      * follows:
   9236      * ---------------------------------------------------------------|
   9237      *      Video stream is absent in configure_streams               |
   9238      *    (Camcorder preview before the first video record            |
   9239      * ---------------------------------------------------------------|
   9240      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   9241      *                   |             |             | vid_min/max_fps|
   9242      * ---------------------------------------------------------------|
   9243      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   9244      *                   |-------------|-------------|----------------|
   9245      *                   |  [240, 240] |     240     |  [240, 240]    |
   9246      * ---------------------------------------------------------------|
   9247      *     Video stream is present in configure_streams               |
   9248      * ---------------------------------------------------------------|
   9249      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   9250      *                   |             |             | vid_min/max_fps|
   9251      * ---------------------------------------------------------------|
   9252      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   9253      * (camcorder prev   |-------------|-------------|----------------|
   9254      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
   9255      *  is stopped)      |             |             |                |
   9256      * ---------------------------------------------------------------|
   9257      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
   9258      *                   |-------------|-------------|----------------|
   9259      *                   |  [240, 240] |     240     |  [240, 240]    |
   9260      * ---------------------------------------------------------------|
   9261      * When Video stream is absent in configure_streams,
   9262      * preview fps = sensor_fps / batchsize
   9263      * Eg: for 240fps at batchSize 4, preview = 60fps
   9264      *     for 120fps at batchSize 4, preview = 30fps
   9265      *
   9266      * When video stream is present in configure_streams, preview fps is as per
   9267      * the ratio of preview buffers to video buffers requested in process
   9268      * capture request
   9269      */
   9270     mBatchSize = 0;
   9271     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
   9272         fps_range.min_fps = fps_range.video_max_fps;
   9273         fps_range.video_min_fps = fps_range.video_max_fps;
   9274         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
   9275                 fps_range.max_fps);
   9276         if (NAME_NOT_FOUND != val) {
   9277             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
   9278             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   9279                 return BAD_VALUE;
   9280             }
   9281 
   9282             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
   9283                 /* If batchmode is currently in progress and the fps changes,
   9284                  * set the flag to restart the sensor */
   9285                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
   9286                         (mHFRVideoFps != fps_range.max_fps)) {
   9287                     mNeedSensorRestart = true;
   9288                 }
   9289                 mHFRVideoFps = fps_range.max_fps;
   9290                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
   9291                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
   9292                     mBatchSize = MAX_HFR_BATCH_SIZE;
   9293                 }
   9294              }
   9295             LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
   9296 
   9297          }
   9298     } else {
   9299         /* HFR mode is session param in backend/ISP. This should be reset when
   9300          * in non-HFR mode  */
   9301         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
   9302         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   9303             return BAD_VALUE;
   9304         }
   9305     }
   9306     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
   9307         return BAD_VALUE;
   9308     }
   9309     LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
   9310             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
   9311     return rc;
   9312 }
   9313 
   9314 /*===========================================================================
   9315  * FUNCTION   : translateToHalMetadata
   9316  *
   9317  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   9318  *
   9319  *
   9320  * PARAMETERS :
   9321  *   @request  : request sent from framework
   9322  *
   9323  *
   9324  * RETURN     : success: NO_ERROR
   9325  *              failure:
   9326  *==========================================================================*/
   9327 int QCamera3HardwareInterface::translateToHalMetadata
   9328                                   (const camera3_capture_request_t *request,
   9329                                    metadata_buffer_t *hal_metadata,
   9330                                    uint32_t snapshotStreamId)
   9331 {
   9332     int rc = 0;
   9333     CameraMetadata frame_settings;
   9334     frame_settings = request->settings;
   9335 
   9336     /* Do not change the order of the following list unless you know what you are
   9337      * doing.
   9338      * The order is laid out in such a way that parameters in the front of the table
   9339      * may be used to override the parameters later in the table. Examples are:
   9340      * 1. META_MODE should precede AEC/AWB/AF MODE
   9341      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
   9342      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
   9343      * 4. Any mode should precede it's corresponding settings
   9344      */
   9345     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   9346         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   9347         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
   9348             rc = BAD_VALUE;
   9349         }
   9350         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
   9351         if (rc != NO_ERROR) {
   9352             LOGE("extractSceneMode failed");
   9353         }
   9354     }
   9355 
   9356     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   9357         uint8_t fwk_aeMode =
   9358             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   9359         uint8_t aeMode;
   9360         int32_t redeye;
   9361 
   9362         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   9363             aeMode = CAM_AE_MODE_OFF;
   9364         } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
   9365             aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
   9366         } else {
   9367             aeMode = CAM_AE_MODE_ON;
   9368         }
   9369         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   9370             redeye = 1;
   9371         } else {
   9372             redeye = 0;
   9373         }
   9374 
   9375         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   9376                 fwk_aeMode);
   9377         if (NAME_NOT_FOUND != val) {
   9378             int32_t flashMode = (int32_t)val;
   9379             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
   9380         }
   9381 
   9382         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
   9383         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
   9384             rc = BAD_VALUE;
   9385         }
   9386     }
   9387 
   9388     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   9389         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   9390         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   9391                 fwk_whiteLevel);
   9392         if (NAME_NOT_FOUND != val) {
   9393             uint8_t whiteLevel = (uint8_t)val;
   9394             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
   9395                 rc = BAD_VALUE;
   9396             }
   9397         }
   9398     }
   9399 
   9400     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
   9401         uint8_t fwk_cacMode =
   9402                 frame_settings.find(
   9403                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
   9404         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   9405                 fwk_cacMode);
   9406         if (NAME_NOT_FOUND != val) {
   9407             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
   9408             bool entryAvailable = FALSE;
   9409             // Check whether Frameworks set CAC mode is supported in device or not
   9410             for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
   9411                 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
   9412                     entryAvailable = TRUE;
   9413                     break;
   9414                 }
   9415             }
   9416             LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
   9417             // If entry not found then set the device supported mode instead of frameworks mode i.e,
   9418             // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
   9419             // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
   9420             if (entryAvailable == FALSE) {
   9421                 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
   9422                     cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
   9423                 } else {
   9424                     if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
   9425                         // High is not supported and so set the FAST as spec say's underlying
   9426                         // device implementation can be the same for both modes.
   9427                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
   9428                     } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
   9429                         // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
   9430                         // in order to avoid the fps drop due to high quality
   9431                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
   9432                     } else {
   9433                         cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
   9434                     }
   9435                 }
   9436             }
   9437             LOGD("Final cacMode is %d", cacMode);
   9438             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
   9439                 rc = BAD_VALUE;
   9440             }
   9441         } else {
   9442             LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
   9443         }
   9444     }
   9445 
   9446     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   9447         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   9448         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   9449                 fwk_focusMode);
   9450         if (NAME_NOT_FOUND != val) {
   9451             uint8_t focusMode = (uint8_t)val;
   9452             LOGD("set focus mode %d", focusMode);
   9453             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
   9454                 rc = BAD_VALUE;
   9455             }
   9456         }
   9457     }
   9458 
   9459     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   9460         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   9461         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
   9462                 focalDistance)) {
   9463             rc = BAD_VALUE;
   9464         }
   9465     }
   9466 
   9467     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   9468         uint8_t fwk_antibandingMode =
   9469                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
   9470         int val = lookupHalName(ANTIBANDING_MODES_MAP,
   9471                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
   9472         if (NAME_NOT_FOUND != val) {
   9473             uint32_t hal_antibandingMode = (uint32_t)val;
   9474             if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
   9475                 if (m60HzZone) {
   9476                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
   9477                 } else {
   9478                     hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
   9479                 }
   9480             }
   9481             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
   9482                     hal_antibandingMode)) {
   9483                 rc = BAD_VALUE;
   9484             }
   9485         }
   9486     }
   9487 
   9488     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   9489         int32_t expCompensation = frame_settings.find(
   9490                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   9491         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   9492             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   9493         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   9494             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   9495         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   9496                 expCompensation)) {
   9497             rc = BAD_VALUE;
   9498         }
   9499     }
   9500 
   9501     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   9502         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   9503         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
   9504             rc = BAD_VALUE;
   9505         }
   9506     }
   9507     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   9508         rc = setHalFpsRange(frame_settings, hal_metadata);
   9509         if (rc != NO_ERROR) {
   9510             LOGE("setHalFpsRange failed");
   9511         }
   9512     }
   9513 
   9514     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   9515         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   9516         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
   9517             rc = BAD_VALUE;
   9518         }
   9519     }
   9520 
   9521     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   9522         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   9523         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   9524                 fwk_effectMode);
   9525         if (NAME_NOT_FOUND != val) {
   9526             uint8_t effectMode = (uint8_t)val;
   9527             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
   9528                 rc = BAD_VALUE;
   9529             }
   9530         }
   9531     }
   9532 
   9533     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   9534         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   9535         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
   9536                 colorCorrectMode)) {
   9537             rc = BAD_VALUE;
   9538         }
   9539     }
   9540 
   9541     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   9542         cam_color_correct_gains_t colorCorrectGains;
   9543         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
   9544             colorCorrectGains.gains[i] =
   9545                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   9546         }
   9547         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
   9548                 colorCorrectGains)) {
   9549             rc = BAD_VALUE;
   9550         }
   9551     }
   9552 
   9553     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   9554         cam_color_correct_matrix_t colorCorrectTransform;
   9555         cam_rational_type_t transform_elem;
   9556         size_t num = 0;
   9557         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
   9558            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
   9559               transform_elem.numerator =
   9560                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   9561               transform_elem.denominator =
   9562                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   9563               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   9564               num++;
   9565            }
   9566         }
   9567         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   9568                 colorCorrectTransform)) {
   9569             rc = BAD_VALUE;
   9570         }
   9571     }
   9572 
   9573     cam_trigger_t aecTrigger;
   9574     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
   9575     aecTrigger.trigger_id = -1;
   9576     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   9577         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   9578         aecTrigger.trigger =
   9579             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   9580         aecTrigger.trigger_id =
   9581             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   9582         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   9583                 aecTrigger)) {
   9584             rc = BAD_VALUE;
   9585         }
   9586         LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
   9587                 aecTrigger.trigger, aecTrigger.trigger_id);
   9588     }
   9589 
   9590     /*af_trigger must come with a trigger id*/
   9591     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   9592         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   9593         cam_trigger_t af_trigger;
   9594         af_trigger.trigger =
   9595             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   9596         af_trigger.trigger_id =
   9597             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   9598         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
   9599             rc = BAD_VALUE;
   9600         }
   9601         LOGD("AfTrigger: %d AfTriggerID: %d",
   9602                 af_trigger.trigger, af_trigger.trigger_id);
   9603     }
   9604 
   9605     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   9606         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   9607         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
   9608             rc = BAD_VALUE;
   9609         }
   9610     }
   9611     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   9612         cam_edge_application_t edge_application;
   9613         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   9614         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
   9615             edge_application.sharpness = 0;
   9616         } else {
   9617             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
   9618         }
   9619         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
   9620             rc = BAD_VALUE;
   9621         }
   9622     }
   9623 
   9624     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   9625         int32_t respectFlashMode = 1;
   9626         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   9627             uint8_t fwk_aeMode =
   9628                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   9629             if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
   9630                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
   9631                     fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   9632                 respectFlashMode = 0;
   9633                 LOGH("AE Mode controls flash, ignore android.flash.mode");
   9634             }
   9635         }
   9636         if (respectFlashMode) {
   9637             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   9638                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   9639             LOGH("flash mode after mapping %d", val);
   9640             // To check: CAM_INTF_META_FLASH_MODE usage
   9641             if (NAME_NOT_FOUND != val) {
   9642                 uint8_t flashMode = (uint8_t)val;
   9643                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
   9644                     rc = BAD_VALUE;
   9645                 }
   9646             }
   9647         }
   9648     }
   9649 
   9650     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   9651         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   9652         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
   9653             rc = BAD_VALUE;
   9654         }
   9655     }
   9656 
   9657     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   9658         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   9659         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
   9660                 flashFiringTime)) {
   9661             rc = BAD_VALUE;
   9662         }
   9663     }
   9664 
   9665     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   9666         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   9667         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
   9668                 hotPixelMode)) {
   9669             rc = BAD_VALUE;
   9670         }
   9671     }
   9672 
   9673     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   9674         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   9675         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
   9676                 lensAperture)) {
   9677             rc = BAD_VALUE;
   9678         }
   9679     }
   9680 
   9681     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   9682         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   9683         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
   9684                 filterDensity)) {
   9685             rc = BAD_VALUE;
   9686         }
   9687     }
   9688 
   9689     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   9690         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   9691         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
   9692                 focalLength)) {
   9693             rc = BAD_VALUE;
   9694         }
   9695     }
   9696 
   9697     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   9698         uint8_t optStabMode =
   9699                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   9700         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
   9701                 optStabMode)) {
   9702             rc = BAD_VALUE;
   9703         }
   9704     }
   9705 
   9706     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
   9707         uint8_t videoStabMode =
   9708                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
   9709         LOGD("videoStabMode from APP = %d", videoStabMode);
   9710         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
   9711                 videoStabMode)) {
   9712             rc = BAD_VALUE;
   9713         }
   9714     }
   9715 
   9716 
   9717     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   9718         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   9719         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
   9720                 noiseRedMode)) {
   9721             rc = BAD_VALUE;
   9722         }
   9723     }
   9724 
   9725     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
   9726         float reprocessEffectiveExposureFactor =
   9727             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
   9728         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
   9729                 reprocessEffectiveExposureFactor)) {
   9730             rc = BAD_VALUE;
   9731         }
   9732     }
   9733 
   9734     cam_crop_region_t scalerCropRegion;
   9735     bool scalerCropSet = false;
   9736     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   9737         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   9738         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   9739         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   9740         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   9741 
   9742         // Map coordinate system from active array to sensor output.
   9743         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
   9744                 scalerCropRegion.width, scalerCropRegion.height);
   9745 
   9746         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
   9747                 scalerCropRegion)) {
   9748             rc = BAD_VALUE;
   9749         }
   9750         scalerCropSet = true;
   9751     }
   9752 
   9753     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   9754         int64_t sensorExpTime =
   9755                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   9756         LOGD("setting sensorExpTime %lld", sensorExpTime);
   9757         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   9758                 sensorExpTime)) {
   9759             rc = BAD_VALUE;
   9760         }
   9761     }
   9762 
   9763     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   9764         int64_t sensorFrameDuration =
   9765                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   9766         int64_t minFrameDuration = getMinFrameDuration(request);
   9767         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
   9768         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   9769             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   9770         LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
   9771         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
   9772                 sensorFrameDuration)) {
   9773             rc = BAD_VALUE;
   9774         }
   9775     }
   9776 
   9777     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   9778         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   9779         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   9780                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   9781         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   9782                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   9783         LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
   9784         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
   9785                 sensorSensitivity)) {
   9786             rc = BAD_VALUE;
   9787         }
   9788     }
   9789 
   9790     if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
   9791         int32_t ispSensitivity =
   9792             frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
   9793         if (ispSensitivity <
   9794             gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
   9795                 ispSensitivity =
   9796                     gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
   9797                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
   9798         }
   9799         if (ispSensitivity >
   9800             gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
   9801                 ispSensitivity =
   9802                     gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
   9803                 LOGD("clamp ispSensitivity to %d", ispSensitivity);
   9804         }
   9805         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
   9806                 ispSensitivity)) {
   9807             rc = BAD_VALUE;
   9808         }
   9809     }
   9810 
   9811     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   9812         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   9813         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
   9814             rc = BAD_VALUE;
   9815         }
   9816     }
   9817 
   9818     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   9819         uint8_t fwk_facedetectMode =
   9820                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   9821 
   9822         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   9823                 fwk_facedetectMode);
   9824 
   9825         if (NAME_NOT_FOUND != val) {
   9826             uint8_t facedetectMode = (uint8_t)val;
   9827             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
   9828                     facedetectMode)) {
   9829                 rc = BAD_VALUE;
   9830             }
   9831         }
   9832     }
   9833 
   9834     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   9835         uint8_t histogramMode =
   9836                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   9837         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
   9838                 histogramMode)) {
   9839             rc = BAD_VALUE;
   9840         }
   9841     }
   9842 
   9843     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   9844         uint8_t sharpnessMapMode =
   9845                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   9846         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   9847                 sharpnessMapMode)) {
   9848             rc = BAD_VALUE;
   9849         }
   9850     }
   9851 
   9852     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   9853         uint8_t tonemapMode =
   9854                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   9855         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
   9856             rc = BAD_VALUE;
   9857         }
   9858     }
   9859     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
   9860     /*All tonemap channels will have the same number of points*/
   9861     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
   9862         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
   9863         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   9864         cam_rgb_tonemap_curves tonemapCurves;
   9865         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
   9866         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   9867             LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
   9868                      tonemapCurves.tonemap_points_cnt,
   9869                     CAM_MAX_TONEMAP_CURVE_SIZE);
   9870             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   9871         }
   9872 
   9873         /* ch0 = G*/
   9874         size_t point = 0;
   9875         cam_tonemap_curve_t tonemapCurveGreen;
   9876         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   9877             for (size_t j = 0; j < 2; j++) {
   9878                tonemapCurveGreen.tonemap_points[i][j] =
   9879                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   9880                point++;
   9881             }
   9882         }
   9883         tonemapCurves.curves[0] = tonemapCurveGreen;
   9884 
   9885         /* ch 1 = B */
   9886         point = 0;
   9887         cam_tonemap_curve_t tonemapCurveBlue;
   9888         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   9889             for (size_t j = 0; j < 2; j++) {
   9890                tonemapCurveBlue.tonemap_points[i][j] =
   9891                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   9892                point++;
   9893             }
   9894         }
   9895         tonemapCurves.curves[1] = tonemapCurveBlue;
   9896 
   9897         /* ch 2 = R */
   9898         point = 0;
   9899         cam_tonemap_curve_t tonemapCurveRed;
   9900         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   9901             for (size_t j = 0; j < 2; j++) {
   9902                tonemapCurveRed.tonemap_points[i][j] =
   9903                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   9904                point++;
   9905             }
   9906         }
   9907         tonemapCurves.curves[2] = tonemapCurveRed;
   9908 
   9909         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
   9910                 tonemapCurves)) {
   9911             rc = BAD_VALUE;
   9912         }
   9913     }
   9914 
   9915     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   9916         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   9917         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
   9918                 captureIntent)) {
   9919             rc = BAD_VALUE;
   9920         }
   9921     }
   9922 
   9923     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   9924         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   9925         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
   9926                 blackLevelLock)) {
   9927             rc = BAD_VALUE;
   9928         }
   9929     }
   9930 
   9931     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   9932         uint8_t lensShadingMapMode =
   9933                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   9934         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   9935                 lensShadingMapMode)) {
   9936             rc = BAD_VALUE;
   9937         }
   9938     }
   9939 
   9940     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   9941         cam_area_t roi;
   9942         bool reset = true;
   9943         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
   9944 
   9945         // Map coordinate system from active array to sensor output.
   9946         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   9947                 roi.rect.height);
   9948 
   9949         if (scalerCropSet) {
   9950             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   9951         }
   9952         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
   9953             rc = BAD_VALUE;
   9954         }
   9955     }
   9956 
   9957     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   9958         cam_area_t roi;
   9959         bool reset = true;
   9960         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
   9961 
   9962         // Map coordinate system from active array to sensor output.
   9963         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   9964                 roi.rect.height);
   9965 
   9966         if (scalerCropSet) {
   9967             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   9968         }
   9969         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
   9970             rc = BAD_VALUE;
   9971         }
   9972     }
   9973 
   9974     // CDS for non-HFR non-video mode
   9975     if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   9976             !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
   9977         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
   9978         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
   9979             LOGE("Invalid CDS mode %d!", *fwk_cds);
   9980         } else {
   9981             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   9982                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
   9983                 rc = BAD_VALUE;
   9984             }
   9985         }
   9986     }
   9987 
   9988     // TNR
   9989     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
   9990         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
   9991         uint8_t b_TnrRequested = 0;
   9992         cam_denoise_param_t tnr;
   9993         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
   9994         tnr.process_plates =
   9995             (cam_denoise_process_type_t)frame_settings.find(
   9996             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
   9997         b_TnrRequested = tnr.denoise_enable;
   9998         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
   9999             rc = BAD_VALUE;
   10000         }
   10001     }
   10002 
   10003     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
   10004         int32_t fwk_testPatternMode =
   10005                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
   10006         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
   10007                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
   10008 
   10009         if (NAME_NOT_FOUND != testPatternMode) {
   10010             cam_test_pattern_data_t testPatternData;
   10011             memset(&testPatternData, 0, sizeof(testPatternData));
   10012             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
   10013             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
   10014                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
   10015                 int32_t *fwk_testPatternData =
   10016                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
   10017                 testPatternData.r = fwk_testPatternData[0];
   10018                 testPatternData.b = fwk_testPatternData[3];
   10019                 switch (gCamCapability[mCameraId]->color_arrangement) {
   10020                     case CAM_FILTER_ARRANGEMENT_RGGB:
   10021                     case CAM_FILTER_ARRANGEMENT_GRBG:
   10022                         testPatternData.gr = fwk_testPatternData[1];
   10023                         testPatternData.gb = fwk_testPatternData[2];
   10024                         break;
   10025                     case CAM_FILTER_ARRANGEMENT_GBRG:
   10026                     case CAM_FILTER_ARRANGEMENT_BGGR:
   10027                         testPatternData.gr = fwk_testPatternData[2];
   10028                         testPatternData.gb = fwk_testPatternData[1];
   10029                         break;
   10030                     default:
   10031                         LOGE("color arrangement %d is not supported",
   10032                                 gCamCapability[mCameraId]->color_arrangement);
   10033                         break;
   10034                 }
   10035             }
   10036             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
   10037                     testPatternData)) {
   10038                 rc = BAD_VALUE;
   10039             }
   10040         } else {
   10041             LOGE("Invalid framework sensor test pattern mode %d",
   10042                     fwk_testPatternMode);
   10043         }
   10044     }
   10045 
   10046     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   10047         size_t count = 0;
   10048         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
   10049         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
   10050                 gps_coords.data.d, gps_coords.count, count);
   10051         if (gps_coords.count != count) {
   10052             rc = BAD_VALUE;
   10053         }
   10054     }
   10055 
   10056     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   10057         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
   10058         size_t count = 0;
   10059         const char *gps_methods_src = (const char *)
   10060                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
   10061         memset(gps_methods, '\0', sizeof(gps_methods));
   10062         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
   10063         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
   10064                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
   10065         if (GPS_PROCESSING_METHOD_SIZE != count) {
   10066             rc = BAD_VALUE;
   10067         }
   10068     }
   10069 
   10070     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   10071         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   10072         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
   10073                 gps_timestamp)) {
   10074             rc = BAD_VALUE;
   10075         }
   10076     }
   10077 
   10078     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   10079         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   10080         cam_rotation_info_t rotation_info;
   10081         if (orientation == 0) {
   10082            rotation_info.rotation = ROTATE_0;
   10083         } else if (orientation == 90) {
   10084            rotation_info.rotation = ROTATE_90;
   10085         } else if (orientation == 180) {
   10086            rotation_info.rotation = ROTATE_180;
   10087         } else if (orientation == 270) {
   10088            rotation_info.rotation = ROTATE_270;
   10089         }
   10090         rotation_info.device_rotation = ROTATE_0;
   10091         rotation_info.streamId = snapshotStreamId;
   10092         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
   10093         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
   10094             rc = BAD_VALUE;
   10095         }
   10096     }
   10097 
   10098     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
   10099         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   10100         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
   10101             rc = BAD_VALUE;
   10102         }
   10103     }
   10104 
   10105     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
   10106         uint32_t thumb_quality = (uint32_t)
   10107                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
   10108         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
   10109                 thumb_quality)) {
   10110             rc = BAD_VALUE;
   10111         }
   10112     }
   10113 
   10114     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   10115         cam_dimension_t dim;
   10116         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   10117         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   10118         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
   10119             rc = BAD_VALUE;
   10120         }
   10121     }
   10122 
   10123     // Internal metadata
   10124     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
   10125         size_t count = 0;
   10126         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
   10127         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
   10128                 privatedata.data.i32, privatedata.count, count);
   10129         if (privatedata.count != count) {
   10130             rc = BAD_VALUE;
   10131         }
   10132     }
   10133 
   10134     // EV step
   10135     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
   10136             gCamCapability[mCameraId]->exp_compensation_step)) {
   10137         rc = BAD_VALUE;
   10138     }
   10139 
   10140     // CDS info
   10141     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
   10142         cam_cds_data_t *cdsData = (cam_cds_data_t *)
   10143                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
   10144 
   10145         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10146                 CAM_INTF_META_CDS_DATA, *cdsData)) {
   10147             rc = BAD_VALUE;
   10148         }
   10149     }
   10150 
   10151     // Hybrid AE
   10152     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
   10153         uint8_t *hybrid_ae = (uint8_t *)
   10154                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
   10155 
   10156         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10157                 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
   10158             rc = BAD_VALUE;
   10159         }
   10160     }
   10161 
   10162     return rc;
   10163 }
   10164 
   10165 /*===========================================================================
   10166  * FUNCTION   : captureResultCb
   10167  *
   10168  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   10169  *
   10170  * PARAMETERS :
   10171  *   @frame  : frame information from mm-camera-interface
   10172  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   10173  *   @userdata: userdata
   10174  *
   10175  * RETURN     : NONE
   10176  *==========================================================================*/
   10177 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   10178                 camera3_stream_buffer_t *buffer,
   10179                 uint32_t frame_number, bool isInputBuffer, void *userdata)
   10180 {
   10181     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   10182     if (hw == NULL) {
   10183         LOGE("Invalid hw %p", hw);
   10184         return;
   10185     }
   10186 
   10187     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
   10188     return;
   10189 }
   10190 
   10191 /*===========================================================================
   10192  * FUNCTION   : setBufferErrorStatus
   10193  *
   10194  * DESCRIPTION: Callback handler for channels to report any buffer errors
   10195  *
   10196  * PARAMETERS :
   10197  *   @ch     : Channel on which buffer error is reported from
   10198  *   @frame_number  : frame number on which buffer error is reported on
   10199  *   @buffer_status : buffer error status
   10200  *   @userdata: userdata
   10201  *
   10202  * RETURN     : NONE
   10203  *==========================================================================*/
   10204 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
   10205                 uint32_t frame_number, camera3_buffer_status_t err,
   10206                 void *userdata)
   10207 {
   10208     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   10209     if (hw == NULL) {
   10210         LOGE("Invalid hw %p", hw);
   10211         return;
   10212     }
   10213 
   10214     hw->setBufferErrorStatus(ch, frame_number, err);
   10215     return;
   10216 }
   10217 
   10218 void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
   10219                 uint32_t frameNumber, camera3_buffer_status_t err)
   10220 {
   10221     LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
   10222     pthread_mutex_lock(&mMutex);
   10223 
   10224     for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
   10225         if (req.frame_number != frameNumber)
   10226             continue;
   10227         for (auto& k : req.mPendingBufferList) {
   10228             if(k.stream->priv == ch) {
   10229                 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
   10230             }
   10231         }
   10232     }
   10233 
   10234     pthread_mutex_unlock(&mMutex);
   10235     return;
   10236 }
   10237 /*===========================================================================
   10238  * FUNCTION   : initialize
   10239  *
   10240  * DESCRIPTION: Pass framework callback pointers to HAL
   10241  *
   10242  * PARAMETERS :
   10243  *
   10244  *
   10245  * RETURN     : Success : 0
   10246  *              Failure: -ENODEV
   10247  *==========================================================================*/
   10248 
   10249 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   10250                                   const camera3_callback_ops_t *callback_ops)
   10251 {
   10252     LOGD("E");
   10253     QCamera3HardwareInterface *hw =
   10254         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10255     if (!hw) {
   10256         LOGE("NULL camera device");
   10257         return -ENODEV;
   10258     }
   10259 
   10260     int rc = hw->initialize(callback_ops);
   10261     LOGD("X");
   10262     return rc;
   10263 }
   10264 
   10265 /*===========================================================================
   10266  * FUNCTION   : configure_streams
   10267  *
   10268  * DESCRIPTION:
   10269  *
   10270  * PARAMETERS :
   10271  *
   10272  *
   10273  * RETURN     : Success: 0
   10274  *              Failure: -EINVAL (if stream configuration is invalid)
   10275  *                       -ENODEV (fatal error)
   10276  *==========================================================================*/
   10277 
   10278 int QCamera3HardwareInterface::configure_streams(
   10279         const struct camera3_device *device,
   10280         camera3_stream_configuration_t *stream_list)
   10281 {
   10282     LOGD("E");
   10283     QCamera3HardwareInterface *hw =
   10284         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10285     if (!hw) {
   10286         LOGE("NULL camera device");
   10287         return -ENODEV;
   10288     }
   10289     int rc = hw->configureStreams(stream_list);
   10290     LOGD("X");
   10291     return rc;
   10292 }
   10293 
   10294 /*===========================================================================
   10295  * FUNCTION   : construct_default_request_settings
   10296  *
   10297  * DESCRIPTION: Configure a settings buffer to meet the required use case
   10298  *
   10299  * PARAMETERS :
   10300  *
   10301  *
   10302  * RETURN     : Success: Return valid metadata
   10303  *              Failure: Return NULL
   10304  *==========================================================================*/
   10305 const camera_metadata_t* QCamera3HardwareInterface::
   10306     construct_default_request_settings(const struct camera3_device *device,
   10307                                         int type)
   10308 {
   10309 
   10310     LOGD("E");
   10311     camera_metadata_t* fwk_metadata = NULL;
   10312     QCamera3HardwareInterface *hw =
   10313         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10314     if (!hw) {
   10315         LOGE("NULL camera device");
   10316         return NULL;
   10317     }
   10318 
   10319     fwk_metadata = hw->translateCapabilityToMetadata(type);
   10320 
   10321     LOGD("X");
   10322     return fwk_metadata;
   10323 }
   10324 
   10325 /*===========================================================================
   10326  * FUNCTION   : process_capture_request
   10327  *
   10328  * DESCRIPTION:
   10329  *
   10330  * PARAMETERS :
   10331  *
   10332  *
   10333  * RETURN     :
   10334  *==========================================================================*/
   10335 int QCamera3HardwareInterface::process_capture_request(
   10336                     const struct camera3_device *device,
   10337                     camera3_capture_request_t *request)
   10338 {
   10339     LOGD("E");
   10340     QCamera3HardwareInterface *hw =
   10341         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10342     if (!hw) {
   10343         LOGE("NULL camera device");
   10344         return -EINVAL;
   10345     }
   10346 
   10347     int rc = hw->processCaptureRequest(request);
   10348     LOGD("X");
   10349     return rc;
   10350 }
   10351 
   10352 /*===========================================================================
   10353  * FUNCTION   : dump
   10354  *
   10355  * DESCRIPTION:
   10356  *
   10357  * PARAMETERS :
   10358  *
   10359  *
   10360  * RETURN     :
   10361  *==========================================================================*/
   10362 
   10363 void QCamera3HardwareInterface::dump(
   10364                 const struct camera3_device *device, int fd)
   10365 {
   10366     /* Log level property is read when "adb shell dumpsys media.camera" is
   10367        called so that the log level can be controlled without restarting
   10368        the media server */
   10369     getLogLevel();
   10370 
   10371     LOGD("E");
   10372     QCamera3HardwareInterface *hw =
   10373         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10374     if (!hw) {
   10375         LOGE("NULL camera device");
   10376         return;
   10377     }
   10378 
   10379     hw->dump(fd);
   10380     LOGD("X");
   10381     return;
   10382 }
   10383 
   10384 /*===========================================================================
   10385  * FUNCTION   : flush
   10386  *
   10387  * DESCRIPTION:
   10388  *
   10389  * PARAMETERS :
   10390  *
   10391  *
   10392  * RETURN     :
   10393  *==========================================================================*/
   10394 
   10395 int QCamera3HardwareInterface::flush(
   10396                 const struct camera3_device *device)
   10397 {
   10398     int rc;
   10399     LOGD("E");
   10400     QCamera3HardwareInterface *hw =
   10401         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   10402     if (!hw) {
   10403         LOGE("NULL camera device");
   10404         return -EINVAL;
   10405     }
   10406 
   10407     pthread_mutex_lock(&hw->mMutex);
   10408     // Validate current state
   10409     switch (hw->mState) {
   10410         case STARTED:
   10411             /* valid state */
   10412             break;
   10413 
   10414         case ERROR:
   10415             pthread_mutex_unlock(&hw->mMutex);
   10416             hw->handleCameraDeviceError();
   10417             return -ENODEV;
   10418 
   10419         default:
   10420             LOGI("Flush returned during state %d", hw->mState);
   10421             pthread_mutex_unlock(&hw->mMutex);
   10422             return 0;
   10423     }
   10424     pthread_mutex_unlock(&hw->mMutex);
   10425 
   10426     rc = hw->flush(true /* restart channels */ );
   10427     LOGD("X");
   10428     return rc;
   10429 }
   10430 
   10431 /*===========================================================================
   10432  * FUNCTION   : close_camera_device
   10433  *
   10434  * DESCRIPTION:
   10435  *
   10436  * PARAMETERS :
   10437  *
   10438  *
   10439  * RETURN     :
   10440  *==========================================================================*/
   10441 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   10442 {
   10443     int ret = NO_ERROR;
   10444     QCamera3HardwareInterface *hw =
   10445         reinterpret_cast<QCamera3HardwareInterface *>(
   10446             reinterpret_cast<camera3_device_t *>(device)->priv);
   10447     if (!hw) {
   10448         LOGE("NULL camera device");
   10449         return BAD_VALUE;
   10450     }
   10451 
   10452     LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
   10453     delete hw;
   10454     LOGI("[KPI Perf]: X");
   10455     return ret;
   10456 }
   10457 
   10458 /*===========================================================================
   10459  * FUNCTION   : getWaveletDenoiseProcessPlate
   10460  *
   10461  * DESCRIPTION: query wavelet denoise process plate
   10462  *
   10463  * PARAMETERS : None
   10464  *
   10465  * RETURN     : WNR prcocess plate value
   10466  *==========================================================================*/
   10467 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   10468 {
   10469     char prop[PROPERTY_VALUE_MAX];
   10470     memset(prop, 0, sizeof(prop));
   10471     property_get("persist.denoise.process.plates", prop, "0");
   10472     int processPlate = atoi(prop);
   10473     switch(processPlate) {
   10474     case 0:
   10475         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   10476     case 1:
   10477         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   10478     case 2:
   10479         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10480     case 3:
   10481         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   10482     default:
   10483         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10484     }
   10485 }
   10486 
   10487 
   10488 /*===========================================================================
   10489  * FUNCTION   : getTemporalDenoiseProcessPlate
   10490  *
   10491  * DESCRIPTION: query temporal denoise process plate
   10492  *
   10493  * PARAMETERS : None
   10494  *
   10495  * RETURN     : TNR prcocess plate value
   10496  *==========================================================================*/
   10497 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
   10498 {
   10499     char prop[PROPERTY_VALUE_MAX];
   10500     memset(prop, 0, sizeof(prop));
   10501     property_get("persist.tnr.process.plates", prop, "0");
   10502     int processPlate = atoi(prop);
   10503     switch(processPlate) {
   10504     case 0:
   10505         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   10506     case 1:
   10507         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   10508     case 2:
   10509         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10510     case 3:
   10511         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   10512     default:
   10513         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   10514     }
   10515 }
   10516 
   10517 
   10518 /*===========================================================================
   10519  * FUNCTION   : extractSceneMode
   10520  *
   10521  * DESCRIPTION: Extract scene mode from frameworks set metadata
   10522  *
   10523  * PARAMETERS :
   10524  *      @frame_settings: CameraMetadata reference
   10525  *      @metaMode: ANDROID_CONTORL_MODE
   10526  *      @hal_metadata: hal metadata structure
   10527  *
   10528  * RETURN     : None
   10529  *==========================================================================*/
   10530 int32_t QCamera3HardwareInterface::extractSceneMode(
   10531         const CameraMetadata &frame_settings, uint8_t metaMode,
   10532         metadata_buffer_t *hal_metadata)
   10533 {
   10534     int32_t rc = NO_ERROR;
   10535 
   10536     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   10537         camera_metadata_ro_entry entry =
   10538                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
   10539         if (0 == entry.count)
   10540             return rc;
   10541 
   10542         uint8_t fwk_sceneMode = entry.data.u8[0];
   10543 
   10544         int val = lookupHalName(SCENE_MODES_MAP,
   10545                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   10546                 fwk_sceneMode);
   10547         if (NAME_NOT_FOUND != val) {
   10548             uint8_t sceneMode = (uint8_t)val;
   10549             LOGD("sceneMode: %d", sceneMode);
   10550             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10551                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   10552                 rc = BAD_VALUE;
   10553             }
   10554         }
   10555     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
   10556             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
   10557         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   10558         LOGD("sceneMode: %d", sceneMode);
   10559         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   10560                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   10561             rc = BAD_VALUE;
   10562         }
   10563     }
   10564     return rc;
   10565 }
   10566 
   10567 /*===========================================================================
   10568  * FUNCTION   : needRotationReprocess
   10569  *
   10570  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   10571  *
   10572  * PARAMETERS : none
   10573  *
   10574  * RETURN     : true: needed
   10575  *              false: no need
   10576  *==========================================================================*/
   10577 bool QCamera3HardwareInterface::needRotationReprocess()
   10578 {
   10579     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
   10580         // current rotation is not zero, and pp has the capability to process rotation
   10581         LOGH("need do reprocess for rotation");
   10582         return true;
   10583     }
   10584 
   10585     return false;
   10586 }
   10587 
   10588 /*===========================================================================
   10589  * FUNCTION   : needReprocess
   10590  *
   10591  * DESCRIPTION: if reprocess in needed
   10592  *
   10593  * PARAMETERS : none
   10594  *
   10595  * RETURN     : true: needed
   10596  *              false: no need
   10597  *==========================================================================*/
   10598 bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
   10599 {
   10600     if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
   10601         // TODO: add for ZSL HDR later
   10602         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   10603         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
   10604             LOGH("need do reprocess for ZSL WNR or min PP reprocess");
   10605             return true;
   10606         } else {
   10607             LOGH("already post processed frame");
   10608             return false;
   10609         }
   10610     }
   10611     return needRotationReprocess();
   10612 }
   10613 
   10614 /*===========================================================================
   10615  * FUNCTION   : needJpegExifRotation
   10616  *
   10617  * DESCRIPTION: if rotation from jpeg is needed
   10618  *
   10619  * PARAMETERS : none
   10620  *
   10621  * RETURN     : true: needed
   10622  *              false: no need
   10623  *==========================================================================*/
   10624 bool QCamera3HardwareInterface::needJpegExifRotation()
   10625 {
   10626    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
   10627     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
   10628        LOGD("Need use Jpeg EXIF Rotation");
   10629        return true;
   10630     }
   10631     return false;
   10632 }
   10633 
   10634 /*===========================================================================
   10635  * FUNCTION   : addOfflineReprocChannel
   10636  *
   10637  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
   10638  *              coming from input channel
   10639  *
   10640  * PARAMETERS :
   10641  *   @config  : reprocess configuration
   10642  *   @inputChHandle : pointer to the input (source) channel
   10643  *
   10644  *
   10645  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   10646  *==========================================================================*/
   10647 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
   10648         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
   10649 {
   10650     int32_t rc = NO_ERROR;
   10651     QCamera3ReprocessChannel *pChannel = NULL;
   10652 
   10653     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   10654             mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
   10655             config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
   10656     if (NULL == pChannel) {
   10657         LOGE("no mem for reprocess channel");
   10658         return NULL;
   10659     }
   10660 
   10661     rc = pChannel->initialize(IS_TYPE_NONE);
   10662     if (rc != NO_ERROR) {
   10663         LOGE("init reprocess channel failed, ret = %d", rc);
   10664         delete pChannel;
   10665         return NULL;
   10666     }
   10667 
   10668     // pp feature config
   10669     cam_pp_feature_config_t pp_config;
   10670     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   10671 
   10672     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   10673     if (gCamCapability[mCameraId]->qcom_supported_feature_mask
   10674             & CAM_QCOM_FEATURE_DSDN) {
   10675         //Use CPP CDS incase h/w supports it.
   10676         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
   10677         pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
   10678     }
   10679     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
   10680         pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
   10681     }
   10682 
   10683     rc = pChannel->addReprocStreamsFromSource(pp_config,
   10684             config,
   10685             IS_TYPE_NONE,
   10686             mMetadataChannel);
   10687 
   10688     if (rc != NO_ERROR) {
   10689         delete pChannel;
   10690         return NULL;
   10691     }
   10692     return pChannel;
   10693 }
   10694 
   10695 /*===========================================================================
   10696  * FUNCTION   : getMobicatMask
   10697  *
   10698  * DESCRIPTION: returns mobicat mask
   10699  *
   10700  * PARAMETERS : none
   10701  *
   10702  * RETURN     : mobicat mask
   10703  *
   10704  *==========================================================================*/
   10705 uint8_t QCamera3HardwareInterface::getMobicatMask()
   10706 {
   10707     return m_MobicatMask;
   10708 }
   10709 
   10710 /*===========================================================================
   10711  * FUNCTION   : setMobicat
   10712  *
   10713  * DESCRIPTION: set Mobicat on/off.
   10714  *
   10715  * PARAMETERS :
   10716  *   @params  : none
   10717  *
   10718  * RETURN     : int32_t type of status
   10719  *              NO_ERROR  -- success
   10720  *              none-zero failure code
   10721  *==========================================================================*/
   10722 int32_t QCamera3HardwareInterface::setMobicat()
   10723 {
   10724     int32_t ret = NO_ERROR;
   10725 
   10726     if (m_MobicatMask) {
   10727         tune_cmd_t tune_cmd;
   10728         tune_cmd.type = SET_RELOAD_CHROMATIX;
   10729         tune_cmd.module = MODULE_ALL;
   10730         tune_cmd.value = TRUE;
   10731         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   10732                 CAM_INTF_PARM_SET_VFE_COMMAND,
   10733                 tune_cmd);
   10734 
   10735         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   10736                 CAM_INTF_PARM_SET_PP_COMMAND,
   10737                 tune_cmd);
   10738     }
   10739 
   10740     return ret;
   10741 }
   10742 
   10743 /*===========================================================================
   10744 * FUNCTION   : getLogLevel
   10745 *
   10746 * DESCRIPTION: Reads the log level property into a variable
   10747 *
   10748 * PARAMETERS :
   10749 *   None
   10750 *
   10751 * RETURN     :
   10752 *   None
   10753 *==========================================================================*/
   10754 void QCamera3HardwareInterface::getLogLevel()
   10755 {
   10756     char prop[PROPERTY_VALUE_MAX];
   10757     uint32_t globalLogLevel = 0;
   10758 
   10759     property_get("persist.camera.hal.debug", prop, "0");
   10760     int val = atoi(prop);
   10761     if (0 <= val) {
   10762         gCamHal3LogLevel = (uint32_t)val;
   10763     }
   10764 
   10765     property_get("persist.camera.kpi.debug", prop, "1");
   10766     gKpiDebugLevel = atoi(prop);
   10767 
   10768     property_get("persist.camera.global.debug", prop, "0");
   10769     val = atoi(prop);
   10770     if (0 <= val) {
   10771         globalLogLevel = (uint32_t)val;
   10772     }
   10773 
   10774     /* Highest log level among hal.logs and global.logs is selected */
   10775     if (gCamHal3LogLevel < globalLogLevel)
   10776         gCamHal3LogLevel = globalLogLevel;
   10777 
   10778     return;
   10779 }
   10780 
   10781 /*===========================================================================
   10782  * FUNCTION   : validateStreamRotations
   10783  *
   10784  * DESCRIPTION: Check if the rotations requested are supported
   10785  *
   10786  * PARAMETERS :
   10787  *   @stream_list : streams to be configured
   10788  *
   10789  * RETURN     : NO_ERROR on success
   10790  *              -EINVAL on failure
   10791  *
   10792  *==========================================================================*/
   10793 int QCamera3HardwareInterface::validateStreamRotations(
   10794         camera3_stream_configuration_t *streamList)
   10795 {
   10796     int rc = NO_ERROR;
   10797 
   10798     /*
   10799     * Loop through all streams requested in configuration
   10800     * Check if unsupported rotations have been requested on any of them
   10801     */
   10802     for (size_t j = 0; j < streamList->num_streams; j++){
   10803         camera3_stream_t *newStream = streamList->streams[j];
   10804 
   10805         switch(newStream->rotation) {
   10806             case CAMERA3_STREAM_ROTATION_0:
   10807             case CAMERA3_STREAM_ROTATION_90:
   10808             case CAMERA3_STREAM_ROTATION_180:
   10809             case CAMERA3_STREAM_ROTATION_270:
   10810                 //Expected values
   10811                 break;
   10812             default:
   10813                 LOGE("Error: Unsupported rotation of %d requested for stream"
   10814                         "type:%d and stream format:%d",
   10815                         newStream->rotation, newStream->stream_type,
   10816                         newStream->format);
   10817                 return -EINVAL;
   10818         }
   10819 
   10820         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
   10821         bool isImplDef = (newStream->format ==
   10822                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
   10823         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
   10824                 isImplDef);
   10825 
   10826         if (isRotated && (!isImplDef || isZsl)) {
   10827             LOGE("Error: Unsupported rotation of %d requested for stream"
   10828                     "type:%d and stream format:%d",
   10829                     newStream->rotation, newStream->stream_type,
   10830                     newStream->format);
   10831             rc = -EINVAL;
   10832             break;
   10833         }
   10834     }
   10835 
   10836     return rc;
   10837 }
   10838 
   10839 /*===========================================================================
   10840 * FUNCTION   : getFlashInfo
   10841 *
   10842 * DESCRIPTION: Retrieve information about whether the device has a flash.
   10843 *
   10844 * PARAMETERS :
   10845 *   @cameraId  : Camera id to query
   10846 *   @hasFlash  : Boolean indicating whether there is a flash device
   10847 *                associated with given camera
   10848 *   @flashNode : If a flash device exists, this will be its device node.
   10849 *
   10850 * RETURN     :
   10851 *   None
   10852 *==========================================================================*/
   10853 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
   10854         bool& hasFlash,
   10855         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
   10856 {
   10857     cam_capability_t* camCapability = gCamCapability[cameraId];
   10858     if (NULL == camCapability) {
   10859         hasFlash = false;
   10860         flashNode[0] = '\0';
   10861     } else {
   10862         hasFlash = camCapability->flash_available;
   10863         strlcpy(flashNode,
   10864                 (char*)camCapability->flash_dev_name,
   10865                 QCAMERA_MAX_FILEPATH_LENGTH);
   10866     }
   10867 }
   10868 
   10869 /*===========================================================================
   10870 * FUNCTION   : getEepromVersionInfo
   10871 *
   10872 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
   10873 *
   10874 * PARAMETERS : None
   10875 *
   10876 * RETURN     : string describing EEPROM version
   10877 *              "\0" if no such info available
   10878 *==========================================================================*/
   10879 const char *QCamera3HardwareInterface::getEepromVersionInfo()
   10880 {
   10881     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
   10882 }
   10883 
   10884 /*===========================================================================
   10885 * FUNCTION   : getLdafCalib
   10886 *
   10887 * DESCRIPTION: Retrieve Laser AF calibration data
   10888 *
   10889 * PARAMETERS : None
   10890 *
   10891 * RETURN     : Two uint32_t describing laser AF calibration data
   10892 *              NULL if none is available.
   10893 *==========================================================================*/
   10894 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
   10895 {
   10896     if (mLdafCalibExist) {
   10897         return &mLdafCalib[0];
   10898     } else {
   10899         return NULL;
   10900     }
   10901 }
   10902 
   10903 /*===========================================================================
   10904  * FUNCTION   : dynamicUpdateMetaStreamInfo
   10905  *
   10906  * DESCRIPTION: This function:
   10907  *             (1) stops all the channels
   10908  *             (2) returns error on pending requests and buffers
   10909  *             (3) sends metastream_info in setparams
   10910  *             (4) starts all channels
   10911  *             This is useful when sensor has to be restarted to apply any
   10912  *             settings such as frame rate from a different sensor mode
   10913  *
   10914  * PARAMETERS : None
   10915  *
   10916  * RETURN     : NO_ERROR on success
   10917  *              Error codes on failure
   10918  *
   10919  *==========================================================================*/
   10920 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
   10921 {
   10922     ATRACE_CALL();
   10923     int rc = NO_ERROR;
   10924 
   10925     LOGD("E");
   10926 
   10927     rc = stopAllChannels();
   10928     if (rc < 0) {
   10929         LOGE("stopAllChannels failed");
   10930         return rc;
   10931     }
   10932 
   10933     rc = notifyErrorForPendingRequests();
   10934     if (rc < 0) {
   10935         LOGE("notifyErrorForPendingRequests failed");
   10936         return rc;
   10937     }
   10938 
   10939     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   10940         LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
   10941                 "Format:%d",
   10942                 mStreamConfigInfo.type[i],
   10943                 mStreamConfigInfo.stream_sizes[i].width,
   10944                 mStreamConfigInfo.stream_sizes[i].height,
   10945                 mStreamConfigInfo.postprocess_mask[i],
   10946                 mStreamConfigInfo.format[i]);
   10947     }
   10948 
   10949     /* Send meta stream info once again so that ISP can start */
   10950     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   10951             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   10952     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   10953             mParameters);
   10954     if (rc < 0) {
   10955         LOGE("set Metastreaminfo failed. Sensor mode does not change");
   10956     }
   10957 
   10958     rc = startAllChannels();
   10959     if (rc < 0) {
   10960         LOGE("startAllChannels failed");
   10961         return rc;
   10962     }
   10963 
   10964     LOGD("X");
   10965     return rc;
   10966 }
   10967 
   10968 /*===========================================================================
   10969  * FUNCTION   : stopAllChannels
   10970  *
   10971  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
   10972  *
   10973  * PARAMETERS : None
   10974  *
   10975  * RETURN     : NO_ERROR on success
   10976  *              Error codes on failure
   10977  *
   10978  *==========================================================================*/
   10979 int32_t QCamera3HardwareInterface::stopAllChannels()
   10980 {
   10981     int32_t rc = NO_ERROR;
   10982 
   10983     LOGD("Stopping all channels");
   10984     // Stop the Streams/Channels
   10985     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   10986         it != mStreamInfo.end(); it++) {
   10987         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   10988         if (channel) {
   10989             channel->stop();
   10990         }
   10991         (*it)->status = INVALID;
   10992     }
   10993 
   10994     if (mSupportChannel) {
   10995         mSupportChannel->stop();
   10996     }
   10997     if (mAnalysisChannel) {
   10998         mAnalysisChannel->stop();
   10999     }
   11000     if (mRawDumpChannel) {
   11001         mRawDumpChannel->stop();
   11002     }
   11003     if (mMetadataChannel) {
   11004         /* If content of mStreamInfo is not 0, there is metadata stream */
   11005         mMetadataChannel->stop();
   11006     }
   11007 
   11008     LOGD("All channels stopped");
   11009     return rc;
   11010 }
   11011 
   11012 /*===========================================================================
   11013  * FUNCTION   : startAllChannels
   11014  *
   11015  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
   11016  *
   11017  * PARAMETERS : None
   11018  *
   11019  * RETURN     : NO_ERROR on success
   11020  *              Error codes on failure
   11021  *
   11022  *==========================================================================*/
   11023 int32_t QCamera3HardwareInterface::startAllChannels()
   11024 {
   11025     int32_t rc = NO_ERROR;
   11026 
   11027     LOGD("Start all channels ");
   11028     // Start the Streams/Channels
   11029     if (mMetadataChannel) {
   11030         /* If content of mStreamInfo is not 0, there is metadata stream */
   11031         rc = mMetadataChannel->start();
   11032         if (rc < 0) {
   11033             LOGE("META channel start failed");
   11034             return rc;
   11035         }
   11036     }
   11037     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   11038         it != mStreamInfo.end(); it++) {
   11039         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   11040         if (channel) {
   11041             rc = channel->start();
   11042             if (rc < 0) {
   11043                 LOGE("channel start failed");
   11044                 return rc;
   11045             }
   11046         }
   11047     }
   11048     if (mAnalysisChannel) {
   11049         mAnalysisChannel->start();
   11050     }
   11051     if (mSupportChannel) {
   11052         rc = mSupportChannel->start();
   11053         if (rc < 0) {
   11054             LOGE("Support channel start failed");
   11055             return rc;
   11056         }
   11057     }
   11058     if (mRawDumpChannel) {
   11059         rc = mRawDumpChannel->start();
   11060         if (rc < 0) {
   11061             LOGE("RAW dump channel start failed");
   11062             return rc;
   11063         }
   11064     }
   11065 
   11066     LOGD("All channels started");
   11067     return rc;
   11068 }
   11069 
   11070 /*===========================================================================
   11071  * FUNCTION   : notifyErrorForPendingRequests
   11072  *
   11073  * DESCRIPTION: This function sends error for all the pending requests/buffers
   11074  *
   11075  * PARAMETERS : None
   11076  *
   11077  * RETURN     : Error codes
   11078  *              NO_ERROR on success
   11079  *
   11080  *==========================================================================*/
   11081 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
   11082 {
   11083     int32_t rc = NO_ERROR;
   11084     unsigned int frameNum = 0;
   11085     camera3_capture_result_t result;
   11086     camera3_stream_buffer_t *pStream_Buf = NULL;
   11087 
   11088     memset(&result, 0, sizeof(camera3_capture_result_t));
   11089 
   11090     if (mPendingRequestsList.size() > 0) {
   11091         pendingRequestIterator i = mPendingRequestsList.begin();
   11092         frameNum = i->frame_number;
   11093     } else {
   11094         /* There might still be pending buffers even though there are
   11095          no pending requests. Setting the frameNum to MAX so that
   11096          all the buffers with smaller frame numbers are returned */
   11097         frameNum = UINT_MAX;
   11098     }
   11099 
   11100     LOGH("Oldest frame num on mPendingRequestsList = %u",
   11101        frameNum);
   11102 
   11103     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
   11104             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
   11105 
   11106         if (req->frame_number < frameNum) {
   11107             // Send Error notify to frameworks for each buffer for which
   11108             // metadata buffer is already sent
   11109             LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
   11110                 req->frame_number, req->mPendingBufferList.size());
   11111 
   11112             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
   11113             if (NULL == pStream_Buf) {
   11114                 LOGE("No memory for pending buffers array");
   11115                 return NO_MEMORY;
   11116             }
   11117             memset(pStream_Buf, 0,
   11118                 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
   11119             result.result = NULL;
   11120             result.frame_number = req->frame_number;
   11121             result.num_output_buffers = req->mPendingBufferList.size();
   11122             result.output_buffers = pStream_Buf;
   11123 
   11124             size_t index = 0;
   11125             for (auto info = req->mPendingBufferList.begin();
   11126                 info != req->mPendingBufferList.end(); ) {
   11127 
   11128                 camera3_notify_msg_t notify_msg;
   11129                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   11130                 notify_msg.type = CAMERA3_MSG_ERROR;
   11131                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
   11132                 notify_msg.message.error.error_stream = info->stream;
   11133                 notify_msg.message.error.frame_number = req->frame_number;
   11134                 pStream_Buf[index].acquire_fence = -1;
   11135                 pStream_Buf[index].release_fence = -1;
   11136                 pStream_Buf[index].buffer = info->buffer;
   11137                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
   11138                 pStream_Buf[index].stream = info->stream;
   11139                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   11140                 index++;
   11141                 // Remove buffer from list
   11142                 info = req->mPendingBufferList.erase(info);
   11143             }
   11144 
   11145             // Remove this request from Map
   11146             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
   11147                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
   11148             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
   11149 
   11150             mCallbackOps->process_capture_result(mCallbackOps, &result);
   11151 
   11152             delete [] pStream_Buf;
   11153         } else {
   11154 
   11155             // Go through the pending requests info and send error request to framework
   11156             LOGE("Sending ERROR REQUEST for all pending requests");
   11157             pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
   11158 
   11159             LOGE("Sending ERROR REQUEST for frame %d", req->frame_number);
   11160 
   11161             // Send error notify to frameworks
   11162             camera3_notify_msg_t notify_msg;
   11163             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   11164             notify_msg.type = CAMERA3_MSG_ERROR;
   11165             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   11166             notify_msg.message.error.error_stream = NULL;
   11167             notify_msg.message.error.frame_number = req->frame_number;
   11168             mCallbackOps->notify(mCallbackOps, &notify_msg);
   11169 
   11170             pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
   11171             if (NULL == pStream_Buf) {
   11172                 LOGE("No memory for pending buffers array");
   11173                 return NO_MEMORY;
   11174             }
   11175             memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
   11176 
   11177             result.result = NULL;
   11178             result.frame_number = req->frame_number;
   11179             result.input_buffer = i->input_buffer;
   11180             result.num_output_buffers = req->mPendingBufferList.size();
   11181             result.output_buffers = pStream_Buf;
   11182 
   11183             size_t index = 0;
   11184             for (auto info = req->mPendingBufferList.begin();
   11185                 info != req->mPendingBufferList.end(); ) {
   11186                 pStream_Buf[index].acquire_fence = -1;
   11187                 pStream_Buf[index].release_fence = -1;
   11188                 pStream_Buf[index].buffer = info->buffer;
   11189                 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
   11190                 pStream_Buf[index].stream = info->stream;
   11191                 index++;
   11192                 // Remove buffer from list
   11193                 info = req->mPendingBufferList.erase(info);
   11194             }
   11195 
   11196             // Remove this request from Map
   11197             LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
   11198                 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
   11199             req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
   11200 
   11201             mCallbackOps->process_capture_result(mCallbackOps, &result);
   11202             delete [] pStream_Buf;
   11203             i = erasePendingRequest(i);
   11204         }
   11205     }
   11206 
   11207     /* Reset pending frame Drop list and requests list */
   11208     mPendingFrameDropList.clear();
   11209 
   11210     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   11211         req.mPendingBufferList.clear();
   11212     }
   11213     mPendingBuffersMap.mPendingBuffersInRequest.clear();
   11214     mPendingReprocessResultList.clear();
   11215     LOGH("Cleared all the pending buffers ");
   11216 
   11217     return rc;
   11218 }
   11219 
   11220 bool QCamera3HardwareInterface::isOnEncoder(
   11221         const cam_dimension_t max_viewfinder_size,
   11222         uint32_t width, uint32_t height)
   11223 {
   11224     return (width > (uint32_t)max_viewfinder_size.width ||
   11225             height > (uint32_t)max_viewfinder_size.height);
   11226 }
   11227 
   11228 /*===========================================================================
   11229  * FUNCTION   : setBundleInfo
   11230  *
   11231  * DESCRIPTION: Set bundle info for all streams that are bundle.
   11232  *
   11233  * PARAMETERS : None
   11234  *
   11235  * RETURN     : NO_ERROR on success
   11236  *              Error codes on failure
   11237  *==========================================================================*/
   11238 int32_t QCamera3HardwareInterface::setBundleInfo()
   11239 {
   11240     int32_t rc = NO_ERROR;
   11241 
   11242     if (mChannelHandle) {
   11243         cam_bundle_config_t bundleInfo;
   11244         memset(&bundleInfo, 0, sizeof(bundleInfo));
   11245         rc = mCameraHandle->ops->get_bundle_info(
   11246                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
   11247         if (rc != NO_ERROR) {
   11248             LOGE("get_bundle_info failed");
   11249             return rc;
   11250         }
   11251         if (mAnalysisChannel) {
   11252             mAnalysisChannel->setBundleInfo(bundleInfo);
   11253         }
   11254         if (mSupportChannel) {
   11255             mSupportChannel->setBundleInfo(bundleInfo);
   11256         }
   11257         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   11258                 it != mStreamInfo.end(); it++) {
   11259             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   11260             channel->setBundleInfo(bundleInfo);
   11261         }
   11262         if (mRawDumpChannel) {
   11263             mRawDumpChannel->setBundleInfo(bundleInfo);
   11264         }
   11265     }
   11266 
   11267     return rc;
   11268 }
   11269 
   11270 /*===========================================================================
   11271  * FUNCTION   : get_num_overall_buffers
   11272  *
   11273  * DESCRIPTION: Estimate number of pending buffers across all requests.
   11274  *
   11275  * PARAMETERS : None
   11276  *
   11277  * RETURN     : Number of overall pending buffers
   11278  *
   11279  *==========================================================================*/
   11280 uint32_t PendingBuffersMap::get_num_overall_buffers()
   11281 {
   11282     uint32_t sum_buffers = 0;
   11283     for (auto &req : mPendingBuffersInRequest) {
   11284         sum_buffers += req.mPendingBufferList.size();
   11285     }
   11286     return sum_buffers;
   11287 }
   11288 
   11289 /*===========================================================================
   11290  * FUNCTION   : removeBuf
   11291  *
   11292  * DESCRIPTION: Remove a matching buffer from tracker.
   11293  *
   11294  * PARAMETERS : @buffer: image buffer for the callback
   11295  *
   11296  * RETURN     : None
   11297  *
   11298  *==========================================================================*/
   11299 void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
   11300 {
   11301     bool buffer_found = false;
   11302     for (auto req = mPendingBuffersInRequest.begin();
   11303             req != mPendingBuffersInRequest.end(); req++) {
   11304         for (auto k = req->mPendingBufferList.begin();
   11305                 k != req->mPendingBufferList.end(); k++ ) {
   11306             if (k->buffer == buffer) {
   11307                 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
   11308                         req->frame_number, buffer);
   11309                 k = req->mPendingBufferList.erase(k);
   11310                 if (req->mPendingBufferList.empty()) {
   11311                     // Remove this request from Map
   11312                     req = mPendingBuffersInRequest.erase(req);
   11313                 }
   11314                 buffer_found = true;
   11315                 break;
   11316             }
   11317         }
   11318         if (buffer_found) {
   11319             break;
   11320         }
   11321     }
   11322     LOGD("mPendingBuffersMap.num_overall_buffers = %d",
   11323             get_num_overall_buffers());
   11324 }
   11325 
   11326 /*===========================================================================
   11327  * FUNCTION   : getBufErrStatus
   11328  *
   11329  * DESCRIPTION: get buffer error status
   11330  *
   11331  * PARAMETERS : @buffer: buffer handle
   11332  *
   11333  * RETURN     : None
   11334  *
   11335  *==========================================================================*/
   11336 int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
   11337 {
   11338     for (auto& req : mPendingBuffersInRequest) {
   11339         for (auto& k : req.mPendingBufferList) {
   11340             if (k.buffer == buffer)
   11341                 return k.bufStatus;
   11342         }
   11343     }
   11344     return CAMERA3_BUFFER_STATUS_OK;
   11345 }
   11346 
   11347 /*===========================================================================
   11348  * FUNCTION   : setPAAFSupport
   11349  *
   11350  * DESCRIPTION: Set the preview-assisted auto focus support bit in
   11351  *              feature mask according to stream type and filter
   11352  *              arrangement
   11353  *
   11354  * PARAMETERS : @feature_mask: current feature mask, which may be modified
   11355  *              @stream_type: stream type
   11356  *              @filter_arrangement: filter arrangement
   11357  *
   11358  * RETURN     : None
   11359  *==========================================================================*/
   11360 void QCamera3HardwareInterface::setPAAFSupport(
   11361         cam_feature_mask_t& feature_mask,
   11362         cam_stream_type_t stream_type,
   11363         cam_color_filter_arrangement_t filter_arrangement)
   11364 {
   11365     switch (filter_arrangement) {
   11366     case CAM_FILTER_ARRANGEMENT_RGGB:
   11367     case CAM_FILTER_ARRANGEMENT_GRBG:
   11368     case CAM_FILTER_ARRANGEMENT_GBRG:
   11369     case CAM_FILTER_ARRANGEMENT_BGGR:
   11370         if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
   11371                 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
   11372             feature_mask |= CAM_QCOM_FEATURE_PAAF;
   11373         }
   11374         break;
   11375     case CAM_FILTER_ARRANGEMENT_Y:
   11376         if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
   11377             feature_mask |= CAM_QCOM_FEATURE_PAAF;
   11378         }
   11379         break;
   11380     default:
   11381         break;
   11382     }
   11383     LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
   11384             feature_mask, stream_type, filter_arrangement);
   11385 
   11386 
   11387 }
   11388 
   11389 /*===========================================================================
   11390  * FUNCTION   : adjustBlackLevelForCFA
   11391  *
   11392  * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
   11393  *              of bayer CFA (Color Filter Array).
   11394  *
   11395  * PARAMETERS : @input: black level pattern in the order of RGGB
   11396  *              @output: black level pattern in the order of CFA
   11397  *              @color_arrangement: CFA color arrangement
   11398  *
   11399  * RETURN     : None
   11400  *==========================================================================*/
   11401 template<typename T>
   11402 void QCamera3HardwareInterface::adjustBlackLevelForCFA(
   11403         T input[BLACK_LEVEL_PATTERN_CNT],
   11404         T output[BLACK_LEVEL_PATTERN_CNT],
   11405         cam_color_filter_arrangement_t color_arrangement)
   11406 {
   11407     switch (color_arrangement) {
   11408     case CAM_FILTER_ARRANGEMENT_GRBG:
   11409         output[0] = input[1];
   11410         output[1] = input[0];
   11411         output[2] = input[3];
   11412         output[3] = input[2];
   11413         break;
   11414     case CAM_FILTER_ARRANGEMENT_GBRG:
   11415         output[0] = input[2];
   11416         output[1] = input[3];
   11417         output[2] = input[0];
   11418         output[3] = input[1];
   11419         break;
   11420     case CAM_FILTER_ARRANGEMENT_BGGR:
   11421         output[0] = input[3];
   11422         output[1] = input[2];
   11423         output[2] = input[1];
   11424         output[3] = input[0];
   11425         break;
   11426     case CAM_FILTER_ARRANGEMENT_RGGB:
   11427         output[0] = input[0];
   11428         output[1] = input[1];
   11429         output[2] = input[2];
   11430         output[3] = input[3];
   11431         break;
   11432     default:
   11433         LOGE("Invalid color arrangement to derive dynamic blacklevel");
   11434         break;
   11435     }
   11436 }
   11437 
   11438 /*===========================================================================
   11439  * FUNCTION   : is60HzZone
   11440  *
   11441  * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
   11442  *
   11443  * PARAMETERS : None
   11444  *
   11445  * RETURN     : True if in 60Hz zone, False otherwise
   11446  *==========================================================================*/
   11447 bool QCamera3HardwareInterface::is60HzZone()
   11448 {
   11449     time_t t = time(NULL);
   11450     struct tm lt;
   11451 
   11452     struct tm* r = localtime_r(&t, &lt);
   11453 
   11454     if (r == NULL || lt.tm_gmtoff <=  -2*60*60 || lt.tm_gmtoff >= 8*60*60)
   11455         return true;
   11456     else
   11457         return false;
   11458 }
   11459 }; //end namespace qcamera
   11460