Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define LOG_TAG "QCamera3HWI"
     31 //#define LOG_NDEBUG 0
     32 
     33 #define __STDC_LIMIT_MACROS
     34 
     35 // To remove
     36 #include <cutils/properties.h>
     37 
     38 // System dependencies
     39 #include <dlfcn.h>
     40 #include <fcntl.h>
     41 #include <stdio.h>
     42 #include <stdlib.h>
     43 #include "utils/Timers.h"
     44 #include "sys/ioctl.h"
     45 #include <sync/sync.h>
     46 #include "gralloc_priv.h"
     47 
     48 // Display dependencies
     49 #include "qdMetaData.h"
     50 
     51 // Camera dependencies
     52 #include "android/QCamera3External.h"
     53 #include "util/QCameraFlash.h"
     54 #include "QCamera3HWI.h"
     55 #include "QCamera3VendorTags.h"
     56 #include "QCameraTrace.h"
     57 
     58 extern "C" {
     59 #include "mm_camera_dbg.h"
     60 }
     61 
     62 using namespace android;
     63 
     64 namespace qcamera {
     65 
     66 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     67 
     68 #define EMPTY_PIPELINE_DELAY 2
     69 #define PARTIAL_RESULT_COUNT 2
     70 #define FRAME_SKIP_DELAY     0
     71 
     72 #define MAX_VALUE_8BIT ((1<<8)-1)
     73 #define MAX_VALUE_10BIT ((1<<10)-1)
     74 #define MAX_VALUE_12BIT ((1<<12)-1)
     75 
     76 #define VIDEO_4K_WIDTH  3840
     77 #define VIDEO_4K_HEIGHT 2160
     78 
     79 #define MAX_EIS_WIDTH 1920
     80 #define MAX_EIS_HEIGHT 1080
     81 
     82 #define MAX_RAW_STREAMS        1
     83 #define MAX_STALLING_STREAMS   1
     84 #define MAX_PROCESSED_STREAMS  3
     85 /* Batch mode is enabled only if FPS set is equal to or greater than this */
     86 #define MIN_FPS_FOR_BATCH_MODE (120)
     87 #define PREVIEW_FPS_FOR_HFR    (30)
     88 #define DEFAULT_VIDEO_FPS      (30.0)
     89 #define MAX_HFR_BATCH_SIZE     (8)
     90 #define REGIONS_TUPLE_COUNT    5
     91 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
     92 // Set a threshold for detection of missing buffers //seconds
     93 #define MISSING_REQUEST_BUF_TIMEOUT 3
     94 #define FLUSH_TIMEOUT 3
     95 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
     96 
     97 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
     98                                               CAM_QCOM_FEATURE_CROP |\
     99                                               CAM_QCOM_FEATURE_ROTATION |\
    100                                               CAM_QCOM_FEATURE_SHARPNESS |\
    101                                               CAM_QCOM_FEATURE_SCALE |\
    102                                               CAM_QCOM_FEATURE_CAC |\
    103                                               CAM_QCOM_FEATURE_CDS )
    104 /* Per configuration size for static metadata length*/
    105 #define PER_CONFIGURATION_SIZE_3 (3)
    106 
    107 #define TIMEOUT_NEVER -1
    108 
    109 /* Face landmarks indices */
    110 #define LEFT_EYE_X             0
    111 #define LEFT_EYE_Y             1
    112 #define RIGHT_EYE_X            2
    113 #define RIGHT_EYE_Y            3
    114 #define MOUTH_X                4
    115 #define MOUTH_Y                5
    116 #define TOTAL_LANDMARK_INDICES 6
    117 
    118 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
    119 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
    120 extern pthread_mutex_t gCamLock;
    121 volatile uint32_t gCamHal3LogLevel = 1;
    122 extern uint8_t gNumCameraSessions;
    123 
    124 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
    125     {"On",  CAM_CDS_MODE_ON},
    126     {"Off", CAM_CDS_MODE_OFF},
    127     {"Auto",CAM_CDS_MODE_AUTO}
    128 };
    129 const QCamera3HardwareInterface::QCameraMap<
    130         camera_metadata_enum_android_video_hdr_mode_t,
    131         cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
    132     { QCAMERA3_VIDEO_HDR_MODE_OFF,  CAM_VIDEO_HDR_MODE_OFF },
    133     { QCAMERA3_VIDEO_HDR_MODE_ON,   CAM_VIDEO_HDR_MODE_ON }
    134 };
    135 
    136 
    137 const QCamera3HardwareInterface::QCameraMap<
    138         camera_metadata_enum_android_ir_mode_t,
    139         cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
    140     {QCAMERA3_IR_MODE_OFF,  CAM_IR_MODE_OFF},
    141     {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
    142     {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
    143 };
    144 
    145 const QCamera3HardwareInterface::QCameraMap<
    146         camera_metadata_enum_android_control_effect_mode_t,
    147         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
    148     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
    149     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
    150     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
    151     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
    152     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
    153     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
    154     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
    155     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
    156     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
    157 };
    158 
    159 const QCamera3HardwareInterface::QCameraMap<
    160         camera_metadata_enum_android_control_awb_mode_t,
    161         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
    162     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
    163     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
    164     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
    165     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
    166     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
    167     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
    168     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
    169     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
    170     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
    171 };
    172 
    173 const QCamera3HardwareInterface::QCameraMap<
    174         camera_metadata_enum_android_control_scene_mode_t,
    175         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
    176     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
    177     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
    178     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
    179     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
    180     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
    181     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
    182     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
    183     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
    184     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
    185     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
    186     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
    187     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
    188     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
    189     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    190     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    191     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    192 };
    193 
    194 const QCamera3HardwareInterface::QCameraMap<
    195         camera_metadata_enum_android_control_af_mode_t,
    196         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    197     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
    198     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    199     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    200     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    201     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    202     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    203     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    204 };
    205 
    206 const QCamera3HardwareInterface::QCameraMap<
    207         camera_metadata_enum_android_color_correction_aberration_mode_t,
    208         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
    209     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
    210             CAM_COLOR_CORRECTION_ABERRATION_OFF },
    211     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
    212             CAM_COLOR_CORRECTION_ABERRATION_FAST },
    213     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
    214             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
    215 };
    216 
    217 const QCamera3HardwareInterface::QCameraMap<
    218         camera_metadata_enum_android_control_ae_antibanding_mode_t,
    219         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    220     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    221     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    222     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    223     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    224 };
    225 
    226 const QCamera3HardwareInterface::QCameraMap<
    227         camera_metadata_enum_android_control_ae_mode_t,
    228         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    229     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    230     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    231     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    232     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    233     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    234 };
    235 
    236 const QCamera3HardwareInterface::QCameraMap<
    237         camera_metadata_enum_android_flash_mode_t,
    238         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    239     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    240     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    241     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    242 };
    243 
    244 const QCamera3HardwareInterface::QCameraMap<
    245         camera_metadata_enum_android_statistics_face_detect_mode_t,
    246         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    247     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    248     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
    249     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    250 };
    251 
    252 const QCamera3HardwareInterface::QCameraMap<
    253         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
    254         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
    255     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
    256       CAM_FOCUS_UNCALIBRATED },
    257     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
    258       CAM_FOCUS_APPROXIMATE },
    259     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
    260       CAM_FOCUS_CALIBRATED }
    261 };
    262 
    263 const QCamera3HardwareInterface::QCameraMap<
    264         camera_metadata_enum_android_lens_state_t,
    265         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
    266     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
    267     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
    268 };
    269 
    270 const int32_t available_thumbnail_sizes[] = {0, 0,
    271                                              176, 144,
    272                                              240, 144,
    273                                              256, 144,
    274                                              240, 160,
    275                                              256, 154,
    276                                              240, 240,
    277                                              320, 240};
    278 
    279 const QCamera3HardwareInterface::QCameraMap<
    280         camera_metadata_enum_android_sensor_test_pattern_mode_t,
    281         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
    282     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
    283     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
    284     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
    285     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
    286     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
    287     { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1,      CAM_TEST_PATTERN_CUSTOM1},
    288 };
    289 
    290 /* Since there is no mapping for all the options some Android enum are not listed.
    291  * Also, the order in this list is important because while mapping from HAL to Android it will
    292  * traverse from lower to higher index which means that for HAL values that are map to different
    293  * Android values, the traverse logic will select the first one found.
    294  */
    295 const QCamera3HardwareInterface::QCameraMap<
    296         camera_metadata_enum_android_sensor_reference_illuminant1_t,
    297         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
    298     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
    299     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    300     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
    301     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
    302     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
    303     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
    304     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
    305     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
    306     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
    307     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
    308     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
    309     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
    310     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
    311     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
    312     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    313     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
    314 };
    315 
    316 const QCamera3HardwareInterface::QCameraMap<
    317         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
    318     { 60, CAM_HFR_MODE_60FPS},
    319     { 90, CAM_HFR_MODE_90FPS},
    320     { 120, CAM_HFR_MODE_120FPS},
    321     { 150, CAM_HFR_MODE_150FPS},
    322     { 180, CAM_HFR_MODE_180FPS},
    323     { 210, CAM_HFR_MODE_210FPS},
    324     { 240, CAM_HFR_MODE_240FPS},
    325     { 480, CAM_HFR_MODE_480FPS},
    326 };
    327 
    328 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    329     .initialize                         = QCamera3HardwareInterface::initialize,
    330     .configure_streams                  = QCamera3HardwareInterface::configure_streams,
    331     .register_stream_buffers            = NULL,
    332     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
    333     .process_capture_request            = QCamera3HardwareInterface::process_capture_request,
    334     .get_metadata_vendor_tag_ops        = NULL,
    335     .dump                               = QCamera3HardwareInterface::dump,
    336     .flush                              = QCamera3HardwareInterface::flush,
    337     .reserved                           = {0},
    338 };
    339 
    340 // initialise to some default value
    341 uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
    342 
    343 /*===========================================================================
    344  * FUNCTION   : QCamera3HardwareInterface
    345  *
    346  * DESCRIPTION: constructor of QCamera3HardwareInterface
    347  *
    348  * PARAMETERS :
    349  *   @cameraId  : camera ID
    350  *
    351  * RETURN     : none
    352  *==========================================================================*/
    353 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
    354         const camera_module_callbacks_t *callbacks)
    355     : mCameraId(cameraId),
    356       mCameraHandle(NULL),
    357       mCameraInitialized(false),
    358       mCallbackOps(NULL),
    359       mMetadataChannel(NULL),
    360       mPictureChannel(NULL),
    361       mRawChannel(NULL),
    362       mSupportChannel(NULL),
    363       mAnalysisChannel(NULL),
    364       mRawDumpChannel(NULL),
    365       mDummyBatchChannel(NULL),
    366       m_perfLock(),
    367       mCommon(),
    368       mChannelHandle(0),
    369       mFirstConfiguration(true),
    370       mFlush(false),
    371       mFlushPerf(false),
    372       mParamHeap(NULL),
    373       mParameters(NULL),
    374       mPrevParameters(NULL),
    375       m_bIsVideo(false),
    376       m_bIs4KVideo(false),
    377       m_bEisSupportedSize(false),
    378       m_bEisEnable(false),
    379       m_MobicatMask(0),
    380       mMinProcessedFrameDuration(0),
    381       mMinJpegFrameDuration(0),
    382       mMinRawFrameDuration(0),
    383       mMetaFrameCount(0U),
    384       mUpdateDebugLevel(false),
    385       mCallbacks(callbacks),
    386       mCaptureIntent(0),
    387       mCacMode(0),
    388       mBatchSize(0),
    389       mToBeQueuedVidBufs(0),
    390       mHFRVideoFps(DEFAULT_VIDEO_FPS),
    391       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
    392       mFirstFrameNumberInBatch(0),
    393       mNeedSensorRestart(false),
    394       mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
    395       mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
    396       mLdafCalibExist(false),
    397       mPowerHintEnabled(false),
    398       mLastCustIntentFrmNum(-1),
    399       mState(CLOSED),
    400       mIsDeviceLinked(false),
    401       mIsMainCamera(true),
    402       mLinkedCameraId(0),
    403       m_pRelCamSyncHeap(NULL),
    404       m_pRelCamSyncBuf(NULL)
    405 {
    406     getLogLevel();
    407     m_perfLock.lock_init();
    408     mCommon.init(gCamCapability[cameraId]);
    409     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    410 #ifndef USE_HAL_3_3
    411     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
    412 #else
    413     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
    414 #endif
    415     mCameraDevice.common.close = close_camera_device;
    416     mCameraDevice.ops = &mCameraOps;
    417     mCameraDevice.priv = this;
    418     gCamCapability[cameraId]->version = CAM_HAL_V3;
    419     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    420     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    421     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    422 
    423     pthread_cond_init(&mBuffersCond, NULL);
    424 
    425     pthread_cond_init(&mRequestCond, NULL);
    426     mPendingLiveRequest = 0;
    427     mCurrentRequestId = -1;
    428     pthread_mutex_init(&mMutex, NULL);
    429 
    430     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    431         mDefaultMetadata[i] = NULL;
    432 
    433     // Getting system props of different kinds
    434     char prop[PROPERTY_VALUE_MAX];
    435     memset(prop, 0, sizeof(prop));
    436     property_get("persist.camera.raw.dump", prop, "0");
    437     mEnableRawDump = atoi(prop);
    438     if (mEnableRawDump)
    439         LOGD("Raw dump from Camera HAL enabled");
    440 
    441     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
    442     memset(mLdafCalib, 0, sizeof(mLdafCalib));
    443 
    444     memset(prop, 0, sizeof(prop));
    445     property_get("persist.camera.tnr.preview", prop, "0");
    446     m_bTnrPreview = (uint8_t)atoi(prop);
    447 
    448     memset(prop, 0, sizeof(prop));
    449     property_get("persist.camera.tnr.video", prop, "0");
    450     m_bTnrVideo = (uint8_t)atoi(prop);
    451 
    452     memset(prop, 0, sizeof(prop));
    453     property_get("persist.camera.avtimer.debug", prop, "0");
    454     m_debug_avtimer = (uint8_t)atoi(prop);
    455 
    456     //Load and read GPU library.
    457     lib_surface_utils = NULL;
    458     LINK_get_surface_pixel_alignment = NULL;
    459     mSurfaceStridePadding = CAM_PAD_TO_32;
    460     lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
    461     if (lib_surface_utils) {
    462         *(void **)&LINK_get_surface_pixel_alignment =
    463                 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
    464          if (LINK_get_surface_pixel_alignment) {
    465              mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
    466          }
    467          dlclose(lib_surface_utils);
    468     }
    469 }
    470 
    471 /*===========================================================================
    472  * FUNCTION   : ~QCamera3HardwareInterface
    473  *
    474  * DESCRIPTION: destructor of QCamera3HardwareInterface
    475  *
    476  * PARAMETERS : none
    477  *
    478  * RETURN     : none
    479  *==========================================================================*/
    480 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    481 {
    482     LOGD("E");
    483 
    484     /* Turn off current power hint before acquiring perfLock in case they
    485      * conflict with each other */
    486     disablePowerHint();
    487 
    488     m_perfLock.lock_acq();
    489 
    490     /* We need to stop all streams before deleting any stream */
    491     if (mRawDumpChannel) {
    492         mRawDumpChannel->stop();
    493     }
    494 
    495     // NOTE: 'camera3_stream_t *' objects are already freed at
    496     //        this stage by the framework
    497     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    498         it != mStreamInfo.end(); it++) {
    499         QCamera3ProcessingChannel *channel = (*it)->channel;
    500         if (channel) {
    501             channel->stop();
    502         }
    503     }
    504     if (mSupportChannel)
    505         mSupportChannel->stop();
    506 
    507     if (mAnalysisChannel) {
    508         mAnalysisChannel->stop();
    509     }
    510     if (mMetadataChannel) {
    511         mMetadataChannel->stop();
    512     }
    513     if (mChannelHandle) {
    514         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
    515                 mChannelHandle);
    516         LOGD("stopping channel %d", mChannelHandle);
    517     }
    518 
    519     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    520         it != mStreamInfo.end(); it++) {
    521         QCamera3ProcessingChannel *channel = (*it)->channel;
    522         if (channel)
    523             delete channel;
    524         free (*it);
    525     }
    526     if (mSupportChannel) {
    527         delete mSupportChannel;
    528         mSupportChannel = NULL;
    529     }
    530 
    531     if (mAnalysisChannel) {
    532         delete mAnalysisChannel;
    533         mAnalysisChannel = NULL;
    534     }
    535     if (mRawDumpChannel) {
    536         delete mRawDumpChannel;
    537         mRawDumpChannel = NULL;
    538     }
    539     if (mDummyBatchChannel) {
    540         delete mDummyBatchChannel;
    541         mDummyBatchChannel = NULL;
    542     }
    543 
    544     mPictureChannel = NULL;
    545 
    546     if (mMetadataChannel) {
    547         delete mMetadataChannel;
    548         mMetadataChannel = NULL;
    549     }
    550 
    551     /* Clean up all channels */
    552     if (mCameraInitialized) {
    553         if(!mFirstConfiguration){
    554             //send the last unconfigure
    555             cam_stream_size_info_t stream_config_info;
    556             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
    557             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
    558             stream_config_info.buffer_info.max_buffers =
    559                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
    560             clear_metadata_buffer(mParameters);
    561             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
    562                     stream_config_info);
    563             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    564             if (rc < 0) {
    565                 LOGE("set_parms failed for unconfigure");
    566             }
    567         }
    568         deinitParameters();
    569     }
    570 
    571     if (mChannelHandle) {
    572         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
    573                 mChannelHandle);
    574         LOGH("deleting channel %d", mChannelHandle);
    575         mChannelHandle = 0;
    576     }
    577 
    578     if (mState != CLOSED)
    579         closeCamera();
    580 
    581     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
    582         req.mPendingBufferList.clear();
    583     }
    584     mPendingBuffersMap.mPendingBuffersInRequest.clear();
    585     mPendingReprocessResultList.clear();
    586     for (pendingRequestIterator i = mPendingRequestsList.begin();
    587             i != mPendingRequestsList.end();) {
    588         i = erasePendingRequest(i);
    589     }
    590     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    591         if (mDefaultMetadata[i])
    592             free_camera_metadata(mDefaultMetadata[i]);
    593 
    594     m_perfLock.lock_rel();
    595     m_perfLock.lock_deinit();
    596 
    597     pthread_cond_destroy(&mRequestCond);
    598 
    599     pthread_cond_destroy(&mBuffersCond);
    600 
    601     pthread_mutex_destroy(&mMutex);
    602     LOGD("X");
    603 }
    604 
    605 /*===========================================================================
    606  * FUNCTION   : erasePendingRequest
    607  *
    608  * DESCRIPTION: function to erase a desired pending request after freeing any
    609  *              allocated memory
    610  *
    611  * PARAMETERS :
    612  *   @i       : iterator pointing to pending request to be erased
    613  *
    614  * RETURN     : iterator pointing to the next request
    615  *==========================================================================*/
    616 QCamera3HardwareInterface::pendingRequestIterator
    617         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
    618 {
    619     if (i->input_buffer != NULL) {
    620         free(i->input_buffer);
    621         i->input_buffer = NULL;
    622     }
    623     if (i->settings != NULL)
    624         free_camera_metadata((camera_metadata_t*)i->settings);
    625     return mPendingRequestsList.erase(i);
    626 }
    627 
    628 /*===========================================================================
    629  * FUNCTION   : camEvtHandle
    630  *
    631  * DESCRIPTION: Function registered to mm-camera-interface to handle events
    632  *
    633  * PARAMETERS :
    634  *   @camera_handle : interface layer camera handle
    635  *   @evt           : ptr to event
    636  *   @user_data     : user data ptr
    637  *
    638  * RETURN     : none
    639  *==========================================================================*/
    640 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
    641                                           mm_camera_event_t *evt,
    642                                           void *user_data)
    643 {
    644     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
    645     if (obj && evt) {
    646         switch(evt->server_event_type) {
    647             case CAM_EVENT_TYPE_DAEMON_DIED:
    648                 pthread_mutex_lock(&obj->mMutex);
    649                 obj->mState = ERROR;
    650                 pthread_mutex_unlock(&obj->mMutex);
    651                 LOGE("Fatal, camera daemon died");
    652                 break;
    653 
    654             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
    655                 LOGD("HAL got request pull from Daemon");
    656                 pthread_mutex_lock(&obj->mMutex);
    657                 obj->mWokenUpByDaemon = true;
    658                 obj->unblockRequestIfNecessary();
    659                 pthread_mutex_unlock(&obj->mMutex);
    660                 break;
    661 
    662             default:
    663                 LOGW("Warning: Unhandled event %d",
    664                         evt->server_event_type);
    665                 break;
    666         }
    667     } else {
    668         LOGE("NULL user_data/evt");
    669     }
    670 }
    671 
    672 /*===========================================================================
    673  * FUNCTION   : openCamera
    674  *
    675  * DESCRIPTION: open camera
    676  *
    677  * PARAMETERS :
    678  *   @hw_device  : double ptr for camera device struct
    679  *
    680  * RETURN     : int32_t type of status
    681  *              NO_ERROR  -- success
    682  *              none-zero failure code
    683  *==========================================================================*/
    684 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    685 {
    686     int rc = 0;
    687     if (mState != CLOSED) {
    688         *hw_device = NULL;
    689         return PERMISSION_DENIED;
    690     }
    691 
    692     m_perfLock.lock_acq();
    693     LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
    694              mCameraId);
    695 
    696     rc = openCamera();
    697     if (rc == 0) {
    698         *hw_device = &mCameraDevice.common;
    699     } else
    700         *hw_device = NULL;
    701 
    702     m_perfLock.lock_rel();
    703     LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
    704              mCameraId, rc);
    705 
    706     if (rc == NO_ERROR) {
    707         mState = OPENED;
    708     }
    709     return rc;
    710 }
    711 
    712 /*===========================================================================
    713  * FUNCTION   : openCamera
    714  *
    715  * DESCRIPTION: open camera
    716  *
    717  * PARAMETERS : none
    718  *
    719  * RETURN     : int32_t type of status
    720  *              NO_ERROR  -- success
    721  *              none-zero failure code
    722  *==========================================================================*/
    723 int QCamera3HardwareInterface::openCamera()
    724 {
    725     int rc = 0;
    726     char value[PROPERTY_VALUE_MAX];
    727 
    728     KPI_ATRACE_CALL();
    729     if (mCameraHandle) {
    730         LOGE("Failure: Camera already opened");
    731         return ALREADY_EXISTS;
    732     }
    733 
    734     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
    735     if (rc < 0) {
    736         LOGE("Failed to reserve flash for camera id: %d",
    737                 mCameraId);
    738         return UNKNOWN_ERROR;
    739     }
    740 
    741     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
    742     if (rc) {
    743         LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
    744         return rc;
    745     }
    746 
    747     if (!mCameraHandle) {
    748         LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
    749         return -ENODEV;
    750     }
    751 
    752     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
    753             camEvtHandle, (void *)this);
    754 
    755     if (rc < 0) {
    756         LOGE("Error, failed to register event callback");
    757         /* Not closing camera here since it is already handled in destructor */
    758         return FAILED_TRANSACTION;
    759     }
    760 
    761     mExifParams.debug_params =
    762             (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
    763     if (mExifParams.debug_params) {
    764         memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
    765     } else {
    766         LOGE("Out of Memory. Allocation failed for 3A debug exif params");
    767         return NO_MEMORY;
    768     }
    769     mFirstConfiguration = true;
    770 
    771     //Notify display HAL that a camera session is active.
    772     //But avoid calling the same during bootup because camera service might open/close
    773     //cameras at boot time during its initialization and display service will also internally
    774     //wait for camera service to initialize first while calling this display API, resulting in a
    775     //deadlock situation. Since boot time camera open/close calls are made only to fetch
    776     //capabilities, no need of this display bw optimization.
    777     //Use "service.bootanim.exit" property to know boot status.
    778     property_get("service.bootanim.exit", value, "0");
    779     if (atoi(value) == 1) {
    780         pthread_mutex_lock(&gCamLock);
    781         if (gNumCameraSessions++ == 0) {
    782             setCameraLaunchStatus(true);
    783         }
    784         pthread_mutex_unlock(&gCamLock);
    785     }
    786 
    787     //fill the session id needed while linking dual cam
    788     pthread_mutex_lock(&gCamLock);
    789     rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
    790         &sessionId[mCameraId]);
    791     pthread_mutex_unlock(&gCamLock);
    792 
    793     if (rc < 0) {
    794         LOGE("Error, failed to get sessiion id");
    795         return UNKNOWN_ERROR;
    796     } else {
    797         //Allocate related cam sync buffer
    798         //this is needed for the payload that goes along with bundling cmd for related
    799         //camera use cases
    800         m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
    801         rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
    802         if(rc != OK) {
    803             rc = NO_MEMORY;
    804             LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
    805             return NO_MEMORY;
    806         }
    807 
    808         //Map memory for related cam sync buffer
    809         rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
    810                 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
    811                 m_pRelCamSyncHeap->getFd(0),
    812                 sizeof(cam_sync_related_sensors_event_info_t),
    813                 m_pRelCamSyncHeap->getPtr(0));
    814         if(rc < 0) {
    815             LOGE("Dualcam: failed to map Related cam sync buffer");
    816             rc = FAILED_TRANSACTION;
    817             return NO_MEMORY;
    818         }
    819         m_pRelCamSyncBuf =
    820                 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
    821     }
    822 
    823     LOGH("mCameraId=%d",mCameraId);
    824 
    825     return NO_ERROR;
    826 }
    827 
    828 /*===========================================================================
    829  * FUNCTION   : closeCamera
    830  *
    831  * DESCRIPTION: close camera
    832  *
    833  * PARAMETERS : none
    834  *
    835  * RETURN     : int32_t type of status
    836  *              NO_ERROR  -- success
    837  *              none-zero failure code
    838  *==========================================================================*/
    839 int QCamera3HardwareInterface::closeCamera()
    840 {
    841     KPI_ATRACE_CALL();
    842     int rc = NO_ERROR;
    843     char value[PROPERTY_VALUE_MAX];
    844 
    845     LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
    846              mCameraId);
    847 
    848     // unmap memory for related cam sync buffer
    849     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
    850             CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF);
    851     if (NULL != m_pRelCamSyncHeap) {
    852         m_pRelCamSyncHeap->deallocate();
    853         delete m_pRelCamSyncHeap;
    854         m_pRelCamSyncHeap = NULL;
    855         m_pRelCamSyncBuf = NULL;
    856     }
    857 
    858     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    859     mCameraHandle = NULL;
    860 
    861     //reset session id to some invalid id
    862     pthread_mutex_lock(&gCamLock);
    863     sessionId[mCameraId] = 0xDEADBEEF;
    864     pthread_mutex_unlock(&gCamLock);
    865 
    866     //Notify display HAL that there is no active camera session
    867     //but avoid calling the same during bootup. Refer to openCamera
    868     //for more details.
    869     property_get("service.bootanim.exit", value, "0");
    870     if (atoi(value) == 1) {
    871         pthread_mutex_lock(&gCamLock);
    872         if (--gNumCameraSessions == 0) {
    873             setCameraLaunchStatus(false);
    874         }
    875         pthread_mutex_unlock(&gCamLock);
    876     }
    877 
    878     if (mExifParams.debug_params) {
    879         free(mExifParams.debug_params);
    880         mExifParams.debug_params = NULL;
    881     }
    882     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
    883         LOGW("Failed to release flash for camera id: %d",
    884                 mCameraId);
    885     }
    886     mState = CLOSED;
    887     LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
    888          mCameraId, rc);
    889     return rc;
    890 }
    891 
    892 /*===========================================================================
    893  * FUNCTION   : initialize
    894  *
    895  * DESCRIPTION: Initialize frameworks callback functions
    896  *
    897  * PARAMETERS :
    898  *   @callback_ops : callback function to frameworks
    899  *
    900  * RETURN     :
    901  *
    902  *==========================================================================*/
    903 int QCamera3HardwareInterface::initialize(
    904         const struct camera3_callback_ops *callback_ops)
    905 {
    906     ATRACE_CALL();
    907     int rc;
    908 
    909     LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
    910     pthread_mutex_lock(&mMutex);
    911 
    912     // Validate current state
    913     switch (mState) {
    914         case OPENED:
    915             /* valid state */
    916             break;
    917         default:
    918             LOGE("Invalid state %d", mState);
    919             rc = -ENODEV;
    920             goto err1;
    921     }
    922 
    923     rc = initParameters();
    924     if (rc < 0) {
    925         LOGE("initParamters failed %d", rc);
    926         goto err1;
    927     }
    928     mCallbackOps = callback_ops;
    929 
    930     mChannelHandle = mCameraHandle->ops->add_channel(
    931             mCameraHandle->camera_handle, NULL, NULL, this);
    932     if (mChannelHandle == 0) {
    933         LOGE("add_channel failed");
    934         rc = -ENOMEM;
    935         pthread_mutex_unlock(&mMutex);
    936         return rc;
    937     }
    938 
    939     pthread_mutex_unlock(&mMutex);
    940     mCameraInitialized = true;
    941     mState = INITIALIZED;
    942     LOGI("X");
    943     return 0;
    944 
    945 err1:
    946     pthread_mutex_unlock(&mMutex);
    947     return rc;
    948 }
    949 
    950 /*===========================================================================
    951  * FUNCTION   : validateStreamDimensions
    952  *
    953  * DESCRIPTION: Check if the configuration requested are those advertised
    954  *
    955  * PARAMETERS :
    956  *   @stream_list : streams to be configured
    957  *
    958  * RETURN     :
    959  *
    960  *==========================================================================*/
    961 int QCamera3HardwareInterface::validateStreamDimensions(
    962         camera3_stream_configuration_t *streamList)
    963 {
    964     int rc = NO_ERROR;
    965     size_t count = 0;
    966 
    967     camera3_stream_t *inputStream = NULL;
    968     /*
    969     * Loop through all streams to find input stream if it exists*
    970     */
    971     for (size_t i = 0; i< streamList->num_streams; i++) {
    972         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
    973             if (inputStream != NULL) {
    974                 LOGE("Error, Multiple input streams requested");
    975                 return -EINVAL;
    976             }
    977             inputStream = streamList->streams[i];
    978         }
    979     }
    980     /*
    981     * Loop through all streams requested in configuration
    982     * Check if unsupported sizes have been requested on any of them
    983     */
    984     for (size_t j = 0; j < streamList->num_streams; j++) {
    985         bool sizeFound = false;
    986         camera3_stream_t *newStream = streamList->streams[j];
    987 
    988         uint32_t rotatedHeight = newStream->height;
    989         uint32_t rotatedWidth = newStream->width;
    990         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
    991                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
    992             rotatedHeight = newStream->width;
    993             rotatedWidth = newStream->height;
    994         }
    995 
    996         /*
    997         * Sizes are different for each type of stream format check against
    998         * appropriate table.
    999         */
   1000         switch (newStream->format) {
   1001         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   1002         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   1003         case HAL_PIXEL_FORMAT_RAW10:
   1004             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
   1005             for (size_t i = 0; i < count; i++) {
   1006                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
   1007                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
   1008                     sizeFound = true;
   1009                     break;
   1010                 }
   1011             }
   1012             break;
   1013         case HAL_PIXEL_FORMAT_BLOB:
   1014             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   1015             /* Verify set size against generated sizes table */
   1016             for (size_t i = 0; i < count; i++) {
   1017                 if (((int32_t)rotatedWidth ==
   1018                         gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
   1019                         ((int32_t)rotatedHeight ==
   1020                         gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
   1021                     sizeFound = true;
   1022                     break;
   1023                 }
   1024             }
   1025             break;
   1026         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1027         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1028         default:
   1029             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1030                     || newStream->stream_type == CAMERA3_STREAM_INPUT
   1031                     || IS_USAGE_ZSL(newStream->usage)) {
   1032                 if (((int32_t)rotatedWidth ==
   1033                                 gCamCapability[mCameraId]->active_array_size.width) &&
   1034                                 ((int32_t)rotatedHeight ==
   1035                                 gCamCapability[mCameraId]->active_array_size.height)) {
   1036                     sizeFound = true;
   1037                     break;
   1038                 }
   1039                 /* We could potentially break here to enforce ZSL stream
   1040                  * set from frameworks always is full active array size
   1041                  * but it is not clear from the spc if framework will always
   1042                  * follow that, also we have logic to override to full array
   1043                  * size, so keeping the logic lenient at the moment
   1044                  */
   1045             }
   1046             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
   1047                     MAX_SIZES_CNT);
   1048             for (size_t i = 0; i < count; i++) {
   1049                 if (((int32_t)rotatedWidth ==
   1050                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
   1051                             ((int32_t)rotatedHeight ==
   1052                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
   1053                     sizeFound = true;
   1054                     break;
   1055                 }
   1056             }
   1057             break;
   1058         } /* End of switch(newStream->format) */
   1059 
   1060         /* We error out even if a single stream has unsupported size set */
   1061         if (!sizeFound) {
   1062             LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
   1063                     rotatedWidth, rotatedHeight, newStream->format,
   1064                     gCamCapability[mCameraId]->active_array_size.width,
   1065                     gCamCapability[mCameraId]->active_array_size.height);
   1066             rc = -EINVAL;
   1067             break;
   1068         }
   1069     } /* End of for each stream */
   1070     return rc;
   1071 }
   1072 
   1073 /*==============================================================================
   1074  * FUNCTION   : isSupportChannelNeeded
   1075  *
   1076  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
   1077  *
   1078  * PARAMETERS :
   1079  *   @stream_list : streams to be configured
   1080  *   @stream_config_info : the config info for streams to be configured
   1081  *
   1082  * RETURN     : Boolen true/false decision
   1083  *
   1084  *==========================================================================*/
   1085 bool QCamera3HardwareInterface::isSupportChannelNeeded(
   1086         camera3_stream_configuration_t *streamList,
   1087         cam_stream_size_info_t stream_config_info)
   1088 {
   1089     uint32_t i;
   1090     bool pprocRequested = false;
   1091     /* Check for conditions where PProc pipeline does not have any streams*/
   1092     for (i = 0; i < stream_config_info.num_streams; i++) {
   1093         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
   1094                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
   1095             pprocRequested = true;
   1096             break;
   1097         }
   1098     }
   1099 
   1100     if (pprocRequested == false )
   1101         return true;
   1102 
   1103     /* Dummy stream needed if only raw or jpeg streams present */
   1104     for (i = 0; i < streamList->num_streams; i++) {
   1105         switch(streamList->streams[i]->format) {
   1106             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1107             case HAL_PIXEL_FORMAT_RAW10:
   1108             case HAL_PIXEL_FORMAT_RAW16:
   1109             case HAL_PIXEL_FORMAT_BLOB:
   1110                 break;
   1111             default:
   1112                 return false;
   1113         }
   1114     }
   1115     return true;
   1116 }
   1117 
   1118 /*==============================================================================
   1119  * FUNCTION   : getSensorOutputSize
   1120  *
   1121  * DESCRIPTION: Get sensor output size based on current stream configuratoin
   1122  *
   1123  * PARAMETERS :
   1124  *   @sensor_dim : sensor output dimension (output)
   1125  *
   1126  * RETURN     : int32_t type of status
   1127  *              NO_ERROR  -- success
   1128  *              none-zero failure code
   1129  *
   1130  *==========================================================================*/
   1131 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
   1132 {
   1133     int32_t rc = NO_ERROR;
   1134 
   1135     cam_dimension_t max_dim = {0, 0};
   1136     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   1137         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
   1138             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
   1139         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
   1140             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
   1141     }
   1142 
   1143     clear_metadata_buffer(mParameters);
   1144 
   1145     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
   1146             max_dim);
   1147     if (rc != NO_ERROR) {
   1148         LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
   1149         return rc;
   1150     }
   1151 
   1152     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   1153     if (rc != NO_ERROR) {
   1154         LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
   1155         return rc;
   1156     }
   1157 
   1158     clear_metadata_buffer(mParameters);
   1159     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
   1160 
   1161     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
   1162             mParameters);
   1163     if (rc != NO_ERROR) {
   1164         LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
   1165         return rc;
   1166     }
   1167 
   1168     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
   1169     LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
   1170 
   1171     return rc;
   1172 }
   1173 
   1174 /*==============================================================================
   1175  * FUNCTION   : enablePowerHint
   1176  *
   1177  * DESCRIPTION: enable single powerhint for preview and different video modes.
   1178  *
   1179  * PARAMETERS :
   1180  *
   1181  * RETURN     : NULL
   1182  *
   1183  *==========================================================================*/
   1184 void QCamera3HardwareInterface::enablePowerHint()
   1185 {
   1186     if (!mPowerHintEnabled) {
   1187         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
   1188         mPowerHintEnabled = true;
   1189     }
   1190 }
   1191 
   1192 /*==============================================================================
   1193  * FUNCTION   : disablePowerHint
   1194  *
   1195  * DESCRIPTION: disable current powerhint.
   1196  *
   1197  * PARAMETERS :
   1198  *
   1199  * RETURN     : NULL
   1200  *
   1201  *==========================================================================*/
   1202 void QCamera3HardwareInterface::disablePowerHint()
   1203 {
   1204     if (mPowerHintEnabled) {
   1205         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
   1206         mPowerHintEnabled = false;
   1207     }
   1208 }
   1209 
   1210 /*==============================================================================
   1211  * FUNCTION   : addToPPFeatureMask
   1212  *
   1213  * DESCRIPTION: add additional features to pp feature mask based on
   1214  *              stream type and usecase
   1215  *
   1216  * PARAMETERS :
   1217  *   @stream_format : stream type for feature mask
   1218  *   @stream_idx : stream idx within postprocess_mask list to change
   1219  *
   1220  * RETURN     : NULL
   1221  *
   1222  *==========================================================================*/
   1223 void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
   1224         uint32_t stream_idx)
   1225 {
   1226     char feature_mask_value[PROPERTY_VALUE_MAX];
   1227     cam_feature_mask_t feature_mask;
   1228     int args_converted;
   1229     int property_len;
   1230 
   1231     /* Get feature mask from property */
   1232 #ifdef _LE_CAMERA_
   1233     char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
   1234     snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
   1235     property_len = property_get("persist.camera.hal3.feature",
   1236             feature_mask_value, swtnr_feature_mask_value);
   1237 #else
   1238     property_len = property_get("persist.camera.hal3.feature",
   1239             feature_mask_value, "0");
   1240 #endif
   1241     if ((property_len > 2) && (feature_mask_value[0] == '0') &&
   1242             (feature_mask_value[1] == 'x')) {
   1243         args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
   1244     } else {
   1245         args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
   1246     }
   1247     if (1 != args_converted) {
   1248         feature_mask = 0;
   1249         LOGE("Wrong feature mask %s", feature_mask_value);
   1250         return;
   1251     }
   1252 
   1253     switch (stream_format) {
   1254     case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
   1255         /* Add LLVD to pp feature mask only if video hint is enabled */
   1256         if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
   1257             mStreamConfigInfo.postprocess_mask[stream_idx]
   1258                     |= CAM_QTI_FEATURE_SW_TNR;
   1259             LOGH("Added SW TNR to pp feature mask");
   1260         } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
   1261             mStreamConfigInfo.postprocess_mask[stream_idx]
   1262                     |= CAM_QCOM_FEATURE_LLVD;
   1263             LOGH("Added LLVD SeeMore to pp feature mask");
   1264         }
   1265         break;
   1266     }
   1267     default:
   1268         break;
   1269     }
   1270     LOGD("PP feature mask %llx",
   1271             mStreamConfigInfo.postprocess_mask[stream_idx]);
   1272 }
   1273 
   1274 /*==============================================================================
   1275  * FUNCTION   : updateFpsInPreviewBuffer
   1276  *
   1277  * DESCRIPTION: update FPS information in preview buffer.
   1278  *
   1279  * PARAMETERS :
   1280  *   @metadata    : pointer to metadata buffer
   1281  *   @frame_number: frame_number to look for in pending buffer list
   1282  *
   1283  * RETURN     : None
   1284  *
   1285  *==========================================================================*/
   1286 void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
   1287         uint32_t frame_number)
   1288 {
   1289     // Mark all pending buffers for this particular request
   1290     // with corresponding framerate information
   1291     for (List<PendingBuffersInRequest>::iterator req =
   1292             mPendingBuffersMap.mPendingBuffersInRequest.begin();
   1293             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   1294         for(List<PendingBufferInfo>::iterator j =
   1295                 req->mPendingBufferList.begin();
   1296                 j != req->mPendingBufferList.end(); j++) {
   1297             QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
   1298             if ((req->frame_number == frame_number) &&
   1299                 (channel->getStreamTypeMask() &
   1300                 (1U << CAM_STREAM_TYPE_PREVIEW))) {
   1301                 IF_META_AVAILABLE(cam_fps_range_t, float_range,
   1302                     CAM_INTF_PARM_FPS_RANGE, metadata) {
   1303                     typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
   1304                     struct private_handle_t *priv_handle =
   1305                         (struct private_handle_t *)(*(j->buffer));
   1306                     setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
   1307                 }
   1308             }
   1309         }
   1310     }
   1311 }
   1312 
   1313 /*==============================================================================
   1314  * FUNCTION   : updateTimeStampInPendingBuffers
   1315  *
   1316  * DESCRIPTION: update timestamp in display metadata for all pending buffers
   1317  *              of a frame number
   1318  *
   1319  * PARAMETERS :
   1320  *   @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
   1321  *   @timestamp   : timestamp to be set
   1322  *
   1323  * RETURN     : None
   1324  *
   1325  *==========================================================================*/
   1326 void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
   1327         uint32_t frameNumber, nsecs_t timestamp)
   1328 {
   1329     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
   1330             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   1331         if (req->frame_number != frameNumber)
   1332             continue;
   1333 
   1334         for (auto k = req->mPendingBufferList.begin();
   1335                 k != req->mPendingBufferList.end(); k++ ) {
   1336             struct private_handle_t *priv_handle =
   1337                     (struct private_handle_t *) (*(k->buffer));
   1338             setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
   1339         }
   1340     }
   1341     return;
   1342 }
   1343 
   1344 /*===========================================================================
   1345  * FUNCTION   : configureStreams
   1346  *
   1347  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
   1348  *              and output streams.
   1349  *
   1350  * PARAMETERS :
   1351  *   @stream_list : streams to be configured
   1352  *
   1353  * RETURN     :
   1354  *
   1355  *==========================================================================*/
   1356 int QCamera3HardwareInterface::configureStreams(
   1357         camera3_stream_configuration_t *streamList)
   1358 {
   1359     ATRACE_CALL();
   1360     int rc = 0;
   1361 
   1362     // Acquire perfLock before configure streams
   1363     m_perfLock.lock_acq();
   1364     rc = configureStreamsPerfLocked(streamList);
   1365     m_perfLock.lock_rel();
   1366 
   1367     return rc;
   1368 }
   1369 
   1370 /*===========================================================================
   1371  * FUNCTION   : configureStreamsPerfLocked
   1372  *
   1373  * DESCRIPTION: configureStreams while perfLock is held.
   1374  *
   1375  * PARAMETERS :
   1376  *   @stream_list : streams to be configured
   1377  *
   1378  * RETURN     : int32_t type of status
   1379  *              NO_ERROR  -- success
   1380  *              none-zero failure code
   1381  *==========================================================================*/
   1382 int QCamera3HardwareInterface::configureStreamsPerfLocked(
   1383         camera3_stream_configuration_t *streamList)
   1384 {
   1385     ATRACE_CALL();
   1386     int rc = 0;
   1387 
   1388     // Sanity check stream_list
   1389     if (streamList == NULL) {
   1390         LOGE("NULL stream configuration");
   1391         return BAD_VALUE;
   1392     }
   1393     if (streamList->streams == NULL) {
   1394         LOGE("NULL stream list");
   1395         return BAD_VALUE;
   1396     }
   1397 
   1398     if (streamList->num_streams < 1) {
   1399         LOGE("Bad number of streams requested: %d",
   1400                 streamList->num_streams);
   1401         return BAD_VALUE;
   1402     }
   1403 
   1404     if (streamList->num_streams >= MAX_NUM_STREAMS) {
   1405         LOGE("Maximum number of streams %d exceeded: %d",
   1406                 MAX_NUM_STREAMS, streamList->num_streams);
   1407         return BAD_VALUE;
   1408     }
   1409 
   1410     mOpMode = streamList->operation_mode;
   1411     LOGD("mOpMode: %d", mOpMode);
   1412 
   1413     /* first invalidate all the steams in the mStreamList
   1414      * if they appear again, they will be validated */
   1415     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   1416             it != mStreamInfo.end(); it++) {
   1417         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1418         if (channel) {
   1419           channel->stop();
   1420         }
   1421         (*it)->status = INVALID;
   1422     }
   1423 
   1424     if (mRawDumpChannel) {
   1425         mRawDumpChannel->stop();
   1426         delete mRawDumpChannel;
   1427         mRawDumpChannel = NULL;
   1428     }
   1429 
   1430     if (mSupportChannel)
   1431         mSupportChannel->stop();
   1432 
   1433     if (mAnalysisChannel) {
   1434         mAnalysisChannel->stop();
   1435     }
   1436     if (mMetadataChannel) {
   1437         /* If content of mStreamInfo is not 0, there is metadata stream */
   1438         mMetadataChannel->stop();
   1439     }
   1440     if (mChannelHandle) {
   1441         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   1442                 mChannelHandle);
   1443         LOGD("stopping channel %d", mChannelHandle);
   1444     }
   1445 
   1446     pthread_mutex_lock(&mMutex);
   1447 
   1448     // Check state
   1449     switch (mState) {
   1450         case INITIALIZED:
   1451         case CONFIGURED:
   1452         case STARTED:
   1453             /* valid state */
   1454             break;
   1455         default:
   1456             LOGE("Invalid state %d", mState);
   1457             pthread_mutex_unlock(&mMutex);
   1458             return -ENODEV;
   1459     }
   1460 
   1461     /* Check whether we have video stream */
   1462     m_bIs4KVideo = false;
   1463     m_bIsVideo = false;
   1464     m_bEisSupportedSize = false;
   1465     m_bTnrEnabled = false;
   1466     bool isZsl = false;
   1467     uint32_t videoWidth = 0U;
   1468     uint32_t videoHeight = 0U;
   1469     size_t rawStreamCnt = 0;
   1470     size_t stallStreamCnt = 0;
   1471     size_t processedStreamCnt = 0;
   1472     // Number of streams on ISP encoder path
   1473     size_t numStreamsOnEncoder = 0;
   1474     size_t numYuv888OnEncoder = 0;
   1475     bool bYuv888OverrideJpeg = false;
   1476     cam_dimension_t largeYuv888Size = {0, 0};
   1477     cam_dimension_t maxViewfinderSize = {0, 0};
   1478     bool bJpegExceeds4K = false;
   1479     bool bJpegOnEncoder = false;
   1480     bool bUseCommonFeatureMask = false;
   1481     cam_feature_mask_t commonFeatureMask = 0;
   1482     bool bSmallJpegSize = false;
   1483     uint32_t width_ratio;
   1484     uint32_t height_ratio;
   1485     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
   1486     camera3_stream_t *inputStream = NULL;
   1487     bool isJpeg = false;
   1488     cam_dimension_t jpegSize = {0, 0};
   1489 
   1490     cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
   1491 
   1492     /*EIS configuration*/
   1493     bool oisSupported = false;
   1494     uint8_t eis_prop_set;
   1495     uint32_t maxEisWidth = 0;
   1496     uint32_t maxEisHeight = 0;
   1497 
   1498     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
   1499 
   1500     size_t count = IS_TYPE_MAX;
   1501     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
   1502     for (size_t i = 0; i < count; i++) {
   1503         if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
   1504             (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
   1505             m_bEisSupported = true;
   1506             break;
   1507         }
   1508     }
   1509     count = CAM_OPT_STAB_MAX;
   1510     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
   1511     for (size_t i = 0; i < count; i++) {
   1512         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
   1513             oisSupported = true;
   1514             break;
   1515         }
   1516     }
   1517 
   1518     if (m_bEisSupported) {
   1519         maxEisWidth = MAX_EIS_WIDTH;
   1520         maxEisHeight = MAX_EIS_HEIGHT;
   1521     }
   1522 
   1523     /* EIS setprop control */
   1524     char eis_prop[PROPERTY_VALUE_MAX];
   1525     memset(eis_prop, 0, sizeof(eis_prop));
   1526     property_get("persist.camera.eis.enable", eis_prop, "1");
   1527     eis_prop_set = (uint8_t)atoi(eis_prop);
   1528 
   1529     m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
   1530             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
   1531 
   1532     LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
   1533             m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
   1534 
   1535     /* stream configurations */
   1536     for (size_t i = 0; i < streamList->num_streams; i++) {
   1537         camera3_stream_t *newStream = streamList->streams[i];
   1538         LOGI("stream[%d] type = %d, format = %d, width = %d, "
   1539                 "height = %d, rotation = %d, usage = 0x%x",
   1540                  i, newStream->stream_type, newStream->format,
   1541                 newStream->width, newStream->height, newStream->rotation,
   1542                 newStream->usage);
   1543         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1544                 newStream->stream_type == CAMERA3_STREAM_INPUT){
   1545             isZsl = true;
   1546         }
   1547         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
   1548             inputStream = newStream;
   1549         }
   1550 
   1551         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1552             isJpeg = true;
   1553             jpegSize.width = newStream->width;
   1554             jpegSize.height = newStream->height;
   1555             if (newStream->width > VIDEO_4K_WIDTH ||
   1556                     newStream->height > VIDEO_4K_HEIGHT)
   1557                 bJpegExceeds4K = true;
   1558         }
   1559 
   1560         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
   1561                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
   1562             m_bIsVideo = true;
   1563             videoWidth = newStream->width;
   1564             videoHeight = newStream->height;
   1565             if ((VIDEO_4K_WIDTH <= newStream->width) &&
   1566                     (VIDEO_4K_HEIGHT <= newStream->height)) {
   1567                 m_bIs4KVideo = true;
   1568             }
   1569             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
   1570                                   (newStream->height <= maxEisHeight);
   1571         }
   1572         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1573                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
   1574             switch (newStream->format) {
   1575             case HAL_PIXEL_FORMAT_BLOB:
   1576                 stallStreamCnt++;
   1577                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1578                         newStream->height)) {
   1579                     numStreamsOnEncoder++;
   1580                     bJpegOnEncoder = true;
   1581                 }
   1582                 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
   1583                         newStream->width);
   1584                 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
   1585                         newStream->height);;
   1586                 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
   1587                         "FATAL: max_downscale_factor cannot be zero and so assert");
   1588                 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
   1589                     (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
   1590                     LOGH("Setting small jpeg size flag to true");
   1591                     bSmallJpegSize = true;
   1592                 }
   1593                 break;
   1594             case HAL_PIXEL_FORMAT_RAW10:
   1595             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1596             case HAL_PIXEL_FORMAT_RAW16:
   1597                 rawStreamCnt++;
   1598                 break;
   1599             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1600                 processedStreamCnt++;
   1601                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1602                         newStream->height)) {
   1603                     if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
   1604                             !IS_USAGE_ZSL(newStream->usage)) {
   1605                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1606                     }
   1607                     numStreamsOnEncoder++;
   1608                 }
   1609                 break;
   1610             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1611                 processedStreamCnt++;
   1612                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1613                         newStream->height)) {
   1614                     // If Yuv888 size is not greater than 4K, set feature mask
   1615                     // to SUPERSET so that it support concurrent request on
   1616                     // YUV and JPEG.
   1617                     if (newStream->width <= VIDEO_4K_WIDTH &&
   1618                             newStream->height <= VIDEO_4K_HEIGHT) {
   1619                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1620                     }
   1621                     numStreamsOnEncoder++;
   1622                     numYuv888OnEncoder++;
   1623                     largeYuv888Size.width = newStream->width;
   1624                     largeYuv888Size.height = newStream->height;
   1625                 }
   1626                 break;
   1627             default:
   1628                 processedStreamCnt++;
   1629                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1630                         newStream->height)) {
   1631                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1632                     numStreamsOnEncoder++;
   1633                 }
   1634                 break;
   1635             }
   1636 
   1637         }
   1638     }
   1639 
   1640     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
   1641             gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
   1642             !m_bIsVideo) {
   1643         m_bEisEnable = false;
   1644     }
   1645 
   1646     /* Logic to enable/disable TNR based on specific config size/etc.*/
   1647     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
   1648             ((videoWidth == 1920 && videoHeight == 1080) ||
   1649             (videoWidth == 1280 && videoHeight == 720)) &&
   1650             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
   1651         m_bTnrEnabled = true;
   1652 
   1653     /* Check if num_streams is sane */
   1654     if (stallStreamCnt > MAX_STALLING_STREAMS ||
   1655             rawStreamCnt > MAX_RAW_STREAMS ||
   1656             processedStreamCnt > MAX_PROCESSED_STREAMS) {
   1657         LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
   1658                  stallStreamCnt, rawStreamCnt, processedStreamCnt);
   1659         pthread_mutex_unlock(&mMutex);
   1660         return -EINVAL;
   1661     }
   1662     /* Check whether we have zsl stream or 4k video case */
   1663     if (isZsl && m_bIsVideo) {
   1664         LOGE("Currently invalid configuration ZSL&Video!");
   1665         pthread_mutex_unlock(&mMutex);
   1666         return -EINVAL;
   1667     }
   1668     /* Check if stream sizes are sane */
   1669     if (numStreamsOnEncoder > 2) {
   1670         LOGE("Number of streams on ISP encoder path exceeds limits of 2");
   1671         pthread_mutex_unlock(&mMutex);
   1672         return -EINVAL;
   1673     } else if (1 < numStreamsOnEncoder){
   1674         bUseCommonFeatureMask = true;
   1675         LOGH("Multiple streams above max viewfinder size, common mask needed");
   1676     }
   1677 
   1678     /* Check if BLOB size is greater than 4k in 4k recording case */
   1679     if (m_bIs4KVideo && bJpegExceeds4K) {
   1680         LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
   1681         pthread_mutex_unlock(&mMutex);
   1682         return -EINVAL;
   1683     }
   1684 
   1685     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
   1686     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
   1687     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
   1688     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
   1689     // configurations:
   1690     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
   1691     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
   1692     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
   1693     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
   1694         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
   1695                 __func__);
   1696         pthread_mutex_unlock(&mMutex);
   1697         return -EINVAL;
   1698     }
   1699 
   1700     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
   1701     // the YUV stream's size is greater or equal to the JPEG size, set common
   1702     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
   1703     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
   1704             jpegSize.width, jpegSize.height) &&
   1705             largeYuv888Size.width > jpegSize.width &&
   1706             largeYuv888Size.height > jpegSize.height) {
   1707         bYuv888OverrideJpeg = true;
   1708     } else if (!isJpeg && numStreamsOnEncoder > 1) {
   1709         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1710     }
   1711 
   1712     LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
   1713             maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
   1714             commonFeatureMask);
   1715     LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
   1716             numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
   1717 
   1718     rc = validateStreamDimensions(streamList);
   1719     if (rc == NO_ERROR) {
   1720         rc = validateStreamRotations(streamList);
   1721     }
   1722     if (rc != NO_ERROR) {
   1723         LOGE("Invalid stream configuration requested!");
   1724         pthread_mutex_unlock(&mMutex);
   1725         return rc;
   1726     }
   1727 
   1728     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
   1729     for (size_t i = 0; i < streamList->num_streams; i++) {
   1730         camera3_stream_t *newStream = streamList->streams[i];
   1731         LOGH("newStream type = %d, stream format = %d "
   1732                 "stream size : %d x %d, stream rotation = %d",
   1733                  newStream->stream_type, newStream->format,
   1734                 newStream->width, newStream->height, newStream->rotation);
   1735         //if the stream is in the mStreamList validate it
   1736         bool stream_exists = false;
   1737         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1738                 it != mStreamInfo.end(); it++) {
   1739             if ((*it)->stream == newStream) {
   1740                 QCamera3ProcessingChannel *channel =
   1741                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1742                 stream_exists = true;
   1743                 if (channel)
   1744                     delete channel;
   1745                 (*it)->status = VALID;
   1746                 (*it)->stream->priv = NULL;
   1747                 (*it)->channel = NULL;
   1748             }
   1749         }
   1750         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
   1751             //new stream
   1752             stream_info_t* stream_info;
   1753             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
   1754             if (!stream_info) {
   1755                LOGE("Could not allocate stream info");
   1756                rc = -ENOMEM;
   1757                pthread_mutex_unlock(&mMutex);
   1758                return rc;
   1759             }
   1760             stream_info->stream = newStream;
   1761             stream_info->status = VALID;
   1762             stream_info->channel = NULL;
   1763             mStreamInfo.push_back(stream_info);
   1764         }
   1765         /* Covers Opaque ZSL and API1 F/W ZSL */
   1766         if (IS_USAGE_ZSL(newStream->usage)
   1767                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
   1768             if (zslStream != NULL) {
   1769                 LOGE("Multiple input/reprocess streams requested!");
   1770                 pthread_mutex_unlock(&mMutex);
   1771                 return BAD_VALUE;
   1772             }
   1773             zslStream = newStream;
   1774         }
   1775         /* Covers YUV reprocess */
   1776         if (inputStream != NULL) {
   1777             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
   1778                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1779                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1780                     && inputStream->width == newStream->width
   1781                     && inputStream->height == newStream->height) {
   1782                 if (zslStream != NULL) {
   1783                     /* This scenario indicates multiple YUV streams with same size
   1784                      * as input stream have been requested, since zsl stream handle
   1785                      * is solely use for the purpose of overriding the size of streams
   1786                      * which share h/w streams we will just make a guess here as to
   1787                      * which of the stream is a ZSL stream, this will be refactored
   1788                      * once we make generic logic for streams sharing encoder output
   1789                      */
   1790                     LOGH("Warning, Multiple ip/reprocess streams requested!");
   1791                 }
   1792                 zslStream = newStream;
   1793             }
   1794         }
   1795     }
   1796 
   1797     /* If a zsl stream is set, we know that we have configured at least one input or
   1798        bidirectional stream */
   1799     if (NULL != zslStream) {
   1800         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
   1801         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
   1802         mInputStreamInfo.format = zslStream->format;
   1803         mInputStreamInfo.usage = zslStream->usage;
   1804         LOGD("Input stream configured! %d x %d, format %d, usage %d",
   1805                  mInputStreamInfo.dim.width,
   1806                 mInputStreamInfo.dim.height,
   1807                 mInputStreamInfo.format, mInputStreamInfo.usage);
   1808     }
   1809 
   1810     cleanAndSortStreamInfo();
   1811     if (mMetadataChannel) {
   1812         delete mMetadataChannel;
   1813         mMetadataChannel = NULL;
   1814     }
   1815     if (mSupportChannel) {
   1816         delete mSupportChannel;
   1817         mSupportChannel = NULL;
   1818     }
   1819 
   1820     if (mAnalysisChannel) {
   1821         delete mAnalysisChannel;
   1822         mAnalysisChannel = NULL;
   1823     }
   1824 
   1825     if (mDummyBatchChannel) {
   1826         delete mDummyBatchChannel;
   1827         mDummyBatchChannel = NULL;
   1828     }
   1829 
   1830     //Create metadata channel and initialize it
   1831     cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
   1832     setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
   1833             gCamCapability[mCameraId]->color_arrangement);
   1834     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
   1835                     mChannelHandle, mCameraHandle->ops, captureResultCb,
   1836                     &padding_info, metadataFeatureMask, this);
   1837     if (mMetadataChannel == NULL) {
   1838         LOGE("failed to allocate metadata channel");
   1839         rc = -ENOMEM;
   1840         pthread_mutex_unlock(&mMutex);
   1841         return rc;
   1842     }
   1843     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
   1844     if (rc < 0) {
   1845         LOGE("metadata channel initialization failed");
   1846         delete mMetadataChannel;
   1847         mMetadataChannel = NULL;
   1848         pthread_mutex_unlock(&mMutex);
   1849         return rc;
   1850     }
   1851 
   1852     // Create analysis stream all the time, even when h/w support is not available
   1853     {
   1854         cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1855         setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
   1856                 gCamCapability[mCameraId]->color_arrangement);
   1857         cam_analysis_info_t analysisInfo;
   1858         int32_t ret = NO_ERROR;
   1859         ret = mCommon.getAnalysisInfo(
   1860                 FALSE,
   1861                 TRUE,
   1862                 analysisFeatureMask,
   1863                 &analysisInfo);
   1864         if (ret == NO_ERROR) {
   1865             mAnalysisChannel = new QCamera3SupportChannel(
   1866                     mCameraHandle->camera_handle,
   1867                     mChannelHandle,
   1868                     mCameraHandle->ops,
   1869                     &analysisInfo.analysis_padding_info,
   1870                     analysisFeatureMask,
   1871                     CAM_STREAM_TYPE_ANALYSIS,
   1872                     &analysisInfo.analysis_max_res,
   1873                     (analysisInfo.analysis_format
   1874                     == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
   1875                     : CAM_FORMAT_YUV_420_NV21),
   1876                     analysisInfo.hw_analysis_supported,
   1877                     gCamCapability[mCameraId]->color_arrangement,
   1878                     this,
   1879                     0); // force buffer count to 0
   1880         } else {
   1881             LOGW("getAnalysisInfo failed, ret = %d", ret);
   1882         }
   1883         if (!mAnalysisChannel) {
   1884             LOGW("Analysis channel cannot be created");
   1885         }
   1886     }
   1887 
   1888     bool isRawStreamRequested = false;
   1889     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
   1890     /* Allocate channel objects for the requested streams */
   1891     for (size_t i = 0; i < streamList->num_streams; i++) {
   1892         camera3_stream_t *newStream = streamList->streams[i];
   1893         uint32_t stream_usage = newStream->usage;
   1894         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
   1895         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
   1896         struct camera_info *p_info = NULL;
   1897         pthread_mutex_lock(&gCamLock);
   1898         p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
   1899         pthread_mutex_unlock(&gCamLock);
   1900         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1901                 || IS_USAGE_ZSL(newStream->usage)) &&
   1902             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
   1903             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1904             if (bUseCommonFeatureMask) {
   1905                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1906                         commonFeatureMask;
   1907             } else {
   1908                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1909                         CAM_QCOM_FEATURE_NONE;
   1910             }
   1911 
   1912         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1913                 LOGH("Input stream configured, reprocess config");
   1914         } else {
   1915             //for non zsl streams find out the format
   1916             switch (newStream->format) {
   1917             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
   1918             {
   1919                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1920                         CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1921                 /* add additional features to pp feature mask */
   1922                 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
   1923                         mStreamConfigInfo.num_streams);
   1924 
   1925                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
   1926                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1927                                 CAM_STREAM_TYPE_VIDEO;
   1928                     if (m_bTnrEnabled && m_bTnrVideo) {
   1929                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1930                             CAM_QCOM_FEATURE_CPP_TNR;
   1931                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
   1932                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
   1933                                 ~CAM_QCOM_FEATURE_CDS;
   1934                     }
   1935                 } else {
   1936                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1937                             CAM_STREAM_TYPE_PREVIEW;
   1938                     if (m_bTnrEnabled && m_bTnrPreview) {
   1939                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1940                                 CAM_QCOM_FEATURE_CPP_TNR;
   1941                         //TNR and CDS are mutually exclusive. So reset CDS from feature mask
   1942                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
   1943                                 ~CAM_QCOM_FEATURE_CDS;
   1944                     }
   1945                     padding_info.width_padding = mSurfaceStridePadding;
   1946                     padding_info.height_padding = CAM_PAD_TO_2;
   1947                 }
   1948                 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
   1949                         (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
   1950                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1951                             newStream->height;
   1952                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1953                             newStream->width;
   1954                 }
   1955             }
   1956             break;
   1957             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1958                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
   1959                 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
   1960                     if (bUseCommonFeatureMask)
   1961                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1962                                 commonFeatureMask;
   1963                     else
   1964                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1965                                 CAM_QCOM_FEATURE_NONE;
   1966                 } else {
   1967                     mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1968                             CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1969                 }
   1970             break;
   1971             case HAL_PIXEL_FORMAT_BLOB:
   1972                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1973                 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
   1974                 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
   1975                      mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1976                              CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1977                 } else {
   1978                     if (bUseCommonFeatureMask &&
   1979                             isOnEncoder(maxViewfinderSize, newStream->width,
   1980                             newStream->height)) {
   1981                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
   1982                     } else {
   1983                         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   1984                     }
   1985                 }
   1986                 if (isZsl) {
   1987                     if (zslStream) {
   1988                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1989                                 (int32_t)zslStream->width;
   1990                         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1991                                 (int32_t)zslStream->height;
   1992                     } else {
   1993                         LOGE("Error, No ZSL stream identified");
   1994                         pthread_mutex_unlock(&mMutex);
   1995                         return -EINVAL;
   1996                     }
   1997                 } else if (m_bIs4KVideo) {
   1998                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
   1999                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
   2000                 } else if (bYuv888OverrideJpeg) {
   2001                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2002                             (int32_t)largeYuv888Size.width;
   2003                     mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2004                             (int32_t)largeYuv888Size.height;
   2005                 }
   2006                 break;
   2007             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   2008             case HAL_PIXEL_FORMAT_RAW16:
   2009             case HAL_PIXEL_FORMAT_RAW10:
   2010                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
   2011                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   2012                 isRawStreamRequested = true;
   2013                 break;
   2014             default:
   2015                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
   2016                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   2017                 break;
   2018             }
   2019         }
   2020 
   2021         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2022                 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2023                 gCamCapability[mCameraId]->color_arrangement);
   2024 
   2025         if (newStream->priv == NULL) {
   2026             //New stream, construct channel
   2027             switch (newStream->stream_type) {
   2028             case CAMERA3_STREAM_INPUT:
   2029                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
   2030                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
   2031                 break;
   2032             case CAMERA3_STREAM_BIDIRECTIONAL:
   2033                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
   2034                     GRALLOC_USAGE_HW_CAMERA_WRITE;
   2035                 break;
   2036             case CAMERA3_STREAM_OUTPUT:
   2037                 /* For video encoding stream, set read/write rarely
   2038                  * flag so that they may be set to un-cached */
   2039                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
   2040                     newStream->usage |=
   2041                          (GRALLOC_USAGE_SW_READ_RARELY |
   2042                          GRALLOC_USAGE_SW_WRITE_RARELY |
   2043                          GRALLOC_USAGE_HW_CAMERA_WRITE);
   2044                 else if (IS_USAGE_ZSL(newStream->usage))
   2045                 {
   2046                     LOGD("ZSL usage flag skipping");
   2047                 }
   2048                 else if (newStream == zslStream
   2049                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   2050                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
   2051                 } else
   2052                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
   2053                 break;
   2054             default:
   2055                 LOGE("Invalid stream_type %d", newStream->stream_type);
   2056                 break;
   2057             }
   2058 
   2059             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
   2060                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   2061                 QCamera3ProcessingChannel *channel = NULL;
   2062                 switch (newStream->format) {
   2063                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   2064                     if ((newStream->usage &
   2065                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
   2066                             (streamList->operation_mode ==
   2067                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   2068                     ) {
   2069                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2070                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   2071                                 &gCamCapability[mCameraId]->padding_info,
   2072                                 this,
   2073                                 newStream,
   2074                                 (cam_stream_type_t)
   2075                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2076                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2077                                 mMetadataChannel,
   2078                                 0); //heap buffers are not required for HFR video channel
   2079                         if (channel == NULL) {
   2080                             LOGE("allocation of channel failed");
   2081                             pthread_mutex_unlock(&mMutex);
   2082                             return -ENOMEM;
   2083                         }
   2084                         //channel->getNumBuffers() will return 0 here so use
   2085                         //MAX_INFLIGH_HFR_REQUESTS
   2086                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
   2087                         newStream->priv = channel;
   2088                         LOGI("num video buffers in HFR mode: %d",
   2089                                  MAX_INFLIGHT_HFR_REQUESTS);
   2090                     } else {
   2091                         /* Copy stream contents in HFR preview only case to create
   2092                          * dummy batch channel so that sensor streaming is in
   2093                          * HFR mode */
   2094                         if (!m_bIsVideo && (streamList->operation_mode ==
   2095                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
   2096                             mDummyBatchStream = *newStream;
   2097                         }
   2098                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2099                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   2100                                 &gCamCapability[mCameraId]->padding_info,
   2101                                 this,
   2102                                 newStream,
   2103                                 (cam_stream_type_t)
   2104                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2105                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2106                                 mMetadataChannel,
   2107                                 MAX_INFLIGHT_REQUESTS);
   2108                         if (channel == NULL) {
   2109                             LOGE("allocation of channel failed");
   2110                             pthread_mutex_unlock(&mMutex);
   2111                             return -ENOMEM;
   2112                         }
   2113                         newStream->max_buffers = channel->getNumBuffers();
   2114                         newStream->priv = channel;
   2115                     }
   2116                     break;
   2117                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
   2118                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
   2119                             mChannelHandle,
   2120                             mCameraHandle->ops, captureResultCb,
   2121                             &padding_info,
   2122                             this,
   2123                             newStream,
   2124                             (cam_stream_type_t)
   2125                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2126                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2127                             mMetadataChannel);
   2128                     if (channel == NULL) {
   2129                         LOGE("allocation of YUV channel failed");
   2130                         pthread_mutex_unlock(&mMutex);
   2131                         return -ENOMEM;
   2132                     }
   2133                     newStream->max_buffers = channel->getNumBuffers();
   2134                     newStream->priv = channel;
   2135                     break;
   2136                 }
   2137                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   2138                 case HAL_PIXEL_FORMAT_RAW16:
   2139                 case HAL_PIXEL_FORMAT_RAW10:
   2140                     mRawChannel = new QCamera3RawChannel(
   2141                             mCameraHandle->camera_handle, mChannelHandle,
   2142                             mCameraHandle->ops, captureResultCb,
   2143                             &padding_info,
   2144                             this, newStream,
   2145                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2146                             mMetadataChannel,
   2147                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
   2148                     if (mRawChannel == NULL) {
   2149                         LOGE("allocation of raw channel failed");
   2150                         pthread_mutex_unlock(&mMutex);
   2151                         return -ENOMEM;
   2152                     }
   2153                     newStream->max_buffers = mRawChannel->getNumBuffers();
   2154                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
   2155                     break;
   2156                 case HAL_PIXEL_FORMAT_BLOB:
   2157                     // Max live snapshot inflight buffer is 1. This is to mitigate
   2158                     // frame drop issues for video snapshot. The more buffers being
   2159                     // allocated, the more frame drops there are.
   2160                     mPictureChannel = new QCamera3PicChannel(
   2161                             mCameraHandle->camera_handle, mChannelHandle,
   2162                             mCameraHandle->ops, captureResultCb,
   2163                             &padding_info, this, newStream,
   2164                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2165                             m_bIs4KVideo, isZsl, mMetadataChannel,
   2166                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
   2167                     if (mPictureChannel == NULL) {
   2168                         LOGE("allocation of channel failed");
   2169                         pthread_mutex_unlock(&mMutex);
   2170                         return -ENOMEM;
   2171                     }
   2172                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
   2173                     newStream->max_buffers = mPictureChannel->getNumBuffers();
   2174                     mPictureChannel->overrideYuvSize(
   2175                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
   2176                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
   2177                     break;
   2178 
   2179                 default:
   2180                     LOGE("not a supported format 0x%x", newStream->format);
   2181                     break;
   2182                 }
   2183             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
   2184                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
   2185             } else {
   2186                 LOGE("Error, Unknown stream type");
   2187                 pthread_mutex_unlock(&mMutex);
   2188                 return -EINVAL;
   2189             }
   2190 
   2191             QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
   2192             if (channel != NULL && channel->isUBWCEnabled()) {
   2193                 cam_format_t fmt = channel->getStreamDefaultFormat(
   2194                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2195                         newStream->width, newStream->height);
   2196                 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
   2197                     newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
   2198                 }
   2199             }
   2200 
   2201             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   2202                     it != mStreamInfo.end(); it++) {
   2203                 if ((*it)->stream == newStream) {
   2204                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
   2205                     break;
   2206                 }
   2207             }
   2208         } else {
   2209             // Channel already exists for this stream
   2210             // Do nothing for now
   2211         }
   2212         padding_info = gCamCapability[mCameraId]->padding_info;
   2213 
   2214         /* Do not add entries for input stream in metastream info
   2215          * since there is no real stream associated with it
   2216          */
   2217         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
   2218             mStreamConfigInfo.num_streams++;
   2219     }
   2220 
   2221     //RAW DUMP channel
   2222     if (mEnableRawDump && isRawStreamRequested == false){
   2223         cam_dimension_t rawDumpSize;
   2224         rawDumpSize = getMaxRawSize(mCameraId);
   2225         cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
   2226         setPAAFSupport(rawDumpFeatureMask,
   2227                 CAM_STREAM_TYPE_RAW,
   2228                 gCamCapability[mCameraId]->color_arrangement);
   2229         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
   2230                                   mChannelHandle,
   2231                                   mCameraHandle->ops,
   2232                                   rawDumpSize,
   2233                                   &padding_info,
   2234                                   this, rawDumpFeatureMask);
   2235         if (!mRawDumpChannel) {
   2236             LOGE("Raw Dump channel cannot be created");
   2237             pthread_mutex_unlock(&mMutex);
   2238             return -ENOMEM;
   2239         }
   2240     }
   2241 
   2242 
   2243     if (mAnalysisChannel) {
   2244         cam_analysis_info_t analysisInfo;
   2245         memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
   2246         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2247                 CAM_STREAM_TYPE_ANALYSIS;
   2248         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2249                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2250         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2251                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2252                 gCamCapability[mCameraId]->color_arrangement);
   2253         rc = mCommon.getAnalysisInfo(FALSE, TRUE,
   2254                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2255                 &analysisInfo);
   2256         if (rc != NO_ERROR) {
   2257             LOGE("getAnalysisInfo failed, ret = %d", rc);
   2258             pthread_mutex_unlock(&mMutex);
   2259             return rc;
   2260         }
   2261         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2262                 analysisInfo.analysis_max_res;
   2263         mStreamConfigInfo.num_streams++;
   2264     }
   2265 
   2266     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
   2267         cam_analysis_info_t supportInfo;
   2268         memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
   2269         cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2270         setPAAFSupport(callbackFeatureMask,
   2271                 CAM_STREAM_TYPE_CALLBACK,
   2272                 gCamCapability[mCameraId]->color_arrangement);
   2273         int32_t ret = NO_ERROR;
   2274         ret = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
   2275         if (ret != NO_ERROR) {
   2276             /* Ignore the error for Mono camera
   2277              * because the PAAF bit mask is only set
   2278              * for CAM_STREAM_TYPE_ANALYSIS stream type
   2279              */
   2280             if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
   2281                 LOGW("getAnalysisInfo failed, ret = %d", ret);
   2282             }
   2283         }
   2284         mSupportChannel = new QCamera3SupportChannel(
   2285                 mCameraHandle->camera_handle,
   2286                 mChannelHandle,
   2287                 mCameraHandle->ops,
   2288                 &gCamCapability[mCameraId]->padding_info,
   2289                 callbackFeatureMask,
   2290                 CAM_STREAM_TYPE_CALLBACK,
   2291                 &QCamera3SupportChannel::kDim,
   2292                 CAM_FORMAT_YUV_420_NV21,
   2293                 supportInfo.hw_analysis_supported,
   2294                 gCamCapability[mCameraId]->color_arrangement,
   2295                 this);
   2296         if (!mSupportChannel) {
   2297             LOGE("dummy channel cannot be created");
   2298             pthread_mutex_unlock(&mMutex);
   2299             return -ENOMEM;
   2300         }
   2301     }
   2302 
   2303     if (mSupportChannel) {
   2304         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2305                 QCamera3SupportChannel::kDim;
   2306         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2307                 CAM_STREAM_TYPE_CALLBACK;
   2308         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2309                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2310         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2311                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2312                 gCamCapability[mCameraId]->color_arrangement);
   2313         mStreamConfigInfo.num_streams++;
   2314     }
   2315 
   2316     if (mRawDumpChannel) {
   2317         cam_dimension_t rawSize;
   2318         rawSize = getMaxRawSize(mCameraId);
   2319         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   2320                 rawSize;
   2321         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2322                 CAM_STREAM_TYPE_RAW;
   2323         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2324                 CAM_QCOM_FEATURE_NONE;
   2325         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2326                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2327                 gCamCapability[mCameraId]->color_arrangement);
   2328         mStreamConfigInfo.num_streams++;
   2329     }
   2330     /* In HFR mode, if video stream is not added, create a dummy channel so that
   2331      * ISP can create a batch mode even for preview only case. This channel is
   2332      * never 'start'ed (no stream-on), it is only 'initialized'  */
   2333     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   2334             !m_bIsVideo) {
   2335         cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2336         setPAAFSupport(dummyFeatureMask,
   2337                 CAM_STREAM_TYPE_VIDEO,
   2338                 gCamCapability[mCameraId]->color_arrangement);
   2339         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   2340                 mChannelHandle,
   2341                 mCameraHandle->ops, captureResultCb,
   2342                 &gCamCapability[mCameraId]->padding_info,
   2343                 this,
   2344                 &mDummyBatchStream,
   2345                 CAM_STREAM_TYPE_VIDEO,
   2346                 dummyFeatureMask,
   2347                 mMetadataChannel);
   2348         if (NULL == mDummyBatchChannel) {
   2349             LOGE("creation of mDummyBatchChannel failed."
   2350                     "Preview will use non-hfr sensor mode ");
   2351         }
   2352     }
   2353     if (mDummyBatchChannel) {
   2354         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2355                 mDummyBatchStream.width;
   2356         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2357                 mDummyBatchStream.height;
   2358         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2359                 CAM_STREAM_TYPE_VIDEO;
   2360         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2361                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2362         setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   2363                 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   2364                 gCamCapability[mCameraId]->color_arrangement);
   2365         mStreamConfigInfo.num_streams++;
   2366     }
   2367 
   2368     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
   2369     mStreamConfigInfo.buffer_info.max_buffers =
   2370             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   2371 
   2372     /* Initialize mPendingRequestInfo and mPendingBuffersMap */
   2373     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2374             i != mPendingRequestsList.end();) {
   2375         i = erasePendingRequest(i);
   2376     }
   2377     mPendingFrameDropList.clear();
   2378     // Initialize/Reset the pending buffers list
   2379     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   2380         req.mPendingBufferList.clear();
   2381     }
   2382     mPendingBuffersMap.mPendingBuffersInRequest.clear();
   2383 
   2384     mPendingReprocessResultList.clear();
   2385 
   2386     mCurJpegMeta.clear();
   2387     //Get min frame duration for this streams configuration
   2388     deriveMinFrameDuration();
   2389 
   2390     // Update state
   2391     mState = CONFIGURED;
   2392 
   2393     pthread_mutex_unlock(&mMutex);
   2394 
   2395     return rc;
   2396 }
   2397 
   2398 /*===========================================================================
   2399  * FUNCTION   : validateCaptureRequest
   2400  *
   2401  * DESCRIPTION: validate a capture request from camera service
   2402  *
   2403  * PARAMETERS :
   2404  *   @request : request from framework to process
   2405  *
   2406  * RETURN     :
   2407  *
   2408  *==========================================================================*/
   2409 int QCamera3HardwareInterface::validateCaptureRequest(
   2410                     camera3_capture_request_t *request)
   2411 {
   2412     ssize_t idx = 0;
   2413     const camera3_stream_buffer_t *b;
   2414     CameraMetadata meta;
   2415 
   2416     /* Sanity check the request */
   2417     if (request == NULL) {
   2418         LOGE("NULL capture request");
   2419         return BAD_VALUE;
   2420     }
   2421 
   2422     if ((request->settings == NULL) && (mState == CONFIGURED)) {
   2423         /*settings cannot be null for the first request*/
   2424         return BAD_VALUE;
   2425     }
   2426 
   2427     uint32_t frameNumber = request->frame_number;
   2428     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
   2429         LOGE("Request %d: No output buffers provided!",
   2430                 __FUNCTION__, frameNumber);
   2431         return BAD_VALUE;
   2432     }
   2433     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
   2434         LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
   2435                  request->num_output_buffers, MAX_NUM_STREAMS);
   2436         return BAD_VALUE;
   2437     }
   2438     if (request->input_buffer != NULL) {
   2439         b = request->input_buffer;
   2440         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2441             LOGE("Request %d: Buffer %ld: Status not OK!",
   2442                      frameNumber, (long)idx);
   2443             return BAD_VALUE;
   2444         }
   2445         if (b->release_fence != -1) {
   2446             LOGE("Request %d: Buffer %ld: Has a release fence!",
   2447                      frameNumber, (long)idx);
   2448             return BAD_VALUE;
   2449         }
   2450         if (b->buffer == NULL) {
   2451             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
   2452                      frameNumber, (long)idx);
   2453             return BAD_VALUE;
   2454         }
   2455     }
   2456 
   2457     // Validate all buffers
   2458     b = request->output_buffers;
   2459     do {
   2460         QCamera3ProcessingChannel *channel =
   2461                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
   2462         if (channel == NULL) {
   2463             LOGE("Request %d: Buffer %ld: Unconfigured stream!",
   2464                      frameNumber, (long)idx);
   2465             return BAD_VALUE;
   2466         }
   2467         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2468             LOGE("Request %d: Buffer %ld: Status not OK!",
   2469                      frameNumber, (long)idx);
   2470             return BAD_VALUE;
   2471         }
   2472         if (b->release_fence != -1) {
   2473             LOGE("Request %d: Buffer %ld: Has a release fence!",
   2474                      frameNumber, (long)idx);
   2475             return BAD_VALUE;
   2476         }
   2477         if (b->buffer == NULL) {
   2478             LOGE("Request %d: Buffer %ld: NULL buffer handle!",
   2479                      frameNumber, (long)idx);
   2480             return BAD_VALUE;
   2481         }
   2482         if (*(b->buffer) == NULL) {
   2483             LOGE("Request %d: Buffer %ld: NULL private handle!",
   2484                      frameNumber, (long)idx);
   2485             return BAD_VALUE;
   2486         }
   2487         idx++;
   2488         b = request->output_buffers + idx;
   2489     } while (idx < (ssize_t)request->num_output_buffers);
   2490 
   2491     return NO_ERROR;
   2492 }
   2493 
   2494 /*===========================================================================
   2495  * FUNCTION   : deriveMinFrameDuration
   2496  *
   2497  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
   2498  *              on currently configured streams.
   2499  *
   2500  * PARAMETERS : NONE
   2501  *
   2502  * RETURN     : NONE
   2503  *
   2504  *==========================================================================*/
   2505 void QCamera3HardwareInterface::deriveMinFrameDuration()
   2506 {
   2507     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
   2508 
   2509     maxJpegDim = 0;
   2510     maxProcessedDim = 0;
   2511     maxRawDim = 0;
   2512 
   2513     // Figure out maximum jpeg, processed, and raw dimensions
   2514     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   2515         it != mStreamInfo.end(); it++) {
   2516 
   2517         // Input stream doesn't have valid stream_type
   2518         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
   2519             continue;
   2520 
   2521         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
   2522         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2523             if (dimension > maxJpegDim)
   2524                 maxJpegDim = dimension;
   2525         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2526                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2527                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
   2528             if (dimension > maxRawDim)
   2529                 maxRawDim = dimension;
   2530         } else {
   2531             if (dimension > maxProcessedDim)
   2532                 maxProcessedDim = dimension;
   2533         }
   2534     }
   2535 
   2536     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
   2537             MAX_SIZES_CNT);
   2538 
   2539     //Assume all jpeg dimensions are in processed dimensions.
   2540     if (maxJpegDim > maxProcessedDim)
   2541         maxProcessedDim = maxJpegDim;
   2542     //Find the smallest raw dimension that is greater or equal to jpeg dimension
   2543     if (maxProcessedDim > maxRawDim) {
   2544         maxRawDim = INT32_MAX;
   2545 
   2546         for (size_t i = 0; i < count; i++) {
   2547             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
   2548                     gCamCapability[mCameraId]->raw_dim[i].height;
   2549             if (dimension >= maxProcessedDim && dimension < maxRawDim)
   2550                 maxRawDim = dimension;
   2551         }
   2552     }
   2553 
   2554     //Find minimum durations for processed, jpeg, and raw
   2555     for (size_t i = 0; i < count; i++) {
   2556         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
   2557                 gCamCapability[mCameraId]->raw_dim[i].height) {
   2558             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
   2559             break;
   2560         }
   2561     }
   2562     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   2563     for (size_t i = 0; i < count; i++) {
   2564         if (maxProcessedDim ==
   2565                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
   2566                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
   2567             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2568             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2569             break;
   2570         }
   2571     }
   2572 }
   2573 
   2574 /*===========================================================================
   2575  * FUNCTION   : getMinFrameDuration
   2576  *
   2577  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
   2578  *              and current request configuration.
   2579  *
   2580  * PARAMETERS : @request: requset sent by the frameworks
   2581  *
   2582  * RETURN     : min farme duration for a particular request
   2583  *
   2584  *==========================================================================*/
   2585 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
   2586 {
   2587     bool hasJpegStream = false;
   2588     bool hasRawStream = false;
   2589     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
   2590         const camera3_stream_t *stream = request->output_buffers[i].stream;
   2591         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
   2592             hasJpegStream = true;
   2593         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2594                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2595                 stream->format == HAL_PIXEL_FORMAT_RAW16)
   2596             hasRawStream = true;
   2597     }
   2598 
   2599     if (!hasJpegStream)
   2600         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
   2601     else
   2602         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
   2603 }
   2604 
   2605 /*===========================================================================
   2606  * FUNCTION   : handleBuffersDuringFlushLock
   2607  *
   2608  * DESCRIPTION: Account for buffers returned from back-end during flush
   2609  *              This function is executed while mMutex is held by the caller.
   2610  *
   2611  * PARAMETERS :
   2612  *   @buffer: image buffer for the callback
   2613  *
   2614  * RETURN     :
   2615  *==========================================================================*/
   2616 void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
   2617 {
   2618     bool buffer_found = false;
   2619     for (List<PendingBuffersInRequest>::iterator req =
   2620             mPendingBuffersMap.mPendingBuffersInRequest.begin();
   2621             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
   2622         for (List<PendingBufferInfo>::iterator i =
   2623                 req->mPendingBufferList.begin();
   2624                 i != req->mPendingBufferList.end(); i++) {
   2625             if (i->buffer == buffer->buffer) {
   2626                 mPendingBuffersMap.numPendingBufsAtFlush--;
   2627                 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
   2628                     buffer->buffer, req->frame_number,
   2629                     mPendingBuffersMap.numPendingBufsAtFlush);
   2630                 buffer_found = true;
   2631                 break;
   2632             }
   2633         }
   2634         if (buffer_found) {
   2635             break;
   2636         }
   2637     }
   2638     if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
   2639         //signal the flush()
   2640         LOGD("All buffers returned to HAL. Continue flush");
   2641         pthread_cond_signal(&mBuffersCond);
   2642     }
   2643 }
   2644 
   2645 
   2646 /*===========================================================================
   2647  * FUNCTION   : handlePendingReprocResults
   2648  *
   2649  * DESCRIPTION: check and notify on any pending reprocess results
   2650  *
   2651  * PARAMETERS :
   2652  *   @frame_number   : Pending request frame number
   2653  *
   2654  * RETURN     : int32_t type of status
   2655  *              NO_ERROR  -- success
   2656  *              none-zero failure code
   2657  *==========================================================================*/
   2658 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
   2659 {
   2660     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
   2661             j != mPendingReprocessResultList.end(); j++) {
   2662         if (j->frame_number == frame_number) {
   2663             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
   2664 
   2665             LOGD("Delayed reprocess notify %d",
   2666                     frame_number);
   2667 
   2668             for (pendingRequestIterator k = mPendingRequestsList.begin();
   2669                     k != mPendingRequestsList.end(); k++) {
   2670 
   2671                 if (k->frame_number == j->frame_number) {
   2672                     LOGD("Found reprocess frame number %d in pending reprocess List "
   2673                             "Take it out!!",
   2674                             k->frame_number);
   2675 
   2676                     camera3_capture_result result;
   2677                     memset(&result, 0, sizeof(camera3_capture_result));
   2678                     result.frame_number = frame_number;
   2679                     result.num_output_buffers = 1;
   2680                     result.output_buffers =  &j->buffer;
   2681                     result.input_buffer = k->input_buffer;
   2682                     result.result = k->settings;
   2683                     result.partial_result = PARTIAL_RESULT_COUNT;
   2684                     mCallbackOps->process_capture_result(mCallbackOps, &result);
   2685 
   2686                     erasePendingRequest(k);
   2687                     break;
   2688                 }
   2689             }
   2690             mPendingReprocessResultList.erase(j);
   2691             break;
   2692         }
   2693     }
   2694     return NO_ERROR;
   2695 }
   2696 
   2697 /*===========================================================================
   2698  * FUNCTION   : handleBatchMetadata
   2699  *
   2700  * DESCRIPTION: Handles metadata buffer callback in batch mode
   2701  *
   2702  * PARAMETERS : @metadata_buf: metadata buffer
   2703  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2704  *                 the meta buf in this method
   2705  *
   2706  * RETURN     :
   2707  *
   2708  *==========================================================================*/
   2709 void QCamera3HardwareInterface::handleBatchMetadata(
   2710         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
   2711 {
   2712     ATRACE_CALL();
   2713 
   2714     if (NULL == metadata_buf) {
   2715         LOGE("metadata_buf is NULL");
   2716         return;
   2717     }
   2718     /* In batch mode, the metdata will contain the frame number and timestamp of
   2719      * the last frame in the batch. Eg: a batch containing buffers from request
   2720      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
   2721      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
   2722      * multiple process_capture_results */
   2723     metadata_buffer_t *metadata =
   2724             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2725     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
   2726     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
   2727     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
   2728     uint32_t frame_number = 0, urgent_frame_number = 0;
   2729     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
   2730     bool invalid_metadata = false;
   2731     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
   2732     size_t loopCount = 1;
   2733 
   2734     int32_t *p_frame_number_valid =
   2735             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2736     uint32_t *p_frame_number =
   2737             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2738     int64_t *p_capture_time =
   2739             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2740     int32_t *p_urgent_frame_number_valid =
   2741             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2742     uint32_t *p_urgent_frame_number =
   2743             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2744 
   2745     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
   2746             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
   2747             (NULL == p_urgent_frame_number)) {
   2748         LOGE("Invalid metadata");
   2749         invalid_metadata = true;
   2750     } else {
   2751         frame_number_valid = *p_frame_number_valid;
   2752         last_frame_number = *p_frame_number;
   2753         last_frame_capture_time = *p_capture_time;
   2754         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2755         last_urgent_frame_number = *p_urgent_frame_number;
   2756     }
   2757 
   2758     /* In batchmode, when no video buffers are requested, set_parms are sent
   2759      * for every capture_request. The difference between consecutive urgent
   2760      * frame numbers and frame numbers should be used to interpolate the
   2761      * corresponding frame numbers and time stamps */
   2762     pthread_mutex_lock(&mMutex);
   2763     if (urgent_frame_number_valid) {
   2764         first_urgent_frame_number =
   2765                 mPendingBatchMap.valueFor(last_urgent_frame_number);
   2766         urgentFrameNumDiff = last_urgent_frame_number + 1 -
   2767                 first_urgent_frame_number;
   2768 
   2769         LOGD("urgent_frm: valid: %d frm_num: %d - %d",
   2770                  urgent_frame_number_valid,
   2771                 first_urgent_frame_number, last_urgent_frame_number);
   2772     }
   2773 
   2774     if (frame_number_valid) {
   2775         first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
   2776         frameNumDiff = last_frame_number + 1 -
   2777                 first_frame_number;
   2778         mPendingBatchMap.removeItem(last_frame_number);
   2779 
   2780         LOGD("frm: valid: %d frm_num: %d - %d",
   2781                  frame_number_valid,
   2782                 first_frame_number, last_frame_number);
   2783 
   2784     }
   2785     pthread_mutex_unlock(&mMutex);
   2786 
   2787     if (urgent_frame_number_valid || frame_number_valid) {
   2788         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
   2789         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
   2790             LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
   2791                      urgentFrameNumDiff, last_urgent_frame_number);
   2792         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
   2793             LOGE("frameNumDiff: %d frameNum: %d",
   2794                      frameNumDiff, last_frame_number);
   2795     }
   2796 
   2797     for (size_t i = 0; i < loopCount; i++) {
   2798         /* handleMetadataWithLock is called even for invalid_metadata for
   2799          * pipeline depth calculation */
   2800         if (!invalid_metadata) {
   2801             /* Infer frame number. Batch metadata contains frame number of the
   2802              * last frame */
   2803             if (urgent_frame_number_valid) {
   2804                 if (i < urgentFrameNumDiff) {
   2805                     urgent_frame_number =
   2806                             first_urgent_frame_number + i;
   2807                     LOGD("inferred urgent frame_number: %d",
   2808                              urgent_frame_number);
   2809                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2810                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
   2811                 } else {
   2812                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
   2813                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2814                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
   2815                 }
   2816             }
   2817 
   2818             /* Infer frame number. Batch metadata contains frame number of the
   2819              * last frame */
   2820             if (frame_number_valid) {
   2821                 if (i < frameNumDiff) {
   2822                     frame_number = first_frame_number + i;
   2823                     LOGD("inferred frame_number: %d", frame_number);
   2824                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2825                             CAM_INTF_META_FRAME_NUMBER, frame_number);
   2826                 } else {
   2827                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
   2828                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2829                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
   2830                 }
   2831             }
   2832 
   2833             if (last_frame_capture_time) {
   2834                 //Infer timestamp
   2835                 first_frame_capture_time = last_frame_capture_time -
   2836                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
   2837                 capture_time =
   2838                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
   2839                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2840                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
   2841                 LOGD("batch capture_time: %lld, capture_time: %lld",
   2842                          last_frame_capture_time, capture_time);
   2843             }
   2844         }
   2845         pthread_mutex_lock(&mMutex);
   2846         handleMetadataWithLock(metadata_buf,
   2847                 false /* free_and_bufdone_meta_buf */,
   2848                 (i == 0) /* first metadata in the batch metadata */);
   2849         pthread_mutex_unlock(&mMutex);
   2850     }
   2851 
   2852     /* BufDone metadata buffer */
   2853     if (free_and_bufdone_meta_buf) {
   2854         mMetadataChannel->bufDone(metadata_buf);
   2855         free(metadata_buf);
   2856     }
   2857 }
   2858 
   2859 void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
   2860         camera3_error_msg_code_t errorCode)
   2861 {
   2862     camera3_notify_msg_t notify_msg;
   2863     memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2864     notify_msg.type = CAMERA3_MSG_ERROR;
   2865     notify_msg.message.error.error_code = errorCode;
   2866     notify_msg.message.error.error_stream = NULL;
   2867     notify_msg.message.error.frame_number = frameNumber;
   2868     mCallbackOps->notify(mCallbackOps, &notify_msg);
   2869 
   2870     return;
   2871 }
   2872 /*===========================================================================
   2873  * FUNCTION   : handleMetadataWithLock
   2874  *
   2875  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
   2876  *
   2877  * PARAMETERS : @metadata_buf: metadata buffer
   2878  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2879  *                 the meta buf in this method
   2880  *              @firstMetadataInBatch: Boolean to indicate whether this is the
   2881  *                  first metadata in a batch. Valid only for batch mode
   2882  *
   2883  * RETURN     :
   2884  *
   2885  *==========================================================================*/
   2886 void QCamera3HardwareInterface::handleMetadataWithLock(
   2887     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
   2888     bool firstMetadataInBatch)
   2889 {
   2890     ATRACE_CALL();
   2891     if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
   2892         //during flush do not send metadata from this thread
   2893         LOGD("not sending metadata during flush or when mState is error");
   2894         if (free_and_bufdone_meta_buf) {
   2895             mMetadataChannel->bufDone(metadata_buf);
   2896             free(metadata_buf);
   2897         }
   2898         return;
   2899     }
   2900 
   2901     //not in flush
   2902     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2903     int32_t frame_number_valid, urgent_frame_number_valid;
   2904     uint32_t frame_number, urgent_frame_number;
   2905     int64_t capture_time;
   2906     nsecs_t currentSysTime;
   2907 
   2908     int32_t *p_frame_number_valid =
   2909             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2910     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2911     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2912     int32_t *p_urgent_frame_number_valid =
   2913             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2914     uint32_t *p_urgent_frame_number =
   2915             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2916     IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
   2917             metadata) {
   2918         LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
   2919                  *p_frame_number_valid, *p_frame_number);
   2920     }
   2921 
   2922     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
   2923             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
   2924         LOGE("Invalid metadata");
   2925         if (free_and_bufdone_meta_buf) {
   2926             mMetadataChannel->bufDone(metadata_buf);
   2927             free(metadata_buf);
   2928         }
   2929         goto done_metadata;
   2930     }
   2931     frame_number_valid =        *p_frame_number_valid;
   2932     frame_number =              *p_frame_number;
   2933     capture_time =              *p_capture_time;
   2934     urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2935     urgent_frame_number =       *p_urgent_frame_number;
   2936     currentSysTime =            systemTime(CLOCK_MONOTONIC);
   2937 
   2938     // Detect if buffers from any requests are overdue
   2939     for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
   2940         if ( (currentSysTime - req.timestamp) >
   2941             s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
   2942             for (auto &missed : req.mPendingBufferList) {
   2943                 LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
   2944                     "stream type = %d, stream format = %d",
   2945                     frame_number, req.frame_number, missed.buffer,
   2946                     missed.stream->stream_type, missed.stream->format);
   2947             }
   2948         }
   2949     }
   2950     //Partial result on process_capture_result for timestamp
   2951     if (urgent_frame_number_valid) {
   2952         LOGD("valid urgent frame_number = %u, capture_time = %lld",
   2953            urgent_frame_number, capture_time);
   2954 
   2955         //Recieved an urgent Frame Number, handle it
   2956         //using partial results
   2957         for (pendingRequestIterator i =
   2958                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
   2959             LOGD("Iterator Frame = %d urgent frame = %d",
   2960                  i->frame_number, urgent_frame_number);
   2961 
   2962             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
   2963                 (i->partial_result_cnt == 0)) {
   2964                 LOGE("Error: HAL missed urgent metadata for frame number %d",
   2965                          i->frame_number);
   2966             }
   2967 
   2968             if (i->frame_number == urgent_frame_number &&
   2969                      i->bUrgentReceived == 0) {
   2970 
   2971                 camera3_capture_result_t result;
   2972                 memset(&result, 0, sizeof(camera3_capture_result_t));
   2973 
   2974                 i->partial_result_cnt++;
   2975                 i->bUrgentReceived = 1;
   2976                 // Extract 3A metadata
   2977                 result.result =
   2978                     translateCbUrgentMetadataToResultMetadata(metadata);
   2979                 // Populate metadata result
   2980                 result.frame_number = urgent_frame_number;
   2981                 result.num_output_buffers = 0;
   2982                 result.output_buffers = NULL;
   2983                 result.partial_result = i->partial_result_cnt;
   2984 
   2985                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   2986                 LOGD("urgent frame_number = %u, capture_time = %lld",
   2987                       result.frame_number, capture_time);
   2988                 free_camera_metadata((camera_metadata_t *)result.result);
   2989                 break;
   2990             }
   2991         }
   2992     }
   2993 
   2994     if (!frame_number_valid) {
   2995         LOGD("Not a valid normal frame number, used as SOF only");
   2996         if (free_and_bufdone_meta_buf) {
   2997             mMetadataChannel->bufDone(metadata_buf);
   2998             free(metadata_buf);
   2999         }
   3000         goto done_metadata;
   3001     }
   3002     LOGH("valid frame_number = %u, capture_time = %lld",
   3003             frame_number, capture_time);
   3004 
   3005     for (pendingRequestIterator i = mPendingRequestsList.begin();
   3006             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   3007         // Flush out all entries with less or equal frame numbers.
   3008 
   3009         camera3_capture_result_t result;
   3010         memset(&result, 0, sizeof(camera3_capture_result_t));
   3011 
   3012         LOGD("frame_number in the list is %u", i->frame_number);
   3013         i->partial_result_cnt++;
   3014         result.partial_result = i->partial_result_cnt;
   3015 
   3016         // Check whether any stream buffer corresponding to this is dropped or not
   3017         // If dropped, then send the ERROR_BUFFER for the corresponding stream
   3018         if (p_cam_frame_drop) {
   3019             /* Clear notify_msg structure */
   3020             camera3_notify_msg_t notify_msg;
   3021             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3022             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3023                     j != i->buffers.end(); j++) {
   3024                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
   3025                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3026                 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
   3027                     if (streamID == p_cam_frame_drop->streamID[k]) {
   3028                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   3029                         LOGE("Start of reporting error frame#=%u, streamID=%u",
   3030                                  i->frame_number, streamID);
   3031                         notify_msg.type = CAMERA3_MSG_ERROR;
   3032                         notify_msg.message.error.frame_number = i->frame_number;
   3033                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
   3034                         notify_msg.message.error.error_stream = j->stream;
   3035                         mCallbackOps->notify(mCallbackOps, &notify_msg);
   3036                         LOGE("End of reporting error frame#=%u, streamID=%u",
   3037                                 i->frame_number, streamID);
   3038                         PendingFrameDropInfo PendingFrameDrop;
   3039                         PendingFrameDrop.frame_number=i->frame_number;
   3040                         PendingFrameDrop.stream_ID = streamID;
   3041                         // Add the Frame drop info to mPendingFrameDropList
   3042                         mPendingFrameDropList.push_back(PendingFrameDrop);
   3043                    }
   3044                 }
   3045             }
   3046         }
   3047 
   3048         // Send empty metadata with already filled buffers for dropped metadata
   3049         // and send valid metadata with already filled buffers for current metadata
   3050         /* we could hit this case when we either
   3051          * 1. have a pending reprocess request or
   3052          * 2. miss a metadata buffer callback */
   3053         if (i->frame_number < frame_number) {
   3054             if (i->input_buffer) {
   3055                 /* this will be handled in handleInputBufferWithLock */
   3056                 i++;
   3057                 continue;
   3058             } else if (mBatchSize) {
   3059 
   3060                 mPendingLiveRequest--;
   3061 
   3062                 CameraMetadata dummyMetadata;
   3063                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
   3064                 result.result = dummyMetadata.release();
   3065 
   3066                 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
   3067             } else {
   3068                 LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
   3069                 if (free_and_bufdone_meta_buf) {
   3070                     mMetadataChannel->bufDone(metadata_buf);
   3071                     free(metadata_buf);
   3072                 }
   3073                 mState = ERROR;
   3074                 goto done_metadata;
   3075             }
   3076         } else {
   3077             mPendingLiveRequest--;
   3078             /* Clear notify_msg structure */
   3079             camera3_notify_msg_t notify_msg;
   3080             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3081 
   3082             // Send shutter notify to frameworks
   3083             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3084             notify_msg.message.shutter.frame_number = i->frame_number;
   3085             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3086             mCallbackOps->notify(mCallbackOps, &notify_msg);
   3087 
   3088             i->timestamp = capture_time;
   3089 
   3090             /* Set the timestamp in display metadata so that clients aware of
   3091                private_handle such as VT can use this un-modified timestamps.
   3092                Camera framework is unaware of this timestamp and cannot change this */
   3093             updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
   3094 
   3095             // Find channel requiring metadata, meaning internal offline postprocess
   3096             // is needed.
   3097             //TODO: for now, we don't support two streams requiring metadata at the same time.
   3098             // (because we are not making copies, and metadata buffer is not reference counted.
   3099             bool internalPproc = false;
   3100             for (pendingBufferIterator iter = i->buffers.begin();
   3101                     iter != i->buffers.end(); iter++) {
   3102                 if (iter->need_metadata) {
   3103                     internalPproc = true;
   3104                     QCamera3ProcessingChannel *channel =
   3105                             (QCamera3ProcessingChannel *)iter->stream->priv;
   3106                     channel->queueReprocMetadata(metadata_buf);
   3107                     break;
   3108                 }
   3109             }
   3110 
   3111             result.result = translateFromHalMetadata(metadata,
   3112                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
   3113                     i->capture_intent, internalPproc, i->fwkCacMode,
   3114                     firstMetadataInBatch);
   3115 
   3116             saveExifParams(metadata);
   3117 
   3118             if (i->blob_request) {
   3119                 {
   3120                     //Dump tuning metadata if enabled and available
   3121                     char prop[PROPERTY_VALUE_MAX];
   3122                     memset(prop, 0, sizeof(prop));
   3123                     property_get("persist.camera.dumpmetadata", prop, "0");
   3124                     int32_t enabled = atoi(prop);
   3125                     if (enabled && metadata->is_tuning_params_valid) {
   3126                         dumpMetadataToFile(metadata->tuning_params,
   3127                                mMetaFrameCount,
   3128                                enabled,
   3129                                "Snapshot",
   3130                                frame_number);
   3131                     }
   3132                 }
   3133             }
   3134 
   3135             if (!internalPproc) {
   3136                 LOGD("couldn't find need_metadata for this metadata");
   3137                 // Return metadata buffer
   3138                 if (free_and_bufdone_meta_buf) {
   3139                     mMetadataChannel->bufDone(metadata_buf);
   3140                     free(metadata_buf);
   3141                 }
   3142             }
   3143         }
   3144         if (!result.result) {
   3145             LOGE("metadata is NULL");
   3146         }
   3147         result.frame_number = i->frame_number;
   3148         result.input_buffer = i->input_buffer;
   3149         result.num_output_buffers = 0;
   3150         result.output_buffers = NULL;
   3151         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3152                     j != i->buffers.end(); j++) {
   3153             if (j->buffer) {
   3154                 result.num_output_buffers++;
   3155             }
   3156         }
   3157 
   3158         updateFpsInPreviewBuffer(metadata, i->frame_number);
   3159 
   3160         if (result.num_output_buffers > 0) {
   3161             camera3_stream_buffer_t *result_buffers =
   3162                 new camera3_stream_buffer_t[result.num_output_buffers];
   3163             if (result_buffers != NULL) {
   3164                 size_t result_buffers_idx = 0;
   3165                 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3166                         j != i->buffers.end(); j++) {
   3167                     if (j->buffer) {
   3168                         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   3169                                 m != mPendingFrameDropList.end(); m++) {
   3170                             QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   3171                             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3172                             if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
   3173                                 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   3174                                 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
   3175                                         frame_number, streamID);
   3176                                 m = mPendingFrameDropList.erase(m);
   3177                                 break;
   3178                             }
   3179                         }
   3180                         mPendingBuffersMap.removeBuf(j->buffer->buffer);
   3181                         result_buffers[result_buffers_idx++] = *(j->buffer);
   3182                         free(j->buffer);
   3183                         j->buffer = NULL;
   3184                     }
   3185                 }
   3186 
   3187                 result.output_buffers = result_buffers;
   3188                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3189                 LOGD("meta frame_number = %u, capture_time = %lld",
   3190                         result.frame_number, i->timestamp);
   3191                 free_camera_metadata((camera_metadata_t *)result.result);
   3192                 delete[] result_buffers;
   3193             }else {
   3194                 LOGE("Fatal error: out of memory");
   3195             }
   3196         } else {
   3197             mCallbackOps->process_capture_result(mCallbackOps, &result);
   3198             LOGD("meta frame_number = %u, capture_time = %lld",
   3199                     result.frame_number, i->timestamp);
   3200             free_camera_metadata((camera_metadata_t *)result.result);
   3201         }
   3202 
   3203         i = erasePendingRequest(i);
   3204 
   3205         if (!mPendingReprocessResultList.empty()) {
   3206             handlePendingReprocResults(frame_number + 1);
   3207         }
   3208     }
   3209 
   3210 done_metadata:
   3211     for (pendingRequestIterator i = mPendingRequestsList.begin();
   3212             i != mPendingRequestsList.end() ;i++) {
   3213         i->pipeline_depth++;
   3214     }
   3215     LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
   3216     unblockRequestIfNecessary();
   3217 }
   3218 
   3219 /*===========================================================================
   3220  * FUNCTION   : hdrPlusPerfLock
   3221  *
   3222  * DESCRIPTION: perf lock for HDR+ using custom intent
   3223  *
   3224  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
   3225  *
   3226  * RETURN     : None
   3227  *
   3228  *==========================================================================*/
   3229 void QCamera3HardwareInterface::hdrPlusPerfLock(
   3230         mm_camera_super_buf_t *metadata_buf)
   3231 {
   3232     if (NULL == metadata_buf) {
   3233         LOGE("metadata_buf is NULL");
   3234         return;
   3235     }
   3236     metadata_buffer_t *metadata =
   3237             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   3238     int32_t *p_frame_number_valid =
   3239             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   3240     uint32_t *p_frame_number =
   3241             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   3242 
   3243     if (p_frame_number_valid == NULL || p_frame_number == NULL) {
   3244         LOGE("%s: Invalid metadata", __func__);
   3245         return;
   3246     }
   3247 
   3248     //acquire perf lock for 5 sec after the last HDR frame is captured
   3249     if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
   3250         if ((p_frame_number != NULL) &&
   3251                 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
   3252             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
   3253         }
   3254     }
   3255 
   3256     //release lock after perf lock timer is expired. If lock is already released,
   3257     //isTimerReset returns false
   3258     if (m_perfLock.isTimerReset()) {
   3259         mLastCustIntentFrmNum = -1;
   3260         m_perfLock.lock_rel_timed();
   3261     }
   3262 }
   3263 
   3264 /*===========================================================================
   3265  * FUNCTION   : handleInputBufferWithLock
   3266  *
   3267  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
   3268  *
   3269  * PARAMETERS : @frame_number: frame number of the input buffer
   3270  *
   3271  * RETURN     :
   3272  *
   3273  *==========================================================================*/
   3274 void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
   3275 {
   3276     ATRACE_CALL();
   3277     pendingRequestIterator i = mPendingRequestsList.begin();
   3278     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   3279         i++;
   3280     }
   3281     if (i != mPendingRequestsList.end() && i->input_buffer) {
   3282         //found the right request
   3283         if (!i->shutter_notified) {
   3284             CameraMetadata settings;
   3285             camera3_notify_msg_t notify_msg;
   3286             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3287             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   3288             if(i->settings) {
   3289                 settings = i->settings;
   3290                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   3291                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   3292                 } else {
   3293                     LOGE("No timestamp in input settings! Using current one.");
   3294                 }
   3295             } else {
   3296                 LOGE("Input settings missing!");
   3297             }
   3298 
   3299             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3300             notify_msg.message.shutter.frame_number = frame_number;
   3301             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3302             mCallbackOps->notify(mCallbackOps, &notify_msg);
   3303             i->shutter_notified = true;
   3304             LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
   3305                         i->frame_number, notify_msg.message.shutter.timestamp);
   3306         }
   3307 
   3308         if (i->input_buffer->release_fence != -1) {
   3309            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   3310            close(i->input_buffer->release_fence);
   3311            if (rc != OK) {
   3312                LOGE("input buffer sync wait failed %d", rc);
   3313            }
   3314         }
   3315 
   3316         camera3_capture_result result;
   3317         memset(&result, 0, sizeof(camera3_capture_result));
   3318         result.frame_number = frame_number;
   3319         result.result = i->settings;
   3320         result.input_buffer = i->input_buffer;
   3321         result.partial_result = PARTIAL_RESULT_COUNT;
   3322 
   3323         mCallbackOps->process_capture_result(mCallbackOps, &result);
   3324         LOGD("Input request metadata and input buffer frame_number = %u",
   3325                         i->frame_number);
   3326         i = erasePendingRequest(i);
   3327     } else {
   3328         LOGE("Could not find input request for frame number %d", frame_number);
   3329     }
   3330 }
   3331 
   3332 /*===========================================================================
   3333  * FUNCTION   : handleBufferWithLock
   3334  *
   3335  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   3336  *
   3337  * PARAMETERS : @buffer: image buffer for the callback
   3338  *              @frame_number: frame number of the image buffer
   3339  *
   3340  * RETURN     :
   3341  *
   3342  *==========================================================================*/
   3343 void QCamera3HardwareInterface::handleBufferWithLock(
   3344     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   3345 {
   3346     ATRACE_CALL();
   3347     /* Nothing to be done during error state */
   3348     if ((ERROR == mState) || (DEINIT == mState)) {
   3349         return;
   3350     }
   3351     if (mFlushP