Home | History | Annotate | Download | only in HAL3
      1 /* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved.
      2 *
      3 * Redistribution and use in source and binary forms, with or without
      4 * modification, are permitted provided that the following conditions are
      5 * met:
      6 *     * Redistributions of source code must retain the above copyright
      7 *       notice, this list of conditions and the following disclaimer.
      8 *     * Redistributions in binary form must reproduce the above
      9 *       copyright notice, this list of conditions and the following
     10 *       disclaimer in the documentation and/or other materials provided
     11 *       with the distribution.
     12 *     * Neither the name of The Linux Foundation nor the names of its
     13 *       contributors may be used to endorse or promote products derived
     14 *       from this software without specific prior written permission.
     15 *
     16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
     17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
     19 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
     20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
     23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
     24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
     25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
     26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 *
     28 */
     29 
     30 #define ATRACE_TAG ATRACE_TAG_CAMERA
     31 #define LOG_TAG "QCamera3HWI"
     32 //#define LOG_NDEBUG 0
     33 
     34 #define __STDC_LIMIT_MACROS
     35 #include <cutils/properties.h>
     36 #include <hardware/camera3.h>
     37 #include <camera/CameraMetadata.h>
     38 #include <stdio.h>
     39 #include <stdlib.h>
     40 #include <fcntl.h>
     41 #include <stdint.h>
     42 #include <utils/Log.h>
     43 #include <utils/Errors.h>
     44 #include <utils/Trace.h>
     45 #include <sync/sync.h>
     46 #include <gralloc_priv.h>
     47 #include "util/QCameraFlash.h"
     48 #include "QCamera3HWI.h"
     49 #include "QCamera3Mem.h"
     50 #include "QCamera3Channel.h"
     51 #include "QCamera3PostProc.h"
     52 #include "QCamera3VendorTags.h"
     53 #include "cam_cond.h"
     54 
     55 using namespace android;
     56 
     57 namespace qcamera {
     58 
     59 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
     60 
     61 #define EMPTY_PIPELINE_DELAY 2
     62 #define PARTIAL_RESULT_COUNT 3
     63 #define FRAME_SKIP_DELAY     0
     64 #define CAM_MAX_SYNC_LATENCY 4
     65 
     66 #define MAX_VALUE_8BIT ((1<<8)-1)
     67 #define MAX_VALUE_10BIT ((1<<10)-1)
     68 #define MAX_VALUE_12BIT ((1<<12)-1)
     69 
     70 #define VIDEO_4K_WIDTH  3840
     71 #define VIDEO_4K_HEIGHT 2160
     72 
     73 #define MAX_EIS_WIDTH 1920
     74 #define MAX_EIS_HEIGHT 1080
     75 
     76 #define MAX_RAW_STREAMS        1
     77 #define MAX_STALLING_STREAMS   1
     78 #define MAX_PROCESSED_STREAMS  3
     79 /* Batch mode is enabled only if FPS set is equal to or greater than this */
     80 #define MIN_FPS_FOR_BATCH_MODE (120)
     81 #define PREVIEW_FPS_FOR_HFR    (30)
     82 #define DEFAULT_VIDEO_FPS      (30.0)
     83 #define MAX_HFR_BATCH_SIZE     (8)
     84 #define REGIONS_TUPLE_COUNT    5
     85 #define HDR_PLUS_PERF_TIME_OUT  (7000) // milliseconds
     86 
     87 #define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
     88 
     89 #define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3   ( CAM_QCOM_FEATURE_DENOISE2D |\
     90                                               CAM_QCOM_FEATURE_CROP |\
     91                                               CAM_QCOM_FEATURE_ROTATION |\
     92                                               CAM_QCOM_FEATURE_SHARPNESS |\
     93                                               CAM_QCOM_FEATURE_SCALE |\
     94                                               CAM_QCOM_FEATURE_CAC |\
     95                                               CAM_QCOM_FEATURE_CDS )
     96 
     97 #define TIMEOUT_NEVER -1
     98 
     99 cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
    100 const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
    101 static pthread_mutex_t gCamLock = PTHREAD_MUTEX_INITIALIZER;
    102 volatile uint32_t gCamHal3LogLevel = 1;
    103 
    104 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
    105     {"On",  CAM_CDS_MODE_ON},
    106     {"Off", CAM_CDS_MODE_OFF},
    107     {"Auto",CAM_CDS_MODE_AUTO}
    108 };
    109 
    110 const QCamera3HardwareInterface::QCameraMap<
    111         camera_metadata_enum_android_control_effect_mode_t,
    112         cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
    113     { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
    114     { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
    115     { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
    116     { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
    117     { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
    118     { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
    119     { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
    120     { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
    121     { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
    122 };
    123 
    124 const QCamera3HardwareInterface::QCameraMap<
    125         camera_metadata_enum_android_control_awb_mode_t,
    126         cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
    127     { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
    128     { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
    129     { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
    130     { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
    131     { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
    132     { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
    133     { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
    134     { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
    135     { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
    136 };
    137 
    138 const QCamera3HardwareInterface::QCameraMap<
    139         camera_metadata_enum_android_control_scene_mode_t,
    140         cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
    141     { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_FACE_PRIORITY },
    142     { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
    143     { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
    144     { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
    145     { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
    146     { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
    147     { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
    148     { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
    149     { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
    150     { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
    151     { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
    152     { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
    153     { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
    154     { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
    155     { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
    156     { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
    157 };
    158 
    159 const QCamera3HardwareInterface::QCameraMap<
    160         camera_metadata_enum_android_control_af_mode_t,
    161         cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
    162     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
    163     { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
    164     { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
    165     { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
    166     { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
    167     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
    168     { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
    169 };
    170 
    171 const QCamera3HardwareInterface::QCameraMap<
    172         camera_metadata_enum_android_color_correction_aberration_mode_t,
    173         cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
    174     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
    175             CAM_COLOR_CORRECTION_ABERRATION_OFF },
    176     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
    177             CAM_COLOR_CORRECTION_ABERRATION_FAST },
    178     { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
    179             CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
    180 };
    181 
    182 const QCamera3HardwareInterface::QCameraMap<
    183         camera_metadata_enum_android_control_ae_antibanding_mode_t,
    184         cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
    185     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
    186     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
    187     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
    188     { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
    189 };
    190 
    191 const QCamera3HardwareInterface::QCameraMap<
    192         camera_metadata_enum_android_control_ae_mode_t,
    193         cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
    194     { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
    195     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
    196     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
    197     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
    198     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
    199 };
    200 
    201 const QCamera3HardwareInterface::QCameraMap<
    202         camera_metadata_enum_android_flash_mode_t,
    203         cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
    204     { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
    205     { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
    206     { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
    207 };
    208 
    209 const QCamera3HardwareInterface::QCameraMap<
    210         camera_metadata_enum_android_statistics_face_detect_mode_t,
    211         cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
    212     { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
    213     { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE  },
    214     { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
    215 };
    216 
    217 const QCamera3HardwareInterface::QCameraMap<
    218         camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
    219         cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
    220     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
    221       CAM_FOCUS_UNCALIBRATED },
    222     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
    223       CAM_FOCUS_APPROXIMATE },
    224     { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
    225       CAM_FOCUS_CALIBRATED }
    226 };
    227 
    228 const QCamera3HardwareInterface::QCameraMap<
    229         camera_metadata_enum_android_lens_state_t,
    230         cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
    231     { ANDROID_LENS_STATE_STATIONARY,    CAM_AF_LENS_STATE_STATIONARY},
    232     { ANDROID_LENS_STATE_MOVING,        CAM_AF_LENS_STATE_MOVING}
    233 };
    234 
    235 const int32_t available_thumbnail_sizes[] = {0, 0,
    236                                              176, 144,
    237                                              320, 240,
    238                                              432, 288,
    239                                              480, 288,
    240                                              512, 288,
    241                                              512, 384};
    242 
    243 const QCamera3HardwareInterface::QCameraMap<
    244         camera_metadata_enum_android_sensor_test_pattern_mode_t,
    245         cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
    246     { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
    247     { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
    248     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
    249     { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
    250     { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
    251 };
    252 
    253 /* Since there is no mapping for all the options some Android enum are not listed.
    254  * Also, the order in this list is important because while mapping from HAL to Android it will
    255  * traverse from lower to higher index which means that for HAL values that are map to different
    256  * Android values, the traverse logic will select the first one found.
    257  */
    258 const QCamera3HardwareInterface::QCameraMap<
    259         camera_metadata_enum_android_sensor_reference_illuminant1_t,
    260         cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
    261     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
    262     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    263     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
    264     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
    265     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
    266     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
    267     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
    268     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
    269     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
    270     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
    271     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
    272     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
    273     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
    274     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
    275     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
    276     { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
    277 };
    278 
    279 const QCamera3HardwareInterface::QCameraMap<
    280         int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
    281     { 60, CAM_HFR_MODE_60FPS},
    282     { 90, CAM_HFR_MODE_90FPS},
    283     { 120, CAM_HFR_MODE_120FPS},
    284     { 150, CAM_HFR_MODE_150FPS},
    285     { 180, CAM_HFR_MODE_180FPS},
    286     { 210, CAM_HFR_MODE_210FPS},
    287     { 240, CAM_HFR_MODE_240FPS},
    288     { 480, CAM_HFR_MODE_480FPS},
    289 };
    290 
    291 camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
    292     .initialize =                         QCamera3HardwareInterface::initialize,
    293     .configure_streams =                  QCamera3HardwareInterface::configure_streams,
    294     .register_stream_buffers =            NULL,
    295     .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
    296     .process_capture_request =            QCamera3HardwareInterface::process_capture_request,
    297     .get_metadata_vendor_tag_ops =        NULL,
    298     .dump =                               QCamera3HardwareInterface::dump,
    299     .flush =                              QCamera3HardwareInterface::flush,
    300     .reserved =                           {0},
    301 };
    302 
    303 /*===========================================================================
    304  * FUNCTION   : QCamera3HardwareInterface
    305  *
    306  * DESCRIPTION: constructor of QCamera3HardwareInterface
    307  *
    308  * PARAMETERS :
    309  *   @cameraId  : camera ID
    310  *
    311  * RETURN     : none
    312  *==========================================================================*/
    313 QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
    314         const camera_module_callbacks_t *callbacks)
    315     : mCameraId(cameraId),
    316       mCameraHandle(NULL),
    317       mCameraOpened(false),
    318       mCameraInitialized(false),
    319       mCallbackOps(NULL),
    320       mMetadataChannel(NULL),
    321       mPictureChannel(NULL),
    322       mRawChannel(NULL),
    323       mSupportChannel(NULL),
    324       mAnalysisChannel(NULL),
    325       mRawDumpChannel(NULL),
    326       mDummyBatchChannel(NULL),
    327       mChannelHandle(0),
    328       mFirstRequest(false),
    329       mFirstConfiguration(true),
    330       mFlush(false),
    331       mParamHeap(NULL),
    332       mParameters(NULL),
    333       mPrevParameters(NULL),
    334       m_bIsVideo(false),
    335       m_bIs4KVideo(false),
    336       m_bEisSupportedSize(false),
    337       m_bEisEnable(false),
    338       m_MobicatMask(0),
    339       mMinProcessedFrameDuration(0),
    340       mMinJpegFrameDuration(0),
    341       mMinRawFrameDuration(0),
    342       mMetaFrameCount(0U),
    343       mUpdateDebugLevel(false),
    344       mCallbacks(callbacks),
    345       mCaptureIntent(0),
    346       mHybridAeEnable(0),
    347       mBatchSize(0),
    348       mToBeQueuedVidBufs(0),
    349       mHFRVideoFps(DEFAULT_VIDEO_FPS),
    350       mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
    351       mFirstFrameNumberInBatch(0),
    352       mNeedSensorRestart(false),
    353       mLdafCalibExist(false),
    354       mPowerHintEnabled(false),
    355       mLastCustIntentFrmNum(-1)
    356 {
    357     getLogLevel();
    358     m_perfLock.lock_init();
    359     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
    360     mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
    361     mCameraDevice.common.close = close_camera_device;
    362     mCameraDevice.ops = &mCameraOps;
    363     mCameraDevice.priv = this;
    364     gCamCapability[cameraId]->version = CAM_HAL_V3;
    365     // TODO: hardcode for now until mctl add support for min_num_pp_bufs
    366     //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
    367     gCamCapability[cameraId]->min_num_pp_bufs = 3;
    368 
    369     PTHREAD_COND_INIT(&mRequestCond);
    370     mPendingLiveRequest = 0;
    371     mCurrentRequestId = -1;
    372     pthread_mutex_init(&mMutex, NULL);
    373 
    374     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    375         mDefaultMetadata[i] = NULL;
    376 
    377     // Getting system props of different kinds
    378     char prop[PROPERTY_VALUE_MAX];
    379     memset(prop, 0, sizeof(prop));
    380     property_get("persist.camera.raw.dump", prop, "0");
    381     mEnableRawDump = atoi(prop);
    382     if (mEnableRawDump)
    383         CDBG("%s: Raw dump from Camera HAL enabled", __func__);
    384 
    385     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
    386     memset(mLdafCalib, 0, sizeof(mLdafCalib));
    387 
    388     memset(prop, 0, sizeof(prop));
    389     property_get("persist.camera.tnr.preview", prop, "1");
    390     m_bTnrPreview = (uint8_t)atoi(prop);
    391 
    392     memset(prop, 0, sizeof(prop));
    393     property_get("persist.camera.tnr.video", prop, "1");
    394     m_bTnrVideo = (uint8_t)atoi(prop);
    395 
    396     mPendingBuffersMap.num_buffers = 0;
    397     mPendingBuffersMap.last_frame_number = -1;
    398 }
    399 
    400 /*===========================================================================
    401  * FUNCTION   : ~QCamera3HardwareInterface
    402  *
    403  * DESCRIPTION: destructor of QCamera3HardwareInterface
    404  *
    405  * PARAMETERS : none
    406  *
    407  * RETURN     : none
    408  *==========================================================================*/
    409 QCamera3HardwareInterface::~QCamera3HardwareInterface()
    410 {
    411     CDBG("%s: E", __func__);
    412     bool hasPendingBuffers = (mPendingBuffersMap.num_buffers > 0);
    413 
    414     /* Turn off current power hint before acquiring perfLock in case they
    415      * conflict with each other */
    416     disablePowerHint();
    417 
    418     m_perfLock.lock_acq();
    419 
    420     /* We need to stop all streams before deleting any stream */
    421     if (mRawDumpChannel) {
    422         mRawDumpChannel->stop();
    423     }
    424 
    425     // NOTE: 'camera3_stream_t *' objects are already freed at
    426     //        this stage by the framework
    427     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    428         it != mStreamInfo.end(); it++) {
    429         QCamera3ProcessingChannel *channel = (*it)->channel;
    430         if (channel) {
    431             channel->stop();
    432         }
    433     }
    434     if (mSupportChannel)
    435         mSupportChannel->stop();
    436 
    437     if (mAnalysisChannel) {
    438         mAnalysisChannel->stop();
    439     }
    440     if (mMetadataChannel) {
    441         mMetadataChannel->stop();
    442     }
    443     if (mChannelHandle) {
    444         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
    445                 mChannelHandle);
    446         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
    447     }
    448 
    449     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
    450         it != mStreamInfo.end(); it++) {
    451         QCamera3ProcessingChannel *channel = (*it)->channel;
    452         if (channel)
    453             delete channel;
    454         free (*it);
    455     }
    456     if (mSupportChannel) {
    457         delete mSupportChannel;
    458         mSupportChannel = NULL;
    459     }
    460 
    461     if (mAnalysisChannel) {
    462         delete mAnalysisChannel;
    463         mAnalysisChannel = NULL;
    464     }
    465     if (mRawDumpChannel) {
    466         delete mRawDumpChannel;
    467         mRawDumpChannel = NULL;
    468     }
    469     if (mDummyBatchChannel) {
    470         delete mDummyBatchChannel;
    471         mDummyBatchChannel = NULL;
    472     }
    473     mPictureChannel = NULL;
    474 
    475     if (mMetadataChannel) {
    476         delete mMetadataChannel;
    477         mMetadataChannel = NULL;
    478     }
    479 
    480     /* Clean up all channels */
    481     if (mCameraInitialized) {
    482         if(!mFirstConfiguration){
    483             clear_metadata_buffer(mParameters);
    484 
    485             // Check if there is still pending buffer not yet returned.
    486             if (hasPendingBuffers) {
    487                 for (auto& pendingBuffer : mPendingBuffersMap.mPendingBufferList) {
    488                     ALOGE("%s: Buffer not yet returned for stream. Frame number %d, format 0x%x, width %d, height %d",
    489                         __func__, pendingBuffer.frame_number, pendingBuffer.stream->format, pendingBuffer.stream->width,
    490                         pendingBuffer.stream->height);
    491                 }
    492                 ALOGE("%s: Last requested frame number is %d", __func__, mPendingBuffersMap.last_frame_number);
    493                 uint8_t restart = TRUE;
    494                 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_DAEMON_RESTART,
    495                         restart);
    496             }
    497 
    498             //send the last unconfigure
    499             cam_stream_size_info_t stream_config_info;
    500             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
    501             stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
    502             stream_config_info.buffer_info.max_buffers =
    503                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
    504             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
    505                     stream_config_info);
    506 
    507             int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
    508             if (rc < 0) {
    509                 ALOGE("%s: set_parms failed for unconfigure", __func__);
    510             }
    511         }
    512         deinitParameters();
    513     }
    514 
    515     if (mChannelHandle) {
    516         mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
    517                 mChannelHandle);
    518         ALOGE("%s: deleting channel %d", __func__, mChannelHandle);
    519         mChannelHandle = 0;
    520     }
    521 
    522     if (mCameraOpened)
    523         closeCamera();
    524 
    525     mPendingBuffersMap.mPendingBufferList.clear();
    526     mPendingReprocessResultList.clear();
    527     for (pendingRequestIterator i = mPendingRequestsList.begin();
    528             i != mPendingRequestsList.end();) {
    529         i = erasePendingRequest(i);
    530     }
    531     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
    532         if (mDefaultMetadata[i])
    533             free_camera_metadata(mDefaultMetadata[i]);
    534 
    535     m_perfLock.lock_rel();
    536     m_perfLock.lock_deinit();
    537 
    538     pthread_cond_destroy(&mRequestCond);
    539 
    540     pthread_mutex_destroy(&mMutex);
    541 
    542     if (hasPendingBuffers) {
    543         ALOGE("%s: Not all buffers were returned. Notified the camera daemon process to restart."
    544                 " Exiting here...", __func__);
    545         exit(EXIT_FAILURE);
    546     }
    547     CDBG("%s: X", __func__);
    548 }
    549 
    550 /*===========================================================================
    551  * FUNCTION   : erasePendingRequest
    552  *
    553  * DESCRIPTION: function to erase a desired pending request after freeing any
    554  *              allocated memory
    555  *
    556  * PARAMETERS :
    557  *   @i       : iterator pointing to pending request to be erased
    558  *
    559  * RETURN     : iterator pointing to the next request
    560  *==========================================================================*/
    561 QCamera3HardwareInterface::pendingRequestIterator
    562         QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
    563 {
    564     if (i->input_buffer != NULL) {
    565         free(i->input_buffer);
    566         i->input_buffer = NULL;
    567     }
    568     if (i->settings != NULL)
    569         free_camera_metadata((camera_metadata_t*)i->settings);
    570     return mPendingRequestsList.erase(i);
    571 }
    572 
    573 /*===========================================================================
    574  * FUNCTION   : camEvtHandle
    575  *
    576  * DESCRIPTION: Function registered to mm-camera-interface to handle events
    577  *
    578  * PARAMETERS :
    579  *   @camera_handle : interface layer camera handle
    580  *   @evt           : ptr to event
    581  *   @user_data     : user data ptr
    582  *
    583  * RETURN     : none
    584  *==========================================================================*/
    585 void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
    586                                           mm_camera_event_t *evt,
    587                                           void *user_data)
    588 {
    589     QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
    590     if (obj && evt) {
    591         switch(evt->server_event_type) {
    592             case CAM_EVENT_TYPE_DAEMON_DIED:
    593                 ALOGE("%s: Fatal, camera daemon died", __func__);
    594                 //close the camera backend
    595                 if (obj->mCameraHandle && obj->mCameraHandle->camera_handle
    596                         && obj->mCameraHandle->ops) {
    597                     obj->mCameraHandle->ops->error_close_camera(obj->mCameraHandle->camera_handle);
    598                 } else {
    599                     ALOGE("%s: Could not close camera on error because the handle or ops is NULL",
    600                             __func__);
    601                 }
    602                 camera3_notify_msg_t notify_msg;
    603                 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
    604                 notify_msg.type = CAMERA3_MSG_ERROR;
    605                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
    606                 notify_msg.message.error.error_stream = NULL;
    607                 notify_msg.message.error.frame_number = 0;
    608                 obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
    609                 break;
    610 
    611             case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
    612                 CDBG("%s: HAL got request pull from Daemon", __func__);
    613                 pthread_mutex_lock(&obj->mMutex);
    614                 obj->mWokenUpByDaemon = true;
    615                 obj->unblockRequestIfNecessary();
    616                 pthread_mutex_unlock(&obj->mMutex);
    617                 break;
    618 
    619             default:
    620                 CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
    621                         evt->server_event_type);
    622                 break;
    623         }
    624     } else {
    625         ALOGE("%s: NULL user_data/evt", __func__);
    626     }
    627 }
    628 
    629 /*===========================================================================
    630  * FUNCTION   : openCamera
    631  *
    632  * DESCRIPTION: open camera
    633  *
    634  * PARAMETERS :
    635  *   @hw_device  : double ptr for camera device struct
    636  *
    637  * RETURN     : int32_t type of status
    638  *              NO_ERROR  -- success
    639  *              none-zero failure code
    640  *==========================================================================*/
    641 int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
    642 {
    643     int rc = 0;
    644     if (mCameraOpened) {
    645         *hw_device = NULL;
    646         return PERMISSION_DENIED;
    647     }
    648     m_perfLock.lock_acq();
    649     rc = openCamera();
    650     if (rc == 0) {
    651         *hw_device = &mCameraDevice.common;
    652     } else
    653         *hw_device = NULL;
    654 
    655     m_perfLock.lock_rel();
    656     return rc;
    657 }
    658 
    659 /*===========================================================================
    660  * FUNCTION   : openCamera
    661  *
    662  * DESCRIPTION: open camera
    663  *
    664  * PARAMETERS : none
    665  *
    666  * RETURN     : int32_t type of status
    667  *              NO_ERROR  -- success
    668  *              none-zero failure code
    669  *==========================================================================*/
    670 int QCamera3HardwareInterface::openCamera()
    671 {
    672     int rc = 0;
    673 
    674     ATRACE_CALL();
    675     if (mCameraHandle) {
    676         ALOGE("Failure: Camera already opened");
    677         return ALREADY_EXISTS;
    678     }
    679 
    680     rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
    681     if (rc < 0) {
    682         ALOGE("%s: Failed to reserve flash for camera id: %d",
    683                 __func__,
    684                 mCameraId);
    685         return UNKNOWN_ERROR;
    686     }
    687 
    688     rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
    689     if (rc) {
    690         ALOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
    691         return rc;
    692     }
    693 
    694     mCameraOpened = true;
    695 
    696     rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
    697             camEvtHandle, (void *)this);
    698 
    699     if (rc < 0) {
    700         ALOGE("%s: Error, failed to register event callback", __func__);
    701         /* Not closing camera here since it is already handled in destructor */
    702         return FAILED_TRANSACTION;
    703     }
    704     mFirstConfiguration = true;
    705     return NO_ERROR;
    706 }
    707 
    708 /*===========================================================================
    709  * FUNCTION   : closeCamera
    710  *
    711  * DESCRIPTION: close camera
    712  *
    713  * PARAMETERS : none
    714  *
    715  * RETURN     : int32_t type of status
    716  *              NO_ERROR  -- success
    717  *              none-zero failure code
    718  *==========================================================================*/
    719 int QCamera3HardwareInterface::closeCamera()
    720 {
    721     ATRACE_CALL();
    722     int rc = NO_ERROR;
    723 
    724     rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
    725     mCameraHandle = NULL;
    726     mCameraOpened = false;
    727 
    728     if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
    729         CDBG("%s: Failed to release flash for camera id: %d",
    730                 __func__,
    731                 mCameraId);
    732     }
    733 
    734     return rc;
    735 }
    736 
    737 /*===========================================================================
    738  * FUNCTION   : initialize
    739  *
    740  * DESCRIPTION: Initialize frameworks callback functions
    741  *
    742  * PARAMETERS :
    743  *   @callback_ops : callback function to frameworks
    744  *
    745  * RETURN     :
    746  *
    747  *==========================================================================*/
    748 int QCamera3HardwareInterface::initialize(
    749         const struct camera3_callback_ops *callback_ops)
    750 {
    751     ATRACE_CALL();
    752     int rc;
    753 
    754     pthread_mutex_lock(&mMutex);
    755 
    756     rc = initParameters();
    757     if (rc < 0) {
    758         ALOGE("%s: initParamters failed %d", __func__, rc);
    759        goto err1;
    760     }
    761     mCallbackOps = callback_ops;
    762 
    763     mChannelHandle = mCameraHandle->ops->add_channel(
    764             mCameraHandle->camera_handle, NULL, NULL, this);
    765     if (mChannelHandle == 0) {
    766         ALOGE("%s: add_channel failed", __func__);
    767         rc = -ENOMEM;
    768         pthread_mutex_unlock(&mMutex);
    769         return rc;
    770     }
    771 
    772     pthread_mutex_unlock(&mMutex);
    773     mCameraInitialized = true;
    774     return 0;
    775 
    776 err1:
    777     pthread_mutex_unlock(&mMutex);
    778     return rc;
    779 }
    780 
    781 /*===========================================================================
    782  * FUNCTION   : validateStreamDimensions
    783  *
    784  * DESCRIPTION: Check if the configuration requested are those advertised
    785  *
    786  * PARAMETERS :
    787  *   @stream_list : streams to be configured
    788  *
    789  * RETURN     :
    790  *
    791  *==========================================================================*/
    792 int QCamera3HardwareInterface::validateStreamDimensions(
    793         camera3_stream_configuration_t *streamList)
    794 {
    795     int rc = NO_ERROR;
    796     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
    797     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
    798     size_t count = 0;
    799 
    800     camera3_stream_t *inputStream = NULL;
    801     /*
    802     * Loop through all streams to find input stream if it exists*
    803     */
    804     for (size_t i = 0; i< streamList->num_streams; i++) {
    805         if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
    806             if (inputStream != NULL) {
    807                 ALOGE("%s: Error, Multiple input streams requested");
    808                 return -EINVAL;
    809             }
    810             inputStream = streamList->streams[i];
    811         }
    812     }
    813     /*
    814     * Loop through all streams requested in configuration
    815     * Check if unsupported sizes have been requested on any of them
    816     */
    817     for (size_t j = 0; j < streamList->num_streams; j++) {
    818         bool sizeFound = false;
    819         size_t jpeg_sizes_cnt = 0;
    820         camera3_stream_t *newStream = streamList->streams[j];
    821 
    822         uint32_t rotatedHeight = newStream->height;
    823         uint32_t rotatedWidth = newStream->width;
    824         if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
    825                 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
    826             rotatedHeight = newStream->width;
    827             rotatedWidth = newStream->height;
    828         }
    829 
    830         /*
    831         * Sizes are different for each type of stream format check against
    832         * appropriate table.
    833         */
    834         switch (newStream->format) {
    835         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
    836         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
    837         case HAL_PIXEL_FORMAT_RAW10:
    838             count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
    839             for (size_t i = 0; i < count; i++) {
    840                 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
    841                         (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
    842                     sizeFound = true;
    843                     break;
    844                 }
    845             }
    846             break;
    847         case HAL_PIXEL_FORMAT_BLOB:
    848             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
    849             /* Generate JPEG sizes table */
    850             makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
    851                     count,
    852                     MAX_SIZES_CNT,
    853                     available_processed_sizes);
    854             jpeg_sizes_cnt = filterJpegSizes(
    855                     available_jpeg_sizes,
    856                     available_processed_sizes,
    857                     count * 2,
    858                     MAX_SIZES_CNT * 2,
    859                     gCamCapability[mCameraId]->active_array_size,
    860                     gCamCapability[mCameraId]->max_downscale_factor);
    861 
    862             /* Verify set size against generated sizes table */
    863             for (size_t i = 0; i < (jpeg_sizes_cnt / 2); i++) {
    864                 if (((int32_t)rotatedWidth == available_jpeg_sizes[i*2]) &&
    865                         ((int32_t)rotatedHeight == available_jpeg_sizes[i*2+1])) {
    866                     sizeFound = true;
    867                     break;
    868                 }
    869             }
    870             break;
    871         case HAL_PIXEL_FORMAT_YCbCr_420_888:
    872         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
    873         default:
    874             if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
    875                     || newStream->stream_type == CAMERA3_STREAM_INPUT
    876                     || IS_USAGE_ZSL(newStream->usage)) {
    877                 if (((int32_t)rotatedWidth ==
    878                                 gCamCapability[mCameraId]->active_array_size.width) &&
    879                                 ((int32_t)rotatedHeight ==
    880                                 gCamCapability[mCameraId]->active_array_size.height)) {
    881                     sizeFound = true;
    882                     break;
    883                 }
    884                 /* We could potentially break here to enforce ZSL stream
    885                  * set from frameworks always is full active array size
    886                  * but it is not clear from the spc if framework will always
    887                  * follow that, also we have logic to override to full array
    888                  * size, so keeping the logic lenient at the moment
    889                  */
    890             }
    891             count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
    892                     MAX_SIZES_CNT);
    893             for (size_t i = 0; i < count; i++) {
    894                 if (((int32_t)rotatedWidth ==
    895                             gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
    896                             ((int32_t)rotatedHeight ==
    897                             gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
    898                     sizeFound = true;
    899                     break;
    900                 }
    901             }
    902             break;
    903         } /* End of switch(newStream->format) */
    904 
    905         /* We error out even if a single stream has unsupported size set */
    906         if (!sizeFound) {
    907             ALOGE("%s: Error: Unsupported size of  %d x %d requested for stream"
    908                   "type:%d", __func__, rotatedWidth, rotatedHeight,
    909                   newStream->format);
    910             ALOGE("%s: Active array size is  %d x %d", __func__,
    911                     gCamCapability[mCameraId]->active_array_size.width,
    912                     gCamCapability[mCameraId]->active_array_size.height);
    913             rc = -EINVAL;
    914             break;
    915         }
    916     } /* End of for each stream */
    917     return rc;
    918 }
    919 
    920 /*===========================================================================
    921  * FUNCTION   : validateUsageFlags
    922  *
    923  * DESCRIPTION: Check if the configuration usage flags are supported
    924  *
    925  * PARAMETERS :
    926  *   @stream_list : streams to be configured
    927  *
    928  * RETURN     :
    929  *   NO_ERROR if the usage flags are supported
    930  *   error code if usage flags are not supported
    931  *
    932  *==========================================================================*/
    933 int QCamera3HardwareInterface::validateUsageFlags(
    934         const camera3_stream_configuration_t* streamList)
    935 {
    936     for (size_t j = 0; j < streamList->num_streams; j++) {
    937         const camera3_stream_t *newStream = streamList->streams[j];
    938 
    939         if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
    940             (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
    941              newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
    942             continue;
    943         }
    944 
    945         bool isVideo = IS_USAGE_VIDEO(newStream->usage);
    946         bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
    947         bool isZSL = IS_USAGE_ZSL(newStream->usage);
    948 
    949         // Color space for this camera device is guaranteed to be ITU_R_601_FR.
    950         // So color spaces will always match.
    951 
    952         // Check whether underlying formats of shared streams match.
    953         if (isVideo && isPreview) {
    954             ALOGE("Combined video and preview usage flag is not supported");
    955             return -EINVAL;
    956         }
    957         if (isPreview && isZSL) {
    958             ALOGE("Combined preview and zsl usage flag is not supported");
    959             return -EINVAL;
    960         }
    961         if (isVideo && isZSL) {
    962             ALOGE("Combined video and zsl usage flag is not supported");
    963             return -EINVAL;
    964         }
    965     }
    966     return NO_ERROR;
    967 }
    968 
    969 /*==============================================================================
    970  * FUNCTION   : isSupportChannelNeeded
    971  *
    972  * DESCRIPTION: Simple heuristic func to determine if support channels is needed
    973  *
    974  * PARAMETERS :
    975  *   @stream_list : streams to be configured
    976  *   @stream_config_info : the config info for streams to be configured
    977  *
    978  * RETURN     : Boolen true/false decision
    979  *
    980  *==========================================================================*/
    981 bool QCamera3HardwareInterface::isSupportChannelNeeded(
    982         camera3_stream_configuration_t *streamList,
    983         cam_stream_size_info_t stream_config_info)
    984 {
    985     uint32_t i;
    986     bool pprocRequested = false;
    987     /* Check for conditions where PProc pipeline does not have any streams*/
    988     for (i = 0; i < stream_config_info.num_streams; i++) {
    989         if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
    990                 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
    991             pprocRequested = true;
    992             break;
    993         }
    994     }
    995 
    996     if (pprocRequested == false )
    997         return true;
    998 
    999     /* Dummy stream needed if only raw or jpeg streams present */
   1000     for (i = 0; i < streamList->num_streams; i++) {
   1001         switch(streamList->streams[i]->format) {
   1002             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1003             case HAL_PIXEL_FORMAT_RAW10:
   1004             case HAL_PIXEL_FORMAT_RAW16:
   1005             case HAL_PIXEL_FORMAT_BLOB:
   1006                 break;
   1007             default:
   1008                 return false;
   1009         }
   1010     }
   1011     return true;
   1012 }
   1013 
   1014 /*==============================================================================
   1015  * FUNCTION   : getSensorOutputSize
   1016  *
   1017  * DESCRIPTION: Get sensor output size based on current stream configuratoin
   1018  *
   1019  * PARAMETERS :
   1020  *   @sensor_dim : sensor output dimension (output)
   1021  *
   1022  * RETURN     : int32_t type of status
   1023  *              NO_ERROR  -- success
   1024  *              none-zero failure code
   1025  *
   1026  *==========================================================================*/
   1027 int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
   1028 {
   1029     int32_t rc = NO_ERROR;
   1030 
   1031     cam_dimension_t max_dim = {0, 0};
   1032     for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
   1033         if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
   1034             max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
   1035         if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
   1036             max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
   1037     }
   1038 
   1039     clear_metadata_buffer(mParameters);
   1040 
   1041     rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
   1042             max_dim);
   1043     if (rc != NO_ERROR) {
   1044         ALOGE("%s:Failed to update table for CAM_INTF_PARM_MAX_DIMENSION", __func__);
   1045         return rc;
   1046     }
   1047 
   1048     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
   1049     if (rc != NO_ERROR) {
   1050         ALOGE("%s: Failed to set CAM_INTF_PARM_MAX_DIMENSION", __func__);
   1051         return rc;
   1052     }
   1053 
   1054     clear_metadata_buffer(mParameters);
   1055     ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
   1056 
   1057     rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
   1058             mParameters);
   1059     if (rc != NO_ERROR) {
   1060         ALOGE("%s: Failed to get CAM_INTF_PARM_RAW_DIMENSION", __func__);
   1061         return rc;
   1062     }
   1063 
   1064     READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
   1065     ALOGI("%s: sensor output dimension = %d x %d", __func__, sensor_dim.width, sensor_dim.height);
   1066 
   1067     return rc;
   1068 }
   1069 
   1070 /*==============================================================================
   1071  * FUNCTION   : enablePowerHint
   1072  *
   1073  * DESCRIPTION: enable single powerhint for preview and different video modes.
   1074  *
   1075  * PARAMETERS :
   1076  *
   1077  * RETURN     : NULL
   1078  *
   1079  *==========================================================================*/
   1080 void QCamera3HardwareInterface::enablePowerHint()
   1081 {
   1082     if (!mPowerHintEnabled) {
   1083         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 1);
   1084         mPowerHintEnabled = true;
   1085     }
   1086 }
   1087 
   1088 /*==============================================================================
   1089  * FUNCTION   : disablePowerHint
   1090  *
   1091  * DESCRIPTION: disable current powerhint.
   1092  *
   1093  * PARAMETERS :
   1094  *
   1095  * RETURN     : NULL
   1096  *
   1097  *==========================================================================*/
   1098 void QCamera3HardwareInterface::disablePowerHint()
   1099 {
   1100     if (mPowerHintEnabled) {
   1101         m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, 0);
   1102         mPowerHintEnabled = false;
   1103     }
   1104 }
   1105 
   1106 /*===========================================================================
   1107  * FUNCTION   : configureStreams
   1108  *
   1109  * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
   1110  *              and output streams.
   1111  *
   1112  * PARAMETERS :
   1113  *   @stream_list : streams to be configured
   1114  *
   1115  * RETURN     :
   1116  *
   1117  *==========================================================================*/
   1118 int QCamera3HardwareInterface::configureStreams(
   1119         camera3_stream_configuration_t *streamList)
   1120 {
   1121     ATRACE_CALL();
   1122     int rc = 0;
   1123 
   1124     // Acquire perfLock before configure streams
   1125     m_perfLock.lock_acq();
   1126     rc = configureStreamsPerfLocked(streamList);
   1127     m_perfLock.lock_rel();
   1128 
   1129     return rc;
   1130 }
   1131 
   1132 /*===========================================================================
   1133  * FUNCTION   : configureStreamsPerfLocked
   1134  *
   1135  * DESCRIPTION: configureStreams while perfLock is held.
   1136  *
   1137  * PARAMETERS :
   1138  *   @stream_list : streams to be configured
   1139  *
   1140  * RETURN     : int32_t type of status
   1141  *              NO_ERROR  -- success
   1142  *              none-zero failure code
   1143  *==========================================================================*/
   1144 int QCamera3HardwareInterface::configureStreamsPerfLocked(
   1145         camera3_stream_configuration_t *streamList)
   1146 {
   1147     ATRACE_CALL();
   1148     int rc = 0;
   1149 
   1150     // Sanity check stream_list
   1151     if (streamList == NULL) {
   1152         ALOGE("%s: NULL stream configuration", __func__);
   1153         return BAD_VALUE;
   1154     }
   1155     if (streamList->streams == NULL) {
   1156         ALOGE("%s: NULL stream list", __func__);
   1157         return BAD_VALUE;
   1158     }
   1159 
   1160     if (streamList->num_streams < 1) {
   1161         ALOGE("%s: Bad number of streams requested: %d", __func__,
   1162                 streamList->num_streams);
   1163         return BAD_VALUE;
   1164     }
   1165 
   1166     if (streamList->num_streams >= MAX_NUM_STREAMS) {
   1167         ALOGE("%s: Maximum number of streams %d exceeded: %d", __func__,
   1168                 MAX_NUM_STREAMS, streamList->num_streams);
   1169         return BAD_VALUE;
   1170     }
   1171 
   1172     rc = validateUsageFlags(streamList);
   1173     if (rc != NO_ERROR) {
   1174         return rc;
   1175     }
   1176 
   1177     mOpMode = streamList->operation_mode;
   1178     CDBG("%s: mOpMode: %d", __func__, mOpMode);
   1179 
   1180     /* first invalidate all the steams in the mStreamList
   1181      * if they appear again, they will be validated */
   1182     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   1183             it != mStreamInfo.end(); it++) {
   1184         QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1185         if (channel) {
   1186           channel->stop();
   1187         }
   1188         (*it)->status = INVALID;
   1189     }
   1190 
   1191     if (mRawDumpChannel) {
   1192         mRawDumpChannel->stop();
   1193         delete mRawDumpChannel;
   1194         mRawDumpChannel = NULL;
   1195     }
   1196 
   1197     if (mSupportChannel)
   1198         mSupportChannel->stop();
   1199 
   1200     if (mAnalysisChannel) {
   1201         mAnalysisChannel->stop();
   1202     }
   1203     if (mMetadataChannel) {
   1204         /* If content of mStreamInfo is not 0, there is metadata stream */
   1205         mMetadataChannel->stop();
   1206     }
   1207     if (mChannelHandle) {
   1208         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   1209                 mChannelHandle);
   1210         ALOGI("%s: stopping channel %d", __func__, mChannelHandle);
   1211     }
   1212 
   1213     pthread_mutex_lock(&mMutex);
   1214 
   1215     /* Check whether we have video stream */
   1216     m_bIs4KVideo = false;
   1217     m_bIsVideo = false;
   1218     m_bEisSupportedSize = false;
   1219     m_bTnrEnabled = false;
   1220     bool isZsl = false;
   1221     uint32_t videoWidth = 0U;
   1222     uint32_t videoHeight = 0U;
   1223     size_t rawStreamCnt = 0;
   1224     size_t stallStreamCnt = 0;
   1225     size_t processedStreamCnt = 0;
   1226     // Number of streams on ISP encoder path
   1227     size_t numStreamsOnEncoder = 0;
   1228     size_t numYuv888OnEncoder = 0;
   1229     bool bYuv888OverrideJpeg = false;
   1230     cam_dimension_t largeYuv888Size = {0, 0};
   1231     cam_dimension_t maxViewfinderSize = {0, 0};
   1232     bool bJpegExceeds4K = false;
   1233     bool bJpegOnEncoder = false;
   1234     bool bUseCommonFeatureMask = false;
   1235     uint32_t commonFeatureMask = 0;
   1236     maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
   1237     camera3_stream_t *inputStream = NULL;
   1238     bool isJpeg = false;
   1239     cam_dimension_t jpegSize = {0, 0};
   1240 
   1241     /*EIS configuration*/
   1242     bool eisSupported = false;
   1243     bool oisSupported = false;
   1244     int32_t margin_index = -1;
   1245     uint8_t eis_prop_set;
   1246     uint32_t maxEisWidth = 0;
   1247     uint32_t maxEisHeight = 0;
   1248     int32_t hal_version = CAM_HAL_V3;
   1249 
   1250     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
   1251 
   1252     size_t count = IS_TYPE_MAX;
   1253     count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
   1254     for (size_t i = 0; i < count; i++) {
   1255         if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) {
   1256             eisSupported = true;
   1257             margin_index = (int32_t)i;
   1258             break;
   1259         }
   1260     }
   1261 
   1262     count = CAM_OPT_STAB_MAX;
   1263     count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
   1264     for (size_t i = 0; i < count; i++) {
   1265         if (gCamCapability[mCameraId]->optical_stab_modes[i] ==  CAM_OPT_STAB_ON) {
   1266             oisSupported = true;
   1267             break;
   1268         }
   1269     }
   1270 
   1271     if (eisSupported) {
   1272         maxEisWidth = MAX_EIS_WIDTH;
   1273         maxEisHeight = MAX_EIS_HEIGHT;
   1274     }
   1275 
   1276     /* EIS setprop control */
   1277     char eis_prop[PROPERTY_VALUE_MAX];
   1278     memset(eis_prop, 0, sizeof(eis_prop));
   1279     property_get("persist.camera.eis.enable", eis_prop, "0");
   1280     eis_prop_set = (uint8_t)atoi(eis_prop);
   1281 
   1282     m_bEisEnable = eis_prop_set && (!oisSupported && eisSupported) &&
   1283             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
   1284 
   1285     /* stream configurations */
   1286     for (size_t i = 0; i < streamList->num_streams; i++) {
   1287         camera3_stream_t *newStream = streamList->streams[i];
   1288         ALOGI("%s: stream[%d] type = %d, format = %d, width = %d, "
   1289                 "height = %d, rotation = %d, usage = 0x%x",
   1290                 __func__, i, newStream->stream_type, newStream->format,
   1291                 newStream->width, newStream->height, newStream->rotation,
   1292                 newStream->usage);
   1293         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1294                 newStream->stream_type == CAMERA3_STREAM_INPUT){
   1295             isZsl = true;
   1296         }
   1297         if (newStream->stream_type == CAMERA3_STREAM_INPUT){
   1298             inputStream = newStream;
   1299         }
   1300 
   1301         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1302             isJpeg = true;
   1303             jpegSize.width = newStream->width;
   1304             jpegSize.height = newStream->height;
   1305             if (newStream->width > VIDEO_4K_WIDTH ||
   1306                     newStream->height > VIDEO_4K_HEIGHT)
   1307                 bJpegExceeds4K = true;
   1308         }
   1309 
   1310         if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
   1311                 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
   1312             m_bIsVideo = true;
   1313             videoWidth = newStream->width;
   1314             videoHeight = newStream->height;
   1315             if ((VIDEO_4K_WIDTH <= newStream->width) &&
   1316                     (VIDEO_4K_HEIGHT <= newStream->height)) {
   1317                 m_bIs4KVideo = true;
   1318             }
   1319             m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
   1320                                   (newStream->height <= maxEisHeight);
   1321         }
   1322         if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1323                 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
   1324             switch (newStream->format) {
   1325             case HAL_PIXEL_FORMAT_BLOB:
   1326                 stallStreamCnt++;
   1327                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1328                         newStream->height)) {
   1329                     commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
   1330                     numStreamsOnEncoder++;
   1331                     bJpegOnEncoder = true;
   1332                 }
   1333                 break;
   1334             case HAL_PIXEL_FORMAT_RAW10:
   1335             case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1336             case HAL_PIXEL_FORMAT_RAW16:
   1337                 rawStreamCnt++;
   1338                 break;
   1339             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1340                 processedStreamCnt++;
   1341                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1342                         newStream->height)) {
   1343                     if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
   1344                             IS_USAGE_ZSL(newStream->usage)) {
   1345                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
   1346                     } else {
   1347                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1348                     }
   1349                     numStreamsOnEncoder++;
   1350                 }
   1351                 break;
   1352             case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1353                 processedStreamCnt++;
   1354                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1355                         newStream->height)) {
   1356                     // If Yuv888 size is not greater than 4K, set feature mask
   1357                     // to SUPERSET so that it support concurrent request on
   1358                     // YUV and JPEG.
   1359                     if (newStream->width <= VIDEO_4K_WIDTH &&
   1360                             newStream->height <= VIDEO_4K_HEIGHT) {
   1361                         commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1362                     } else {
   1363                         commonFeatureMask |= CAM_QCOM_FEATURE_NONE;
   1364                     }
   1365                     numStreamsOnEncoder++;
   1366                     numYuv888OnEncoder++;
   1367                     largeYuv888Size.width = newStream->width;
   1368                     largeYuv888Size.height = newStream->height;
   1369                 }
   1370                 break;
   1371             default:
   1372                 processedStreamCnt++;
   1373                 if (isOnEncoder(maxViewfinderSize, newStream->width,
   1374                         newStream->height)) {
   1375                     commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1376                     numStreamsOnEncoder++;
   1377                 }
   1378                 break;
   1379             }
   1380 
   1381         }
   1382     }
   1383 
   1384     if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
   1385         !m_bIsVideo) {
   1386         m_bEisEnable = false;
   1387     }
   1388 
   1389     /* Logic to enable/disable TNR based on specific config size/etc.*/
   1390     if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
   1391             ((videoWidth == 1920 && videoHeight == 1080) ||
   1392             (videoWidth == 1280 && videoHeight == 720)) &&
   1393             (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
   1394         m_bTnrEnabled = true;
   1395 
   1396     /* Check if num_streams is sane */
   1397     if (stallStreamCnt > MAX_STALLING_STREAMS ||
   1398             rawStreamCnt > MAX_RAW_STREAMS ||
   1399             processedStreamCnt > MAX_PROCESSED_STREAMS) {
   1400         ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
   1401                 __func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
   1402         pthread_mutex_unlock(&mMutex);
   1403         return -EINVAL;
   1404     }
   1405     /* Check whether we have zsl stream or 4k video case */
   1406     if (isZsl && m_bIsVideo) {
   1407         ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
   1408         pthread_mutex_unlock(&mMutex);
   1409         return -EINVAL;
   1410     }
   1411     /* Check if stream sizes are sane */
   1412     if (numStreamsOnEncoder > 2) {
   1413         ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
   1414                 __func__);
   1415         pthread_mutex_unlock(&mMutex);
   1416         return -EINVAL;
   1417     } else if (1 < numStreamsOnEncoder){
   1418         bUseCommonFeatureMask = true;
   1419         CDBG_HIGH("%s: Multiple streams above max viewfinder size, common mask needed",
   1420                 __func__);
   1421     }
   1422 
   1423     /* Check if BLOB size is greater than 4k in 4k recording case */
   1424     if (m_bIs4KVideo && bJpegExceeds4K) {
   1425         ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
   1426                 __func__);
   1427         pthread_mutex_unlock(&mMutex);
   1428         return -EINVAL;
   1429     }
   1430 
   1431     // When JPEG and preview streams share VFE output, CPP will not apply CAC2
   1432     // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
   1433     // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
   1434     // is not true. Otherwise testMandatoryOutputCombinations will fail with following
   1435     // configurations:
   1436     //    {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
   1437     //    {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
   1438     //    (These two configurations will not have CAC2 enabled even in HQ modes.)
   1439     if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
   1440         ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
   1441                 __func__);
   1442         pthread_mutex_unlock(&mMutex);
   1443         return -EINVAL;
   1444     }
   1445 
   1446     // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
   1447     // the YUV stream's size is greater or equal to the JPEG size, set common
   1448     // postprocess mask to NONE, so that we can take advantage of postproc bypass.
   1449     if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
   1450             jpegSize.width, jpegSize.height) &&
   1451             largeYuv888Size.width > jpegSize.width &&
   1452             largeYuv888Size.height > jpegSize.height) {
   1453         bYuv888OverrideJpeg = true;
   1454     } else if (!isJpeg && numStreamsOnEncoder > 1) {
   1455         commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1456     }
   1457 
   1458     rc = validateStreamDimensions(streamList);
   1459     if (rc == NO_ERROR) {
   1460         rc = validateStreamRotations(streamList);
   1461     }
   1462     if (rc != NO_ERROR) {
   1463         ALOGE("%s: Invalid stream configuration requested!", __func__);
   1464         pthread_mutex_unlock(&mMutex);
   1465         return rc;
   1466     }
   1467 
   1468     camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
   1469     camera3_stream_t *jpegStream = NULL;
   1470     for (size_t i = 0; i < streamList->num_streams; i++) {
   1471         camera3_stream_t *newStream = streamList->streams[i];
   1472         CDBG_HIGH("%s: newStream type = %d, stream format = %d "
   1473                 "stream size : %d x %d, stream rotation = %d",
   1474                 __func__, newStream->stream_type, newStream->format,
   1475                 newStream->width, newStream->height, newStream->rotation);
   1476         //if the stream is in the mStreamList validate it
   1477         bool stream_exists = false;
   1478         for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1479                 it != mStreamInfo.end(); it++) {
   1480             if ((*it)->stream == newStream) {
   1481                 QCamera3ProcessingChannel *channel =
   1482                     (QCamera3ProcessingChannel*)(*it)->stream->priv;
   1483                 stream_exists = true;
   1484                 if (channel)
   1485                     delete channel;
   1486                 (*it)->status = VALID;
   1487                 (*it)->stream->priv = NULL;
   1488                 (*it)->channel = NULL;
   1489             }
   1490         }
   1491         if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
   1492             //new stream
   1493             stream_info_t* stream_info;
   1494             stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
   1495             if (!stream_info) {
   1496                ALOGE("%s: Could not allocate stream info", __func__);
   1497                rc = -ENOMEM;
   1498                pthread_mutex_unlock(&mMutex);
   1499                return rc;
   1500             }
   1501             stream_info->stream = newStream;
   1502             stream_info->status = VALID;
   1503             stream_info->channel = NULL;
   1504             mStreamInfo.push_back(stream_info);
   1505         }
   1506         /* Covers Opaque ZSL and API1 F/W ZSL */
   1507         if (IS_USAGE_ZSL(newStream->usage)
   1508                 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
   1509             if (zslStream != NULL) {
   1510                 ALOGE("%s: Multiple input/reprocess streams requested!", __func__);
   1511                 pthread_mutex_unlock(&mMutex);
   1512                 return BAD_VALUE;
   1513             }
   1514             zslStream = newStream;
   1515         }
   1516         /* Covers YUV reprocess */
   1517         if (inputStream != NULL) {
   1518             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
   1519                     && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1520                     && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
   1521                     && inputStream->width == newStream->width
   1522                     && inputStream->height == newStream->height) {
   1523                 if (zslStream != NULL) {
   1524                     /* This scenario indicates multiple YUV streams with same size
   1525                      * as input stream have been requested, since zsl stream handle
   1526                      * is solely use for the purpose of overriding the size of streams
   1527                      * which share h/w streams we will just make a guess here as to
   1528                      * which of the stream is a ZSL stream, this will be refactored
   1529                      * once we make generic logic for streams sharing encoder output
   1530                      */
   1531                     CDBG_HIGH("%s: Warning, Multiple ip/reprocess streams requested!", __func__);
   1532                 }
   1533                 zslStream = newStream;
   1534             }
   1535         }
   1536         if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
   1537             jpegStream = newStream;
   1538         }
   1539     }
   1540 
   1541     /* If a zsl stream is set, we know that we have configured at least one input or
   1542        bidirectional stream */
   1543     if (NULL != zslStream) {
   1544         mInputStreamInfo.dim.width = (int32_t)zslStream->width;
   1545         mInputStreamInfo.dim.height = (int32_t)zslStream->height;
   1546         mInputStreamInfo.format = zslStream->format;
   1547         mInputStreamInfo.usage = zslStream->usage;
   1548         CDBG("%s: Input stream configured! %d x %d, format %d, usage %d",
   1549                 __func__, mInputStreamInfo.dim.width,
   1550                 mInputStreamInfo.dim.height,
   1551                 mInputStreamInfo.format, mInputStreamInfo.usage);
   1552     }
   1553 
   1554     cleanAndSortStreamInfo();
   1555     if (mMetadataChannel) {
   1556         delete mMetadataChannel;
   1557         mMetadataChannel = NULL;
   1558     }
   1559     if (mSupportChannel) {
   1560         delete mSupportChannel;
   1561         mSupportChannel = NULL;
   1562     }
   1563 
   1564     if (mAnalysisChannel) {
   1565         delete mAnalysisChannel;
   1566         mAnalysisChannel = NULL;
   1567     }
   1568 
   1569     if (mDummyBatchChannel) {
   1570         delete mDummyBatchChannel;
   1571         mDummyBatchChannel = NULL;
   1572     }
   1573 
   1574     //Create metadata channel and initialize it
   1575     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
   1576                     mChannelHandle, mCameraHandle->ops, captureResultCb,
   1577                     &gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, this);
   1578     if (mMetadataChannel == NULL) {
   1579         ALOGE("%s: failed to allocate metadata channel", __func__);
   1580         rc = -ENOMEM;
   1581         pthread_mutex_unlock(&mMutex);
   1582         return rc;
   1583     }
   1584     rc = mMetadataChannel->initialize(IS_TYPE_NONE);
   1585     if (rc < 0) {
   1586         ALOGE("%s: metadata channel initialization failed", __func__);
   1587         delete mMetadataChannel;
   1588         mMetadataChannel = NULL;
   1589         pthread_mutex_unlock(&mMutex);
   1590         return rc;
   1591     }
   1592 
   1593     // Create analysis stream all the time, even when h/w support is not available
   1594     {
   1595         mAnalysisChannel = new QCamera3SupportChannel(
   1596                 mCameraHandle->camera_handle,
   1597                 mChannelHandle,
   1598                 mCameraHandle->ops,
   1599                 &gCamCapability[mCameraId]->padding_info,
   1600                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
   1601                 CAM_STREAM_TYPE_ANALYSIS,
   1602                 &gCamCapability[mCameraId]->analysis_recommended_res,
   1603                 gCamCapability[mCameraId]->analysis_recommended_format,
   1604                 this,
   1605                 0); // force buffer count to 0
   1606         if (!mAnalysisChannel) {
   1607             ALOGE("%s: H/W Analysis channel cannot be created", __func__);
   1608             pthread_mutex_unlock(&mMutex);
   1609             return -ENOMEM;
   1610         }
   1611     }
   1612 
   1613     bool isRawStreamRequested = false;
   1614     memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
   1615     /* Allocate channel objects for the requested streams */
   1616     for (size_t i = 0; i < streamList->num_streams; i++) {
   1617         camera3_stream_t *newStream = streamList->streams[i];
   1618         uint32_t stream_usage = newStream->usage;
   1619         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
   1620         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
   1621         if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
   1622                 || IS_USAGE_ZSL(newStream->usage)) &&
   1623             newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
   1624             mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1625             if (bUseCommonFeatureMask) {
   1626                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1627                         commonFeatureMask;
   1628             } else {
   1629                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1630                         CAM_QCOM_FEATURE_NONE;
   1631             }
   1632 
   1633         } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1634                 CDBG_HIGH("%s: Input stream configured, reprocess config", __func__);
   1635         } else {
   1636             //for non zsl streams find out the format
   1637             switch (newStream->format) {
   1638             case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
   1639               {
   1640                  mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
   1641                          = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1642 
   1643                  if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
   1644 
   1645                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_VIDEO;
   1646                      if (m_bTnrEnabled && m_bTnrVideo) {
   1647                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1648                              CAM_QCOM_FEATURE_CPP_TNR;
   1649                      }
   1650 
   1651                  } else {
   1652 
   1653                      mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_PREVIEW;
   1654                      if (m_bTnrEnabled && m_bTnrPreview) {
   1655                          mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
   1656                              CAM_QCOM_FEATURE_CPP_TNR;
   1657                      }
   1658                  }
   1659 
   1660                  if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
   1661                          (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
   1662                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1663                              newStream->height;
   1664                      mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1665                              newStream->width;
   1666                  }
   1667               }
   1668               break;
   1669            case HAL_PIXEL_FORMAT_YCbCr_420_888:
   1670               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
   1671               if (isOnEncoder(maxViewfinderSize, newStream->width,
   1672                       newStream->height)) {
   1673                   if (bUseCommonFeatureMask)
   1674                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1675                               commonFeatureMask;
   1676                   else
   1677                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1678                               CAM_QCOM_FEATURE_NONE;
   1679               } else {
   1680                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1681                           CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1682               }
   1683               break;
   1684            case HAL_PIXEL_FORMAT_BLOB:
   1685               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
   1686               if (m_bIs4KVideo && !isZsl) {
   1687                   mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
   1688                           = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1689               } else {
   1690                   if (bUseCommonFeatureMask &&
   1691                           isOnEncoder(maxViewfinderSize, newStream->width,
   1692                                   newStream->height)) {
   1693                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
   1694                   } else {
   1695                       mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   1696                   }
   1697               }
   1698               if (isZsl) {
   1699                   if (zslStream) {
   1700                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1701                               (int32_t)zslStream->width;
   1702                       mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1703                               (int32_t)zslStream->height;
   1704                   } else {
   1705                       ALOGE("%s: Error, No ZSL stream identified",__func__);
   1706                       pthread_mutex_unlock(&mMutex);
   1707                       return -EINVAL;
   1708                   }
   1709               } else if (m_bIs4KVideo) {
   1710                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1711                           (int32_t)videoWidth;
   1712                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1713                           (int32_t)videoHeight;
   1714               } else if (bYuv888OverrideJpeg) {
   1715                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   1716                           (int32_t)largeYuv888Size.width;
   1717                   mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   1718                           (int32_t)largeYuv888Size.height;
   1719               }
   1720               break;
   1721            case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1722            case HAL_PIXEL_FORMAT_RAW16:
   1723            case HAL_PIXEL_FORMAT_RAW10:
   1724               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
   1725               isRawStreamRequested = true;
   1726               break;
   1727            default:
   1728               mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
   1729               mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
   1730               break;
   1731             }
   1732 
   1733         }
   1734 
   1735         if (newStream->priv == NULL) {
   1736             //New stream, construct channel
   1737             switch (newStream->stream_type) {
   1738             case CAMERA3_STREAM_INPUT:
   1739                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
   1740                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
   1741                 break;
   1742             case CAMERA3_STREAM_BIDIRECTIONAL:
   1743                 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
   1744                     GRALLOC_USAGE_HW_CAMERA_WRITE;
   1745                 break;
   1746             case CAMERA3_STREAM_OUTPUT:
   1747                 /* For video encoding stream, set read/write rarely
   1748                  * flag so that they may be set to un-cached */
   1749                 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
   1750                     newStream->usage |=
   1751                          (GRALLOC_USAGE_SW_READ_RARELY |
   1752                          GRALLOC_USAGE_SW_WRITE_RARELY |
   1753                          GRALLOC_USAGE_HW_CAMERA_WRITE);
   1754                 else if (IS_USAGE_ZSL(newStream->usage))
   1755                     CDBG("%s: ZSL usage flag skipping", __func__);
   1756                 else if (newStream == zslStream
   1757                         || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   1758                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
   1759                 } else
   1760                     newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
   1761                 break;
   1762             default:
   1763                 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
   1764                 break;
   1765             }
   1766 
   1767             if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
   1768                     newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
   1769                 QCamera3ProcessingChannel *channel = NULL;
   1770                 switch (newStream->format) {
   1771                 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   1772                     if ((newStream->usage &
   1773                             private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
   1774                             (streamList->operation_mode ==
   1775                             CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
   1776                     ) {
   1777                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   1778                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   1779                                 &gCamCapability[mCameraId]->padding_info,
   1780                                 this,
   1781                                 newStream,
   1782                                 (cam_stream_type_t)
   1783                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   1784                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1785                                 mMetadataChannel,
   1786                                 0); //heap buffers are not required for HFR video channel
   1787                         if (channel == NULL) {
   1788                             ALOGE("%s: allocation of channel failed", __func__);
   1789                             pthread_mutex_unlock(&mMutex);
   1790                             return -ENOMEM;
   1791                         }
   1792                         //channel->getNumBuffers() will return 0 here so use
   1793                         //MAX_INFLIGH_HFR_REQUESTS
   1794                         newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
   1795                         newStream->priv = channel;
   1796                         ALOGI("%s: num video buffers in HFR mode: %d",
   1797                                 __func__, MAX_INFLIGHT_HFR_REQUESTS);
   1798                     } else {
   1799                         /* Copy stream contents in HFR preview only case to create
   1800                          * dummy batch channel so that sensor streaming is in
   1801                          * HFR mode */
   1802                         if (!m_bIsVideo && (streamList->operation_mode ==
   1803                                 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
   1804                             mDummyBatchStream = *newStream;
   1805                         }
   1806                         channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   1807                                 mChannelHandle, mCameraHandle->ops, captureResultCb,
   1808                                 &gCamCapability[mCameraId]->padding_info,
   1809                                 this,
   1810                                 newStream,
   1811                                 (cam_stream_type_t)
   1812                                         mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   1813                                 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1814                                 mMetadataChannel,
   1815                                 MAX_INFLIGHT_REQUESTS);
   1816                         if (channel == NULL) {
   1817                             ALOGE("%s: allocation of channel failed", __func__);
   1818                             pthread_mutex_unlock(&mMutex);
   1819                             return -ENOMEM;
   1820                         }
   1821                         newStream->max_buffers = channel->getNumBuffers();
   1822                         newStream->priv = channel;
   1823                     }
   1824                     break;
   1825                 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
   1826                     channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
   1827                             mChannelHandle,
   1828                             mCameraHandle->ops, captureResultCb,
   1829                             &gCamCapability[mCameraId]->padding_info,
   1830                             this,
   1831                             newStream,
   1832                             (cam_stream_type_t)
   1833                                     mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
   1834                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1835                             mMetadataChannel);
   1836                     if (channel == NULL) {
   1837                         ALOGE("%s: allocation of YUV channel failed", __func__);
   1838                         pthread_mutex_unlock(&mMutex);
   1839                         return -ENOMEM;
   1840                     }
   1841                     newStream->max_buffers = channel->getNumBuffers();
   1842                     newStream->priv = channel;
   1843                     break;
   1844                 }
   1845                 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
   1846                 case HAL_PIXEL_FORMAT_RAW16:
   1847                 case HAL_PIXEL_FORMAT_RAW10:
   1848                     mRawChannel = new QCamera3RawChannel(
   1849                             mCameraHandle->camera_handle, mChannelHandle,
   1850                             mCameraHandle->ops, captureResultCb,
   1851                             &gCamCapability[mCameraId]->padding_info,
   1852                             this, newStream, CAM_QCOM_FEATURE_NONE,
   1853                             mMetadataChannel,
   1854                             (newStream->format == HAL_PIXEL_FORMAT_RAW16));
   1855                     if (mRawChannel == NULL) {
   1856                         ALOGE("%s: allocation of raw channel failed", __func__);
   1857                         pthread_mutex_unlock(&mMutex);
   1858                         return -ENOMEM;
   1859                     }
   1860                     newStream->max_buffers = mRawChannel->getNumBuffers();
   1861                     newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
   1862                     break;
   1863                 case HAL_PIXEL_FORMAT_BLOB:
   1864                     // Max live snapshot inflight buffer is 1. This is to mitigate
   1865                     // frame drop issues for video snapshot. The more buffers being
   1866                     // allocated, the more frame drops there are.
   1867                     mPictureChannel = new QCamera3PicChannel(
   1868                             mCameraHandle->camera_handle, mChannelHandle,
   1869                             mCameraHandle->ops, captureResultCb,
   1870                             &gCamCapability[mCameraId]->padding_info, this, newStream,
   1871                             mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
   1872                             m_bIs4KVideo, isZsl, mMetadataChannel,
   1873                             (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
   1874                     if (mPictureChannel == NULL) {
   1875                         ALOGE("%s: allocation of channel failed", __func__);
   1876                         pthread_mutex_unlock(&mMutex);
   1877                         return -ENOMEM;
   1878                     }
   1879                     newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
   1880                     newStream->max_buffers = mPictureChannel->getNumBuffers();
   1881                     mPictureChannel->overrideYuvSize(
   1882                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
   1883                             mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
   1884                     break;
   1885 
   1886                 default:
   1887                     ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
   1888                     pthread_mutex_unlock(&mMutex);
   1889                     return -EINVAL;
   1890                 }
   1891             } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
   1892                 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
   1893             } else {
   1894                 ALOGE("%s: Error, Unknown stream type", __func__);
   1895                 return -EINVAL;
   1896             }
   1897 
   1898             for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   1899                     it != mStreamInfo.end(); it++) {
   1900                 if ((*it)->stream == newStream) {
   1901                     (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
   1902                     break;
   1903                 }
   1904             }
   1905         } else {
   1906             // Channel already exists for this stream
   1907             // Do nothing for now
   1908         }
   1909 
   1910     /* Do not add entries for input stream in metastream info
   1911          * since there is no real stream associated with it
   1912          */
   1913         if (newStream->stream_type != CAMERA3_STREAM_INPUT)
   1914             mStreamConfigInfo.num_streams++;
   1915     }
   1916 
   1917     //RAW DUMP channel
   1918     if (mEnableRawDump && isRawStreamRequested == false){
   1919         cam_dimension_t rawDumpSize;
   1920         rawDumpSize = getMaxRawSize(mCameraId);
   1921         mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
   1922                                   mChannelHandle,
   1923                                   mCameraHandle->ops,
   1924                                   rawDumpSize,
   1925                                   &gCamCapability[mCameraId]->padding_info,
   1926                                   this, CAM_QCOM_FEATURE_NONE);
   1927         if (!mRawDumpChannel) {
   1928             ALOGE("%s: Raw Dump channel cannot be created", __func__);
   1929             pthread_mutex_unlock(&mMutex);
   1930             return -ENOMEM;
   1931         }
   1932     }
   1933 
   1934 
   1935     if (mAnalysisChannel) {
   1936         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   1937                 gCamCapability[mCameraId]->analysis_recommended_res;
   1938         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1939                 CAM_STREAM_TYPE_ANALYSIS;
   1940         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1941                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1942         mStreamConfigInfo.num_streams++;
   1943     }
   1944 
   1945     if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
   1946         mSupportChannel = new QCamera3SupportChannel(
   1947                 mCameraHandle->camera_handle,
   1948                 mChannelHandle,
   1949                 mCameraHandle->ops,
   1950                 &gCamCapability[mCameraId]->padding_info,
   1951                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
   1952                 CAM_STREAM_TYPE_CALLBACK,
   1953                 &QCamera3SupportChannel::kDim,
   1954                 CAM_FORMAT_YUV_420_NV21,
   1955                 this);
   1956         if (!mSupportChannel) {
   1957             ALOGE("%s: dummy channel cannot be created", __func__);
   1958             pthread_mutex_unlock(&mMutex);
   1959             return -ENOMEM;
   1960         }
   1961     }
   1962 
   1963     if (mSupportChannel) {
   1964         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   1965                 QCamera3SupportChannel::kDim;
   1966         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1967                 CAM_STREAM_TYPE_CALLBACK;
   1968         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1969                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   1970         mStreamConfigInfo.num_streams++;
   1971     }
   1972 
   1973     if (mRawDumpChannel) {
   1974         cam_dimension_t rawSize;
   1975         rawSize = getMaxRawSize(mCameraId);
   1976         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
   1977                 rawSize;
   1978         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   1979                 CAM_STREAM_TYPE_RAW;
   1980         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   1981                 CAM_QCOM_FEATURE_NONE;
   1982         mStreamConfigInfo.num_streams++;
   1983     }
   1984     /* In HFR mode, if video stream is not added, create a dummy channel so that
   1985      * ISP can create a batch mode even for preview only case. This channel is
   1986      * never 'start'ed (no stream-on), it is only 'initialized'  */
   1987     if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   1988             !m_bIsVideo) {
   1989         mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
   1990                 mChannelHandle,
   1991                 mCameraHandle->ops, captureResultCb,
   1992                 &gCamCapability[mCameraId]->padding_info,
   1993                 this,
   1994                 &mDummyBatchStream,
   1995                 CAM_STREAM_TYPE_VIDEO,
   1996                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
   1997                 mMetadataChannel);
   1998         if (NULL == mDummyBatchChannel) {
   1999             ALOGE("%s: creation of mDummyBatchChannel failed."
   2000                     "Preview will use non-hfr sensor mode ", __func__);
   2001         }
   2002     }
   2003     if (mDummyBatchChannel) {
   2004         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
   2005                 mDummyBatchStream.width;
   2006         mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
   2007                 mDummyBatchStream.height;
   2008         mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
   2009                 CAM_STREAM_TYPE_VIDEO;
   2010         mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
   2011                 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   2012         mStreamConfigInfo.num_streams++;
   2013     }
   2014 
   2015     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
   2016     mStreamConfigInfo.buffer_info.max_buffers =
   2017             m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   2018 
   2019     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
   2020     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2021             i != mPendingRequestsList.end();) {
   2022         i = erasePendingRequest(i);
   2023     }
   2024     mPendingFrameDropList.clear();
   2025     // Initialize/Reset the pending buffers list
   2026     mPendingBuffersMap.num_buffers = 0;
   2027     mPendingBuffersMap.mPendingBufferList.clear();
   2028     mPendingReprocessResultList.clear();
   2029 
   2030     mFirstRequest = true;
   2031     mCurJpegMeta.clear();
   2032     //Get min frame duration for this streams configuration
   2033     deriveMinFrameDuration();
   2034 
   2035     /* Turn on video hint only if video stream is configured */
   2036 
   2037     pthread_mutex_unlock(&mMutex);
   2038 
   2039     return rc;
   2040 }
   2041 
   2042 /*===========================================================================
   2043  * FUNCTION   : validateCaptureRequest
   2044  *
   2045  * DESCRIPTION: validate a capture request from camera service
   2046  *
   2047  * PARAMETERS :
   2048  *   @request : request from framework to process
   2049  *
   2050  * RETURN     :
   2051  *
   2052  *==========================================================================*/
   2053 int QCamera3HardwareInterface::validateCaptureRequest(
   2054                     camera3_capture_request_t *request)
   2055 {
   2056     ssize_t idx = 0;
   2057     const camera3_stream_buffer_t *b;
   2058     CameraMetadata meta;
   2059 
   2060     /* Sanity check the request */
   2061     if (request == NULL) {
   2062         ALOGE("%s: NULL capture request", __func__);
   2063         return BAD_VALUE;
   2064     }
   2065 
   2066     if (request->settings == NULL && mFirstRequest) {
   2067         /*settings cannot be null for the first request*/
   2068         return BAD_VALUE;
   2069     }
   2070 
   2071     uint32_t frameNumber = request->frame_number;
   2072     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
   2073         ALOGE("%s: Request %d: No output buffers provided!",
   2074                 __FUNCTION__, frameNumber);
   2075         return BAD_VALUE;
   2076     }
   2077     if (request->num_output_buffers >= MAX_NUM_STREAMS) {
   2078         ALOGE("%s: Number of buffers %d equals or is greater than maximum number of streams!",
   2079                 __func__, request->num_output_buffers, MAX_NUM_STREAMS);
   2080         return BAD_VALUE;
   2081     }
   2082     if (request->input_buffer != NULL) {
   2083         b = request->input_buffer;
   2084         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2085             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
   2086                     __func__, frameNumber, (long)idx);
   2087             return BAD_VALUE;
   2088         }
   2089         if (b->release_fence != -1) {
   2090             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
   2091                     __func__, frameNumber, (long)idx);
   2092             return BAD_VALUE;
   2093         }
   2094         if (b->buffer == NULL) {
   2095             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
   2096                     __func__, frameNumber, (long)idx);
   2097             return BAD_VALUE;
   2098         }
   2099     }
   2100 
   2101     // Validate all buffers
   2102     b = request->output_buffers;
   2103     do {
   2104         QCamera3ProcessingChannel *channel =
   2105                 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
   2106         if (channel == NULL) {
   2107             ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
   2108                     __func__, frameNumber, (long)idx);
   2109             return BAD_VALUE;
   2110         }
   2111         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
   2112             ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
   2113                     __func__, frameNumber, (long)idx);
   2114             return BAD_VALUE;
   2115         }
   2116         if (b->release_fence != -1) {
   2117             ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
   2118                     __func__, frameNumber, (long)idx);
   2119             return BAD_VALUE;
   2120         }
   2121         if (b->buffer == NULL) {
   2122             ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
   2123                     __func__, frameNumber, (long)idx);
   2124             return BAD_VALUE;
   2125         }
   2126         if (*(b->buffer) == NULL) {
   2127             ALOGE("%s: Request %d: Buffer %ld: NULL private handle!",
   2128                     __func__, frameNumber, (long)idx);
   2129             return BAD_VALUE;
   2130         }
   2131         idx++;
   2132         b = request->output_buffers + idx;
   2133     } while (idx < (ssize_t)request->num_output_buffers);
   2134 
   2135     return NO_ERROR;
   2136 }
   2137 
   2138 /*===========================================================================
   2139  * FUNCTION   : deriveMinFrameDuration
   2140  *
   2141  * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
   2142  *              on currently configured streams.
   2143  *
   2144  * PARAMETERS : NONE
   2145  *
   2146  * RETURN     : NONE
   2147  *
   2148  *==========================================================================*/
   2149 void QCamera3HardwareInterface::deriveMinFrameDuration()
   2150 {
   2151     int32_t maxJpegDim, maxProcessedDim, maxRawDim;
   2152 
   2153     maxJpegDim = 0;
   2154     maxProcessedDim = 0;
   2155     maxRawDim = 0;
   2156 
   2157     // Figure out maximum jpeg, processed, and raw dimensions
   2158     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
   2159         it != mStreamInfo.end(); it++) {
   2160 
   2161         // Input stream doesn't have valid stream_type
   2162         if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
   2163             continue;
   2164 
   2165         int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
   2166         if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
   2167             if (dimension > maxJpegDim)
   2168                 maxJpegDim = dimension;
   2169         } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2170                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2171                 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
   2172             if (dimension > maxRawDim)
   2173                 maxRawDim = dimension;
   2174         } else {
   2175             if (dimension > maxProcessedDim)
   2176                 maxProcessedDim = dimension;
   2177         }
   2178     }
   2179 
   2180     size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
   2181             MAX_SIZES_CNT);
   2182 
   2183     //Assume all jpeg dimensions are in processed dimensions.
   2184     if (maxJpegDim > maxProcessedDim)
   2185         maxProcessedDim = maxJpegDim;
   2186     //Find the smallest raw dimension that is greater or equal to jpeg dimension
   2187     if (maxProcessedDim > maxRawDim) {
   2188         maxRawDim = INT32_MAX;
   2189 
   2190         for (size_t i = 0; i < count; i++) {
   2191             int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
   2192                     gCamCapability[mCameraId]->raw_dim[i].height;
   2193             if (dimension >= maxProcessedDim && dimension < maxRawDim)
   2194                 maxRawDim = dimension;
   2195         }
   2196     }
   2197 
   2198     //Find minimum durations for processed, jpeg, and raw
   2199     for (size_t i = 0; i < count; i++) {
   2200         if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
   2201                 gCamCapability[mCameraId]->raw_dim[i].height) {
   2202             mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
   2203             break;
   2204         }
   2205     }
   2206     count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   2207     for (size_t i = 0; i < count; i++) {
   2208         if (maxProcessedDim ==
   2209                 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
   2210                 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
   2211             mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2212             mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
   2213             break;
   2214         }
   2215     }
   2216 }
   2217 
   2218 /*===========================================================================
   2219  * FUNCTION   : getMinFrameDuration
   2220  *
   2221  * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
   2222  *              and current request configuration.
   2223  *
   2224  * PARAMETERS : @request: requset sent by the frameworks
   2225  *
   2226  * RETURN     : min farme duration for a particular request
   2227  *
   2228  *==========================================================================*/
   2229 int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
   2230 {
   2231     bool hasJpegStream = false;
   2232     bool hasRawStream = false;
   2233     for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
   2234         const camera3_stream_t *stream = request->output_buffers[i].stream;
   2235         if (stream->format == HAL_PIXEL_FORMAT_BLOB)
   2236             hasJpegStream = true;
   2237         else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
   2238                 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
   2239                 stream->format == HAL_PIXEL_FORMAT_RAW16)
   2240             hasRawStream = true;
   2241     }
   2242 
   2243     if (!hasJpegStream)
   2244         return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
   2245     else
   2246         return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
   2247 }
   2248 
   2249 /*===========================================================================
   2250  * FUNCTION   : handlePendingReprocResults
   2251  *
   2252  * DESCRIPTION: check and notify on any pending reprocess results
   2253  *
   2254  * PARAMETERS :
   2255  *   @frame_number   : Pending request frame number
   2256  *
   2257  * RETURN     : int32_t type of status
   2258  *              NO_ERROR  -- success
   2259  *              none-zero failure code
   2260  *==========================================================================*/
   2261 int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
   2262 {
   2263     for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
   2264             j != mPendingReprocessResultList.end(); j++) {
   2265         if (j->frame_number == frame_number) {
   2266             mCallbackOps->notify(mCallbackOps, &j->notify_msg);
   2267 
   2268             CDBG("%s: Delayed reprocess notify %d", __func__,
   2269                     frame_number);
   2270 
   2271             for (pendingRequestIterator k = mPendingRequestsList.begin();
   2272                     k != mPendingRequestsList.end(); k++) {
   2273 
   2274                 if (k->frame_number == j->frame_number) {
   2275                     CDBG("%s: Found reprocess frame number %d in pending reprocess List "
   2276                             "Take it out!!", __func__,
   2277                             k->frame_number);
   2278 
   2279                     camera3_capture_result result;
   2280                     memset(&result, 0, sizeof(camera3_capture_result));
   2281                     result.frame_number = frame_number;
   2282                     result.num_output_buffers = 1;
   2283                     result.output_buffers =  &j->buffer;
   2284                     result.input_buffer = k->input_buffer;
   2285                     result.result = k->settings;
   2286                     result.partial_result = PARTIAL_RESULT_COUNT;
   2287                     mCallbackOps->process_capture_result(mCallbackOps, &result);
   2288 
   2289                     erasePendingRequest(k);
   2290                     break;
   2291                 }
   2292             }
   2293             mPendingReprocessResultList.erase(j);
   2294             break;
   2295         }
   2296     }
   2297     return NO_ERROR;
   2298 }
   2299 
   2300 /*===========================================================================
   2301  * FUNCTION   : handleBatchMetadata
   2302  *
   2303  * DESCRIPTION: Handles metadata buffer callback in batch mode
   2304  *
   2305  * PARAMETERS : @metadata_buf: metadata buffer
   2306  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2307  *                 the meta buf in this method
   2308  *
   2309  * RETURN     :
   2310  *
   2311  *==========================================================================*/
   2312 void QCamera3HardwareInterface::handleBatchMetadata(
   2313         mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
   2314 {
   2315     ATRACE_CALL();
   2316 
   2317     if (NULL == metadata_buf) {
   2318         ALOGE("%s: metadata_buf is NULL", __func__);
   2319         return;
   2320     }
   2321     /* In batch mode, the metdata will contain the frame number and timestamp of
   2322      * the last frame in the batch. Eg: a batch containing buffers from request
   2323      * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
   2324      * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
   2325      * multiple process_capture_results */
   2326     metadata_buffer_t *metadata =
   2327             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2328     int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
   2329     uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
   2330     uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
   2331     uint32_t frame_number = 0, urgent_frame_number = 0;
   2332     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
   2333     bool invalid_metadata = false;
   2334     size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
   2335     size_t loopCount = 1;
   2336 
   2337     int32_t *p_frame_number_valid =
   2338             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2339     uint32_t *p_frame_number =
   2340             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2341     int64_t *p_capture_time =
   2342             POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2343     int32_t *p_urgent_frame_number_valid =
   2344             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2345     uint32_t *p_urgent_frame_number =
   2346             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2347 
   2348     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
   2349             (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
   2350             (NULL == p_urgent_frame_number)) {
   2351         ALOGE("%s: Invalid metadata", __func__);
   2352         invalid_metadata = true;
   2353     } else {
   2354         frame_number_valid = *p_frame_number_valid;
   2355         last_frame_number = *p_frame_number;
   2356         last_frame_capture_time = *p_capture_time;
   2357         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2358         last_urgent_frame_number = *p_urgent_frame_number;
   2359     }
   2360 
   2361     /* In batchmode, when no video buffers are requested, set_parms are sent
   2362      * for every capture_request. The difference between consecutive urgent
   2363      * frame numbers and frame numbers should be used to interpolate the
   2364      * corresponding frame numbers and time stamps */
   2365     pthread_mutex_lock(&mMutex);
   2366     if (urgent_frame_number_valid) {
   2367         first_urgent_frame_number =
   2368                 mPendingBatchMap.valueFor(last_urgent_frame_number);
   2369         urgentFrameNumDiff = last_urgent_frame_number + 1 -
   2370                 first_urgent_frame_number;
   2371 
   2372         CDBG_HIGH("%s: urgent_frm: valid: %d frm_num: %d - %d",
   2373                 __func__, urgent_frame_number_valid,
   2374                 first_urgent_frame_number, last_urgent_frame_number);
   2375     }
   2376 
   2377     if (frame_number_valid) {
   2378         first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
   2379         frameNumDiff = last_frame_number + 1 -
   2380                 first_frame_number;
   2381         mPendingBatchMap.removeItem(last_frame_number);
   2382 
   2383         CDBG_HIGH("%s:        frm: valid: %d frm_num: %d - %d",
   2384                 __func__, frame_number_valid,
   2385                 first_frame_number, last_frame_number);
   2386 
   2387     }
   2388     pthread_mutex_unlock(&mMutex);
   2389 
   2390     if (urgent_frame_number_valid || frame_number_valid) {
   2391         loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
   2392         if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
   2393             ALOGE("%s: urgentFrameNumDiff: %d urgentFrameNum: %d",
   2394                     __func__, urgentFrameNumDiff, last_urgent_frame_number);
   2395         if (frameNumDiff > MAX_HFR_BATCH_SIZE)
   2396             ALOGE("%s: frameNumDiff: %d frameNum: %d",
   2397                     __func__, frameNumDiff, last_frame_number);
   2398     }
   2399 
   2400     for (size_t i = 0; i < loopCount; i++) {
   2401         /* handleMetadataWithLock is called even for invalid_metadata for
   2402          * pipeline depth calculation */
   2403         if (!invalid_metadata) {
   2404             /* Infer frame number. Batch metadata contains frame number of the
   2405              * last frame */
   2406             if (urgent_frame_number_valid) {
   2407                 if (i < urgentFrameNumDiff) {
   2408                     urgent_frame_number =
   2409                             first_urgent_frame_number + i;
   2410                     CDBG("%s: inferred urgent frame_number: %d",
   2411                             __func__, urgent_frame_number);
   2412                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2413                             CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
   2414                 } else {
   2415                     /* This is to handle when urgentFrameNumDiff < frameNumDiff */
   2416                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2417                             CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
   2418                 }
   2419             }
   2420 
   2421             /* Infer frame number. Batch metadata contains frame number of the
   2422              * last frame */
   2423             if (frame_number_valid) {
   2424                 if (i < frameNumDiff) {
   2425                     frame_number = first_frame_number + i;
   2426                     CDBG("%s: inferred frame_number: %d", __func__, frame_number);
   2427                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2428                             CAM_INTF_META_FRAME_NUMBER, frame_number);
   2429                 } else {
   2430                     /* This is to handle when urgentFrameNumDiff > frameNumDiff */
   2431                     ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2432                              CAM_INTF_META_FRAME_NUMBER_VALID, 0);
   2433                 }
   2434             }
   2435 
   2436             if (last_frame_capture_time) {
   2437                 //Infer timestamp
   2438                 first_frame_capture_time = last_frame_capture_time -
   2439                         (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
   2440                 capture_time =
   2441                         first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
   2442                 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
   2443                         CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
   2444                 CDBG_HIGH("%s: batch capture_time: %lld, capture_time: %lld",
   2445                         __func__, last_frame_capture_time, capture_time);
   2446             }
   2447         }
   2448         pthread_mutex_lock(&mMutex);
   2449         handleMetadataWithLock(metadata_buf,
   2450                 false /* free_and_bufdone_meta_buf */,
   2451                 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
   2452                 (i == frameNumDiff-1) /* last metadata in the batch metadata */);
   2453         pthread_mutex_unlock(&mMutex);
   2454     }
   2455 
   2456 done_batch_metadata:
   2457     /* BufDone metadata buffer */
   2458     if (free_and_bufdone_meta_buf) {
   2459         mMetadataChannel->bufDone(metadata_buf);
   2460         free(metadata_buf);
   2461     }
   2462 }
   2463 
   2464 /*===========================================================================
   2465  * FUNCTION   : handleMetadataWithLock
   2466  *
   2467  * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
   2468  *
   2469  * PARAMETERS : @metadata_buf: metadata buffer
   2470  *              @free_and_bufdone_meta_buf: Buf done on the meta buf and free
   2471  *                 the meta buf in this method
   2472  *              @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
   2473  *                  last urgent metadata in a batch. Always true for non-batch mode
   2474  *              @lastMetadataInBatch: Boolean to indicate whether this is the
   2475  *                  last metadata in a batch. Always true for non-batch mode
   2476  *
   2477  * RETURN     :
   2478  *
   2479  *==========================================================================*/
   2480 void QCamera3HardwareInterface::handleMetadataWithLock(
   2481     mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
   2482     bool lastUrgentMetadataInBatch, bool lastMetadataInBatch)
   2483 {
   2484     ATRACE_CALL();
   2485 
   2486     metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2487     int32_t frame_number_valid, urgent_frame_number_valid;
   2488     uint32_t frame_number, urgent_frame_number;
   2489     int64_t capture_time;
   2490     bool unfinished_raw_request = false;
   2491 
   2492     int32_t *p_frame_number_valid =
   2493             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2494     uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2495     int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
   2496     int32_t *p_urgent_frame_number_valid =
   2497             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
   2498     uint32_t *p_urgent_frame_number =
   2499             POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
   2500     IF_META_AVAILABLE(cam_frame_dropped_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
   2501             metadata) {
   2502         ALOGE("%s: Dropped frame info for frame_number_valid %d, frame_number %d",
   2503                 __func__, *p_frame_number_valid, *p_frame_number);
   2504     }
   2505 
   2506     if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
   2507             (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
   2508         ALOGE("%s: Invalid metadata", __func__);
   2509         if (free_and_bufdone_meta_buf) {
   2510             mMetadataChannel->bufDone(metadata_buf);
   2511             free(metadata_buf);
   2512         }
   2513         goto done_metadata;
   2514     } else {
   2515         frame_number_valid = *p_frame_number_valid;
   2516         frame_number = *p_frame_number;
   2517         capture_time = *p_capture_time;
   2518         urgent_frame_number_valid = *p_urgent_frame_number_valid;
   2519         urgent_frame_number = *p_urgent_frame_number;
   2520     }
   2521     //Partial result on process_capture_result for timestamp
   2522     if (urgent_frame_number_valid) {
   2523         CDBG("%s: valid urgent frame_number = %u, capture_time = %lld",
   2524           __func__, urgent_frame_number, capture_time);
   2525 
   2526         //Recieved an urgent Frame Number, handle it
   2527         //using partial results
   2528         for (pendingRequestIterator i =
   2529                 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
   2530             CDBG("%s: Iterator Frame = %d urgent frame = %d",
   2531                 __func__, i->frame_number, urgent_frame_number);
   2532 
   2533             if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
   2534                 (i->partial_result_cnt == 0)) {
   2535                 ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
   2536                     __func__, i->frame_number);
   2537                 i->partial_result_cnt++;
   2538             }
   2539 
   2540             if (i->frame_number == urgent_frame_number &&
   2541                      i->bUrgentReceived == 0) {
   2542 
   2543                 camera3_capture_result_t result;
   2544                 memset(&result, 0, sizeof(camera3_capture_result_t));
   2545 
   2546                 i->partial_result_cnt++;
   2547                 i->bUrgentReceived = 1;
   2548                 // Extract 3A metadata
   2549                 result.result = translateCbUrgentMetadataToResultMetadata(
   2550                         metadata, lastUrgentMetadataInBatch);
   2551                 // Populate metadata result
   2552                 result.frame_number = urgent_frame_number;
   2553                 result.num_output_buffers = 0;
   2554                 result.output_buffers = NULL;
   2555                 result.partial_result = i->partial_result_cnt;
   2556 
   2557                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   2558                 CDBG("%s: urgent frame_number = %u, capture_time = %lld",
   2559                      __func__, result.frame_number, capture_time);
   2560                 free_camera_metadata((camera_metadata_t *)result.result);
   2561                 break;
   2562             }
   2563         }
   2564     }
   2565 
   2566     if (!frame_number_valid) {
   2567         CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
   2568         if (free_and_bufdone_meta_buf) {
   2569             mMetadataChannel->bufDone(metadata_buf);
   2570             free(metadata_buf);
   2571         }
   2572         goto done_metadata;
   2573     }
   2574     CDBG_HIGH("%s: valid frame_number = %u, capture_time = %lld", __func__,
   2575             frame_number, capture_time);
   2576 
   2577     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2578             i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
   2579         // Flush out all entries with less or equal frame numbers.
   2580 
   2581         camera3_capture_result_t result;
   2582         memset(&result, 0, sizeof(camera3_capture_result_t));
   2583 
   2584         CDBG("%s: frame_number in the list is %u", __func__, i->frame_number);
   2585 
   2586         // Check whether any stream buffer corresponding to this is dropped or not
   2587         // If dropped, then send the ERROR_BUFFER for the corresponding stream
   2588         // The API does not expect a blob buffer to be dropped
   2589         if (p_cam_frame_drop && p_cam_frame_drop->frame_dropped) {
   2590             /* Clear notify_msg structure */
   2591             camera3_notify_msg_t notify_msg;
   2592             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2593             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   2594                     j != i->buffers.end(); j++) {
   2595                 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
   2596                 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   2597                 for (uint32_t k = 0; k < p_cam_frame_drop->cam_stream_ID.num_streams; k++) {
   2598                     if (streamID == p_cam_frame_drop->cam_stream_ID.streamID[k]) {
   2599                         // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
   2600                         ALOGW("%s: Start of reporting error frame#=%u, streamID=%u streamFormat=%d",
   2601                                 __func__, i->frame_number, streamID, j->stream->format);
   2602                         notify_msg.type = CAMERA3_MSG_ERROR;
   2603                         notify_msg.message.error.frame_number = i->frame_number;
   2604                         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
   2605                         notify_msg.message.error.error_stream = j->stream;
   2606                         mCallbackOps->notify(mCallbackOps, &notify_msg);
   2607                         ALOGW("%s: End of reporting error frame#=%u, streamID=%u streamFormat=%d",
   2608                                 __func__, i->frame_number, streamID, j->stream->format);
   2609                         PendingFrameDropInfo PendingFrameDrop;
   2610                         PendingFrameDrop.frame_number=i->frame_number;
   2611                         PendingFrameDrop.stream_ID = streamID;
   2612                         // Add the Frame drop info to mPendingFrameDropList
   2613                         mPendingFrameDropList.push_back(PendingFrameDrop);
   2614                    }
   2615                }
   2616             }
   2617         }
   2618 
   2619         // Send empty metadata with already filled buffers for dropped metadata
   2620         // and send valid metadata with already filled buffers for current metadata
   2621         /* we could hit this case when we either
   2622          * 1. have a pending reprocess request or
   2623          * 2. miss a metadata buffer callback */
   2624         if (i->frame_number < frame_number) {
   2625             if (i->input_buffer) {
   2626                 /* this will be handled in handleInputBufferWithLock */
   2627                 i++;
   2628                 continue;
   2629             } else if (i->need_dynamic_blklvl) {
   2630                 unfinished_raw_request = true;
   2631                 // i->partial_result_cnt--;
   2632                 CDBG("%s, frame number:%d, partial_result:%d, unfinished raw request..",
   2633                         __func__, i->frame_number, i->partial_result_cnt);
   2634                 i++;
   2635                 continue;
   2636             } else if (i->pending_extra_result) {
   2637                 CDBG("%s, frame_number:%d, partial_result:%d, need_dynamic_blklvl:%d",
   2638                         __func__, i->frame_number, i->partial_result_cnt,
   2639                         i->need_dynamic_blklvl);
   2640                 // i->partial_result_cnt--;
   2641                 i++;
   2642                 continue;
   2643             } else {
   2644                 ALOGE("%s: Missing metadata buffer for frame number %d, reporting CAMERA3_MSG_ERROR_RESULT",
   2645                      __func__, i->frame_number);
   2646 
   2647                 CameraMetadata dummyMetadata;
   2648                 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
   2649                 result.result = dummyMetadata.release();
   2650 
   2651                 camera3_notify_msg_t notify_msg;
   2652                 memset(&notify_msg, 0, sizeof(notify_msg));
   2653                 notify_msg.type = CAMERA3_MSG_ERROR;
   2654                 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
   2655                 notify_msg.message.error.error_stream = NULL;
   2656                 notify_msg.message.error.frame_number = i->frame_number;
   2657                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   2658 
   2659                 // partial_result should be PARTIAL_RESULT_CNT in case of
   2660                 // ERROR_RESULT.
   2661                 i->partial_result_cnt = PARTIAL_RESULT_COUNT;
   2662                 result.partial_result = PARTIAL_RESULT_COUNT;
   2663             }
   2664         } else {
   2665             i->partial_result_cnt++;
   2666             CDBG("%s, frame_number:%d, need_dynamic_blklvl:%d, partial cnt:%d\n",
   2667                     __func__, i->frame_number, i->need_dynamic_blklvl,
   2668                     i->partial_result_cnt);
   2669             if (!i->need_dynamic_blklvl) {
   2670                 CDBG("%s, meta for request without raw, frame number: %d\n",
   2671                         __func__, i->frame_number);
   2672                 if (!unfinished_raw_request) {
   2673                     i->partial_result_cnt++;
   2674                     CDBG("%s, no raw request pending, send the final (cnt:%d) partial result",
   2675                             __func__, i->partial_result_cnt);
   2676                 }
   2677             }
   2678 
   2679             result.partial_result = i->partial_result_cnt;
   2680 
   2681             /* Clear notify_msg structure */
   2682             camera3_notify_msg_t notify_msg;
   2683             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2684 
   2685             // Send shutter notify to frameworks
   2686             notify_msg.type = CAMERA3_MSG_SHUTTER;
   2687             notify_msg.message.shutter.frame_number = i->frame_number;
   2688             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   2689             mCallbackOps->notify(mCallbackOps, &notify_msg);
   2690 
   2691             i->timestamp = capture_time;
   2692 
   2693             // Find channel requiring metadata, meaning internal offline postprocess
   2694             // is needed.
   2695             //TODO: for now, we don't support two streams requiring metadata at the same time.
   2696             // (because we are not making copies, and metadata buffer is not reference counted.
   2697             bool internalPproc = false;
   2698             for (pendingBufferIterator iter = i->buffers.begin();
   2699                     iter != i->buffers.end(); iter++) {
   2700                 if (iter->need_metadata) {
   2701                     internalPproc = true;
   2702                     QCamera3ProcessingChannel *channel =
   2703                             (QCamera3ProcessingChannel *)iter->stream->priv;
   2704                     channel->queueReprocMetadata(metadata_buf);
   2705                     break;
   2706                 }
   2707             }
   2708 
   2709             result.result = translateFromHalMetadata(metadata,
   2710                     i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
   2711                     i->capture_intent, i->hybrid_ae_enable, internalPproc, i->need_dynamic_blklvl,
   2712                     lastMetadataInBatch);
   2713 
   2714             saveExifParams(metadata);
   2715 
   2716             if (i->blob_request) {
   2717                 {
   2718                     //Dump tuning metadata if enabled and available
   2719                     char prop[PROPERTY_VALUE_MAX];
   2720                     memset(prop, 0, sizeof(prop));
   2721                     property_get("persist.camera.dumpmetadata", prop, "0");
   2722                     int32_t enabled = atoi(prop);
   2723                     if (enabled && metadata->is_tuning_params_valid) {
   2724                         dumpMetadataToFile(metadata->tuning_params,
   2725                                mMetaFrameCount,
   2726                                enabled,
   2727                                "Snapshot",
   2728                                frame_number);
   2729                     }
   2730                 }
   2731             }
   2732 
   2733             if (!internalPproc) {
   2734                 CDBG("%s: couldn't find need_metadata for this metadata", __func__);
   2735                 // Return metadata buffer
   2736                 if (free_and_bufdone_meta_buf) {
   2737                     mMetadataChannel->bufDone(metadata_buf);
   2738                     free(metadata_buf);
   2739                 }
   2740             }
   2741         }
   2742         if (!result.result) {
   2743             ALOGE("%s: metadata is NULL", __func__);
   2744         }
   2745         result.frame_number = i->frame_number;
   2746         result.input_buffer = i->input_buffer;
   2747         result.num_output_buffers = 0;
   2748         result.output_buffers = NULL;
   2749         for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   2750                     j != i->buffers.end(); j++) {
   2751             if (j->buffer) {
   2752                 result.num_output_buffers++;
   2753             }
   2754         }
   2755 
   2756         if (result.num_output_buffers > 0) {
   2757             camera3_stream_buffer_t *result_buffers =
   2758                 new camera3_stream_buffer_t[result.num_output_buffers];
   2759             if (!result_buffers) {
   2760                 ALOGE("%s: Fatal error: out of memory", __func__);
   2761             }
   2762             size_t result_buffers_idx = 0;
   2763             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   2764                     j != i->buffers.end(); j++) {
   2765                 if (j->buffer) {
   2766                     for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   2767                             m != mPendingFrameDropList.end(); m++) {
   2768                         QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
   2769                         uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   2770                         if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
   2771                             j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   2772                             ALOGW("%s: Stream STATUS_ERROR frame_number=%u, streamID=%u",
   2773                                   __func__, frame_number, streamID);
   2774                             m = mPendingFrameDropList.erase(m);
   2775                             break;
   2776                         }
   2777                     }
   2778 
   2779                     for (List<PendingBufferInfo>::iterator k =
   2780                       mPendingBuffersMap.mPendingBufferList.begin();
   2781                       k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
   2782                       if (k->buffer == j->buffer->buffer) {
   2783                         CDBG("%s: Found buffer %p in pending buffer List "
   2784                               "for frame %u, Take it out!!", __func__,
   2785                                k->buffer, k->frame_number);
   2786                         mPendingBuffersMap.num_buffers--;
   2787                         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2788                         break;
   2789                       }
   2790                     }
   2791 
   2792                     result_buffers[result_buffers_idx++] = *(j->buffer);
   2793                     free(j->buffer);
   2794                     j->buffer = NULL;
   2795                 }
   2796             }
   2797             result.output_buffers = result_buffers;
   2798             mCallbackOps->process_capture_result(mCallbackOps, &result);
   2799             CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
   2800                     __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
   2801             free_camera_metadata((camera_metadata_t *)result.result);
   2802             delete[] result_buffers;
   2803         } else {
   2804             mCallbackOps->process_capture_result(mCallbackOps, &result);
   2805             CDBG("%s %d: meta frame_number = %u, capture_time = %lld, partial:%d",
   2806                         __func__, __LINE__, result.frame_number, i->timestamp, result.partial_result);
   2807             free_camera_metadata((camera_metadata_t *)result.result);
   2808         }
   2809 
   2810         if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
   2811             mPendingLiveRequest--;
   2812             i = erasePendingRequest(i);
   2813         } else {
   2814             CDBG("%s, keep in list, frame number:%d, partial result:%d",
   2815                     __func__, i->frame_number, i->partial_result_cnt);
   2816             i->pending_extra_result = true;
   2817             i++;
   2818         }
   2819 
   2820         if (!mPendingReprocessResultList.empty()) {
   2821             handlePendingReprocResults(frame_number + 1);
   2822         }
   2823 
   2824     }
   2825 
   2826 done_metadata:
   2827     for (pendingRequestIterator i = mPendingRequestsList.begin();
   2828             i != mPendingRequestsList.end() ;i++) {
   2829         i->pipeline_depth++;
   2830     }
   2831     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
   2832     unblockRequestIfNecessary();
   2833 
   2834 }
   2835 
   2836 /*===========================================================================
   2837  * FUNCTION   : hdrPlusPerfLock
   2838  *
   2839  * DESCRIPTION: perf lock for HDR+ using custom intent
   2840  *
   2841  * PARAMETERS : @metadata_buf: Metadata super_buf pointer
   2842  *
   2843  * RETURN     : None
   2844  *
   2845  *==========================================================================*/
   2846 void QCamera3HardwareInterface::hdrPlusPerfLock(
   2847         mm_camera_super_buf_t *metadata_buf)
   2848 {
   2849     if (NULL == metadata_buf) {
   2850         ALOGE("%s: metadata_buf is NULL", __func__);
   2851         return;
   2852     }
   2853     metadata_buffer_t *metadata =
   2854             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
   2855     int32_t *p_frame_number_valid =
   2856             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
   2857     uint32_t *p_frame_number =
   2858             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
   2859 
   2860     //acquire perf lock for 5 sec after the last HDR frame is captured
   2861     if (*p_frame_number_valid) {
   2862         if (mLastCustIntentFrmNum == (int32_t)*p_frame_number) {
   2863             m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
   2864         }
   2865     }
   2866 
   2867     //release lock after perf lock timer is expired. If lock is already released,
   2868     //isTimerReset returns false
   2869     if (m_perfLock.isTimerReset()) {
   2870         mLastCustIntentFrmNum = -1;
   2871         m_perfLock.lock_rel_timed();
   2872     }
   2873 }
   2874 
   2875 /*===========================================================================
   2876  * FUNCTION   : handleInputBufferWithLock
   2877  *
   2878  * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
   2879  *
   2880  * PARAMETERS :
   2881  *  @buffer: contains status information about the processed buffer
   2882  *  @frame_number: frame number of the input buffer
   2883  *
   2884  * RETURN     :
   2885  *
   2886  *==========================================================================*/
   2887 void QCamera3HardwareInterface::handleInputBufferWithLock(
   2888         camera3_stream_buffer_t *buffer, uint32_t frame_number)
   2889 {
   2890     ATRACE_CALL();
   2891     pendingRequestIterator i = mPendingRequestsList.begin();
   2892     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   2893         i++;
   2894     }
   2895     if (i != mPendingRequestsList.end() && i->input_buffer) {
   2896         //found the right request
   2897         if (!i->shutter_notified) {
   2898             CameraMetadata settings;
   2899             camera3_notify_msg_t notify_msg;
   2900             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2901             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   2902             if(i->settings) {
   2903                 settings = i->settings;
   2904                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   2905                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   2906                 } else {
   2907                     ALOGE("%s: No timestamp in input settings! Using current one.",
   2908                             __func__);
   2909                 }
   2910             } else {
   2911                 ALOGE("%s: Input settings missing!", __func__);
   2912             }
   2913 
   2914             notify_msg.type = CAMERA3_MSG_SHUTTER;
   2915             notify_msg.message.shutter.frame_number = frame_number;
   2916             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   2917             mCallbackOps->notify(mCallbackOps, &notify_msg);
   2918             i->shutter_notified = true;
   2919             CDBG("%s: Input request metadata notify frame_number = %u, capture_time = %llu",
   2920                        __func__, i->frame_number, notify_msg.message.shutter.timestamp);
   2921         }
   2922 
   2923         if (i->input_buffer->release_fence != -1) {
   2924            int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   2925            close(i->input_buffer->release_fence);
   2926            if (rc != OK) {
   2927                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   2928            }
   2929         }
   2930 
   2931         if ((nullptr != buffer) && (CAMERA3_BUFFER_STATUS_OK != buffer->status)) {
   2932             camera3_notify_msg_t notify_msg;
   2933             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   2934             notify_msg.type = CAMERA3_MSG_ERROR;
   2935             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   2936             notify_msg.message.error.error_stream = NULL;
   2937             notify_msg.message.error.frame_number = frame_number;
   2938             mCallbackOps->notify(mCallbackOps, &notify_msg);
   2939 
   2940             Vector<camera3_stream_buffer_t> pendingBuffers;
   2941             camera3_stream_buffer_t pending;
   2942             memset(&pending, 0, sizeof(pending));
   2943             pending.acquire_fence = -1;
   2944             pending.release_fence = -1;
   2945             pending.status = CAMERA3_BUFFER_STATUS_ERROR;
   2946             for (List<PendingBufferInfo>::iterator k =
   2947                     mPendingBuffersMap.mPendingBufferList.begin();
   2948                     k != mPendingBuffersMap.mPendingBufferList.end();) {
   2949                 if (k->frame_number == frame_number) {
   2950                     pending.buffer = k->buffer;
   2951                     pending.stream = k->stream;
   2952                     pendingBuffers.add(pending);
   2953 
   2954                     mPendingBuffersMap.num_buffers--;
   2955                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
   2956                 } else {
   2957                     k++;
   2958                 }
   2959             }
   2960 
   2961             camera3_capture_result result;
   2962             memset(&result, 0, sizeof(camera3_capture_result));
   2963             result.input_buffer = i->input_buffer;
   2964             result.num_output_buffers = pendingBuffers.size();
   2965             result.output_buffers = pendingBuffers.array();
   2966             result.result = NULL;
   2967             result.frame_number = frame_number;
   2968             mCallbackOps->process_capture_result(mCallbackOps, &result);
   2969         } else {
   2970             camera3_capture_result result;
   2971             memset(&result, 0, sizeof(camera3_capture_result));
   2972             result.frame_number = frame_number;
   2973             result.result = i->settings;
   2974             result.input_buffer = i->input_buffer;
   2975 
   2976             result.partial_result = PARTIAL_RESULT_COUNT;
   2977 
   2978             mCallbackOps->process_capture_result(mCallbackOps, &result);
   2979         }
   2980         CDBG("%s: Input request metadata and input buffer frame_number = %u",
   2981                        __func__, i->frame_number);
   2982         i = erasePendingRequest(i);
   2983     } else {
   2984         ALOGE("%s: Could not find input request for frame number %d", __func__, frame_number);
   2985     }
   2986 }
   2987 
   2988 bool QCamera3HardwareInterface::getBlackLevelRegion(int (&opticalBlackRegions)[4])
   2989 {
   2990     if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
   2991         /*just calculate one region black level and send to fwk*/
   2992         for (size_t i = 0; i <  4; i++) {
   2993             opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
   2994         }
   2995         return TRUE;
   2996     }
   2997 
   2998     return FALSE;
   2999 }
   3000 
   3001 void QCamera3HardwareInterface::sendDynamicBlackLevel(float blacklevel[4], uint32_t frame_number)
   3002 {
   3003     CDBG("%s, E.\n", __func__);
   3004     pthread_mutex_lock(&mMutex);
   3005     sendDynamicBlackLevelWithLock(blacklevel, frame_number);
   3006     pthread_mutex_unlock(&mMutex);
   3007     CDBG("%s, X.\n", __func__);
   3008 }
   3009 
   3010 void QCamera3HardwareInterface::sendDynamicBlackLevelWithLock(float blacklevel[4], uint32_t frame_number)
   3011 {
   3012     CDBG("%s, E. frame_number:%d\n", __func__, frame_number);
   3013 
   3014     pendingRequestIterator i = mPendingRequestsList.begin();
   3015     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   3016         i++;
   3017     }
   3018     if ((i == mPendingRequestsList.end()) || !i->need_dynamic_blklvl) {
   3019         ALOGE("%s, error: invalid frame number.", __func__);
   3020         return;
   3021     }
   3022 
   3023     i->partial_result_cnt++;
   3024 
   3025     CameraMetadata camMetadata;
   3026     int64_t fwk_frame_number = (int64_t)frame_number;
   3027     camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   3028 
   3029     // update dynamic black level here
   3030     camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, blacklevel, 4);
   3031 
   3032     camera3_capture_result_t result;
   3033     memset(&result, 0, sizeof(camera3_capture_result_t));
   3034     result.frame_number = frame_number;
   3035     result.num_output_buffers = 0;
   3036     result.result = camMetadata.release();
   3037     result.partial_result = i->partial_result_cnt;
   3038 
   3039     CDBG("%s, partial result:%d, frame_number:%d, pending extra result:%d\n",
   3040             __func__, result.partial_result, frame_number, i->pending_extra_result);
   3041     mCallbackOps->process_capture_result(mCallbackOps, &result);
   3042     free_camera_metadata((camera_metadata_t *)result.result);
   3043 
   3044     if (i->partial_result_cnt == PARTIAL_RESULT_COUNT) {
   3045         CDBG("%s, remove cur request from pending list.", __func__);
   3046         mPendingLiveRequest--;
   3047         i = erasePendingRequest(i);
   3048 
   3049         // traverse the remaining pending list to see whether need to send cached ones..
   3050         while (i != mPendingRequestsList.end()) {
   3051             CDBG("%s, frame number:%d, partial_result:%d, pending extra result:%d",
   3052                     __func__, i->frame_number, i->partial_result_cnt,
   3053                     i->pending_extra_result);
   3054 
   3055             if ((i->partial_result_cnt == PARTIAL_RESULT_COUNT - 1)
   3056                     && (i->need_dynamic_blklvl == false) /* in case two consecutive raw requests */) {
   3057                 // send out final result, and remove it from pending list.
   3058                 CameraMetadata camMetadata;
   3059                 int64_t fwk_frame_number = (int64_t)i->frame_number;
   3060                 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   3061 
   3062                 memset(&result, 0, sizeof(camera3_capture_result_t));
   3063                 result.frame_number = i->frame_number;
   3064                 result.num_output_buffers = 0;
   3065                 result.result = camMetadata.release();
   3066                 result.partial_result = i->partial_result_cnt + 1;
   3067 
   3068                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3069                 free_camera_metadata((camera_metadata_t *)result.result);
   3070 
   3071                 mPendingLiveRequest--;
   3072                 i = erasePendingRequest(i);
   3073                 CDBG("%s, mPendingLiveRequest:%d, pending list size:%d",
   3074                         __func__, mPendingLiveRequest, mPendingRequestsList.size());
   3075             } else {
   3076                 break;
   3077             }
   3078         }
   3079     }
   3080 
   3081     unblockRequestIfNecessary();
   3082     CDBG("%s, X.mPendingLiveRequest = %d\n", __func__, mPendingLiveRequest);
   3083 }
   3084 
   3085 
   3086 /*===========================================================================
   3087  * FUNCTION   : handleBufferWithLock
   3088  *
   3089  * DESCRIPTION: Handles image buffer callback with mMutex lock held.
   3090  *
   3091  * PARAMETERS : @buffer: image buffer for the callback
   3092  *              @frame_number: frame number of the image buffer
   3093  *
   3094  * RETURN     :
   3095  *
   3096  *==========================================================================*/
   3097 void QCamera3HardwareInterface::handleBufferWithLock(
   3098     camera3_stream_buffer_t *buffer, uint32_t frame_number)
   3099 {
   3100     ATRACE_CALL();
   3101     // If the frame number doesn't exist in the pending request list,
   3102     // directly send the buffer to the frameworks, and update pending buffers map
   3103     // Otherwise, book-keep the buffer.
   3104     pendingRequestIterator i = mPendingRequestsList.begin();
   3105     while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
   3106         i++;
   3107     }
   3108     if (i == mPendingRequestsList.end() || i->pending_extra_result == true) {
   3109         if (i != mPendingRequestsList.end()) {
   3110             // though the pendingRequestInfo is still in the list,
   3111             // still send the buffer directly, as the pending_extra_result is true,
   3112             // and we've already received meta for this frame number.
   3113             CDBG("%s, send the buffer directly, frame number:%d",
   3114                     __func__, i->frame_number);
   3115         }
   3116         // Verify all pending requests frame_numbers are greater
   3117         for (pendingRequestIterator j = mPendingRequestsList.begin();
   3118                 j != mPendingRequestsList.end(); j++) {
   3119             if ((j->frame_number < frame_number) && !(j->input_buffer)) {
   3120                 ALOGE("%s: Error: pending live frame number %d is smaller than %d",
   3121                         __func__, j->frame_number, frame_number);
   3122             }
   3123         }
   3124         camera3_capture_result_t result;
   3125         memset(&result, 0, sizeof(camera3_capture_result_t));
   3126         result.result = NULL;
   3127         result.frame_number = frame_number;
   3128         result.num_output_buffers = 1;
   3129         result.partial_result = 0;
   3130         for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
   3131                 m != mPendingFrameDropList.end(); m++) {
   3132             QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
   3133             uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
   3134             if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
   3135                 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
   3136                 CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
   3137                         __func__, frame_number, streamID);
   3138                 m = mPendingFrameDropList.erase(m);
   3139                 break;
   3140             }
   3141         }
   3142         result.output_buffers = buffer;
   3143         CDBG_HIGH("%s: result frame_number = %d, buffer = %p",
   3144                 __func__, frame_number, buffer->buffer);
   3145 
   3146         for (List<PendingBufferInfo>::iterator k =
   3147                 mPendingBuffersMap.mPendingBufferList.begin();
   3148                 k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
   3149             if (k->buffer == buffer->buffer) {
   3150                 CDBG("%s: Found Frame buffer, take it out from list",
   3151                         __func__);
   3152 
   3153                 mPendingBuffersMap.num_buffers--;
   3154                 k = mPendingBuffersMap.mPendingBufferList.erase(k);
   3155                 break;
   3156             }
   3157         }
   3158         CDBG("%s: mPendingBuffersMap.num_buffers = %d",
   3159             __func__, mPendingBuffersMap.num_buffers);
   3160 
   3161         mCallbackOps->process_capture_result(mCallbackOps, &result);
   3162     } else {
   3163         if (i->input_buffer) {
   3164             CameraMetadata settings;
   3165             camera3_notify_msg_t notify_msg;
   3166             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   3167             nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
   3168             if(i->settings) {
   3169                 settings = i->settings;
   3170                 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
   3171                     capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
   3172                 } else {
   3173                     ALOGE("%s: No timestamp in input settings! Using current one.",
   3174                             __func__);
   3175                 }
   3176             } else {
   3177                 ALOGE("%s: Input settings missing!", __func__);
   3178             }
   3179 
   3180             notify_msg.type = CAMERA3_MSG_SHUTTER;
   3181             notify_msg.message.shutter.frame_number = frame_number;
   3182             notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
   3183 
   3184             if (i->input_buffer->release_fence != -1) {
   3185                int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
   3186                close(i->input_buffer->release_fence);
   3187                if (rc != OK) {
   3188                ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   3189                }
   3190             }
   3191 
   3192             for (List<PendingBufferInfo>::iterator k =
   3193                     mPendingBuffersMap.mPendingBufferList.begin();
   3194                     k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
   3195                 if (k->buffer == buffer->buffer) {
   3196                     CDBG("%s: Found Frame buffer, take it out from list",
   3197                             __func__);
   3198 
   3199                     mPendingBuffersMap.num_buffers--;
   3200                     k = mPendingBuffersMap.mPendingBufferList.erase(k);
   3201                     break;
   3202                 }
   3203             }
   3204             CDBG("%s: mPendingBuffersMap.num_buffers = %d",
   3205                 __func__, mPendingBuffersMap.num_buffers);
   3206 
   3207             bool notifyNow = true;
   3208             for (pendingRequestIterator j = mPendingRequestsList.begin();
   3209                     j != mPendingRequestsList.end(); j++) {
   3210                 if (j->frame_number < frame_number) {
   3211                     notifyNow = false;
   3212                     break;
   3213                 }
   3214             }
   3215 
   3216             if (notifyNow) {
   3217                 camera3_capture_result result;
   3218                 memset(&result, 0, sizeof(camera3_capture_result));
   3219                 result.frame_number = frame_number;
   3220                 result.result = i->settings;
   3221                 result.input_buffer = i->input_buffer;
   3222                 result.num_output_buffers = 1;
   3223                 result.output_buffers = buffer;
   3224                 result.partial_result = PARTIAL_RESULT_COUNT;
   3225 
   3226                 mCallbackOps->notify(mCallbackOps, &notify_msg);
   3227                 mCallbackOps->process_capture_result(mCallbackOps, &result);
   3228                 CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
   3229                 i = erasePendingRequest(i);
   3230             } else {
   3231                 // Cache reprocess result for later
   3232                 PendingReprocessResult pendingResult;
   3233                 memset(&pendingResult, 0, sizeof(PendingReprocessResult));
   3234                 pendingResult.notify_msg = notify_msg;
   3235                 pendingResult.buffer = *buffer;
   3236                 pendingResult.frame_number = frame_number;
   3237                 mPendingReprocessResultList.push_back(pendingResult);
   3238                 CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
   3239             }
   3240         } else {
   3241             for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
   3242                 j != i->buffers.end(); j++) {
   3243                 if (j->stream == buffer->stream) {
   3244                     if (j->buffer != NULL) {
   3245                         ALOGE("%s: Error: buffer is already set", __func__);
   3246                     } else {
   3247                         j->buffer = (camera3_stream_buffer_t *)malloc(
   3248                             sizeof(camera3_stream_buffer_t));
   3249                         *(j->buffer) = *buffer;
   3250                         CDBG_HIGH("%s: cache buffer %p at result frame_number %d",
   3251                             __func__, buffer, frame_number);
   3252                     }
   3253                 }
   3254             }
   3255         }
   3256     }
   3257 }
   3258 
   3259 /*===========================================================================
   3260  * FUNCTION   : unblockRequestIfNecessary
   3261  *
   3262  * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
   3263  *              that mMutex is held when this function is called.
   3264  *
   3265  * PARAMETERS :
   3266  *
   3267  * RETURN     :
   3268  *
   3269  *==========================================================================*/
   3270 void QCamera3HardwareInterface::unblockRequestIfNecessary()
   3271 {
   3272    // Unblock process_capture_request
   3273    pthread_cond_signal(&mRequestCond);
   3274 }
   3275 
   3276 
   3277 /*===========================================================================
   3278  * FUNCTION   : processCaptureRequest
   3279  *
   3280  * DESCRIPTION: process a capture request from camera service
   3281  *
   3282  * PARAMETERS :
   3283  *   @request : request from framework to process
   3284  *
   3285  * RETURN     :
   3286  *
   3287  *==========================================================================*/
   3288 int QCamera3HardwareInterface::processCaptureRequest(
   3289                     camera3_capture_request_t *request)
   3290 {
   3291     ATRACE_CALL();
   3292     int rc = NO_ERROR;
   3293     int32_t request_id;
   3294     CameraMetadata meta;
   3295     uint32_t minInFlightRequests = MIN_INFLIGHT_REQUESTS;
   3296     uint32_t maxInFlightRequests = MAX_INFLIGHT_REQUESTS;
   3297     bool isVidBufRequested = false;
   3298     camera3_stream_buffer_t *pInputBuffer = NULL;
   3299 
   3300     pthread_mutex_lock(&mMutex);
   3301 
   3302     rc = validateCaptureRequest(request);
   3303     if (rc != NO_ERROR) {
   3304         ALOGE("%s: incoming request is not valid", __func__);
   3305         pthread_mutex_unlock(&mMutex);
   3306         return rc;
   3307     }
   3308 
   3309     meta = request->settings;
   3310 
   3311     // For first capture request, send capture intent, and
   3312     // stream on all streams
   3313     if (mFirstRequest) {
   3314         // send an unconfigure to the backend so that the isp
   3315         // resources are deallocated
   3316         if (!mFirstConfiguration) {
   3317             cam_stream_size_info_t stream_config_info;
   3318             int32_t hal_version = CAM_HAL_V3;
   3319             memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
   3320             stream_config_info.buffer_info.min_buffers =
   3321                     MIN_INFLIGHT_REQUESTS;
   3322             stream_config_info.buffer_info.max_buffers =
   3323                     m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
   3324             clear_metadata_buffer(mParameters);
   3325             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3326                     CAM_INTF_PARM_HAL_VERSION, hal_version);
   3327             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3328                     CAM_INTF_META_STREAM_INFO, stream_config_info);
   3329             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3330                     mParameters);
   3331             if (rc < 0) {
   3332                 ALOGE("%s: set_parms for unconfigure failed", __func__);
   3333                 pthread_mutex_unlock(&mMutex);
   3334                 return rc;
   3335             }
   3336         }
   3337         m_perfLock.lock_acq();
   3338         /* get eis information for stream configuration */
   3339         cam_is_type_t is_type;
   3340         char is_type_value[PROPERTY_VALUE_MAX];
   3341         property_get("persist.camera.is_type", is_type_value, "0");
   3342         is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
   3343 
   3344         if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3345             int32_t hal_version = CAM_HAL_V3;
   3346             uint8_t captureIntent =
   3347                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3348             mCaptureIntent = captureIntent;
   3349             clear_metadata_buffer(mParameters);
   3350             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
   3351             ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
   3352         }
   3353 
   3354         //If EIS is enabled, turn it on for video
   3355         bool setEis = m_bEisEnable && m_bEisSupportedSize;
   3356         int32_t vsMode;
   3357         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
   3358         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
   3359             rc = BAD_VALUE;
   3360         }
   3361 
   3362         //IS type will be 0 unless EIS is supported. If EIS is supported
   3363         //it could either be 1 or 4 depending on the stream and video size
   3364         if (setEis) {
   3365             if (!m_bEisSupportedSize) {
   3366                 is_type = IS_TYPE_DIS;
   3367             } else {
   3368                 is_type = IS_TYPE_EIS_2_0;
   3369             }
   3370             mStreamConfigInfo.is_type = is_type;
   3371         } else {
   3372             mStreamConfigInfo.is_type = IS_TYPE_NONE;
   3373         }
   3374 
   3375         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3376                 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   3377         int32_t tintless_value = 1;
   3378         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3379                 CAM_INTF_PARM_TINTLESS, tintless_value);
   3380         //Disable CDS for HFR mode and if mPprocBypass = true.
   3381         //CDS is a session parameter in the backend/ISP, so need to be set/reset
   3382         //after every configure_stream
   3383         if((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
   3384                 (m_bIsVideo)) {
   3385             int32_t cds = CAM_CDS_MODE_OFF;
   3386             if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3387                     CAM_INTF_PARM_CDS_MODE, cds))
   3388                 ALOGE("%s: Failed to disable CDS for HFR mode", __func__);
   3389 
   3390         }
   3391         setMobicat();
   3392 
   3393         /* Set fps and hfr mode while sending meta stream info so that sensor
   3394          * can configure appropriate streaming mode */
   3395         mHFRVideoFps = DEFAULT_VIDEO_FPS;
   3396         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   3397             rc = setHalFpsRange(meta, mParameters);
   3398             if (rc != NO_ERROR) {
   3399                 ALOGE("%s: setHalFpsRange failed", __func__);
   3400             }
   3401         }
   3402         if (meta.exists(ANDROID_CONTROL_MODE)) {
   3403             uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
   3404             rc = extractSceneMode(meta, metaMode, mParameters);
   3405             if (rc != NO_ERROR) {
   3406                 ALOGE("%s: extractSceneMode failed", __func__);
   3407             }
   3408         }
   3409 
   3410         //TODO: validate the arguments, HSV scenemode should have only the
   3411         //advertised fps ranges
   3412 
   3413         /*set the capture intent, hal version, tintless, stream info,
   3414          *and disenable parameters to the backend*/
   3415         CDBG("%s: set_parms META_STREAM_INFO ", __func__ );
   3416         mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3417                     mParameters);
   3418 
   3419         cam_dimension_t sensor_dim;
   3420         memset(&sensor_dim, 0, sizeof(sensor_dim));
   3421         rc = getSensorOutputSize(sensor_dim);
   3422         if (rc != NO_ERROR) {
   3423             ALOGE("%s: Failed to get sensor output size", __func__);
   3424             pthread_mutex_unlock(&mMutex);
   3425             goto error_exit;
   3426         }
   3427 
   3428         mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
   3429                 gCamCapability[mCameraId]->active_array_size.height,
   3430                 sensor_dim.width, sensor_dim.height);
   3431 
   3432         /* Set batchmode before initializing channel. Since registerBuffer
   3433          * internally initializes some of the channels, better set batchmode
   3434          * even before first register buffer */
   3435         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3436             it != mStreamInfo.end(); it++) {
   3437             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3438             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   3439                     && mBatchSize) {
   3440                 rc = channel->setBatchSize(mBatchSize);
   3441                 //Disable per frame map unmap for HFR/batchmode case
   3442                 rc |= channel->setPerFrameMapUnmap(false);
   3443                 if (NO_ERROR != rc) {
   3444                     ALOGE("%s : Channel init failed %d", __func__, rc);
   3445                     pthread_mutex_unlock(&mMutex);
   3446                     goto error_exit;
   3447                 }
   3448             }
   3449         }
   3450 
   3451         //First initialize all streams
   3452         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3453             it != mStreamInfo.end(); it++) {
   3454             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3455             if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
   3456                ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
   3457                setEis)
   3458                 rc = channel->initialize(is_type);
   3459             else {
   3460                 rc = channel->initialize(IS_TYPE_NONE);
   3461             }
   3462             if (NO_ERROR != rc) {
   3463                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
   3464                 pthread_mutex_unlock(&mMutex);
   3465                 goto error_exit;
   3466             }
   3467         }
   3468 
   3469         if (mRawDumpChannel) {
   3470             rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
   3471             if (rc != NO_ERROR) {
   3472                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
   3473                 pthread_mutex_unlock(&mMutex);
   3474                 goto error_exit;
   3475             }
   3476         }
   3477         if (mSupportChannel) {
   3478             rc = mSupportChannel->initialize(IS_TYPE_NONE);
   3479             if (rc < 0) {
   3480                 ALOGE("%s: Support channel initialization failed", __func__);
   3481                 pthread_mutex_unlock(&mMutex);
   3482                 goto error_exit;
   3483             }
   3484         }
   3485         if (mAnalysisChannel) {
   3486             rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
   3487             if (rc < 0) {
   3488                 ALOGE("%s: Analysis channel initialization failed", __func__);
   3489                 pthread_mutex_unlock(&mMutex);
   3490                 goto error_exit;
   3491             }
   3492         }
   3493         if (mDummyBatchChannel) {
   3494             rc = mDummyBatchChannel->setBatchSize(mBatchSize);
   3495             if (rc < 0) {
   3496                 ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
   3497                 pthread_mutex_unlock(&mMutex);
   3498                 goto error_exit;
   3499             }
   3500             rc = mDummyBatchChannel->initialize(is_type);
   3501             if (rc < 0) {
   3502                 ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
   3503                 pthread_mutex_unlock(&mMutex);
   3504                 goto error_exit;
   3505             }
   3506         }
   3507 
   3508         // Set bundle info
   3509         rc = setBundleInfo();
   3510         if (rc < 0) {
   3511             ALOGE("%s: setBundleInfo failed %d", __func__, rc);
   3512             pthread_mutex_unlock(&mMutex);
   3513             goto error_exit;
   3514         }
   3515 
   3516         //Then start them.
   3517         CDBG_HIGH("%s: Start META Channel", __func__);
   3518         rc = mMetadataChannel->start();
   3519         if (rc < 0) {
   3520             ALOGE("%s: META channel start failed", __func__);
   3521             pthread_mutex_unlock(&mMutex);
   3522             goto error_exit;
   3523         }
   3524 
   3525         if (mAnalysisChannel) {
   3526             rc = mAnalysisChannel->start();
   3527             if (rc < 0) {
   3528                 ALOGE("%s: Analysis channel start failed", __func__);
   3529                 mMetadataChannel->stop();
   3530                 pthread_mutex_unlock(&mMutex);
   3531                 goto error_exit;
   3532             }
   3533         }
   3534 
   3535         if (mSupportChannel) {
   3536             rc = mSupportChannel->start();
   3537             if (rc < 0) {
   3538                 ALOGE("%s: Support channel start failed", __func__);
   3539                 mMetadataChannel->stop();
   3540                 /* Although support and analysis are mutually exclusive today
   3541                    adding it in anycase for future proofing */
   3542                 if (mAnalysisChannel) {
   3543                     mAnalysisChannel->stop();
   3544                 }
   3545                 pthread_mutex_unlock(&mMutex);
   3546                 goto error_exit;
   3547             }
   3548         }
   3549         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3550             it != mStreamInfo.end(); it++) {
   3551             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   3552             CDBG_HIGH("%s: Start Processing Channel mask=%d",
   3553                     __func__, channel->getStreamTypeMask());
   3554             rc = channel->start();
   3555             if (rc < 0) {
   3556                 ALOGE("%s: channel start failed", __func__);
   3557                 pthread_mutex_unlock(&mMutex);
   3558                 goto error_exit;
   3559             }
   3560         }
   3561 
   3562         if (mRawDumpChannel) {
   3563             CDBG("%s: Starting raw dump stream",__func__);
   3564             rc = mRawDumpChannel->start();
   3565             if (rc != NO_ERROR) {
   3566                 ALOGE("%s: Error Starting Raw Dump Channel", __func__);
   3567                 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   3568                       it != mStreamInfo.end(); it++) {
   3569                     QCamera3Channel *channel =
   3570                         (QCamera3Channel *)(*it)->stream->priv;
   3571                     ALOGE("%s: Stopping Processing Channel mask=%d", __func__,
   3572                         channel->getStreamTypeMask());
   3573                     channel->stop();
   3574                 }
   3575                 if (mSupportChannel)
   3576                     mSupportChannel->stop();
   3577                 if (mAnalysisChannel) {
   3578                     mAnalysisChannel->stop();
   3579                 }
   3580                 mMetadataChannel->stop();
   3581                 pthread_mutex_unlock(&mMutex);
   3582                 goto error_exit;
   3583             }
   3584         }
   3585 
   3586         if (mChannelHandle) {
   3587 
   3588             rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   3589                     mChannelHandle);
   3590             if (rc != NO_ERROR) {
   3591                 ALOGE("%s: start_channel failed %d", __func__, rc);
   3592                 pthread_mutex_unlock(&mMutex);
   3593                 goto error_exit;
   3594             }
   3595         }
   3596 
   3597 
   3598         goto no_error;
   3599 error_exit:
   3600         m_perfLock.lock_rel();
   3601         return rc;
   3602 no_error:
   3603         m_perfLock.lock_rel();
   3604 
   3605         mWokenUpByDaemon = false;
   3606         mPendingLiveRequest = 0;
   3607         mFirstConfiguration = false;
   3608         enablePowerHint();
   3609     }
   3610 
   3611     uint32_t frameNumber = request->frame_number;
   3612     cam_stream_ID_t streamID;
   3613 
   3614     if (meta.exists(ANDROID_REQUEST_ID)) {
   3615         request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
   3616         mCurrentRequestId = request_id;
   3617         CDBG("%s: Received request with id: %d",__func__, request_id);
   3618     } else if (mFirstRequest || mCurrentRequestId == -1){
   3619         ALOGE("%s: Unable to find request id field, \
   3620                 & no previous id available", __func__);
   3621         pthread_mutex_unlock(&mMutex);
   3622         return NAME_NOT_FOUND;
   3623     } else {
   3624         CDBG("%s: Re-using old request id", __func__);
   3625         request_id = mCurrentRequestId;
   3626     }
   3627 
   3628     CDBG_HIGH("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
   3629                                     __func__, __LINE__,
   3630                                     request->num_output_buffers,
   3631                                     request->input_buffer,
   3632                                     frameNumber);
   3633     // Acquire all request buffers first
   3634     streamID.num_streams = 0;
   3635     int blob_request = 0;
   3636     uint32_t snapshotStreamId = 0;
   3637     for (size_t i = 0; i < request->num_output_buffers; i++) {
   3638         const camera3_stream_buffer_t& output = request->output_buffers[i];
   3639         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   3640 
   3641         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   3642             //Call function to store local copy of jpeg data for encode params.
   3643             blob_request = 1;
   3644             snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
   3645         }
   3646 
   3647         if (output.acquire_fence != -1) {
   3648            rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
   3649            close(output.acquire_fence);
   3650            if (rc != OK) {
   3651               ALOGE("%s: sync wait failed %d", __func__, rc);
   3652               pthread_mutex_unlock(&mMutex);
   3653               return rc;
   3654            }
   3655         }
   3656 
   3657         streamID.streamID[streamID.num_streams] =
   3658             channel->getStreamID(channel->getStreamTypeMask());
   3659         streamID.num_streams++;
   3660 
   3661         if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
   3662             isVidBufRequested = true;
   3663         }
   3664     }
   3665 
   3666     if (blob_request && mRawDumpChannel) {
   3667         CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
   3668         streamID.streamID[streamID.num_streams] =
   3669             mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
   3670         streamID.num_streams++;
   3671     }
   3672 
   3673     if(request->input_buffer == NULL) {
   3674         /* Parse the settings:
   3675          * - For every request in NORMAL MODE
   3676          * - For every request in HFR mode during preview only case
   3677          * - For first request of every batch in HFR mode during video
   3678          * recording. In batchmode the same settings except frame number is
   3679          * repeated in each request of the batch.
   3680          */
   3681         if (!mBatchSize ||
   3682            (mBatchSize && !isVidBufRequested) ||
   3683            (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
   3684             rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
   3685             if (rc < 0) {
   3686                 ALOGE("%s: fail to set frame parameters", __func__);
   3687                 pthread_mutex_unlock(&mMutex);
   3688                 return rc;
   3689             }
   3690         }
   3691         /* For batchMode HFR, setFrameParameters is not called for every
   3692          * request. But only frame number of the latest request is parsed.
   3693          * Keep track of first and last frame numbers in a batch so that
   3694          * metadata for the frame numbers of batch can be duplicated in
   3695          * handleBatchMetadta */
   3696         if (mBatchSize) {
   3697             if (!mToBeQueuedVidBufs) {
   3698                 //start of the batch
   3699                 mFirstFrameNumberInBatch = request->frame_number;
   3700             }
   3701             if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   3702                 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
   3703                 ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   3704                 return BAD_VALUE;
   3705             }
   3706         }
   3707         if (mNeedSensorRestart) {
   3708             /* Unlock the mutex as restartSensor waits on the channels to be
   3709              * stopped, which in turn calls stream callback functions -
   3710              * handleBufferWithLock and handleMetadataWithLock */
   3711             pthread_mutex_unlock(&mMutex);
   3712             rc = dynamicUpdateMetaStreamInfo();
   3713             if (rc != NO_ERROR) {
   3714                 ALOGE("%s: Restarting the sensor failed", __func__);
   3715                 return BAD_VALUE;
   3716             }
   3717             mNeedSensorRestart = false;
   3718             pthread_mutex_lock(&mMutex);
   3719         }
   3720     } else {
   3721 
   3722         if (request->input_buffer->acquire_fence != -1) {
   3723            rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
   3724            close(request->input_buffer->acquire_fence);
   3725            if (rc != OK) {
   3726               ALOGE("%s: input buffer sync wait failed %d", __func__, rc);
   3727               pthread_mutex_unlock(&mMutex);
   3728               return rc;
   3729            }
   3730         }
   3731     }
   3732 
   3733     if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
   3734         mLastCustIntentFrmNum = frameNumber;
   3735     }
   3736     /* Update pending request list and pending buffers map */
   3737     PendingRequestInfo pendingRequest;
   3738     pendingRequestIterator latestRequest;
   3739     pendingRequest.frame_number = frameNumber;
   3740     pendingRequest.num_buffers = request->num_output_buffers;
   3741     pendingRequest.request_id = request_id;
   3742     pendingRequest.blob_request = blob_request;
   3743     pendingRequest.timestamp = 0;
   3744     pendingRequest.bUrgentReceived = 0;
   3745     if (request->input_buffer) {
   3746         pendingRequest.input_buffer =
   3747                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
   3748         *(pendingRequest.input_buffer) = *(request->input_buffer);
   3749         pInputBuffer = pendingRequest.input_buffer;
   3750     } else {
   3751        pendingRequest.input_buffer = NULL;
   3752        pInputBuffer = NULL;
   3753     }
   3754 
   3755     pendingRequest.pipeline_depth = 0;
   3756     pendingRequest.partial_result_cnt = 0;
   3757     extractJpegMetadata(mCurJpegMeta, request);
   3758     pendingRequest.jpegMetadata = mCurJpegMeta;
   3759     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
   3760     pendingRequest.shutter_notified = false;
   3761     pendingRequest.need_dynamic_blklvl = false;
   3762     pendingRequest.pending_extra_result = false;
   3763 
   3764     //extract capture intent
   3765     if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   3766         mCaptureIntent =
   3767                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   3768     }
   3769     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
   3770         mHybridAeEnable =
   3771                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
   3772     }
   3773     pendingRequest.capture_intent = mCaptureIntent;
   3774     pendingRequest.hybrid_ae_enable = mHybridAeEnable;
   3775 
   3776     for (size_t i = 0; i < request->num_output_buffers; i++) {
   3777         RequestedBufferInfo requestedBuf;
   3778         memset(&requestedBuf, 0, sizeof(requestedBuf));
   3779         requestedBuf.stream = request->output_buffers[i].stream;
   3780         requestedBuf.buffer = NULL;
   3781         pendingRequest.buffers.push_back(requestedBuf);
   3782 
   3783         // Add to buffer handle the pending buffers list
   3784         PendingBufferInfo bufferInfo;
   3785         bufferInfo.frame_number = frameNumber;
   3786         bufferInfo.buffer = request->output_buffers[i].buffer;
   3787         bufferInfo.stream = request->output_buffers[i].stream;
   3788         mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
   3789         mPendingBuffersMap.num_buffers++;
   3790         QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
   3791         CDBG("%s: frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
   3792                 __func__, frameNumber, bufferInfo.buffer,
   3793                 channel->getStreamTypeMask(), bufferInfo.stream->format);
   3794 
   3795         if (bufferInfo.stream->format == HAL_PIXEL_FORMAT_RAW16) {
   3796             if (gCamCapability[mCameraId]->optical_black_region_count > 0) {
   3797                 CDBG("%s, frame_number:%d, need dynamic blacklevel", __func__, frameNumber);
   3798                 pendingRequest.need_dynamic_blklvl = true;
   3799             }
   3800         }
   3801     }
   3802     mPendingBuffersMap.last_frame_number = frameNumber;
   3803     latestRequest = mPendingRequestsList.insert(
   3804             mPendingRequestsList.end(), pendingRequest);
   3805     if(mFlush) {
   3806         pthread_mutex_unlock(&mMutex);
   3807         return NO_ERROR;
   3808     }
   3809 
   3810     // Notify metadata channel we receive a request
   3811     mMetadataChannel->request(NULL, frameNumber);
   3812 
   3813     if(request->input_buffer != NULL){
   3814         CDBG("%s: Input request, frame_number %d", __func__, frameNumber);
   3815         rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
   3816         if (NO_ERROR != rc) {
   3817             ALOGE("%s: fail to set reproc parameters", __func__);
   3818             pthread_mutex_unlock(&mMutex);
   3819             return rc;
   3820         }
   3821     }
   3822 
   3823     // Call request on other streams
   3824     uint32_t streams_need_metadata = 0;
   3825     pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
   3826     for (size_t i = 0; i < request->num_output_buffers; i++) {
   3827         const camera3_stream_buffer_t& output = request->output_buffers[i];
   3828         QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
   3829 
   3830         if (channel == NULL) {
   3831             ALOGE("%s: invalid channel pointer for stream", __func__);
   3832             continue;
   3833         }
   3834 
   3835         if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
   3836             if(request->input_buffer != NULL){
   3837                 rc = channel->request(output.buffer, frameNumber,
   3838                         pInputBuffer, &mReprocMeta);
   3839                 if (rc < 0) {
   3840                     ALOGE("%s: Fail to request on picture channel", __func__);
   3841                     pthread_mutex_unlock(&mMutex);
   3842                     return rc;
   3843                 }
   3844             } else {
   3845                 CDBG("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
   3846                         __LINE__, output.buffer, frameNumber);
   3847                 if (!request->settings) {
   3848                     rc = channel->request(output.buffer, frameNumber,
   3849                             NULL, mPrevParameters);
   3850                 } else {
   3851                     rc = channel->request(output.buffer, frameNumber,
   3852                             NULL, mParameters);
   3853                 }
   3854                 if (rc < 0) {
   3855                     ALOGE("%s: Fail to request on picture channel", __func__);
   3856                     pthread_mutex_unlock(&mMutex);
   3857                     return rc;
   3858                 }
   3859                 pendingBufferIter->need_metadata = true;
   3860                 streams_need_metadata++;
   3861             }
   3862         } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   3863             bool needMetadata = false;
   3864             QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
   3865             rc = yuvChannel->request(output.buffer, frameNumber,
   3866                     pInputBuffer,
   3867                     (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
   3868             if (rc < 0) {
   3869                 ALOGE("%s: Fail to request on YUV channel", __func__);
   3870                 pthread_mutex_unlock(&mMutex);
   3871                 return rc;
   3872             }
   3873             pendingBufferIter->need_metadata = needMetadata;
   3874             if (needMetadata)
   3875                 streams_need_metadata += 1;
   3876             CDBG("%s: calling YUV channel request, need_metadata is %d",
   3877                     __func__, needMetadata);
   3878         } else {
   3879             CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
   3880                 __LINE__, output.buffer, frameNumber);
   3881             rc = channel->request(output.buffer, frameNumber);
   3882             if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
   3883                     && mBatchSize) {
   3884                 mToBeQueuedVidBufs++;
   3885                 if (mToBeQueuedVidBufs == mBatchSize) {
   3886                     channel->queueBatchBuf();
   3887                 }
   3888             }
   3889             if (rc < 0) {
   3890                 ALOGE("%s: request failed", __func__);
   3891                 pthread_mutex_unlock(&mMutex);
   3892                 return rc;
   3893             }
   3894         }
   3895         pendingBufferIter++;
   3896     }
   3897 
   3898     //If 2 streams have need_metadata set to true, fail the request, unless
   3899     //we copy/reference count the metadata buffer
   3900     if (streams_need_metadata > 1) {
   3901         ALOGE("%s: not supporting request in which two streams requires"
   3902                 " 2 HAL metadata for reprocessing", __func__);
   3903         pthread_mutex_unlock(&mMutex);
   3904         return -EINVAL;
   3905     }
   3906 
   3907     if(request->input_buffer == NULL) {
   3908         /* Set the parameters to backend:
   3909          * - For every request in NORMAL MODE
   3910          * - For every request in HFR mode during preview only case
   3911          * - Once every batch in HFR mode during video recording
   3912          */
   3913         if (!mBatchSize ||
   3914            (mBatchSize && !isVidBufRequested) ||
   3915            (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
   3916             CDBG("%s: set_parms  batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
   3917                     __func__, mBatchSize, isVidBufRequested,
   3918                     mToBeQueuedVidBufs);
   3919             rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   3920                     mParameters);
   3921             if (rc < 0) {
   3922                 ALOGE("%s: set_parms failed", __func__);
   3923             }
   3924             /* reset to zero coz, the batch is queued */
   3925             mToBeQueuedVidBufs = 0;
   3926             mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
   3927         }
   3928         mPendingLiveRequest++;
   3929     }
   3930 
   3931     CDBG("%s: mPendingLiveRequest = %d", __func__, mPendingLiveRequest);
   3932 
   3933     mFirstRequest = false;
   3934     // Added a timed condition wait
   3935     struct timespec ts;
   3936     uint8_t isValidTimeout = 1;
   3937     rc = clock_gettime(CLOCK_MONOTONIC, &ts);
   3938     if (rc < 0) {
   3939       isValidTimeout = 0;
   3940       ALOGE("%s: Error reading the real time clock!!", __func__);
   3941     }
   3942     else {
   3943       // Make timeout as 5 sec for request to be honored
   3944       ts.tv_sec += 5;
   3945     }
   3946     //Block on conditional variable
   3947     if (mBatchSize) {
   3948         /* For HFR, more buffers are dequeued upfront to improve the performance */
   3949         minInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
   3950         maxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
   3951     }
   3952 
   3953     // Do not block in the middle of a batch.
   3954     while ((mPendingLiveRequest >= minInFlightRequests) && !pInputBuffer &&
   3955             mToBeQueuedVidBufs == 0) {
   3956         if (!isValidTimeout) {
   3957             CDBG("%s: Blocking on conditional wait", __func__);
   3958             pthread_cond_wait(&mRequestCond, &mMutex);
   3959         }
   3960         else {
   3961             CDBG("%s: Blocking on timed conditional wait", __func__);
   3962             rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
   3963             if (rc == ETIMEDOUT) {
   3964                 rc = -ENODEV;
   3965                 ALOGE("%s: Unblocked on timeout!!!!", __func__);
   3966                 break;
   3967             }
   3968         }
   3969         CDBG("%s: Unblocked", __func__);
   3970         if (mWokenUpByDaemon) {
   3971             mWokenUpByDaemon = false;
   3972             if (mPendingLiveRequest < maxInFlightRequests)
   3973                 break;
   3974         }
   3975     }
   3976     pthread_mutex_unlock(&mMutex);
   3977 
   3978     return rc;
   3979 }
   3980 
   3981 /*===========================================================================
   3982  * FUNCTION   : dump
   3983  *
   3984  * DESCRIPTION:
   3985  *
   3986  * PARAMETERS :
   3987  *
   3988  *
   3989  * RETURN     :
   3990  *==========================================================================*/
   3991 void QCamera3HardwareInterface::dump(int fd)
   3992 {
   3993     pthread_mutex_lock(&mMutex);
   3994     dprintf(fd, "\n Camera HAL3 information Begin \n");
   3995 
   3996     dprintf(fd, "\nNumber of pending requests: %zu \n",
   3997         mPendingRequestsList.size());
   3998     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   3999     dprintf(fd, " Frame | Number of Buffers |   Req Id:   | Blob Req | Input buffer present\n");
   4000     dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
   4001     for(pendingRequestIterator i = mPendingRequestsList.begin();
   4002             i != mPendingRequestsList.end(); i++) {
   4003         dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
   4004         i->frame_number, i->num_buffers, i->request_id, i->blob_request,
   4005         i->input_buffer);
   4006     }
   4007     dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
   4008                 mPendingBuffersMap.num_buffers);
   4009     dprintf(fd, "-------+------------------\n");
   4010     dprintf(fd, " Frame | Stream type mask \n");
   4011     dprintf(fd, "-------+------------------\n");
   4012     for(List<PendingBufferInfo>::iterator i =
   4013         mPendingBuffersMap.mPendingBufferList.begin();
   4014         i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
   4015         QCamera3Channel *channel = (QCamera3Channel *)(i->stream->priv);
   4016         dprintf(fd, " %5d | %11d \n",
   4017                 i->frame_number, channel->getStreamTypeMask());
   4018     }
   4019     dprintf(fd, "-------+------------------\n");
   4020 
   4021     dprintf(fd, "\nPending frame drop list: %zu\n",
   4022         mPendingFrameDropList.size());
   4023     dprintf(fd, "-------+-----------\n");
   4024     dprintf(fd, " Frame | Stream ID \n");
   4025     dprintf(fd, "-------+-----------\n");
   4026     for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
   4027         i != mPendingFrameDropList.end(); i++) {
   4028         dprintf(fd, " %5d | %9d \n",
   4029             i->frame_number, i->stream_ID);
   4030     }
   4031     dprintf(fd, "-------+-----------\n");
   4032 
   4033     dprintf(fd, "\n Camera HAL3 information End \n");
   4034 
   4035     /* use dumpsys media.camera as trigger to send update debug level event */
   4036     mUpdateDebugLevel = true;
   4037     pthread_mutex_unlock(&mMutex);
   4038     return;
   4039 }
   4040 
   4041 /*===========================================================================
   4042  * FUNCTION   : flush
   4043  *
   4044  * DESCRIPTION:
   4045  *
   4046  * PARAMETERS :
   4047  *
   4048  *
   4049  * RETURN     :
   4050  *==========================================================================*/
   4051 int QCamera3HardwareInterface::flush()
   4052 {
   4053     ATRACE_CALL();
   4054     int32_t rc = NO_ERROR;
   4055 
   4056     CDBG("%s: Unblocking Process Capture Request", __func__);
   4057     pthread_mutex_lock(&mMutex);
   4058 
   4059     if (mFirstRequest) {
   4060         pthread_mutex_unlock(&mMutex);
   4061         return NO_ERROR;
   4062     }
   4063 
   4064     mFlush = true;
   4065     pthread_mutex_unlock(&mMutex);
   4066 
   4067     rc = stopAllChannels();
   4068     if (rc < 0) {
   4069         ALOGE("%s: stopAllChannels failed", __func__);
   4070         return rc;
   4071     }
   4072     if (mChannelHandle) {
   4073         mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
   4074                 mChannelHandle);
   4075     }
   4076 
   4077     // Reset bundle info
   4078     rc = setBundleInfo();
   4079     if (rc < 0) {
   4080         ALOGE("%s: setBundleInfo failed %d", __func__, rc);
   4081         return rc;
   4082     }
   4083 
   4084     // Mutex Lock
   4085     pthread_mutex_lock(&mMutex);
   4086 
   4087     // Unblock process_capture_request
   4088     mPendingLiveRequest = 0;
   4089     pthread_cond_signal(&mRequestCond);
   4090 
   4091     rc = notifyErrorForPendingRequests();
   4092     if (rc < 0) {
   4093         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
   4094         pthread_mutex_unlock(&mMutex);
   4095         return rc;
   4096     }
   4097 
   4098     mFlush = false;
   4099 
   4100     // Start the Streams/Channels
   4101     rc = startAllChannels();
   4102     if (rc < 0) {
   4103         ALOGE("%s: startAllChannels failed", __func__);
   4104         pthread_mutex_unlock(&mMutex);
   4105         return rc;
   4106     }
   4107 
   4108     if (mChannelHandle) {
   4109         mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
   4110                     mChannelHandle);
   4111         if (rc < 0) {
   4112             ALOGE("%s: start_channel failed", __func__);
   4113             pthread_mutex_unlock(&mMutex);
   4114             return rc;
   4115         }
   4116     }
   4117 
   4118     pthread_mutex_unlock(&mMutex);
   4119 
   4120     return 0;
   4121 }
   4122 
   4123 /*===========================================================================
   4124  * FUNCTION   : captureResultCb
   4125  *
   4126  * DESCRIPTION: Callback handler for all capture result
   4127  *              (streams, as well as metadata)
   4128  *
   4129  * PARAMETERS :
   4130  *   @metadata : metadata information
   4131  *   @buffer   : actual gralloc buffer to be returned to frameworks.
   4132  *               NULL if metadata.
   4133  *
   4134  * RETURN     : NONE
   4135  *==========================================================================*/
   4136 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
   4137                 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
   4138 {
   4139     if (metadata_buf) {
   4140         if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
   4141             handleBatchMetadata(metadata_buf,
   4142                     true /* free_and_bufdone_meta_buf */);
   4143         } else { /* mBatchSize = 0 */
   4144             hdrPlusPerfLock(metadata_buf);
   4145             pthread_mutex_lock(&mMutex);
   4146             handleMetadataWithLock(metadata_buf,
   4147                     true /* free_and_bufdone_meta_buf */,
   4148                     true /* last urgent frame of batch metadata */,
   4149                     true /* last frame of batch metadata */ );
   4150             pthread_mutex_unlock(&mMutex);
   4151         }
   4152     } else if (isInputBuffer) {
   4153         pthread_mutex_lock(&mMutex);
   4154         handleInputBufferWithLock(buffer, frame_number);
   4155         pthread_mutex_unlock(&mMutex);
   4156     } else {
   4157         pthread_mutex_lock(&mMutex);
   4158         handleBufferWithLock(buffer, frame_number);
   4159         pthread_mutex_unlock(&mMutex);
   4160     }
   4161     return;
   4162 }
   4163 
   4164 /*===========================================================================
   4165  * FUNCTION   : getReprocessibleOutputStreamId
   4166  *
   4167  * DESCRIPTION: Get source output stream id for the input reprocess stream
   4168  *              based on size and format, which would be the largest
   4169  *              output stream if an input stream exists.
   4170  *
   4171  * PARAMETERS :
   4172  *   @id      : return the stream id if found
   4173  *
   4174  * RETURN     : int32_t type of status
   4175  *              NO_ERROR  -- success
   4176  *              none-zero failure code
   4177  *==========================================================================*/
   4178 int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
   4179 {
   4180     stream_info_t* stream = NULL;
   4181 
   4182     /* check if any output or bidirectional stream with the same size and format
   4183        and return that stream */
   4184     if ((mInputStreamInfo.dim.width > 0) &&
   4185             (mInputStreamInfo.dim.height > 0)) {
   4186         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   4187                 it != mStreamInfo.end(); it++) {
   4188 
   4189             camera3_stream_t *stream = (*it)->stream;
   4190             if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
   4191                     (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
   4192                     (stream->format == mInputStreamInfo.format)) {
   4193                 // Usage flag for an input stream and the source output stream
   4194                 // may be different.
   4195                 CDBG("%s: Found reprocessible output stream! %p", __func__, *it);
   4196                 CDBG("%s: input stream usage 0x%x, current stream usage 0x%x",
   4197                         __func__, stream->usage, mInputStreamInfo.usage);
   4198 
   4199                 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
   4200                 if (channel != NULL && channel->mStreams[0]) {
   4201                     id = channel->mStreams[0]->getMyServerID();
   4202                     return NO_ERROR;
   4203                 }
   4204             }
   4205         }
   4206     } else {
   4207         CDBG("%s: No input stream, so no reprocessible output stream", __func__);
   4208     }
   4209     return NAME_NOT_FOUND;
   4210 }
   4211 
   4212 /*===========================================================================
   4213  * FUNCTION   : lookupFwkName
   4214  *
   4215  * DESCRIPTION: In case the enum is not same in fwk and backend
   4216  *              make sure the parameter is correctly propogated
   4217  *
   4218  * PARAMETERS  :
   4219  *   @arr      : map between the two enums
   4220  *   @len      : len of the map
   4221  *   @hal_name : name of the hal_parm to map
   4222  *
   4223  * RETURN     : int type of status
   4224  *              fwk_name  -- success
   4225  *              none-zero failure code
   4226  *==========================================================================*/
   4227 template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
   4228         size_t len, halType hal_name)
   4229 {
   4230 
   4231     for (size_t i = 0; i < len; i++) {
   4232         if (arr[i].hal_name == hal_name) {
   4233             return arr[i].fwk_name;
   4234         }
   4235     }
   4236 
   4237     /* Not able to find matching framework type is not necessarily
   4238      * an error case. This happens when mm-camera supports more attributes
   4239      * than the frameworks do */
   4240     CDBG_HIGH("%s: Cannot find matching framework type", __func__);
   4241     return NAME_NOT_FOUND;
   4242 }
   4243 
   4244 /*===========================================================================
   4245  * FUNCTION   : lookupHalName
   4246  *
   4247  * DESCRIPTION: In case the enum is not same in fwk and backend
   4248  *              make sure the parameter is correctly propogated
   4249  *
   4250  * PARAMETERS  :
   4251  *   @arr      : map between the two enums
   4252  *   @len      : len of the map
   4253  *   @fwk_name : name of the hal_parm to map
   4254  *
   4255  * RETURN     : int32_t type of status
   4256  *              hal_name  -- success
   4257  *              none-zero failure code
   4258  *==========================================================================*/
   4259 template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
   4260         size_t len, fwkType fwk_name)
   4261 {
   4262     for (size_t i = 0; i < len; i++) {
   4263         if (arr[i].fwk_name == fwk_name) {
   4264             return arr[i].hal_name;
   4265         }
   4266     }
   4267 
   4268     ALOGE("%s: Cannot find matching hal type fwk_name=%d", __func__, fwk_name);
   4269     return NAME_NOT_FOUND;
   4270 }
   4271 
   4272 /*===========================================================================
   4273  * FUNCTION   : lookupProp
   4274  *
   4275  * DESCRIPTION: lookup a value by its name
   4276  *
   4277  * PARAMETERS :
   4278  *   @arr     : map between the two enums
   4279  *   @len     : size of the map
   4280  *   @name    : name to be looked up
   4281  *
   4282  * RETURN     : Value if found
   4283  *              CAM_CDS_MODE_MAX if not found
   4284  *==========================================================================*/
   4285 template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
   4286         size_t len, const char *name)
   4287 {
   4288     if (name) {
   4289         for (size_t i = 0; i < len; i++) {
   4290             if (!strcmp(arr[i].desc, name)) {
   4291                 return arr[i].val;
   4292             }
   4293         }
   4294     }
   4295     return CAM_CDS_MODE_MAX;
   4296 }
   4297 
   4298 /*===========================================================================
   4299  *
   4300  * DESCRIPTION:
   4301  *
   4302  * PARAMETERS :
   4303  *   @metadata : metadata information from callback
   4304  *   @timestamp: metadata buffer timestamp
   4305  *   @request_id: request id
   4306  *   @hybrid_ae_enable: whether hybrid ae is enabled
   4307  *   @jpegMetadata: additional jpeg metadata
   4308  *   @pprocDone: whether internal offline postprocsesing is done
   4309  *   @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
   4310  *                         in a batch. Always true for non-batch mode.
   4311  *
   4312  * RETURN     : camera_metadata_t*
   4313  *              metadata in a format specified by fwk
   4314  *==========================================================================*/
   4315 camera_metadata_t*
   4316 QCamera3HardwareInterface::translateFromHalMetadata(
   4317                                  metadata_buffer_t *metadata,
   4318                                  nsecs_t timestamp,
   4319                                  int32_t request_id,
   4320                                  const CameraMetadata& jpegMetadata,
   4321                                  uint8_t pipeline_depth,
   4322                                  uint8_t capture_intent,
   4323                                  uint8_t hybrid_ae_enable,
   4324                                  bool pprocDone,
   4325                                  bool dynamic_blklvl,
   4326                                  bool lastMetadataInBatch)
   4327 {
   4328     CameraMetadata camMetadata;
   4329     camera_metadata_t *resultMetadata;
   4330 
   4331     if (!lastMetadataInBatch) {
   4332         /* In batch mode, use empty metadata if this is not the last in batch*/
   4333         resultMetadata = allocate_camera_metadata(0, 0);
   4334         return resultMetadata;
   4335     }
   4336 
   4337     if (jpegMetadata.entryCount())
   4338         camMetadata.append(jpegMetadata);
   4339 
   4340     camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
   4341     camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
   4342     camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
   4343     camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
   4344     camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
   4345 
   4346     IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
   4347         int64_t fwk_frame_number = *frame_number;
   4348         camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
   4349     }
   4350 
   4351     IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
   4352         int32_t fps_range[2];
   4353         fps_range[0] = (int32_t)float_range->min_fps;
   4354         fps_range[1] = (int32_t)float_range->max_fps;
   4355         camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   4356                                       fps_range, 2);
   4357         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
   4358             __func__, fps_range[0], fps_range[1]);
   4359     }
   4360 
   4361     IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
   4362         camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
   4363     }
   4364 
   4365     IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   4366         int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
   4367                 METADATA_MAP_SIZE(SCENE_MODES_MAP),
   4368                 *sceneMode);
   4369         if (NAME_NOT_FOUND != val) {
   4370             uint8_t fwkSceneMode = (uint8_t)val;
   4371             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
   4372             CDBG("%s: urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
   4373                     __func__, fwkSceneMode);
   4374         }
   4375     }
   4376 
   4377     IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
   4378         uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
   4379         camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
   4380     }
   4381 
   4382     IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
   4383         uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
   4384         camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
   4385     }
   4386 
   4387     IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
   4388         uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
   4389         camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
   4390     }
   4391 
   4392     IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
   4393             CAM_INTF_META_EDGE_MODE, metadata) {
   4394         uint8_t edgeStrength = (uint8_t) edgeApplication->sharpness;
   4395         camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
   4396     }
   4397 
   4398     IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
   4399         uint8_t fwk_flashPower = (uint8_t) *flashPower;
   4400         camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
   4401     }
   4402 
   4403     IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
   4404         camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
   4405     }
   4406 
   4407     IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
   4408         if (0 <= *flashState) {
   4409             uint8_t fwk_flashState = (uint8_t) *flashState;
   4410             if (!gCamCapability[mCameraId]->flash_available) {
   4411                 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
   4412             }
   4413             camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
   4414         }
   4415     }
   4416 
   4417     IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
   4418         int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
   4419         if (NAME_NOT_FOUND != val) {
   4420             uint8_t fwk_flashMode = (uint8_t)val;
   4421             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
   4422         }
   4423     }
   4424 
   4425     IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
   4426         uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
   4427         camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
   4428     }
   4429 
   4430     IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
   4431         camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
   4432     }
   4433 
   4434     IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
   4435         camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
   4436     }
   4437 
   4438     IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
   4439         camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
   4440     }
   4441 
   4442     IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
   4443         uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
   4444         camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
   4445     }
   4446 
   4447     IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
   4448         uint8_t fwk_videoStab = (uint8_t) *videoStab;
   4449         camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
   4450     }
   4451 
   4452     IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
   4453         uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
   4454         camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
   4455     }
   4456 
   4457     IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
   4458         camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
   4459     }
   4460 
   4461     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
   4462         CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
   4463 
   4464         CDBG("%s: dynamicblackLevel = %f %f %f %f", __func__,
   4465           blackLevelSourcePattern->cam_black_level[0],
   4466           blackLevelSourcePattern->cam_black_level[1],
   4467           blackLevelSourcePattern->cam_black_level[2],
   4468           blackLevelSourcePattern->cam_black_level[3]);
   4469     }
   4470 
   4471     IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
   4472         CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
   4473         float fwk_blackLevelInd[4];
   4474 
   4475         fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
   4476         fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
   4477         fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
   4478         fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
   4479 
   4480         CDBG("%s: applied dynamicblackLevel = %f %f %f %f", __func__,
   4481           blackLevelAppliedPattern->cam_black_level[0],
   4482           blackLevelAppliedPattern->cam_black_level[1],
   4483           blackLevelAppliedPattern->cam_black_level[2],
   4484           blackLevelAppliedPattern->cam_black_level[3]);
   4485         camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
   4486         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
   4487 
   4488         // if dynmaic_blklvl is true, we calculate blklvl from raw callback
   4489         // otherwise, use the value from linearization LUT.
   4490         if (dynamic_blklvl == false) {
   4491             // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
   4492             // depth space.
   4493             fwk_blackLevelInd[0] /= 64.0;
   4494             fwk_blackLevelInd[1] /= 64.0;
   4495             fwk_blackLevelInd[2] /= 64.0;
   4496             fwk_blackLevelInd[3] /= 64.0;
   4497             camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
   4498         }
   4499     }
   4500 
   4501     // Fixed whitelevel is used by ISP/Sensor
   4502     camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
   4503             &gCamCapability[mCameraId]->white_level, 1);
   4504 
   4505     if (gCamCapability[mCameraId]->optical_black_region_count != 0 &&
   4506         gCamCapability[mCameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
   4507         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
   4508         for (size_t i = 0; i < gCamCapability[mCameraId]->optical_black_region_count * 4; i++) {
   4509             opticalBlackRegions[i] = gCamCapability[mCameraId]->optical_black_regions[i];
   4510         }
   4511         camMetadata.update(NEXUS_EXPERIMENTAL_2015_SENSOR_INFO_OPTICALLY_SHIELDED_REGIONS,
   4512                 opticalBlackRegions, gCamCapability[mCameraId]->optical_black_region_count * 4);
   4513     }
   4514 
   4515     IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
   4516             CAM_INTF_META_SCALER_CROP_REGION, metadata) {
   4517         int32_t scalerCropRegion[4];
   4518         scalerCropRegion[0] = hScalerCropRegion->left;
   4519         scalerCropRegion[1] = hScalerCropRegion->top;
   4520         scalerCropRegion[2] = hScalerCropRegion->width;
   4521         scalerCropRegion[3] = hScalerCropRegion->height;
   4522 
   4523         // Adjust crop region from sensor output coordinate system to active
   4524         // array coordinate system.
   4525         mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
   4526                 scalerCropRegion[2], scalerCropRegion[3]);
   4527 
   4528         camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
   4529     }
   4530 
   4531     IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
   4532         CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
   4533         camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
   4534     }
   4535 
   4536     IF_META_AVAILABLE(int64_t, sensorFameDuration,
   4537             CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
   4538         CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
   4539         camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
   4540     }
   4541 
   4542     IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
   4543             CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
   4544         CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
   4545         camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
   4546                 sensorRollingShutterSkew, 1);
   4547     }
   4548 
   4549     IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
   4550         CDBG("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
   4551         camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
   4552 
   4553         //calculate the noise profile based on sensitivity
   4554         double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
   4555         double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
   4556         double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
   4557         for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
   4558             noise_profile[i]   = noise_profile_S;
   4559             noise_profile[i+1] = noise_profile_O;
   4560         }
   4561         CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
   4562                 noise_profile_S, noise_profile_O);
   4563         camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
   4564                 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
   4565     }
   4566 
   4567     IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
   4568         uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
   4569         camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
   4570     }
   4571 
   4572     IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
   4573         int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   4574                 *faceDetectMode);
   4575         if (NAME_NOT_FOUND != val) {
   4576             uint8_t fwk_faceDetectMode = (uint8_t)val;
   4577             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
   4578 
   4579             if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
   4580                 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
   4581                         CAM_INTF_META_FACE_DETECTION, metadata) {
   4582                     uint8_t numFaces = MIN(
   4583                             faceDetectionInfo->num_faces_detected, MAX_ROI);
   4584                     int32_t faceIds[MAX_ROI];
   4585                     uint8_t faceScores[MAX_ROI];
   4586                     int32_t faceRectangles[MAX_ROI * 4];
   4587                     int32_t faceLandmarks[MAX_ROI * 6];
   4588                     size_t j = 0, k = 0;
   4589 
   4590                     for (size_t i = 0; i < numFaces; i++) {
   4591                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
   4592                         // Adjust crop region from sensor output coordinate system to active
   4593                         // array coordinate system.
   4594                         cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
   4595                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
   4596                                 rect.width, rect.height);
   4597 
   4598                         convertToRegions(faceDetectionInfo->faces[i].face_boundary,
   4599                                 faceRectangles+j, -1);
   4600 
   4601                         // Map the co-ordinate sensor output coordinate system to active
   4602                         // array coordinate system.
   4603                         cam_face_detection_info_t& face = faceDetectionInfo->faces[i];
   4604                         mCropRegionMapper.toActiveArray(face.left_eye_center.x,
   4605                                 face.left_eye_center.y);
   4606                         mCropRegionMapper.toActiveArray(face.right_eye_center.x,
   4607                                 face.right_eye_center.y);
   4608                         mCropRegionMapper.toActiveArray(face.mouth_center.x,
   4609                                 face.mouth_center.y);
   4610 
   4611                         convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
   4612                         j+= 4;
   4613                         k+= 6;
   4614                     }
   4615                     if (numFaces <= 0) {
   4616                         memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
   4617                         memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
   4618                         memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
   4619                         memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
   4620                     }
   4621 
   4622                     camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
   4623                             numFaces);
   4624                     camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
   4625                             faceRectangles, numFaces * 4U);
   4626                     if (fwk_faceDetectMode ==
   4627                             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
   4628                         camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
   4629                         camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
   4630                                 faceLandmarks, numFaces * 6U);
   4631                    }
   4632                 }
   4633             }
   4634         }
   4635     }
   4636 
   4637     IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
   4638         uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
   4639         camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
   4640     }
   4641 
   4642     IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
   4643             CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
   4644         uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
   4645         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
   4646     }
   4647 
   4648     IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
   4649             CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
   4650         camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
   4651                 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
   4652     }
   4653 
   4654     IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
   4655             CAM_INTF_META_LENS_SHADING_MAP, metadata) {
   4656         size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
   4657                 CAM_MAX_SHADING_MAP_HEIGHT);
   4658         size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
   4659                 CAM_MAX_SHADING_MAP_WIDTH);
   4660         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
   4661                 lensShadingMap->lens_shading, 4U * map_width * map_height);
   4662     }
   4663 
   4664     IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
   4665         uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
   4666         camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
   4667     }
   4668 
   4669     IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
   4670         //Populate CAM_INTF_META_TONEMAP_CURVES
   4671         /* ch0 = G, ch 1 = B, ch 2 = R*/
   4672         if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   4673             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   4674                     __func__, tonemap->tonemap_points_cnt,
   4675                     CAM_MAX_TONEMAP_CURVE_SIZE);
   4676             tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   4677         }
   4678 
   4679         camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
   4680                         &tonemap->curves[0].tonemap_points[0][0],
   4681                         tonemap->tonemap_points_cnt * 2);
   4682 
   4683         camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
   4684                         &tonemap->curves[1].tonemap_points[0][0],
   4685                         tonemap->tonemap_points_cnt * 2);
   4686 
   4687         camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
   4688                         &tonemap->curves[2].tonemap_points[0][0],
   4689                         tonemap->tonemap_points_cnt * 2);
   4690     }
   4691 
   4692     IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
   4693             CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
   4694         camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
   4695                 CC_GAINS_COUNT);
   4696     }
   4697 
   4698     IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
   4699             CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
   4700         camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
   4701                 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
   4702                 CC_MATRIX_COLS * CC_MATRIX_ROWS);
   4703     }
   4704 
   4705     IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
   4706             CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
   4707         if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   4708             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   4709                     __func__, toneCurve->tonemap_points_cnt,
   4710                     CAM_MAX_TONEMAP_CURVE_SIZE);
   4711             toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   4712         }
   4713         camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
   4714                 (float*)toneCurve->curve.tonemap_points,
   4715                 toneCurve->tonemap_points_cnt * 2);
   4716     }
   4717 
   4718     IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
   4719             CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
   4720         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
   4721                 predColorCorrectionGains->gains, 4);
   4722     }
   4723 
   4724     IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
   4725             CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
   4726         camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   4727                 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
   4728                 CC_MATRIX_ROWS * CC_MATRIX_COLS);
   4729     }
   4730 
   4731     IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
   4732         camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
   4733     }
   4734 
   4735     IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
   4736         uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
   4737         camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
   4738     }
   4739 
   4740     IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
   4741         uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
   4742         camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
   4743     }
   4744 
   4745     IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
   4746         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   4747                 *effectMode);
   4748         if (NAME_NOT_FOUND != val) {
   4749             uint8_t fwk_effectMode = (uint8_t)val;
   4750             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
   4751         }
   4752     }
   4753 
   4754     IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
   4755             CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
   4756         int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
   4757                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
   4758         if (NAME_NOT_FOUND != fwk_testPatternMode) {
   4759             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
   4760         }
   4761         int32_t fwk_testPatternData[4];
   4762         fwk_testPatternData[0] = testPatternData->r;
   4763         fwk_testPatternData[3] = testPatternData->b;
   4764         switch (gCamCapability[mCameraId]->color_arrangement) {
   4765         case CAM_FILTER_ARRANGEMENT_RGGB:
   4766         case CAM_FILTER_ARRANGEMENT_GRBG:
   4767             fwk_testPatternData[1] = testPatternData->gr;
   4768             fwk_testPatternData[2] = testPatternData->gb;
   4769             break;
   4770         case CAM_FILTER_ARRANGEMENT_GBRG:
   4771         case CAM_FILTER_ARRANGEMENT_BGGR:
   4772             fwk_testPatternData[2] = testPatternData->gr;
   4773             fwk_testPatternData[1] = testPatternData->gb;
   4774             break;
   4775         default:
   4776             ALOGE("%s: color arrangement %d is not supported", __func__,
   4777                 gCamCapability[mCameraId]->color_arrangement);
   4778             break;
   4779         }
   4780         camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
   4781     }
   4782 
   4783     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
   4784         camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
   4785     }
   4786 
   4787     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
   4788         String8 str((const char *)gps_methods);
   4789         camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
   4790     }
   4791 
   4792     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
   4793         camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
   4794     }
   4795 
   4796     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
   4797         camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
   4798     }
   4799 
   4800     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
   4801         uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
   4802         camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
   4803     }
   4804 
   4805     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
   4806         uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
   4807         camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
   4808     }
   4809 
   4810     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
   4811         int32_t fwk_thumb_size[2];
   4812         fwk_thumb_size[0] = thumb_size->width;
   4813         fwk_thumb_size[1] = thumb_size->height;
   4814         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
   4815     }
   4816 
   4817     IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
   4818         camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
   4819                 privateData,
   4820                 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
   4821     }
   4822 
   4823     if (metadata->is_tuning_params_valid) {
   4824         uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
   4825         uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
   4826         metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
   4827 
   4828 
   4829         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
   4830                 sizeof(uint32_t));
   4831         data += sizeof(uint32_t);
   4832 
   4833         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
   4834                 sizeof(uint32_t));
   4835         CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
   4836         data += sizeof(uint32_t);
   4837 
   4838         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
   4839                 sizeof(uint32_t));
   4840         CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
   4841         data += sizeof(uint32_t);
   4842 
   4843         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
   4844                 sizeof(uint32_t));
   4845         CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
   4846         data += sizeof(uint32_t);
   4847 
   4848         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
   4849                 sizeof(uint32_t));
   4850         CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
   4851         data += sizeof(uint32_t);
   4852 
   4853         metadata->tuning_params.tuning_mod3_data_size = 0;
   4854         memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
   4855                 sizeof(uint32_t));
   4856         CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
   4857         data += sizeof(uint32_t);
   4858 
   4859         size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
   4860                 TUNING_SENSOR_DATA_MAX);
   4861         memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
   4862                 count);
   4863         data += count;
   4864 
   4865         count = MIN(metadata->tuning_params.tuning_vfe_data_size,
   4866                 TUNING_VFE_DATA_MAX);
   4867         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
   4868                 count);
   4869         data += count;
   4870 
   4871         count = MIN(metadata->tuning_params.tuning_cpp_data_size,
   4872                 TUNING_CPP_DATA_MAX);
   4873         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
   4874                 count);
   4875         data += count;
   4876 
   4877         count = MIN(metadata->tuning_params.tuning_cac_data_size,
   4878                 TUNING_CAC_DATA_MAX);
   4879         memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
   4880                 count);
   4881         data += count;
   4882 
   4883         camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
   4884                 (int32_t *)(void *)tuning_meta_data_blob,
   4885                 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
   4886     }
   4887 
   4888     IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
   4889             CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
   4890         camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   4891                 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
   4892                 NEUTRAL_COL_POINTS);
   4893     }
   4894 
   4895     IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
   4896         uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
   4897         camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
   4898     }
   4899 
   4900     IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
   4901         int32_t aeRegions[REGIONS_TUPLE_COUNT];
   4902         // Adjust crop region from sensor output coordinate system to active
   4903         // array coordinate system.
   4904         mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
   4905                 hAeRegions->rect.width, hAeRegions->rect.height);
   4906 
   4907         convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
   4908         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
   4909                 REGIONS_TUPLE_COUNT);
   4910         CDBG("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   4911                 __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
   4912                 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
   4913                 hAeRegions->rect.height);
   4914     }
   4915 
   4916     IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
   4917         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
   4918         if (NAME_NOT_FOUND != val) {
   4919             uint8_t fwkAfMode = (uint8_t)val;
   4920             camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
   4921             CDBG("%s: Metadata : ANDROID_CONTROL_AF_MODE %d", __func__, val);
   4922         } else {
   4923             CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_AF_MODE %d",
   4924                     __func__, val);
   4925         }
   4926     }
   4927 
   4928     IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
   4929         uint8_t fwk_afState = (uint8_t) *afState;
   4930         camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
   4931         CDBG("%s: Metadata : ANDROID_CONTROL_AF_STATE %u", __func__, *afState);
   4932     }
   4933 
   4934     IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
   4935         camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
   4936     }
   4937 
   4938     IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
   4939         camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
   4940     }
   4941 
   4942     IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
   4943         uint8_t fwk_lensState = *lensState;
   4944         camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
   4945     }
   4946 
   4947     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
   4948         /*af regions*/
   4949         int32_t afRegions[REGIONS_TUPLE_COUNT];
   4950         // Adjust crop region from sensor output coordinate system to active
   4951         // array coordinate system.
   4952         mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
   4953                 hAfRegions->rect.width, hAfRegions->rect.height);
   4954 
   4955         convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
   4956         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
   4957                 REGIONS_TUPLE_COUNT);
   4958         CDBG("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
   4959                 __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
   4960                 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
   4961                 hAfRegions->rect.height);
   4962     }
   4963 
   4964     IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
   4965         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   4966                 *hal_ab_mode);
   4967         if (NAME_NOT_FOUND != val) {
   4968             uint8_t fwk_ab_mode = (uint8_t)val;
   4969             camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
   4970         }
   4971     }
   4972 
   4973     IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
   4974         int val = lookupFwkName(SCENE_MODES_MAP,
   4975                 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
   4976         if (NAME_NOT_FOUND != val) {
   4977             uint8_t fwkBestshotMode = (uint8_t)val;
   4978             camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
   4979             CDBG("%s: Metadata : ANDROID_CONTROL_SCENE_MODE", __func__);
   4980         } else {
   4981             CDBG_HIGH("%s: Metadata not found : ANDROID_CONTROL_SCENE_MODE", __func__);
   4982         }
   4983     }
   4984 
   4985     IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
   4986          uint8_t fwk_mode = (uint8_t) *mode;
   4987          camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
   4988     }
   4989 
   4990     /* Constant metadata values to be update*/
   4991     uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
   4992     camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
   4993 
   4994     uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   4995     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   4996 
   4997     int32_t hotPixelMap[2];
   4998     camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
   4999 
   5000     // CDS
   5001     IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
   5002         camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
   5003     }
   5004 
   5005     // TNR
   5006     IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
   5007         uint8_t tnr_enable       = tnr->denoise_enable;
   5008         int32_t tnr_process_type = (int32_t)tnr->process_plates;
   5009 
   5010         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   5011         camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   5012     }
   5013 
   5014     // Reprocess crop data
   5015     IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
   5016         uint8_t cnt = crop_data->num_of_streams;
   5017         if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
   5018             // mm-qcamera-daemon only posts crop_data for streams
   5019             // not linked to pproc. So no valid crop metadata is not
   5020             // necessarily an error case.
   5021             CDBG("%s: No valid crop metadata entries", __func__);
   5022         } else {
   5023             uint32_t reproc_stream_id;
   5024             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   5025                 CDBG("%s: No reprocessible stream found, ignore crop data", __func__);
   5026             } else {
   5027                 int rc = NO_ERROR;
   5028                 Vector<int32_t> roi_map;
   5029                 int32_t *crop = new int32_t[cnt*4];
   5030                 if (NULL == crop) {
   5031                    rc = NO_MEMORY;
   5032                 }
   5033                 if (NO_ERROR == rc) {
   5034                     int32_t streams_found = 0;
   5035                     for (size_t i = 0; i < cnt; i++) {
   5036                         if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
   5037                             if (pprocDone) {
   5038                                 // HAL already does internal reprocessing,
   5039                                 // either via reprocessing before JPEG encoding,
   5040                                 // or offline postprocessing for pproc bypass case.
   5041                                 crop[0] = 0;
   5042                                 crop[1] = 0;
   5043                                 crop[2] = mInputStreamInfo.dim.width;
   5044                                 crop[3] = mInputStreamInfo.dim.height;
   5045                             } else {
   5046                                 crop[0] = crop_data->crop_info[i].crop.left;
   5047                                 crop[1] = crop_data->crop_info[i].crop.top;
   5048                                 crop[2] = crop_data->crop_info[i].crop.width;
   5049                                 crop[3] = crop_data->crop_info[i].crop.height;
   5050                             }
   5051                             roi_map.add(crop_data->crop_info[i].roi_map.left);
   5052                             roi_map.add(crop_data->crop_info[i].roi_map.top);
   5053                             roi_map.add(crop_data->crop_info[i].roi_map.width);
   5054                             roi_map.add(crop_data->crop_info[i].roi_map.height);
   5055                             streams_found++;
   5056                             CDBG("%s: Adding reprocess crop data for stream %dx%d, %dx%d",
   5057                                     __func__,
   5058                                     crop[0], crop[1], crop[2], crop[3]);
   5059                             CDBG("%s: Adding reprocess crop roi map for stream %dx%d, %dx%d",
   5060                                     __func__,
   5061                                     crop_data->crop_info[i].roi_map.left,
   5062                                     crop_data->crop_info[i].roi_map.top,
   5063                                     crop_data->crop_info[i].roi_map.width,
   5064                                     crop_data->crop_info[i].roi_map.height);
   5065                             break;
   5066 
   5067                        }
   5068                     }
   5069                     camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
   5070                             &streams_found, 1);
   5071                     camMetadata.update(QCAMERA3_CROP_REPROCESS,
   5072                             crop, (size_t)(streams_found * 4));
   5073                     if (roi_map.array()) {
   5074                         camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
   5075                                 roi_map.array(), roi_map.size());
   5076                     }
   5077                }
   5078                if (crop) {
   5079                    delete [] crop;
   5080                }
   5081             }
   5082         }
   5083     }
   5084 
   5085     IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
   5086         int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   5087                 *cacMode);
   5088         if (NAME_NOT_FOUND != val) {
   5089             uint8_t fwkCacMode = (uint8_t)val;
   5090             camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
   5091         } else {
   5092             ALOGE("%s: Invalid CAC camera parameter: %d", __func__, *cacMode);
   5093         }
   5094     }
   5095 
   5096     // Post blob of cam_cds_data through vendor tag.
   5097     IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
   5098         uint8_t cnt = cdsInfo->num_of_streams;
   5099         cam_cds_data_t cdsDataOverride;
   5100         memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
   5101         cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
   5102         cdsDataOverride.num_of_streams = 1;
   5103         if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
   5104             uint32_t reproc_stream_id;
   5105             if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
   5106                 CDBG("%s: No reprocessible stream found, ignore cds data", __func__);
   5107             } else {
   5108                 for (size_t i = 0; i < cnt; i++) {
   5109                     if (cdsInfo->cds_info[i].stream_id ==
   5110                             reproc_stream_id) {
   5111                         cdsDataOverride.cds_info[0].cds_enable =
   5112                                 cdsInfo->cds_info[i].cds_enable;
   5113                         break;
   5114                     }
   5115                 }
   5116             }
   5117         } else {
   5118             CDBG("%s: Invalid stream count %d in CDS_DATA", __func__, cnt);
   5119         }
   5120         camMetadata.update(QCAMERA3_CDS_INFO,
   5121                 (uint8_t *)&cdsDataOverride,
   5122                 sizeof(cam_cds_data_t));
   5123     }
   5124 
   5125     // Ldaf calibration data
   5126     if (!mLdafCalibExist) {
   5127         IF_META_AVAILABLE(uint32_t, ldafCalib,
   5128                 CAM_INTF_META_LDAF_EXIF, metadata) {
   5129             mLdafCalibExist = true;
   5130             mLdafCalib[0] = ldafCalib[0];
   5131             mLdafCalib[1] = ldafCalib[1];
   5132             CDBG("%s: ldafCalib[0] is %d, ldafCalib[1] is %d", __func__,
   5133                     ldafCalib[0], ldafCalib[1]);
   5134         }
   5135     }
   5136 
   5137     // Post Raw Sensitivity Boost = ISP digital gain
   5138     IF_META_AVAILABLE(float, ispDigitalGain, CAM_INTF_META_ISP_DIGITAL_GAIN, metadata) {
   5139         int32_t postRawSensitivity = static_cast<int32_t>(*ispDigitalGain * 100);
   5140         camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &postRawSensitivity, 1);
   5141     }
   5142 
   5143     resultMetadata = camMetadata.release();
   5144     return resultMetadata;
   5145 }
   5146 
   5147 /*===========================================================================
   5148  * FUNCTION   : saveExifParams
   5149  *
   5150  * DESCRIPTION:
   5151  *
   5152  * PARAMETERS :
   5153  *   @metadata : metadata information from callback
   5154  *
   5155  * RETURN     : none
   5156  *
   5157  *==========================================================================*/
   5158 void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
   5159 {
   5160     IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
   5161             CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
   5162         mExifParams.ae_debug_params = *ae_exif_debug_params;
   5163         mExifParams.ae_debug_params_valid = TRUE;
   5164     }
   5165     IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
   5166             CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
   5167         mExifParams.awb_debug_params = *awb_exif_debug_params;
   5168         mExifParams.awb_debug_params_valid = TRUE;
   5169     }
   5170     IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
   5171             CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
   5172         mExifParams.af_debug_params = *af_exif_debug_params;
   5173         mExifParams.af_debug_params_valid = TRUE;
   5174     }
   5175     IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
   5176             CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
   5177         mExifParams.asd_debug_params = *asd_exif_debug_params;
   5178         mExifParams.asd_debug_params_valid = TRUE;
   5179     }
   5180     IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
   5181             CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
   5182         mExifParams.stats_debug_params = *stats_exif_debug_params;
   5183         mExifParams.stats_debug_params_valid = TRUE;
   5184     }
   5185 }
   5186 
   5187 /*===========================================================================
   5188  * FUNCTION   : get3AExifParams
   5189  *
   5190  * DESCRIPTION:
   5191  *
   5192  * PARAMETERS : none
   5193  *
   5194  *
   5195  * RETURN     : mm_jpeg_exif_params_t
   5196  *
   5197  *==========================================================================*/
   5198 mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
   5199 {
   5200     return mExifParams;
   5201 }
   5202 
   5203 /*===========================================================================
   5204  * FUNCTION   : translateCbUrgentMetadataToResultMetadata
   5205  *
   5206  * DESCRIPTION:
   5207  *
   5208  * PARAMETERS :
   5209  *   @metadata : metadata information from callback
   5210  *   @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
   5211  *                               urgent metadata in a batch. Always true for
   5212  *                               non-batch mode.
   5213  *
   5214  * RETURN     : camera_metadata_t*
   5215  *              metadata in a format specified by fwk
   5216  *==========================================================================*/
   5217 camera_metadata_t*
   5218 QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
   5219                                 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
   5220 {
   5221     CameraMetadata camMetadata;
   5222     camera_metadata_t *resultMetadata;
   5223 
   5224     if (!lastUrgentMetadataInBatch) {
   5225         /* In batch mode, use empty metadata if this is not the last in batch
   5226          */
   5227         resultMetadata = allocate_camera_metadata(0, 0);
   5228         return resultMetadata;
   5229     }
   5230 
   5231     IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
   5232         uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
   5233         camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
   5234         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", __func__, *whiteBalanceState);
   5235     }
   5236 
   5237     IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
   5238         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
   5239                 &aecTrigger->trigger, 1);
   5240         camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
   5241                 &aecTrigger->trigger_id, 1);
   5242         CDBG("%s: urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
   5243                 __func__, aecTrigger->trigger);
   5244         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d", __func__,
   5245                 aecTrigger->trigger_id);
   5246     }
   5247 
   5248     IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
   5249         uint8_t fwk_ae_state = (uint8_t) *ae_state;
   5250         camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
   5251         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE %u", __func__, *ae_state);
   5252     }
   5253 
   5254     IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
   5255         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
   5256                 &af_trigger->trigger, 1);
   5257         CDBG("%s: urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
   5258                 __func__, af_trigger->trigger);
   5259         camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
   5260         CDBG("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d", __func__,
   5261                 af_trigger->trigger_id);
   5262     }
   5263 
   5264     IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
   5265         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   5266                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
   5267         if (NAME_NOT_FOUND != val) {
   5268             uint8_t fwkWhiteBalanceMode = (uint8_t)val;
   5269             camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
   5270             CDBG("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", __func__, val);
   5271         } else {
   5272             CDBG_HIGH("%s: urgent Metadata not found : ANDROID_CONTROL_AWB_MODE", __func__);
   5273         }
   5274     }
   5275 
   5276     uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   5277     uint32_t aeMode = CAM_AE_MODE_MAX;
   5278     int32_t flashMode = CAM_FLASH_MODE_MAX;
   5279     int32_t redeye = -1;
   5280     IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
   5281         aeMode = *pAeMode;
   5282     }
   5283     IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
   5284         flashMode = *pFlashMode;
   5285     }
   5286     IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
   5287         redeye = *pRedeye;
   5288     }
   5289 
   5290     if (1 == redeye) {
   5291         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
   5292         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5293     } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
   5294         int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   5295                 flashMode);
   5296         if (NAME_NOT_FOUND != val) {
   5297             fwk_aeMode = (uint8_t)val;
   5298             camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5299         } else {
   5300             ALOGE("%s: Unsupported flash mode %d", __func__, flashMode);
   5301         }
   5302     } else if (aeMode == CAM_AE_MODE_ON) {
   5303         fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
   5304         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5305     } else if (aeMode == CAM_AE_MODE_OFF) {
   5306         fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
   5307         camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
   5308     } else {
   5309         ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
   5310               "flashMode:%d, aeMode:%u!!!",
   5311                 __func__, redeye, flashMode, aeMode);
   5312     }
   5313 
   5314     resultMetadata = camMetadata.release();
   5315     return resultMetadata;
   5316 }
   5317 
   5318 /*===========================================================================
   5319  * FUNCTION   : dumpMetadataToFile
   5320  *
   5321  * DESCRIPTION: Dumps tuning metadata to file system
   5322  *
   5323  * PARAMETERS :
   5324  *   @meta           : tuning metadata
   5325  *   @dumpFrameCount : current dump frame count
   5326  *   @enabled        : Enable mask
   5327  *
   5328  *==========================================================================*/
   5329 void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
   5330                                                    uint32_t &dumpFrameCount,
   5331                                                    bool enabled,
   5332                                                    const char *type,
   5333                                                    uint32_t frameNumber)
   5334 {
   5335     uint32_t frm_num = 0;
   5336 
   5337     //Some sanity checks
   5338     if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
   5339         ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
   5340               __func__,
   5341               meta.tuning_sensor_data_size,
   5342               TUNING_SENSOR_DATA_MAX);
   5343         return;
   5344     }
   5345 
   5346     if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
   5347         ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
   5348               __func__,
   5349               meta.tuning_vfe_data_size,
   5350               TUNING_VFE_DATA_MAX);
   5351         return;
   5352     }
   5353 
   5354     if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
   5355         ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
   5356               __func__,
   5357               meta.tuning_cpp_data_size,
   5358               TUNING_CPP_DATA_MAX);
   5359         return;
   5360     }
   5361 
   5362     if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
   5363         ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
   5364               __func__,
   5365               meta.tuning_cac_data_size,
   5366               TUNING_CAC_DATA_MAX);
   5367         return;
   5368     }
   5369     //
   5370 
   5371     if(enabled){
   5372         char timeBuf[FILENAME_MAX];
   5373         char buf[FILENAME_MAX];
   5374         memset(buf, 0, sizeof(buf));
   5375         memset(timeBuf, 0, sizeof(timeBuf));
   5376         time_t current_time;
   5377         struct tm * timeinfo;
   5378         time (&current_time);
   5379         timeinfo = localtime (&current_time);
   5380         if (timeinfo != NULL) {
   5381             strftime (timeBuf, sizeof(timeBuf),
   5382                     QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
   5383         }
   5384         String8 filePath(timeBuf);
   5385         snprintf(buf,
   5386                 sizeof(buf),
   5387                 "%dm_%s_%d.bin",
   5388                 dumpFrameCount,
   5389                 type,
   5390                 frameNumber);
   5391         filePath.append(buf);
   5392         int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
   5393         if (file_fd >= 0) {
   5394             ssize_t written_len = 0;
   5395             meta.tuning_data_version = TUNING_DATA_VERSION;
   5396             void *data = (void *)((uint8_t *)&meta.tuning_data_version);
   5397             written_len += write(file_fd, data, sizeof(uint32_t));
   5398             data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
   5399             CDBG("tuning_sensor_data_size %d",(int)(*(int *)data));
   5400             written_len += write(file_fd, data, sizeof(uint32_t));
   5401             data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
   5402             CDBG("tuning_vfe_data_size %d",(int)(*(int *)data));
   5403             written_len += write(file_fd, data, sizeof(uint32_t));
   5404             data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
   5405             CDBG("tuning_cpp_data_size %d",(int)(*(int *)data));
   5406             written_len += write(file_fd, data, sizeof(uint32_t));
   5407             data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
   5408             CDBG("tuning_cac_data_size %d",(int)(*(int *)data));
   5409             written_len += write(file_fd, data, sizeof(uint32_t));
   5410             meta.tuning_mod3_data_size = 0;
   5411             data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
   5412             CDBG("tuning_mod3_data_size %d",(int)(*(int *)data));
   5413             written_len += write(file_fd, data, sizeof(uint32_t));
   5414             size_t total_size = meta.tuning_sensor_data_size;
   5415             data = (void *)((uint8_t *)&meta.data);
   5416             written_len += write(file_fd, data, total_size);
   5417             total_size = meta.tuning_vfe_data_size;
   5418             data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
   5419             written_len += write(file_fd, data, total_size);
   5420             total_size = meta.tuning_cpp_data_size;
   5421             data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
   5422             written_len += write(file_fd, data, total_size);
   5423             total_size = meta.tuning_cac_data_size;
   5424             data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
   5425             written_len += write(file_fd, data, total_size);
   5426             close(file_fd);
   5427         }else {
   5428             ALOGE("%s: fail to open file for metadata dumping", __func__);
   5429         }
   5430     }
   5431 }
   5432 
   5433 /*===========================================================================
   5434  * FUNCTION   : cleanAndSortStreamInfo
   5435  *
   5436  * DESCRIPTION: helper method to clean up invalid streams in stream_info,
   5437  *              and sort them such that raw stream is at the end of the list
   5438  *              This is a workaround for camera daemon constraint.
   5439  *
   5440  * PARAMETERS : None
   5441  *
   5442  *==========================================================================*/
   5443 void QCamera3HardwareInterface::cleanAndSortStreamInfo()
   5444 {
   5445     List<stream_info_t *> newStreamInfo;
   5446 
   5447     /*clean up invalid streams*/
   5448     for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
   5449             it != mStreamInfo.end();) {
   5450         if(((*it)->status) == INVALID){
   5451             QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
   5452             delete channel;
   5453             free(*it);
   5454             it = mStreamInfo.erase(it);
   5455         } else {
   5456             it++;
   5457         }
   5458     }
   5459 
   5460     // Move preview/video/callback/snapshot streams into newList
   5461     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   5462             it != mStreamInfo.end();) {
   5463         if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
   5464                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
   5465                 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
   5466             newStreamInfo.push_back(*it);
   5467             it = mStreamInfo.erase(it);
   5468         } else
   5469             it++;
   5470     }
   5471     // Move raw streams into newList
   5472     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   5473             it != mStreamInfo.end();) {
   5474         newStreamInfo.push_back(*it);
   5475         it = mStreamInfo.erase(it);
   5476     }
   5477 
   5478     mStreamInfo = newStreamInfo;
   5479 }
   5480 
   5481 /*===========================================================================
   5482  * FUNCTION   : extractJpegMetadata
   5483  *
   5484  * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
   5485  *              JPEG metadata is cached in HAL, and return as part of capture
   5486  *              result when metadata is returned from camera daemon.
   5487  *
   5488  * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
   5489  *              @request:      capture request
   5490  *
   5491  *==========================================================================*/
   5492 void QCamera3HardwareInterface::extractJpegMetadata(
   5493         CameraMetadata& jpegMetadata,
   5494         const camera3_capture_request_t *request)
   5495 {
   5496     CameraMetadata frame_settings;
   5497     frame_settings = request->settings;
   5498 
   5499     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
   5500         jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
   5501                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
   5502                 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
   5503 
   5504     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
   5505         jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
   5506                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
   5507                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
   5508 
   5509     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
   5510         jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
   5511                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
   5512                 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
   5513 
   5514     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
   5515         jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
   5516                 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
   5517                 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
   5518 
   5519     if (frame_settings.exists(ANDROID_JPEG_QUALITY))
   5520         jpegMetadata.update(ANDROID_JPEG_QUALITY,
   5521                 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
   5522                 frame_settings.find(ANDROID_JPEG_QUALITY).count);
   5523 
   5524     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
   5525         jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
   5526                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
   5527                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
   5528 
   5529     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   5530         int32_t thumbnail_size[2];
   5531         thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   5532         thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   5533         if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   5534             int32_t orientation =
   5535                   frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   5536             if ((orientation == 90) || (orientation == 270)) {
   5537                //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
   5538                int32_t temp;
   5539                temp = thumbnail_size[0];
   5540                thumbnail_size[0] = thumbnail_size[1];
   5541                thumbnail_size[1] = temp;
   5542             }
   5543          }
   5544          jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
   5545                 thumbnail_size,
   5546                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   5547     }
   5548 
   5549 }
   5550 
   5551 /*===========================================================================
   5552  * FUNCTION   : convertToRegions
   5553  *
   5554  * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
   5555  *
   5556  * PARAMETERS :
   5557  *   @rect   : cam_rect_t struct to convert
   5558  *   @region : int32_t destination array
   5559  *   @weight : if we are converting from cam_area_t, weight is valid
   5560  *             else weight = -1
   5561  *
   5562  *==========================================================================*/
   5563 void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
   5564         int32_t *region, int weight)
   5565 {
   5566     region[0] = rect.left;
   5567     region[1] = rect.top;
   5568     region[2] = rect.left + rect.width;
   5569     region[3] = rect.top + rect.height;
   5570     if (weight > -1) {
   5571         region[4] = weight;
   5572     }
   5573 }
   5574 
   5575 /*===========================================================================
   5576  * FUNCTION   : convertFromRegions
   5577  *
   5578  * DESCRIPTION: helper method to convert from array to cam_rect_t
   5579  *
   5580  * PARAMETERS :
   5581  *   @rect   : cam_rect_t struct to convert
   5582  *   @region : int32_t destination array
   5583  *   @weight : if we are converting from cam_area_t, weight is valid
   5584  *             else weight = -1
   5585  *
   5586  *==========================================================================*/
   5587 void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
   5588         const camera_metadata_t *settings, uint32_t tag)
   5589 {
   5590     CameraMetadata frame_settings;
   5591     frame_settings = settings;
   5592     int32_t x_min = frame_settings.find(tag).data.i32[0];
   5593     int32_t y_min = frame_settings.find(tag).data.i32[1];
   5594     int32_t x_max = frame_settings.find(tag).data.i32[2];
   5595     int32_t y_max = frame_settings.find(tag).data.i32[3];
   5596     roi.weight = frame_settings.find(tag).data.i32[4];
   5597     roi.rect.left = x_min;
   5598     roi.rect.top = y_min;
   5599     roi.rect.width = x_max - x_min;
   5600     roi.rect.height = y_max - y_min;
   5601 }
   5602 
   5603 /*===========================================================================
   5604  * FUNCTION   : resetIfNeededROI
   5605  *
   5606  * DESCRIPTION: helper method to reset the roi if it is greater than scaler
   5607  *              crop region
   5608  *
   5609  * PARAMETERS :
   5610  *   @roi       : cam_area_t struct to resize
   5611  *   @scalerCropRegion : cam_crop_region_t region to compare against
   5612  *
   5613  *
   5614  *==========================================================================*/
   5615 bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
   5616                                                  const cam_crop_region_t* scalerCropRegion)
   5617 {
   5618     int32_t roi_x_max = roi->rect.width + roi->rect.left;
   5619     int32_t roi_y_max = roi->rect.height + roi->rect.top;
   5620     int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
   5621     int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
   5622 
   5623     /* According to spec weight = 0 is used to indicate roi needs to be disabled
   5624      * without having this check the calculations below to validate if the roi
   5625      * is inside scalar crop region will fail resulting in the roi not being
   5626      * reset causing algorithm to continue to use stale roi window
   5627      */
   5628     if (roi->weight == 0) {
   5629         return true;
   5630     }
   5631 
   5632     if ((roi_x_max < scalerCropRegion->left) ||
   5633         // right edge of roi window is left of scalar crop's left edge
   5634         (roi_y_max < scalerCropRegion->top)  ||
   5635         // bottom edge of roi window is above scalar crop's top edge
   5636         (roi->rect.left > crop_x_max) ||
   5637         // left edge of roi window is beyond(right) of scalar crop's right edge
   5638         (roi->rect.top > crop_y_max)){
   5639         // top edge of roi windo is above scalar crop's top edge
   5640         return false;
   5641     }
   5642     if (roi->rect.left < scalerCropRegion->left) {
   5643         roi->rect.left = scalerCropRegion->left;
   5644     }
   5645     if (roi->rect.top < scalerCropRegion->top) {
   5646         roi->rect.top = scalerCropRegion->top;
   5647     }
   5648     if (roi_x_max > crop_x_max) {
   5649         roi_x_max = crop_x_max;
   5650     }
   5651     if (roi_y_max > crop_y_max) {
   5652         roi_y_max = crop_y_max;
   5653     }
   5654     roi->rect.width = roi_x_max - roi->rect.left;
   5655     roi->rect.height = roi_y_max - roi->rect.top;
   5656     return true;
   5657 }
   5658 
   5659 /*===========================================================================
   5660  * FUNCTION   : convertLandmarks
   5661  *
   5662  * DESCRIPTION: helper method to extract the landmarks from face detection info
   5663  *
   5664  * PARAMETERS :
   5665  *   @face   : cam_rect_t struct to convert
   5666  *   @landmarks : int32_t destination array
   5667  *
   5668  *
   5669  *==========================================================================*/
   5670 void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t *landmarks)
   5671 {
   5672     landmarks[0] = (int32_t)face.left_eye_center.x;
   5673     landmarks[1] = (int32_t)face.left_eye_center.y;
   5674     landmarks[2] = (int32_t)face.right_eye_center.x;
   5675     landmarks[3] = (int32_t)face.right_eye_center.y;
   5676     landmarks[4] = (int32_t)face.mouth_center.x;
   5677     landmarks[5] = (int32_t)face.mouth_center.y;
   5678 }
   5679 
   5680 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
   5681 /*===========================================================================
   5682  * FUNCTION   : initCapabilities
   5683  *
   5684  * DESCRIPTION: initialize camera capabilities in static data struct
   5685  *
   5686  * PARAMETERS :
   5687  *   @cameraId  : camera Id
   5688  *
   5689  * RETURN     : int32_t type of status
   5690  *              NO_ERROR  -- success
   5691  *              none-zero failure code
   5692  *==========================================================================*/
   5693 int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
   5694 {
   5695     int rc = 0;
   5696     mm_camera_vtbl_t *cameraHandle = NULL;
   5697     QCamera3HeapMemory *capabilityHeap = NULL;
   5698 
   5699     rc = camera_open((uint8_t)cameraId, &cameraHandle);
   5700     if (rc || !cameraHandle) {
   5701         ALOGE("%s: camera_open failed. rc = %d, cameraHandle = %p", __func__, rc, cameraHandle);
   5702         goto open_failed;
   5703     }
   5704 
   5705     capabilityHeap = new QCamera3HeapMemory(1);
   5706     if (capabilityHeap == NULL) {
   5707         ALOGE("%s: creation of capabilityHeap failed", __func__);
   5708         goto heap_creation_failed;
   5709     }
   5710     /* Allocate memory for capability buffer */
   5711     rc = capabilityHeap->allocate(sizeof(cam_capability_t));
   5712     if(rc != OK) {
   5713         ALOGE("%s: No memory for cappability", __func__);
   5714         goto allocate_failed;
   5715     }
   5716 
   5717     /* Map memory for capability buffer */
   5718     memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
   5719     rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
   5720                                 CAM_MAPPING_BUF_TYPE_CAPABILITY,
   5721                                 capabilityHeap->getFd(0),
   5722                                 sizeof(cam_capability_t));
   5723     if(rc < 0) {
   5724         ALOGE("%s: failed to map capability buffer", __func__);
   5725         goto map_failed;
   5726     }
   5727 
   5728     /* Query Capability */
   5729     rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
   5730     if(rc < 0) {
   5731         ALOGE("%s: failed to query capability",__func__);
   5732         goto query_failed;
   5733     }
   5734     gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
   5735     if (!gCamCapability[cameraId]) {
   5736         ALOGE("%s: out of memory", __func__);
   5737         goto query_failed;
   5738     }
   5739     memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
   5740                                         sizeof(cam_capability_t));
   5741     rc = 0;
   5742 
   5743 query_failed:
   5744     cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
   5745                             CAM_MAPPING_BUF_TYPE_CAPABILITY);
   5746 map_failed:
   5747     capabilityHeap->deallocate();
   5748 allocate_failed:
   5749     delete capabilityHeap;
   5750 heap_creation_failed:
   5751     cameraHandle->ops->close_camera(cameraHandle->camera_handle);
   5752     cameraHandle = NULL;
   5753 open_failed:
   5754     return rc;
   5755 }
   5756 
   5757 /*==========================================================================
   5758  * FUNCTION   : get3Aversion
   5759  *
   5760  * DESCRIPTION: get the Q3A S/W version
   5761  *
   5762  * PARAMETERS :
   5763  *  @sw_version: Reference of Q3A structure which will hold version info upon
   5764  *               return
   5765  *
   5766  * RETURN     : None
   5767  *
   5768  *==========================================================================*/
   5769 void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
   5770 {
   5771     if(gCamCapability[mCameraId])
   5772         sw_version = gCamCapability[mCameraId]->q3a_version;
   5773     else
   5774         ALOGE("%s:Capability structure NULL!", __func__);
   5775 }
   5776 
   5777 
   5778 /*===========================================================================
   5779  * FUNCTION   : initParameters
   5780  *
   5781  * DESCRIPTION: initialize camera parameters
   5782  *
   5783  * PARAMETERS :
   5784  *
   5785  * RETURN     : int32_t type of status
   5786  *              NO_ERROR  -- success
   5787  *              none-zero failure code
   5788  *==========================================================================*/
   5789 int QCamera3HardwareInterface::initParameters()
   5790 {
   5791     int rc = 0;
   5792 
   5793     //Allocate Set Param Buffer
   5794     mParamHeap = new QCamera3HeapMemory(1);
   5795     rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
   5796     if(rc != OK) {
   5797         rc = NO_MEMORY;
   5798         ALOGE("Failed to allocate SETPARM Heap memory");
   5799         delete mParamHeap;
   5800         mParamHeap = NULL;
   5801         return rc;
   5802     }
   5803 
   5804     //Map memory for parameters buffer
   5805     rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
   5806             CAM_MAPPING_BUF_TYPE_PARM_BUF,
   5807             mParamHeap->getFd(0),
   5808             sizeof(metadata_buffer_t));
   5809     if(rc < 0) {
   5810         ALOGE("%s:failed to map SETPARM buffer",__func__);
   5811         rc = FAILED_TRANSACTION;
   5812         mParamHeap->deallocate();
   5813         delete mParamHeap;
   5814         mParamHeap = NULL;
   5815         return rc;
   5816     }
   5817 
   5818     mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
   5819 
   5820     mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
   5821     return rc;
   5822 }
   5823 
   5824 /*===========================================================================
   5825  * FUNCTION   : deinitParameters
   5826  *
   5827  * DESCRIPTION: de-initialize camera parameters
   5828  *
   5829  * PARAMETERS :
   5830  *
   5831  * RETURN     : NONE
   5832  *==========================================================================*/
   5833 void QCamera3HardwareInterface::deinitParameters()
   5834 {
   5835     mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
   5836             CAM_MAPPING_BUF_TYPE_PARM_BUF);
   5837 
   5838     mParamHeap->deallocate();
   5839     delete mParamHeap;
   5840     mParamHeap = NULL;
   5841 
   5842     mParameters = NULL;
   5843 
   5844     free(mPrevParameters);
   5845     mPrevParameters = NULL;
   5846 }
   5847 
   5848 /*===========================================================================
   5849  * FUNCTION   : calcMaxJpegSize
   5850  *
   5851  * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
   5852  *
   5853  * PARAMETERS :
   5854  *
   5855  * RETURN     : max_jpeg_size
   5856  *==========================================================================*/
   5857 size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
   5858 {
   5859     size_t max_jpeg_size = 0;
   5860     size_t temp_width, temp_height;
   5861     size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
   5862             MAX_SIZES_CNT);
   5863     for (size_t i = 0; i < count; i++) {
   5864         temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
   5865         temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
   5866         if (temp_width * temp_height > max_jpeg_size ) {
   5867             max_jpeg_size = temp_width * temp_height;
   5868         }
   5869     }
   5870     max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
   5871     return max_jpeg_size;
   5872 }
   5873 
   5874 /*===========================================================================
   5875  * FUNCTION   : getMaxRawSize
   5876  *
   5877  * DESCRIPTION: Fetches maximum raw size supported by the cameraId
   5878  *
   5879  * PARAMETERS :
   5880  *
   5881  * RETURN     : Largest supported Raw Dimension
   5882  *==========================================================================*/
   5883 cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
   5884 {
   5885     int max_width = 0;
   5886     cam_dimension_t maxRawSize;
   5887 
   5888     memset(&maxRawSize, 0, sizeof(cam_dimension_t));
   5889     for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
   5890         if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
   5891             max_width = gCamCapability[camera_id]->raw_dim[i].width;
   5892             maxRawSize = gCamCapability[camera_id]->raw_dim[i];
   5893         }
   5894     }
   5895     return maxRawSize;
   5896 }
   5897 
   5898 
   5899 /*===========================================================================
   5900  * FUNCTION   : calcMaxJpegDim
   5901  *
   5902  * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
   5903  *
   5904  * PARAMETERS :
   5905  *
   5906  * RETURN     : max_jpeg_dim
   5907  *==========================================================================*/
   5908 cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
   5909 {
   5910     cam_dimension_t max_jpeg_dim;
   5911     cam_dimension_t curr_jpeg_dim;
   5912     max_jpeg_dim.width = 0;
   5913     max_jpeg_dim.height = 0;
   5914     curr_jpeg_dim.width = 0;
   5915     curr_jpeg_dim.height = 0;
   5916     for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
   5917         curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
   5918         curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
   5919         if (curr_jpeg_dim.width * curr_jpeg_dim.height >
   5920             max_jpeg_dim.width * max_jpeg_dim.height ) {
   5921             max_jpeg_dim.width = curr_jpeg_dim.width;
   5922             max_jpeg_dim.height = curr_jpeg_dim.height;
   5923         }
   5924     }
   5925     return max_jpeg_dim;
   5926 }
   5927 
   5928 /*===========================================================================
   5929  * FUNCTION   : addStreamConfig
   5930  *
   5931  * DESCRIPTION: adds the stream configuration to the array
   5932  *
   5933  * PARAMETERS :
   5934  * @available_stream_configs : pointer to stream configuration array
   5935  * @scalar_format            : scalar format
   5936  * @dim                      : configuration dimension
   5937  * @config_type              : input or output configuration type
   5938  *
   5939  * RETURN     : NONE
   5940  *==========================================================================*/
   5941 void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
   5942         int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
   5943 {
   5944     available_stream_configs.add(scalar_format);
   5945     available_stream_configs.add(dim.width);
   5946     available_stream_configs.add(dim.height);
   5947     available_stream_configs.add(config_type);
   5948 }
   5949 
   5950 
   5951 /*===========================================================================
   5952  * FUNCTION   : initStaticMetadata
   5953  *
   5954  * DESCRIPTION: initialize the static metadata
   5955  *
   5956  * PARAMETERS :
   5957  *   @cameraId  : camera Id
   5958  *
   5959  * RETURN     : int32_t type of status
   5960  *              0  -- success
   5961  *              non-zero failure code
   5962  *==========================================================================*/
   5963 int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
   5964 {
   5965     int rc = 0;
   5966     CameraMetadata staticInfo;
   5967     size_t count = 0;
   5968     bool limitedDevice = false;
   5969     char prop[PROPERTY_VALUE_MAX];
   5970 
   5971     /* If sensor is YUV sensor (no raw support) or if per-frame control is not
   5972      * guaranteed, its advertised as limited device */
   5973     limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
   5974             (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type);
   5975 
   5976     uint8_t supportedHwLvl = limitedDevice ?
   5977             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
   5978             // No capability check done here to distinguish LEVEL_FULL from
   5979             // LEVEL_3 - assuming this HAL will not run on devices that only
   5980             // meet FULL spec
   5981             ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
   5982 
   5983     staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
   5984             &supportedHwLvl, 1);
   5985 
   5986     bool facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
   5987     /*HAL 3 only*/
   5988     staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   5989                     &gCamCapability[cameraId]->min_focus_distance, 1);
   5990 
   5991     staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
   5992                     &gCamCapability[cameraId]->hyper_focal_distance, 1);
   5993 
   5994     /*should be using focal lengths but sensor doesn't provide that info now*/
   5995     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   5996                       &gCamCapability[cameraId]->focal_length,
   5997                       1);
   5998 
   5999     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   6000                       gCamCapability[cameraId]->apertures,
   6001                       gCamCapability[cameraId]->apertures_count);
   6002 
   6003     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   6004                 gCamCapability[cameraId]->filter_densities,
   6005                 gCamCapability[cameraId]->filter_densities_count);
   6006 
   6007 
   6008     staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   6009                       (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
   6010                       gCamCapability[cameraId]->optical_stab_modes_count);
   6011 
   6012     int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
   6013             gCamCapability[cameraId]->lens_shading_map_size.height};
   6014     staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
   6015                       lens_shading_map_size,
   6016                       sizeof(lens_shading_map_size)/sizeof(int32_t));
   6017 
   6018     staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
   6019             gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
   6020 
   6021     staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
   6022             gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
   6023 
   6024     staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   6025             &gCamCapability[cameraId]->max_frame_duration, 1);
   6026 
   6027     camera_metadata_rational baseGainFactor = {
   6028             gCamCapability[cameraId]->base_gain_factor.numerator,
   6029             gCamCapability[cameraId]->base_gain_factor.denominator};
   6030     staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
   6031                       &baseGainFactor, 1);
   6032 
   6033     staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   6034                      (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
   6035 
   6036     int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
   6037             gCamCapability[cameraId]->pixel_array_size.height};
   6038     staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   6039                       pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
   6040 
   6041     int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
   6042                                                 gCamCapability[cameraId]->active_array_size.top,
   6043                                                 gCamCapability[cameraId]->active_array_size.width,
   6044                                                 gCamCapability[cameraId]->active_array_size.height};
   6045     staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   6046                       active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
   6047 
   6048     staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
   6049             &gCamCapability[cameraId]->white_level, 1);
   6050 
   6051     staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
   6052             gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
   6053 
   6054     bool hasBlackRegions = false;
   6055     if (gCamCapability[cameraId]->optical_black_region_count != 0 &&
   6056             gCamCapability[cameraId]->optical_black_region_count <= MAX_OPTICAL_BLACK_REGIONS) {
   6057         int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
   6058         for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i+=4) {
   6059             // Left
   6060             opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
   6061             //Top
   6062             opticalBlackRegions[i + 1] = gCamCapability[cameraId]->optical_black_regions[i + 1];
   6063             // Width
   6064             opticalBlackRegions[i + 2] = gCamCapability[cameraId]->optical_black_regions[i + 2] -
   6065                     gCamCapability[cameraId]->optical_black_regions[i];
   6066             // Height
   6067             opticalBlackRegions[i + 3] = gCamCapability[cameraId]->optical_black_regions[i + 3] -
   6068                     gCamCapability[cameraId]->optical_black_regions[i + 1];
   6069         }
   6070         staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
   6071                 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
   6072         hasBlackRegions = true;
   6073     }
   6074 
   6075     staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
   6076                       &gCamCapability[cameraId]->flash_charge_duration, 1);
   6077 
   6078     staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
   6079                       &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
   6080 
   6081     uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
   6082             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
   6083             ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
   6084     staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   6085             &timestampSource, 1);
   6086 
   6087     staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   6088                       &gCamCapability[cameraId]->histogram_size, 1);
   6089 
   6090     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   6091             &gCamCapability[cameraId]->max_histogram_count, 1);
   6092 
   6093     int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
   6094             gCamCapability[cameraId]->sharpness_map_size.height};
   6095 
   6096     staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
   6097             sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
   6098 
   6099     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   6100             &gCamCapability[cameraId]->max_sharpness_map_value, 1);
   6101 
   6102     int32_t scalar_formats[] = {
   6103             ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
   6104             ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
   6105             ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
   6106             ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
   6107             HAL_PIXEL_FORMAT_RAW10,
   6108             HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
   6109     size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
   6110     staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
   6111                       scalar_formats,
   6112                       scalar_formats_count);
   6113 
   6114     int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
   6115     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   6116     makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
   6117             count, MAX_SIZES_CNT, available_processed_sizes);
   6118     staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
   6119             available_processed_sizes, count * 2);
   6120 
   6121     int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
   6122     count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
   6123     makeTable(gCamCapability[cameraId]->raw_dim,
   6124             count, MAX_SIZES_CNT, available_raw_sizes);
   6125     staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
   6126             available_raw_sizes, count * 2);
   6127 
   6128     int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
   6129     count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
   6130     makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
   6131             count, MAX_SIZES_CNT, available_fps_ranges);
   6132     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   6133             available_fps_ranges, count * 2);
   6134 
   6135     camera_metadata_rational exposureCompensationStep = {
   6136             gCamCapability[cameraId]->exp_compensation_step.numerator,
   6137             gCamCapability[cameraId]->exp_compensation_step.denominator};
   6138     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
   6139                       &exposureCompensationStep, 1);
   6140 
   6141     Vector<uint8_t> availableVstabModes;
   6142     availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
   6143     char eis_prop[PROPERTY_VALUE_MAX];
   6144     memset(eis_prop, 0, sizeof(eis_prop));
   6145     property_get("persist.camera.eis.enable", eis_prop, "0");
   6146     uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
   6147     if (facingBack && eis_prop_set) {
   6148         availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
   6149     }
   6150     staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   6151                       availableVstabModes.array(), availableVstabModes.size());
   6152 
   6153     /*HAL 1 and HAL 3 common*/
   6154     float maxZoom = 4;
   6155     staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   6156             &maxZoom, 1);
   6157 
   6158     uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
   6159     staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
   6160 
   6161     int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
   6162     if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
   6163         max3aRegions[2] = 0; /* AF not supported */
   6164     staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
   6165             max3aRegions, 3);
   6166 
   6167     /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
   6168     memset(prop, 0, sizeof(prop));
   6169     property_get("persist.camera.facedetect", prop, "1");
   6170     uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
   6171     CDBG("%s: Support face detection mode: %d",
   6172             __func__, supportedFaceDetectMode);
   6173 
   6174     int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
   6175     Vector<uint8_t> availableFaceDetectModes;
   6176     availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
   6177     if (supportedFaceDetectMode == 1) {
   6178         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   6179     } else if (supportedFaceDetectMode == 2) {
   6180         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   6181     } else if (supportedFaceDetectMode == 3) {
   6182         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
   6183         availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
   6184     } else {
   6185         maxFaces = 0;
   6186     }
   6187     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   6188             availableFaceDetectModes.array(),
   6189             availableFaceDetectModes.size());
   6190     staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
   6191             (int32_t *)&maxFaces, 1);
   6192 
   6193     int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
   6194                                            gCamCapability[cameraId]->exposure_compensation_max};
   6195     staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
   6196             exposureCompensationRange,
   6197             sizeof(exposureCompensationRange)/sizeof(int32_t));
   6198 
   6199     uint8_t lensFacing = (facingBack) ?
   6200             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
   6201     staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
   6202 
   6203     staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   6204                       available_thumbnail_sizes,
   6205                       sizeof(available_thumbnail_sizes)/sizeof(int32_t));
   6206 
   6207     /*all sizes will be clubbed into this tag*/
   6208     int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
   6209     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   6210     size_t jpeg_sizes_cnt = filterJpegSizes(available_jpeg_sizes, available_processed_sizes,
   6211             count * 2, MAX_SIZES_CNT * 2, gCamCapability[cameraId]->active_array_size,
   6212             gCamCapability[cameraId]->max_downscale_factor);
   6213     /*android.scaler.availableStreamConfigurations*/
   6214     size_t max_stream_configs_size = count * scalar_formats_count * 4;
   6215     Vector<int32_t> available_stream_configs;
   6216     cam_dimension_t active_array_dim;
   6217     active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
   6218     active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
   6219     /* Add input/output stream configurations for each scalar formats*/
   6220     for (size_t j = 0; j < scalar_formats_count; j++) {
   6221         switch (scalar_formats[j]) {
   6222         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   6223         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   6224         case HAL_PIXEL_FORMAT_RAW10:
   6225             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   6226                 addStreamConfig(available_stream_configs, scalar_formats[j],
   6227                         gCamCapability[cameraId]->raw_dim[i],
   6228                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   6229             }
   6230             break;
   6231         case HAL_PIXEL_FORMAT_BLOB:
   6232             cam_dimension_t jpeg_size;
   6233             for (size_t i = 0; i < jpeg_sizes_cnt/2; i++) {
   6234                 jpeg_size.width  = available_jpeg_sizes[i*2];
   6235                 jpeg_size.height = available_jpeg_sizes[i*2+1];
   6236                 addStreamConfig(available_stream_configs, scalar_formats[j],
   6237                         jpeg_size,
   6238                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   6239             }
   6240             break;
   6241         case HAL_PIXEL_FORMAT_YCbCr_420_888:
   6242         case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
   6243         default:
   6244             cam_dimension_t largest_picture_size;
   6245             memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
   6246             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   6247                 addStreamConfig(available_stream_configs, scalar_formats[j],
   6248                         gCamCapability[cameraId]->picture_sizes_tbl[i],
   6249                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
   6250                 /* Book keep largest */
   6251                 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
   6252                         >= largest_picture_size.width &&
   6253                         gCamCapability[cameraId]->picture_sizes_tbl[i].height
   6254                         >= largest_picture_size.height)
   6255                     largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
   6256             }
   6257             /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
   6258             if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
   6259                     scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
   6260                  addStreamConfig(available_stream_configs, scalar_formats[j],
   6261                          largest_picture_size,
   6262                          ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
   6263             }
   6264             break;
   6265         }
   6266     }
   6267 
   6268     staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   6269                       available_stream_configs.array(), available_stream_configs.size());
   6270     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   6271     staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   6272 
   6273     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   6274     staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   6275 
   6276     /* android.scaler.availableMinFrameDurations */
   6277     int64_t available_min_durations[max_stream_configs_size];
   6278     size_t idx = 0;
   6279     for (size_t j = 0; j < scalar_formats_count; j++) {
   6280         switch (scalar_formats[j]) {
   6281         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
   6282         case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
   6283         case HAL_PIXEL_FORMAT_RAW10:
   6284             for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
   6285                 available_min_durations[idx] = scalar_formats[j];
   6286                 available_min_durations[idx+1] =
   6287                     gCamCapability[cameraId]->raw_dim[i].width;
   6288                 available_min_durations[idx+2] =
   6289                     gCamCapability[cameraId]->raw_dim[i].height;
   6290                 available_min_durations[idx+3] =
   6291                     gCamCapability[cameraId]->raw_min_duration[i];
   6292                 idx+=4;
   6293             }
   6294             break;
   6295         default:
   6296             for (size_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
   6297                 available_min_durations[idx] = scalar_formats[j];
   6298                 available_min_durations[idx+1] =
   6299                     gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   6300                 available_min_durations[idx+2] =
   6301                     gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   6302                 available_min_durations[idx+3] =
   6303                     gCamCapability[cameraId]->picture_min_duration[i];
   6304                 idx+=4;
   6305             }
   6306             break;
   6307         }
   6308     }
   6309     staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
   6310                       &available_min_durations[0], idx);
   6311 
   6312     Vector<int32_t> available_hfr_configs;
   6313     for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
   6314         int32_t fps = 0;
   6315         switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
   6316         case CAM_HFR_MODE_60FPS:
   6317             fps = 60;
   6318             break;
   6319         case CAM_HFR_MODE_90FPS:
   6320             fps = 90;
   6321             break;
   6322         case CAM_HFR_MODE_120FPS:
   6323             fps = 120;
   6324             break;
   6325         case CAM_HFR_MODE_150FPS:
   6326             fps = 150;
   6327             break;
   6328         case CAM_HFR_MODE_180FPS:
   6329             fps = 180;
   6330             break;
   6331         case CAM_HFR_MODE_210FPS:
   6332             fps = 210;
   6333             break;
   6334         case CAM_HFR_MODE_240FPS:
   6335             fps = 240;
   6336             break;
   6337         case CAM_HFR_MODE_480FPS:
   6338             fps = 480;
   6339             break;
   6340         case CAM_HFR_MODE_OFF:
   6341         case CAM_HFR_MODE_MAX:
   6342         default:
   6343             break;
   6344         }
   6345 
   6346         /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
   6347         if (fps >= MIN_FPS_FOR_BATCH_MODE) {
   6348             /* For each HFR frame rate, need to advertise one variable fps range
   6349              * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
   6350              * [120, 120]. While camcorder preview alone is running [30, 120] is
   6351              * set by the app. When video recording is started, [120, 120] is
   6352              * set. This way sensor configuration does not change when recording
   6353              * is started */
   6354 
   6355             /* (width, height, fps_min, fps_max, batch_size_max) */
   6356             available_hfr_configs.add(
   6357                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
   6358             available_hfr_configs.add(
   6359                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
   6360             available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
   6361             available_hfr_configs.add(fps);
   6362             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   6363 
   6364             /* (width, height, fps_min, fps_max, batch_size_max) */
   6365             available_hfr_configs.add(
   6366                     gCamCapability[cameraId]->hfr_tbl[i].dim.width);
   6367             available_hfr_configs.add(
   6368                     gCamCapability[cameraId]->hfr_tbl[i].dim.height);
   6369             available_hfr_configs.add(fps);
   6370             available_hfr_configs.add(fps);
   6371             available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
   6372        }
   6373     }
   6374     //Advertise HFR capability only if the property is set
   6375     memset(prop, 0, sizeof(prop));
   6376     property_get("persist.camera.hal3hfr.enable", prop, "1");
   6377     uint8_t hfrEnable = (uint8_t)atoi(prop);
   6378 
   6379     if(hfrEnable && available_hfr_configs.array()) {
   6380         staticInfo.update(
   6381                 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
   6382                 available_hfr_configs.array(), available_hfr_configs.size());
   6383     }
   6384 
   6385     int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
   6386     staticInfo.update(ANDROID_JPEG_MAX_SIZE,
   6387                       &max_jpeg_size, 1);
   6388 
   6389     uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
   6390     size_t size = 0;
   6391     count = CAM_EFFECT_MODE_MAX;
   6392     count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
   6393     for (size_t i = 0; i < count; i++) {
   6394         int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   6395                 gCamCapability[cameraId]->supported_effects[i]);
   6396         if (NAME_NOT_FOUND != val) {
   6397             avail_effects[size] = (uint8_t)val;
   6398             size++;
   6399         }
   6400     }
   6401     staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
   6402                       avail_effects,
   6403                       size);
   6404 
   6405     uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
   6406     uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
   6407     size_t supported_scene_modes_cnt = 0;
   6408     count = CAM_SCENE_MODE_MAX;
   6409     count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
   6410     for (size_t i = 0; i < count; i++) {
   6411         if (gCamCapability[cameraId]->supported_scene_modes[i] !=
   6412                 CAM_SCENE_MODE_OFF) {
   6413             int val = lookupFwkName(SCENE_MODES_MAP,
   6414                     METADATA_MAP_SIZE(SCENE_MODES_MAP),
   6415                     gCamCapability[cameraId]->supported_scene_modes[i]);
   6416             if (NAME_NOT_FOUND != val) {
   6417                 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
   6418                 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
   6419                 supported_scene_modes_cnt++;
   6420             }
   6421         }
   6422     }
   6423     staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   6424                       avail_scene_modes,
   6425                       supported_scene_modes_cnt);
   6426 
   6427     uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX  * 3];
   6428     makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
   6429                       supported_scene_modes_cnt,
   6430                       CAM_SCENE_MODE_MAX,
   6431                       scene_mode_overrides,
   6432                       supported_indexes,
   6433                       cameraId);
   6434 
   6435     if (supported_scene_modes_cnt == 0) {
   6436         supported_scene_modes_cnt = 1;
   6437         avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
   6438     }
   6439 
   6440     staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
   6441             scene_mode_overrides, supported_scene_modes_cnt * 3);
   6442 
   6443     uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
   6444                                          ANDROID_CONTROL_MODE_AUTO,
   6445                                          ANDROID_CONTROL_MODE_USE_SCENE_MODE};
   6446     staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
   6447             available_control_modes,
   6448             3);
   6449 
   6450     uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
   6451     size = 0;
   6452     count = CAM_ANTIBANDING_MODE_MAX;
   6453     count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
   6454     for (size_t i = 0; i < count; i++) {
   6455         int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
   6456                 gCamCapability[cameraId]->supported_antibandings[i]);
   6457         if (NAME_NOT_FOUND != val) {
   6458             avail_antibanding_modes[size] = (uint8_t)val;
   6459             size++;
   6460         }
   6461 
   6462     }
   6463     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   6464                       avail_antibanding_modes,
   6465                       size);
   6466 
   6467     uint8_t avail_abberation_modes[CAM_COLOR_CORRECTION_ABERRATION_MAX];
   6468     size = 0;
   6469     count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
   6470     count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
   6471     if (0 == count) {
   6472         avail_abberation_modes[0] =
   6473                 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
   6474         size++;
   6475     } else {
   6476         for (size_t i = 0; i < count; i++) {
   6477             int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   6478                     gCamCapability[cameraId]->aberration_modes[i]);
   6479             if (NAME_NOT_FOUND != val) {
   6480                 avail_abberation_modes[size] = (uint8_t)val;
   6481                 size++;
   6482             } else {
   6483                 ALOGE("%s: Invalid CAC mode %d", __func__,
   6484                         gCamCapability[cameraId]->aberration_modes[i]);
   6485                 break;
   6486             }
   6487         }
   6488 
   6489     }
   6490     staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   6491             avail_abberation_modes,
   6492             size);
   6493 
   6494     uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
   6495     size = 0;
   6496     count = CAM_FOCUS_MODE_MAX;
   6497     count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
   6498     for (size_t i = 0; i < count; i++) {
   6499         int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   6500                 gCamCapability[cameraId]->supported_focus_modes[i]);
   6501         if (NAME_NOT_FOUND != val) {
   6502             avail_af_modes[size] = (uint8_t)val;
   6503             size++;
   6504         }
   6505     }
   6506     staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
   6507                       avail_af_modes,
   6508                       size);
   6509 
   6510     uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
   6511     size = 0;
   6512     count = CAM_WB_MODE_MAX;
   6513     count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
   6514     for (size_t i = 0; i < count; i++) {
   6515         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   6516                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   6517                 gCamCapability[cameraId]->supported_white_balances[i]);
   6518         if (NAME_NOT_FOUND != val) {
   6519             avail_awb_modes[size] = (uint8_t)val;
   6520             size++;
   6521         }
   6522     }
   6523     staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
   6524                       avail_awb_modes,
   6525                       size);
   6526 
   6527     uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
   6528     count = CAM_FLASH_FIRING_LEVEL_MAX;
   6529     count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
   6530             count);
   6531     for (size_t i = 0; i < count; i++) {
   6532         available_flash_levels[i] =
   6533                 gCamCapability[cameraId]->supported_firing_levels[i];
   6534     }
   6535     staticInfo.update(ANDROID_FLASH_FIRING_POWER,
   6536             available_flash_levels, count);
   6537 
   6538     uint8_t flashAvailable;
   6539     if (gCamCapability[cameraId]->flash_available)
   6540         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
   6541     else
   6542         flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
   6543     staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
   6544             &flashAvailable, 1);
   6545 
   6546     Vector<uint8_t> avail_ae_modes;
   6547     count = CAM_AE_MODE_MAX;
   6548     count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
   6549     for (size_t i = 0; i < count; i++) {
   6550         avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
   6551     }
   6552     if (flashAvailable) {
   6553         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
   6554         avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
   6555     }
   6556     staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
   6557                       avail_ae_modes.array(),
   6558                       avail_ae_modes.size());
   6559 
   6560     int32_t sensitivity_range[2];
   6561     sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
   6562     sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
   6563     staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
   6564                       sensitivity_range,
   6565                       sizeof(sensitivity_range) / sizeof(int32_t));
   6566 
   6567     staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   6568                       &gCamCapability[cameraId]->max_analog_sensitivity,
   6569                       1);
   6570 
   6571     int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
   6572     staticInfo.update(ANDROID_SENSOR_ORIENTATION,
   6573                       &sensor_orientation,
   6574                       1);
   6575 
   6576     int32_t max_output_streams[] = {
   6577             MAX_STALLING_STREAMS,
   6578             MAX_PROCESSED_STREAMS,
   6579             MAX_RAW_STREAMS};
   6580     staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
   6581             max_output_streams,
   6582             sizeof(max_output_streams)/sizeof(max_output_streams[0]));
   6583 
   6584     uint8_t avail_leds = 0;
   6585     staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
   6586                       &avail_leds, 0);
   6587 
   6588     uint8_t focus_dist_calibrated;
   6589     int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
   6590             gCamCapability[cameraId]->focus_dist_calibrated);
   6591     if (NAME_NOT_FOUND != val) {
   6592         focus_dist_calibrated = (uint8_t)val;
   6593         staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   6594                      &focus_dist_calibrated, 1);
   6595     }
   6596 
   6597     int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
   6598     size = 0;
   6599     count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
   6600             MAX_TEST_PATTERN_CNT);
   6601     for (size_t i = 0; i < count; i++) {
   6602         int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
   6603                 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
   6604         if (NAME_NOT_FOUND != testpatternMode) {
   6605             avail_testpattern_modes[size] = testpatternMode;
   6606             size++;
   6607         }
   6608     }
   6609     staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   6610                       avail_testpattern_modes,
   6611                       size);
   6612 
   6613     uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
   6614     staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
   6615                       &max_pipeline_depth,
   6616                       1);
   6617 
   6618     int32_t partial_result_count = PARTIAL_RESULT_COUNT;
   6619     staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   6620                       &partial_result_count,
   6621                        1);
   6622 
   6623     int32_t max_stall_duration = MAX_REPROCESS_STALL;
   6624     staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
   6625 
   6626     Vector<uint8_t> available_capabilities;
   6627     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
   6628     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
   6629     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
   6630     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
   6631     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
   6632     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
   6633     available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
   6634     if (hfrEnable && available_hfr_configs.array()) {
   6635         available_capabilities.add(
   6636                 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
   6637     }
   6638 
   6639     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
   6640         available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
   6641     }
   6642     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   6643             available_capabilities.array(),
   6644             available_capabilities.size());
   6645 
   6646     //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR and/or
   6647     //BURST_CAPTURE.
   6648     uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   6649             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
   6650 
   6651     staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   6652             &aeLockAvailable, 1);
   6653 
   6654     //awbLockAvailable to be set to true if capabilities has
   6655     //MANUAL_POST_PROCESSING and/or BURST_CAPTURE.
   6656     uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
   6657             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
   6658 
   6659     staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   6660             &awbLockAvailable, 1);
   6661 
   6662     int32_t max_input_streams = 1;
   6663     staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   6664                       &max_input_streams,
   6665                       1);
   6666 
   6667     /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
   6668     int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
   6669             HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
   6670             HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
   6671             HAL_PIXEL_FORMAT_YCbCr_420_888};
   6672     staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   6673                       io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
   6674 
   6675     int32_t max_latency = (limitedDevice) ?
   6676             CAM_MAX_SYNC_LATENCY : ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
   6677     staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
   6678                       &max_latency,
   6679                       1);
   6680 
   6681     uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
   6682                                            ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
   6683     staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   6684             available_hot_pixel_modes,
   6685             sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
   6686 
   6687     uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
   6688                                          ANDROID_SHADING_MODE_FAST,
   6689                                          ANDROID_SHADING_MODE_HIGH_QUALITY};
   6690     staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
   6691                       available_shading_modes,
   6692                       3);
   6693 
   6694     uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
   6695                                                   ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
   6696     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   6697                       available_lens_shading_map_modes,
   6698                       2);
   6699 
   6700     uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
   6701                                       ANDROID_EDGE_MODE_FAST,
   6702                                       ANDROID_EDGE_MODE_HIGH_QUALITY,
   6703                                       ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
   6704     staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   6705             available_edge_modes,
   6706             sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
   6707 
   6708     uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
   6709                                            ANDROID_NOISE_REDUCTION_MODE_FAST,
   6710                                            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
   6711                                            ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
   6712                                            ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
   6713     staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   6714             available_noise_red_modes,
   6715             sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
   6716 
   6717     uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
   6718                                          ANDROID_TONEMAP_MODE_FAST,
   6719                                          ANDROID_TONEMAP_MODE_HIGH_QUALITY};
   6720     staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   6721             available_tonemap_modes,
   6722             sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
   6723 
   6724     uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
   6725     staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   6726             available_hot_pixel_map_modes,
   6727             sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
   6728 
   6729     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   6730             gCamCapability[cameraId]->reference_illuminant1);
   6731     if (NAME_NOT_FOUND != val) {
   6732         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   6733         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
   6734     }
   6735 
   6736     val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
   6737             gCamCapability[cameraId]->reference_illuminant2);
   6738     if (NAME_NOT_FOUND != val) {
   6739         uint8_t fwkReferenceIlluminant = (uint8_t)val;
   6740         staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
   6741     }
   6742 
   6743     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
   6744             (void *)gCamCapability[cameraId]->forward_matrix1,
   6745             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   6746 
   6747     staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
   6748             (void *)gCamCapability[cameraId]->forward_matrix2,
   6749             FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
   6750 
   6751     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
   6752             (void *)gCamCapability[cameraId]->color_transform1,
   6753             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   6754 
   6755     staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
   6756             (void *)gCamCapability[cameraId]->color_transform2,
   6757             COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
   6758 
   6759     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
   6760             (void *)gCamCapability[cameraId]->calibration_transform1,
   6761             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   6762 
   6763     staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
   6764             (void *)gCamCapability[cameraId]->calibration_transform2,
   6765             CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
   6766 
   6767     int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
   6768        ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
   6769        ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
   6770        ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   6771        ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
   6772        ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
   6773        ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
   6774        ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
   6775        ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
   6776        ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
   6777        ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
   6778        ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
   6779        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   6780        ANDROID_JPEG_GPS_COORDINATES,
   6781        ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
   6782        ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
   6783        ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
   6784        ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   6785        ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
   6786        ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
   6787        ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
   6788        ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
   6789        ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
   6790        ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
   6791        ANDROID_STATISTICS_FACE_DETECT_MODE,
   6792        ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   6793        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
   6794        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   6795        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE};
   6796 
   6797     size_t request_keys_cnt =
   6798             sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
   6799     Vector<int32_t> available_request_keys;
   6800     available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
   6801     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   6802         available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
   6803     }
   6804 
   6805     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
   6806             available_request_keys.array(), available_request_keys.size());
   6807 
   6808     int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
   6809        ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
   6810        ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
   6811        ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
   6812        ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
   6813        ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
   6814        ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
   6815        ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
   6816        ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
   6817        ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
   6818        ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
   6819        ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
   6820        ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
   6821        ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
   6822        ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
   6823        ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
   6824        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
   6825        ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
   6826        ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
   6827        ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
   6828        ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
   6829        ANDROID_STATISTICS_FACE_SCORES,
   6830        ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
   6831        ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
   6832        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST };
   6833     size_t result_keys_cnt =
   6834             sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
   6835 
   6836     Vector<int32_t> available_result_keys;
   6837     available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
   6838     if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
   6839         available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
   6840     }
   6841     if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
   6842        available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
   6843        available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
   6844     }
   6845     if (supportedFaceDetectMode == 1) {
   6846         available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
   6847         available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
   6848     } else if ((supportedFaceDetectMode == 2) ||
   6849             (supportedFaceDetectMode == 3)) {
   6850         available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
   6851         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
   6852     }
   6853     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   6854             available_result_keys.array(), available_result_keys.size());
   6855 
   6856     int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
   6857        ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
   6858        ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
   6859        ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
   6860        ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
   6861        ANDROID_SCALER_CROPPING_TYPE,
   6862        ANDROID_SYNC_MAX_LATENCY,
   6863        ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
   6864        ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
   6865        ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
   6866        ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
   6867        ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
   6868        ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
   6869        ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
   6870        ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
   6871        ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
   6872        ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
   6873        ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
   6874        ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
   6875        ANDROID_LENS_FACING,
   6876        ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
   6877        ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
   6878        ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
   6879        ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
   6880        ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
   6881        ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
   6882        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
   6883        /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
   6884        ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
   6885        ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
   6886        ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
   6887        ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
   6888        ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
   6889        ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
   6890        ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
   6891        ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
   6892        ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
   6893        ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
   6894        ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
   6895        ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
   6896        ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
   6897        ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
   6898        ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
   6899        ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
   6900        ANDROID_EDGE_AVAILABLE_EDGE_MODES,
   6901        ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
   6902        ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
   6903        ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
   6904        ANDROID_TONEMAP_MAX_CURVE_POINTS,
   6905        ANDROID_CONTROL_AVAILABLE_MODES,
   6906        ANDROID_CONTROL_AE_LOCK_AVAILABLE,
   6907        ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
   6908        ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
   6909        ANDROID_SHADING_AVAILABLE_MODES,
   6910        ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
   6911 
   6912     Vector<int32_t> available_characteristics_keys;
   6913     available_characteristics_keys.appendArray(characteristics_keys_basic,
   6914             sizeof(characteristics_keys_basic)/sizeof(int32_t));
   6915     if (hasBlackRegions) {
   6916         available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
   6917     }
   6918     staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
   6919                       available_characteristics_keys.array(),
   6920                       available_characteristics_keys.size());
   6921 
   6922     /*available stall durations depend on the hw + sw and will be different for different devices */
   6923     /*have to add for raw after implementation*/
   6924     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
   6925     size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
   6926 
   6927     count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
   6928     size_t raw_count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt,
   6929             MAX_SIZES_CNT);
   6930     size_t available_stall_size = count * 4;
   6931     int64_t available_stall_durations[available_stall_size];
   6932     idx = 0;
   6933     for (uint32_t j = 0; j < stall_formats_count; j++) {
   6934        if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
   6935           for (uint32_t i = 0; i < count; i++) {
   6936              available_stall_durations[idx]   = stall_formats[j];
   6937              available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
   6938              available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
   6939              available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
   6940              idx+=4;
   6941           }
   6942        } else {
   6943           for (uint32_t i = 0; i < raw_count; i++) {
   6944              available_stall_durations[idx]   = stall_formats[j];
   6945              available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
   6946              available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
   6947              available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
   6948              idx+=4;
   6949           }
   6950        }
   6951     }
   6952     staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
   6953                       available_stall_durations,
   6954                       idx);
   6955     //QCAMERA3_OPAQUE_RAW
   6956     uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   6957     cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   6958     switch (gCamCapability[cameraId]->opaque_raw_fmt) {
   6959     case LEGACY_RAW:
   6960         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   6961             fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
   6962         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   6963             fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
   6964         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   6965             fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
   6966         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
   6967         break;
   6968     case MIPI_RAW:
   6969         if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
   6970             fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
   6971         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
   6972             fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
   6973         else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
   6974             fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
   6975         raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
   6976         break;
   6977     default:
   6978         ALOGE("%s: unknown opaque_raw_format %d", __func__,
   6979                 gCamCapability[cameraId]->opaque_raw_fmt);
   6980         break;
   6981     }
   6982     staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
   6983 
   6984     int32_t strides[3*raw_count];
   6985     for (size_t i = 0; i < raw_count; i++) {
   6986         cam_stream_buf_plane_info_t buf_planes;
   6987         strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
   6988         strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
   6989         mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
   6990             &gCamCapability[cameraId]->padding_info, &buf_planes);
   6991         strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
   6992     }
   6993     staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
   6994             3*raw_count);
   6995 
   6996     gStaticMetadata[cameraId] = staticInfo.release();
   6997     return rc;
   6998 }
   6999 
   7000 /*===========================================================================
   7001  * FUNCTION   : makeTable
   7002  *
   7003  * DESCRIPTION: make a table of sizes
   7004  *
   7005  * PARAMETERS :
   7006  *
   7007  *
   7008  *==========================================================================*/
   7009 void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
   7010         size_t max_size, int32_t *sizeTable)
   7011 {
   7012     size_t j = 0;
   7013     if (size > max_size) {
   7014        size = max_size;
   7015     }
   7016     for (size_t i = 0; i < size; i++) {
   7017         sizeTable[j] = dimTable[i].width;
   7018         sizeTable[j+1] = dimTable[i].height;
   7019         j+=2;
   7020     }
   7021 }
   7022 
   7023 /*===========================================================================
   7024  * FUNCTION   : makeFPSTable
   7025  *
   7026  * DESCRIPTION: make a table of fps ranges
   7027  *
   7028  * PARAMETERS :
   7029  *
   7030  *==========================================================================*/
   7031 void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
   7032         size_t max_size, int32_t *fpsRangesTable)
   7033 {
   7034     size_t j = 0;
   7035     if (size > max_size) {
   7036        size = max_size;
   7037     }
   7038     for (size_t i = 0; i < size; i++) {
   7039         fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
   7040         fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
   7041         j+=2;
   7042     }
   7043 }
   7044 
   7045 /*===========================================================================
   7046  * FUNCTION   : makeOverridesList
   7047  *
   7048  * DESCRIPTION: make a list of scene mode overrides
   7049  *
   7050  * PARAMETERS :
   7051  *
   7052  *
   7053  *==========================================================================*/
   7054 void QCamera3HardwareInterface::makeOverridesList(
   7055         cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
   7056         uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
   7057 {
   7058     /*daemon will give a list of overrides for all scene modes.
   7059       However we should send the fwk only the overrides for the scene modes
   7060       supported by the framework*/
   7061     size_t j = 0;
   7062     if (size > max_size) {
   7063        size = max_size;
   7064     }
   7065     size_t focus_count = CAM_FOCUS_MODE_MAX;
   7066     focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
   7067             focus_count);
   7068     for (size_t i = 0; i < size; i++) {
   7069         bool supt = false;
   7070         size_t index = supported_indexes[i];
   7071         overridesList[j] = gCamCapability[camera_id]->flash_available ?
   7072                 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
   7073         int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
   7074                 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   7075                 overridesTable[index].awb_mode);
   7076         if (NAME_NOT_FOUND != val) {
   7077             overridesList[j+1] = (uint8_t)val;
   7078         }
   7079         uint8_t focus_override = overridesTable[index].af_mode;
   7080         for (size_t k = 0; k < focus_count; k++) {
   7081            if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
   7082               supt = true;
   7083               break;
   7084            }
   7085         }
   7086         if (supt) {
   7087             val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   7088                     focus_override);
   7089             if (NAME_NOT_FOUND != val) {
   7090                 overridesList[j+2] = (uint8_t)val;
   7091             }
   7092         } else {
   7093            overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
   7094         }
   7095         j+=3;
   7096     }
   7097 }
   7098 
   7099 /*===========================================================================
   7100  * FUNCTION   : filterJpegSizes
   7101  *
   7102  * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
   7103  *              could be downscaled to
   7104  *
   7105  * PARAMETERS :
   7106  *
   7107  * RETURN     : length of jpegSizes array
   7108  *==========================================================================*/
   7109 
   7110 size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
   7111         size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
   7112         uint8_t downscale_factor)
   7113 {
   7114     if (0 == downscale_factor) {
   7115         downscale_factor = 1;
   7116     }
   7117 
   7118     int32_t min_width = active_array_size.width / downscale_factor;
   7119     int32_t min_height = active_array_size.height / downscale_factor;
   7120     size_t jpegSizesCnt = 0;
   7121     if (processedSizesCnt > maxCount) {
   7122         processedSizesCnt = maxCount;
   7123     }
   7124     for (size_t i = 0; i < processedSizesCnt; i+=2) {
   7125         if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
   7126             jpegSizes[jpegSizesCnt] = processedSizes[i];
   7127             jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
   7128             jpegSizesCnt += 2;
   7129         }
   7130     }
   7131     return jpegSizesCnt;
   7132 }
   7133 
   7134 /*===========================================================================
   7135  * FUNCTION   : getPreviewHalPixelFormat
   7136  *
   7137  * DESCRIPTION: convert the format to type recognized by framework
   7138  *
   7139  * PARAMETERS : format : the format from backend
   7140  *
   7141  ** RETURN    : format recognized by framework
   7142  *
   7143  *==========================================================================*/
   7144 int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
   7145 {
   7146     int32_t halPixelFormat;
   7147 
   7148     switch (format) {
   7149     case CAM_FORMAT_YUV_420_NV12:
   7150         halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
   7151         break;
   7152     case CAM_FORMAT_YUV_420_NV21:
   7153         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   7154         break;
   7155     case CAM_FORMAT_YUV_420_NV21_ADRENO:
   7156         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
   7157         break;
   7158     case CAM_FORMAT_YUV_420_YV12:
   7159         halPixelFormat = HAL_PIXEL_FORMAT_YV12;
   7160         break;
   7161     case CAM_FORMAT_YUV_422_NV16:
   7162     case CAM_FORMAT_YUV_422_NV61:
   7163     default:
   7164         halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
   7165         break;
   7166     }
   7167     return halPixelFormat;
   7168 }
   7169 
   7170 /*===========================================================================
   7171  * FUNCTION   : computeNoiseModelEntryS
   7172  *
   7173  * DESCRIPTION: function to map a given sensitivity to the S noise
   7174  *              model parameters in the DNG noise model.
   7175  *
   7176  * PARAMETERS : sens : the sensor sensitivity
   7177  *
   7178  ** RETURN    : S (sensor amplification) noise
   7179  *
   7180  *==========================================================================*/
   7181 double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
   7182     double s = gCamCapability[mCameraId]->gradient_S * sens +
   7183             gCamCapability[mCameraId]->offset_S;
   7184     return ((s < 0.0) ? 0.0 : s);
   7185 }
   7186 
   7187 /*===========================================================================
   7188  * FUNCTION   : computeNoiseModelEntryO
   7189  *
   7190  * DESCRIPTION: function to map a given sensitivity to the O noise
   7191  *              model parameters in the DNG noise model.
   7192  *
   7193  * PARAMETERS : sens : the sensor sensitivity
   7194  *
   7195  ** RETURN    : O (sensor readout) noise
   7196  *
   7197  *==========================================================================*/
   7198 double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
   7199     int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
   7200     double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
   7201             1.0 : (1.0 * sens / max_analog_sens);
   7202     double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
   7203             gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
   7204     return ((o < 0.0) ? 0.0 : o);
   7205 }
   7206 
   7207 /*===========================================================================
   7208  * FUNCTION   : getSensorSensitivity
   7209  *
   7210  * DESCRIPTION: convert iso_mode to an integer value
   7211  *
   7212  * PARAMETERS : iso_mode : the iso_mode supported by sensor
   7213  *
   7214  ** RETURN    : sensitivity supported by sensor
   7215  *
   7216  *==========================================================================*/
   7217 int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
   7218 {
   7219     int32_t sensitivity;
   7220 
   7221     switch (iso_mode) {
   7222     case CAM_ISO_MODE_100:
   7223         sensitivity = 100;
   7224         break;
   7225     case CAM_ISO_MODE_200:
   7226         sensitivity = 200;
   7227         break;
   7228     case CAM_ISO_MODE_400:
   7229         sensitivity = 400;
   7230         break;
   7231     case CAM_ISO_MODE_800:
   7232         sensitivity = 800;
   7233         break;
   7234     case CAM_ISO_MODE_1600:
   7235         sensitivity = 1600;
   7236         break;
   7237     default:
   7238         sensitivity = -1;
   7239         break;
   7240     }
   7241     return sensitivity;
   7242 }
   7243 
   7244 /*===========================================================================
   7245  * FUNCTION   : getCamInfo
   7246  *
   7247  * DESCRIPTION: query camera capabilities
   7248  *
   7249  * PARAMETERS :
   7250  *   @cameraId  : camera Id
   7251  *   @info      : camera info struct to be filled in with camera capabilities
   7252  *
   7253  * RETURN     : int type of status
   7254  *              NO_ERROR  -- success
   7255  *              none-zero failure code
   7256  *==========================================================================*/
   7257 int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
   7258         struct camera_info *info)
   7259 {
   7260     ATRACE_CALL();
   7261     int rc = 0;
   7262 
   7263     pthread_mutex_lock(&gCamLock);
   7264     if (NULL == gCamCapability[cameraId]) {
   7265         rc = initCapabilities(cameraId);
   7266         if (rc < 0) {
   7267             pthread_mutex_unlock(&gCamLock);
   7268             return rc;
   7269         }
   7270     }
   7271 
   7272     if (NULL == gStaticMetadata[cameraId]) {
   7273         rc = initStaticMetadata(cameraId);
   7274         if (rc < 0) {
   7275             pthread_mutex_unlock(&gCamLock);
   7276             return rc;
   7277         }
   7278     }
   7279 
   7280     switch(gCamCapability[cameraId]->position) {
   7281     case CAM_POSITION_BACK:
   7282         info->facing = CAMERA_FACING_BACK;
   7283         break;
   7284 
   7285     case CAM_POSITION_FRONT:
   7286         info->facing = CAMERA_FACING_FRONT;
   7287         break;
   7288 
   7289     default:
   7290         ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
   7291         rc = -1;
   7292         break;
   7293     }
   7294 
   7295 
   7296     info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
   7297     info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
   7298     info->static_camera_characteristics = gStaticMetadata[cameraId];
   7299 
   7300     //For now assume both cameras can operate independently.
   7301     info->conflicting_devices = NULL;
   7302     info->conflicting_devices_length = 0;
   7303 
   7304     //resource cost is 100 * MIN(1.0, m/M),
   7305     //where m is throughput requirement with maximum stream configuration
   7306     //and M is CPP maximum throughput.
   7307     float max_fps = 0.0;
   7308     for (uint32_t i = 0;
   7309             i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
   7310         if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
   7311             max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
   7312     }
   7313     float ratio = 1.0 * MAX_PROCESSED_STREAMS *
   7314             gCamCapability[cameraId]->active_array_size.width *
   7315             gCamCapability[cameraId]->active_array_size.height * max_fps /
   7316             gCamCapability[cameraId]->max_pixel_bandwidth;
   7317     info->resource_cost = 100 * MIN(1.0, ratio);
   7318     ALOGI("%s: camera %d resource cost is %d", __func__, cameraId,
   7319             info->resource_cost);
   7320 
   7321     pthread_mutex_unlock(&gCamLock);
   7322     return rc;
   7323 }
   7324 
   7325 /*===========================================================================
   7326  * FUNCTION   : translateCapabilityToMetadata
   7327  *
   7328  * DESCRIPTION: translate the capability into camera_metadata_t
   7329  *
   7330  * PARAMETERS : type of the request
   7331  *
   7332  *
   7333  * RETURN     : success: camera_metadata_t*
   7334  *              failure: NULL
   7335  *
   7336  *==========================================================================*/
   7337 camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
   7338 {
   7339     if (mDefaultMetadata[type] != NULL) {
   7340         return mDefaultMetadata[type];
   7341     }
   7342     //first time we are handling this request
   7343     //fill up the metadata structure using the wrapper class
   7344     CameraMetadata settings;
   7345     //translate from cam_capability_t to camera_metadata_tag_t
   7346     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
   7347     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
   7348     int32_t defaultRequestID = 0;
   7349     settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
   7350 
   7351     /* OIS disable */
   7352     char ois_prop[PROPERTY_VALUE_MAX];
   7353     memset(ois_prop, 0, sizeof(ois_prop));
   7354     property_get("persist.camera.ois.disable", ois_prop, "0");
   7355     uint8_t ois_disable = (uint8_t)atoi(ois_prop);
   7356 
   7357     /* Force video to use OIS */
   7358     char videoOisProp[PROPERTY_VALUE_MAX];
   7359     memset(videoOisProp, 0, sizeof(videoOisProp));
   7360     property_get("persist.camera.ois.video", videoOisProp, "1");
   7361     uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
   7362 
   7363     // EIS enable/disable
   7364     char eis_prop[PROPERTY_VALUE_MAX];
   7365     memset(eis_prop, 0, sizeof(eis_prop));
   7366     property_get("persist.camera.eis.enable", eis_prop, "0");
   7367     const uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
   7368 
   7369     // Hybrid AE enable/disable
   7370     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
   7371     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
   7372     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
   7373     const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
   7374 
   7375     const bool facingBack = gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
   7376     // This is a bit hacky. EIS is enabled only when the above setprop
   7377     // is set to non-zero value and on back camera (for 2015 Nexus).
   7378     // Ideally, we should rely on m_bEisEnable, but we cannot guarantee
   7379     // configureStream is called before this function. In other words,
   7380     // we cannot guarantee the app will call configureStream before
   7381     // calling createDefaultRequest.
   7382     const bool eisEnabled = facingBack && eis_prop_set;
   7383 
   7384     uint8_t controlIntent = 0;
   7385     uint8_t focusMode;
   7386     uint8_t vsMode;
   7387     uint8_t optStabMode;
   7388     uint8_t cacMode;
   7389     uint8_t edge_mode;
   7390     uint8_t noise_red_mode;
   7391     uint8_t tonemap_mode;
   7392     vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
   7393     optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7394     switch (type) {
   7395       case CAMERA3_TEMPLATE_PREVIEW:
   7396         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
   7397         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   7398         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7399         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7400         edge_mode = ANDROID_EDGE_MODE_FAST;
   7401         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7402         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7403         break;
   7404       case CAMERA3_TEMPLATE_STILL_CAPTURE:
   7405         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
   7406         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   7407         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7408         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
   7409         edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
   7410         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
   7411         tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
   7412         break;
   7413       case CAMERA3_TEMPLATE_VIDEO_RECORD:
   7414         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
   7415         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   7416         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7417         if (eisEnabled) {
   7418             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
   7419         }
   7420         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7421         edge_mode = ANDROID_EDGE_MODE_FAST;
   7422         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7423         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7424         if (forceVideoOis)
   7425             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7426         break;
   7427       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   7428         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
   7429         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
   7430         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7431         if (eisEnabled) {
   7432             vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
   7433         }
   7434         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7435         edge_mode = ANDROID_EDGE_MODE_FAST;
   7436         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7437         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7438         if (forceVideoOis)
   7439             optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7440         break;
   7441       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
   7442         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
   7443         focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
   7444         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7445         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7446         edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
   7447         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
   7448         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7449         break;
   7450       case CAMERA3_TEMPLATE_MANUAL:
   7451         edge_mode = ANDROID_EDGE_MODE_FAST;
   7452         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7453         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7454         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7455         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
   7456         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   7457         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7458         break;
   7459       default:
   7460         edge_mode = ANDROID_EDGE_MODE_FAST;
   7461         noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
   7462         tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
   7463         cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
   7464         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
   7465         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7466         break;
   7467     }
   7468     settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
   7469     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
   7470     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
   7471     if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
   7472         focusMode = ANDROID_CONTROL_AF_MODE_OFF;
   7473     }
   7474     settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
   7475 
   7476     if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   7477             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
   7478         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
   7479     else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
   7480             gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
   7481             || ois_disable)
   7482         optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
   7483     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
   7484 
   7485     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
   7486             &gCamCapability[mCameraId]->exposure_compensation_default, 1);
   7487 
   7488     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
   7489     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
   7490 
   7491     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
   7492     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
   7493 
   7494     static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
   7495     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
   7496 
   7497     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
   7498     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
   7499 
   7500     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
   7501     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
   7502 
   7503     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
   7504     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
   7505 
   7506     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
   7507     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
   7508 
   7509     /*flash*/
   7510     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
   7511     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
   7512 
   7513     static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
   7514     settings.update(ANDROID_FLASH_FIRING_POWER,
   7515             &flashFiringLevel, 1);
   7516 
   7517     /* lens */
   7518     float default_aperture = gCamCapability[mCameraId]->apertures[0];
   7519     settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
   7520 
   7521     if (gCamCapability[mCameraId]->filter_densities_count) {
   7522         float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
   7523         settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
   7524                         gCamCapability[mCameraId]->filter_densities_count);
   7525     }
   7526 
   7527     float default_focal_length = gCamCapability[mCameraId]->focal_length;
   7528     settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
   7529 
   7530     float default_focus_distance = 0;
   7531     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
   7532 
   7533     static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
   7534     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
   7535 
   7536     static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
   7537     settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
   7538 
   7539     static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
   7540     settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
   7541 
   7542     /* face detection (default to OFF) */
   7543     static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
   7544     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
   7545 
   7546     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
   7547     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
   7548 
   7549     static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
   7550     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
   7551 
   7552     static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
   7553     settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
   7554 
   7555     static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   7556     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
   7557 
   7558     static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   7559     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
   7560 
   7561     /* Exposure time(Update the Min Exposure Time)*/
   7562     int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
   7563     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
   7564 
   7565     /* frame duration */
   7566     static const int64_t default_frame_duration = NSEC_PER_33MSEC;
   7567     settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
   7568 
   7569     /* sensitivity */
   7570     static const int32_t default_sensitivity = 100;
   7571     settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
   7572 
   7573     /*edge mode*/
   7574     settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
   7575 
   7576     /*noise reduction mode*/
   7577     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
   7578 
   7579     /*color correction mode*/
   7580     static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
   7581     settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
   7582 
   7583     /*transform matrix mode*/
   7584     settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
   7585 
   7586     int32_t scaler_crop_region[4];
   7587     scaler_crop_region[0] = 0;
   7588     scaler_crop_region[1] = 0;
   7589     scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
   7590     scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
   7591     settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
   7592 
   7593     static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
   7594     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
   7595 
   7596     /*focus distance*/
   7597     float focus_distance = 0.0;
   7598     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
   7599 
   7600     /*target fps range: use maximum range for picture, and maximum fixed range for video*/
   7601     float max_range = 0.0;
   7602     float max_fixed_fps = 0.0;
   7603     int32_t fps_range[2] = {0, 0};
   7604     for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
   7605             i++) {
   7606         float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
   7607             gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   7608         if (type == CAMERA3_TEMPLATE_PREVIEW ||
   7609                 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
   7610                 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
   7611             if (range > max_range) {
   7612                 fps_range[0] =
   7613                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   7614                 fps_range[1] =
   7615                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   7616                 max_range = range;
   7617             }
   7618         } else {
   7619             if (range < 0.01 && max_fixed_fps <
   7620                     gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
   7621                 fps_range[0] =
   7622                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
   7623                 fps_range[1] =
   7624                     (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   7625                 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
   7626             }
   7627         }
   7628     }
   7629     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
   7630 
   7631     /*precapture trigger*/
   7632     uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
   7633     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
   7634 
   7635     /*af trigger*/
   7636     uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
   7637     settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
   7638 
   7639     /* ae & af regions */
   7640     int32_t active_region[] = {
   7641             gCamCapability[mCameraId]->active_array_size.left,
   7642             gCamCapability[mCameraId]->active_array_size.top,
   7643             gCamCapability[mCameraId]->active_array_size.left +
   7644                     gCamCapability[mCameraId]->active_array_size.width,
   7645             gCamCapability[mCameraId]->active_array_size.top +
   7646                     gCamCapability[mCameraId]->active_array_size.height,
   7647             0};
   7648     settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
   7649             sizeof(active_region) / sizeof(active_region[0]));
   7650     settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
   7651             sizeof(active_region) / sizeof(active_region[0]));
   7652 
   7653     /* black level lock */
   7654     uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
   7655     settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
   7656 
   7657     /* lens shading map mode */
   7658     uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
   7659     if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
   7660         shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
   7661     }
   7662     settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
   7663 
   7664     //special defaults for manual template
   7665     if (type == CAMERA3_TEMPLATE_MANUAL) {
   7666         static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
   7667         settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
   7668 
   7669         static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
   7670         settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
   7671 
   7672         static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
   7673         settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
   7674 
   7675         static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
   7676         settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
   7677 
   7678         static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
   7679         settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
   7680 
   7681         static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
   7682         settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
   7683     }
   7684 
   7685 
   7686     /* TNR
   7687      * We'll use this location to determine which modes TNR will be set.
   7688      * We will enable TNR to be on if either of the Preview/Video stream requires TNR
   7689      * This is not to be confused with linking on a per stream basis that decision
   7690      * is still on per-session basis and will be handled as part of config stream
   7691      */
   7692     uint8_t tnr_enable = 0;
   7693 
   7694     if (m_bTnrPreview || m_bTnrVideo) {
   7695 
   7696         switch (type) {
   7697             case CAMERA3_TEMPLATE_VIDEO_RECORD:
   7698             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
   7699                     tnr_enable = 1;
   7700                     break;
   7701 
   7702             default:
   7703                     tnr_enable = 0;
   7704                     break;
   7705         }
   7706 
   7707         int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
   7708         settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
   7709         settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
   7710 
   7711         CDBG("%s: TNR:%d with process plate %d for template:%d",
   7712                             __func__, tnr_enable, tnr_process_type, type);
   7713     }
   7714 
   7715     /* CDS default */
   7716     char prop[PROPERTY_VALUE_MAX];
   7717     memset(prop, 0, sizeof(prop));
   7718     property_get("persist.camera.CDS", prop, "Auto");
   7719     cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
   7720     cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
   7721     if (CAM_CDS_MODE_MAX == cds_mode) {
   7722         cds_mode = CAM_CDS_MODE_AUTO;
   7723     }
   7724     m_CdsPreference = cds_mode;
   7725 
   7726     /* Disabling CDS in templates which have TNR enabled*/
   7727     if (tnr_enable)
   7728         cds_mode = CAM_CDS_MODE_OFF;
   7729 
   7730     int32_t mode = cds_mode;
   7731     settings.update(QCAMERA3_CDS_MODE, &mode, 1);
   7732 
   7733     /* hybrid ae */
   7734     settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
   7735 
   7736     mDefaultMetadata[type] = settings.release();
   7737 
   7738     return mDefaultMetadata[type];
   7739 }
   7740 
   7741 /*===========================================================================
   7742  * FUNCTION   : setFrameParameters
   7743  *
   7744  * DESCRIPTION: set parameters per frame as requested in the metadata from
   7745  *              framework
   7746  *
   7747  * PARAMETERS :
   7748  *   @request   : request that needs to be serviced
   7749  *   @streamID : Stream ID of all the requested streams
   7750  *   @blob_request: Whether this request is a blob request or not
   7751  *
   7752  * RETURN     : success: NO_ERROR
   7753  *              failure:
   7754  *==========================================================================*/
   7755 int QCamera3HardwareInterface::setFrameParameters(
   7756                     camera3_capture_request_t *request,
   7757                     cam_stream_ID_t streamID,
   7758                     int blob_request,
   7759                     uint32_t snapshotStreamId)
   7760 {
   7761     /*translate from camera_metadata_t type to parm_type_t*/
   7762     int rc = 0;
   7763     int32_t hal_version = CAM_HAL_V3;
   7764 
   7765     clear_metadata_buffer(mParameters);
   7766     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
   7767         ALOGE("%s: Failed to set hal version in the parameters", __func__);
   7768         return BAD_VALUE;
   7769     }
   7770 
   7771     /*we need to update the frame number in the parameters*/
   7772     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
   7773             request->frame_number)) {
   7774         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   7775         return BAD_VALUE;
   7776     }
   7777 
   7778     /* Update stream id of all the requested buffers */
   7779     if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
   7780         ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
   7781         return BAD_VALUE;
   7782     }
   7783 
   7784     if (mUpdateDebugLevel) {
   7785         uint32_t dummyDebugLevel = 0;
   7786         /* The value of dummyDebugLevel is irrelavent. On
   7787          * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
   7788         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
   7789                 dummyDebugLevel)) {
   7790             ALOGE("%s: Failed to set UPDATE_DEBUG_LEVEL", __func__);
   7791             return BAD_VALUE;
   7792         }
   7793         mUpdateDebugLevel = false;
   7794     }
   7795 
   7796     if(request->settings != NULL){
   7797         rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
   7798         if (blob_request)
   7799             memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
   7800     }
   7801 
   7802     return rc;
   7803 }
   7804 
   7805 /*===========================================================================
   7806  * FUNCTION   : setReprocParameters
   7807  *
   7808  * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
   7809  *              return it.
   7810  *
   7811  * PARAMETERS :
   7812  *   @request   : request that needs to be serviced
   7813  *
   7814  * RETURN     : success: NO_ERROR
   7815  *              failure:
   7816  *==========================================================================*/
   7817 int32_t QCamera3HardwareInterface::setReprocParameters(
   7818         camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
   7819         uint32_t snapshotStreamId)
   7820 {
   7821     /*translate from camera_metadata_t type to parm_type_t*/
   7822     int rc = 0;
   7823 
   7824     if (NULL == request->settings){
   7825         ALOGE("%s: Reprocess settings cannot be NULL", __func__);
   7826         return BAD_VALUE;
   7827     }
   7828 
   7829     if (NULL == reprocParam) {
   7830         ALOGE("%s: Invalid reprocessing metadata buffer", __func__);
   7831         return BAD_VALUE;
   7832     }
   7833     clear_metadata_buffer(reprocParam);
   7834 
   7835     /*we need to update the frame number in the parameters*/
   7836     if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
   7837             request->frame_number)) {
   7838         ALOGE("%s: Failed to set the frame number in the parameters", __func__);
   7839         return BAD_VALUE;
   7840     }
   7841 
   7842     rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
   7843     if (rc < 0) {
   7844         ALOGE("%s: Failed to translate reproc request", __func__);
   7845         return rc;
   7846     }
   7847 
   7848     CameraMetadata frame_settings;
   7849     frame_settings = request->settings;
   7850     if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
   7851             frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
   7852         int32_t *crop_count =
   7853                 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
   7854         int32_t *crop_data =
   7855                 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
   7856         int32_t *roi_map =
   7857                 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
   7858         if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
   7859             cam_crop_data_t crop_meta;
   7860             memset(&crop_meta, 0, sizeof(cam_crop_data_t));
   7861             crop_meta.num_of_streams = 1;
   7862             crop_meta.crop_info[0].crop.left   = crop_data[0];
   7863             crop_meta.crop_info[0].crop.top    = crop_data[1];
   7864             crop_meta.crop_info[0].crop.width  = crop_data[2];
   7865             crop_meta.crop_info[0].crop.height = crop_data[3];
   7866 
   7867             crop_meta.crop_info[0].roi_map.left =
   7868                     roi_map[0];
   7869             crop_meta.crop_info[0].roi_map.top =
   7870                     roi_map[1];
   7871             crop_meta.crop_info[0].roi_map.width =
   7872                     roi_map[2];
   7873             crop_meta.crop_info[0].roi_map.height =
   7874                     roi_map[3];
   7875 
   7876             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
   7877                 rc = BAD_VALUE;
   7878             }
   7879             CDBG("%s: Found reprocess crop data for stream %p %dx%d, %dx%d",
   7880                     __func__,
   7881                     request->input_buffer->stream,
   7882                     crop_meta.crop_info[0].crop.left,
   7883                     crop_meta.crop_info[0].crop.top,
   7884                     crop_meta.crop_info[0].crop.width,
   7885                     crop_meta.crop_info[0].crop.height);
   7886             CDBG("%s: Found reprocess roi map data for stream %p %dx%d, %dx%d",
   7887                     __func__,
   7888                     request->input_buffer->stream,
   7889                     crop_meta.crop_info[0].roi_map.left,
   7890                     crop_meta.crop_info[0].roi_map.top,
   7891                     crop_meta.crop_info[0].roi_map.width,
   7892                     crop_meta.crop_info[0].roi_map.height);
   7893             } else {
   7894                 ALOGE("%s: Invalid reprocess crop count %d!", __func__, *crop_count);
   7895             }
   7896     } else {
   7897         ALOGE("%s: No crop data from matching output stream", __func__);
   7898     }
   7899 
   7900     /* These settings are not needed for regular requests so handle them specially for
   7901        reprocess requests; information needed for EXIF tags */
   7902     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   7903         int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   7904                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   7905         if (NAME_NOT_FOUND != val) {
   7906             uint32_t flashMode = (uint32_t)val;
   7907             if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
   7908                 rc = BAD_VALUE;
   7909             }
   7910         } else {
   7911             ALOGE("%s: Could not map fwk flash mode %d to correct hal flash mode", __func__,
   7912                     frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   7913         }
   7914     } else {
   7915         CDBG_HIGH("%s: No flash mode in reprocess settings", __func__);
   7916     }
   7917 
   7918     if (frame_settings.exists(ANDROID_FLASH_STATE)) {
   7919         int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
   7920         if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
   7921             rc = BAD_VALUE;
   7922         }
   7923     } else {
   7924         CDBG_HIGH("%s: No flash state in reprocess settings", __func__);
   7925     }
   7926 
   7927     return rc;
   7928 }
   7929 
   7930 /*===========================================================================
   7931  * FUNCTION   : saveRequestSettings
   7932  *
   7933  * DESCRIPTION: Add any settings that might have changed to the request settings
   7934  *              and save the settings to be applied on the frame
   7935  *
   7936  * PARAMETERS :
   7937  *   @jpegMetadata : the extracted and/or modified jpeg metadata
   7938  *   @request      : request with initial settings
   7939  *
   7940  * RETURN     :
   7941  * camera_metadata_t* : pointer to the saved request settings
   7942  *==========================================================================*/
   7943 camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
   7944         const CameraMetadata &jpegMetadata,
   7945         camera3_capture_request_t *request)
   7946 {
   7947     camera_metadata_t *resultMetadata;
   7948     CameraMetadata camMetadata;
   7949     camMetadata = request->settings;
   7950 
   7951     if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   7952         int32_t thumbnail_size[2];
   7953         thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   7954         thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   7955         camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
   7956                 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
   7957     }
   7958 
   7959     resultMetadata = camMetadata.release();
   7960     return resultMetadata;
   7961 }
   7962 
   7963 /*===========================================================================
   7964  * FUNCTION   : setHalFpsRange
   7965  *
   7966  * DESCRIPTION: set FPS range parameter
   7967  *
   7968  *
   7969  * PARAMETERS :
   7970  *   @settings    : Metadata from framework
   7971  *   @hal_metadata: Metadata buffer
   7972  *
   7973  *
   7974  * RETURN     : success: NO_ERROR
   7975  *              failure:
   7976  *==========================================================================*/
   7977 int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
   7978         metadata_buffer_t *hal_metadata)
   7979 {
   7980     int32_t rc = NO_ERROR;
   7981     cam_fps_range_t fps_range;
   7982     fps_range.min_fps = (float)
   7983             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
   7984     fps_range.max_fps = (float)
   7985             settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
   7986     fps_range.video_min_fps = fps_range.min_fps;
   7987     fps_range.video_max_fps = fps_range.max_fps;
   7988 
   7989     CDBG("%s: aeTargetFpsRange fps: [%f %f]", __func__,
   7990             fps_range.min_fps, fps_range.max_fps);
   7991     /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
   7992      * follows:
   7993      * ---------------------------------------------------------------|
   7994      *      Video stream is absent in configure_streams               |
   7995      *    (Camcorder preview before the first video record            |
   7996      * ---------------------------------------------------------------|
   7997      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   7998      *                   |             |             | vid_min/max_fps|
   7999      * ---------------------------------------------------------------|
   8000      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   8001      *                   |-------------|-------------|----------------|
   8002      *                   |  [240, 240] |     240     |  [240, 240]    |
   8003      * ---------------------------------------------------------------|
   8004      *     Video stream is present in configure_streams               |
   8005      * ---------------------------------------------------------------|
   8006      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
   8007      *                   |             |             | vid_min/max_fps|
   8008      * ---------------------------------------------------------------|
   8009      *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
   8010      * (camcorder prev   |-------------|-------------|----------------|
   8011      *  after video rec  |  [240, 240] |     240     |  [240, 240]    |
   8012      *  is stopped)      |             |             |                |
   8013      * ---------------------------------------------------------------|
   8014      *       YES         |  [ 30, 240] |     240     |  [240, 240]    |
   8015      *                   |-------------|-------------|----------------|
   8016      *                   |  [240, 240] |     240     |  [240, 240]    |
   8017      * ---------------------------------------------------------------|
   8018      * When Video stream is absent in configure_streams,
   8019      * preview fps = sensor_fps / batchsize
   8020      * Eg: for 240fps at batchSize 4, preview = 60fps
   8021      *     for 120fps at batchSize 4, preview = 30fps
   8022      *
   8023      * When video stream is present in configure_streams, preview fps is as per
   8024      * the ratio of preview buffers to video buffers requested in process
   8025      * capture request
   8026      */
   8027     mBatchSize = 0;
   8028     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
   8029         fps_range.min_fps = fps_range.video_max_fps;
   8030         fps_range.video_min_fps = fps_range.video_max_fps;
   8031         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
   8032                 fps_range.max_fps);
   8033         if (NAME_NOT_FOUND != val) {
   8034             cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
   8035             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   8036                 return BAD_VALUE;
   8037             }
   8038 
   8039             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
   8040                 /* If batchmode is currently in progress and the fps changes,
   8041                  * set the flag to restart the sensor */
   8042                 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
   8043                         (mHFRVideoFps != fps_range.max_fps)) {
   8044                     mNeedSensorRestart = true;
   8045                 }
   8046                 mHFRVideoFps = fps_range.max_fps;
   8047                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
   8048                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
   8049                     mBatchSize = MAX_HFR_BATCH_SIZE;
   8050                 }
   8051              }
   8052             CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
   8053 
   8054          }
   8055     } else {
   8056         /* HFR mode is session param in backend/ISP. This should be reset when
   8057          * in non-HFR mode  */
   8058         cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
   8059         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
   8060             return BAD_VALUE;
   8061         }
   8062     }
   8063     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
   8064         return BAD_VALUE;
   8065     }
   8066     CDBG("%s: fps: [%f %f] vid_fps: [%f %f]", __func__, fps_range.min_fps,
   8067             fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
   8068     return rc;
   8069 }
   8070 
   8071 /*===========================================================================
   8072  * FUNCTION   : translateToHalMetadata
   8073  *
   8074  * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
   8075  *
   8076  *
   8077  * PARAMETERS :
   8078  *   @request  : request sent from framework
   8079  *
   8080  *
   8081  * RETURN     : success: NO_ERROR
   8082  *              failure:
   8083  *==========================================================================*/
   8084 int QCamera3HardwareInterface::translateToHalMetadata
   8085                                   (const camera3_capture_request_t *request,
   8086                                    metadata_buffer_t *hal_metadata,
   8087                                    uint32_t snapshotStreamId)
   8088 {
   8089     int rc = 0;
   8090     CameraMetadata frame_settings;
   8091     frame_settings = request->settings;
   8092 
   8093     /* Do not change the order of the following list unless you know what you are
   8094      * doing.
   8095      * The order is laid out in such a way that parameters in the front of the table
   8096      * may be used to override the parameters later in the table. Examples are:
   8097      * 1. META_MODE should precede AEC/AWB/AF MODE
   8098      * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
   8099      * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
   8100      * 4. Any mode should precede it's corresponding settings
   8101      */
   8102     if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
   8103         uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
   8104         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
   8105             rc = BAD_VALUE;
   8106         }
   8107         rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
   8108         if (rc != NO_ERROR) {
   8109             ALOGE("%s: extractSceneMode failed", __func__);
   8110         }
   8111     }
   8112 
   8113     if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   8114         uint8_t fwk_aeMode =
   8115             frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   8116         uint8_t aeMode;
   8117         int32_t redeye;
   8118 
   8119         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
   8120             aeMode = CAM_AE_MODE_OFF;
   8121         } else {
   8122             aeMode = CAM_AE_MODE_ON;
   8123         }
   8124         if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
   8125             redeye = 1;
   8126         } else {
   8127             redeye = 0;
   8128         }
   8129 
   8130         int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
   8131                 fwk_aeMode);
   8132         if (NAME_NOT_FOUND != val) {
   8133             int32_t flashMode = (int32_t)val;
   8134             ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
   8135         }
   8136 
   8137         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
   8138         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
   8139             rc = BAD_VALUE;
   8140         }
   8141     }
   8142 
   8143     if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
   8144         uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
   8145         int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
   8146                 fwk_whiteLevel);
   8147         if (NAME_NOT_FOUND != val) {
   8148             uint8_t whiteLevel = (uint8_t)val;
   8149             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
   8150                 rc = BAD_VALUE;
   8151             }
   8152         }
   8153     }
   8154 
   8155     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
   8156         uint8_t fwk_cacMode =
   8157                 frame_settings.find(
   8158                         ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
   8159         int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
   8160                 fwk_cacMode);
   8161         if (NAME_NOT_FOUND != val) {
   8162             cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
   8163             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
   8164                 rc = BAD_VALUE;
   8165             }
   8166         } else {
   8167             ALOGE("%s: Invalid framework CAC mode: %d", __func__, fwk_cacMode);
   8168         }
   8169     }
   8170 
   8171     if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
   8172         uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
   8173         int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
   8174                 fwk_focusMode);
   8175         if (NAME_NOT_FOUND != val) {
   8176             uint8_t focusMode = (uint8_t)val;
   8177             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
   8178                 rc = BAD_VALUE;
   8179             }
   8180         }
   8181     }
   8182 
   8183     if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
   8184         float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
   8185         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
   8186                 focalDistance)) {
   8187             rc = BAD_VALUE;
   8188         }
   8189     }
   8190 
   8191     if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
   8192         uint8_t fwk_antibandingMode =
   8193                 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
   8194         int val = lookupHalName(ANTIBANDING_MODES_MAP,
   8195                 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
   8196         if (NAME_NOT_FOUND != val) {
   8197             uint32_t hal_antibandingMode = (uint32_t)val;
   8198             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
   8199                     hal_antibandingMode)) {
   8200                 rc = BAD_VALUE;
   8201             }
   8202         }
   8203     }
   8204 
   8205     if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
   8206         int32_t expCompensation = frame_settings.find(
   8207                 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
   8208         if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
   8209             expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
   8210         if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
   8211             expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
   8212         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
   8213                 expCompensation)) {
   8214             rc = BAD_VALUE;
   8215         }
   8216     }
   8217 
   8218     if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
   8219         uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
   8220         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
   8221             rc = BAD_VALUE;
   8222         }
   8223     }
   8224     if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
   8225         rc = setHalFpsRange(frame_settings, hal_metadata);
   8226         if (rc != NO_ERROR) {
   8227             ALOGE("%s: setHalFpsRange failed", __func__);
   8228         }
   8229     }
   8230 
   8231     if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
   8232         uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
   8233         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
   8234             rc = BAD_VALUE;
   8235         }
   8236     }
   8237 
   8238     if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
   8239         uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
   8240         int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
   8241                 fwk_effectMode);
   8242         if (NAME_NOT_FOUND != val) {
   8243             uint8_t effectMode = (uint8_t)val;
   8244             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
   8245                 rc = BAD_VALUE;
   8246             }
   8247         }
   8248     }
   8249 
   8250     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
   8251         uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
   8252         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
   8253                 colorCorrectMode)) {
   8254             rc = BAD_VALUE;
   8255         }
   8256     }
   8257 
   8258     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
   8259         cam_color_correct_gains_t colorCorrectGains;
   8260         for (size_t i = 0; i < CC_GAINS_COUNT; i++) {
   8261             colorCorrectGains.gains[i] =
   8262                     frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
   8263         }
   8264         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
   8265                 colorCorrectGains)) {
   8266             rc = BAD_VALUE;
   8267         }
   8268     }
   8269 
   8270     if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
   8271         cam_color_correct_matrix_t colorCorrectTransform;
   8272         cam_rational_type_t transform_elem;
   8273         size_t num = 0;
   8274         for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
   8275            for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
   8276               transform_elem.numerator =
   8277                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
   8278               transform_elem.denominator =
   8279                  frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
   8280               colorCorrectTransform.transform_matrix[i][j] = transform_elem;
   8281               num++;
   8282            }
   8283         }
   8284         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
   8285                 colorCorrectTransform)) {
   8286             rc = BAD_VALUE;
   8287         }
   8288     }
   8289 
   8290     cam_trigger_t aecTrigger;
   8291     aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
   8292     aecTrigger.trigger_id = -1;
   8293     if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
   8294         frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
   8295         aecTrigger.trigger =
   8296             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
   8297         aecTrigger.trigger_id =
   8298             frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
   8299         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
   8300                 aecTrigger)) {
   8301             rc = BAD_VALUE;
   8302         }
   8303         CDBG("%s: precaptureTrigger: %d precaptureTriggerID: %d", __func__,
   8304                 aecTrigger.trigger, aecTrigger.trigger_id);
   8305     }
   8306 
   8307     /*af_trigger must come with a trigger id*/
   8308     if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
   8309         frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
   8310         cam_trigger_t af_trigger;
   8311         af_trigger.trigger =
   8312             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
   8313         af_trigger.trigger_id =
   8314             frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
   8315         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
   8316             rc = BAD_VALUE;
   8317         }
   8318         CDBG("%s: AfTrigger: %d AfTriggerID: %d", __func__,
   8319                 af_trigger.trigger, af_trigger.trigger_id);
   8320     }
   8321 
   8322     if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
   8323         int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
   8324         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
   8325             rc = BAD_VALUE;
   8326         }
   8327     }
   8328     if (frame_settings.exists(ANDROID_EDGE_MODE)) {
   8329         cam_edge_application_t edge_application;
   8330         edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
   8331         if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
   8332             edge_application.sharpness = 0;
   8333         } else {
   8334             edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
   8335         }
   8336         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
   8337             rc = BAD_VALUE;
   8338         }
   8339     }
   8340 
   8341     if (frame_settings.exists(ANDROID_FLASH_MODE)) {
   8342         int32_t respectFlashMode = 1;
   8343         if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
   8344             uint8_t fwk_aeMode =
   8345                 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
   8346             if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
   8347                 respectFlashMode = 0;
   8348                 CDBG_HIGH("%s: AE Mode controls flash, ignore android.flash.mode",
   8349                     __func__);
   8350             }
   8351         }
   8352         if (respectFlashMode) {
   8353             int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
   8354                     (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
   8355             CDBG_HIGH("%s: flash mode after mapping %d", __func__, val);
   8356             // To check: CAM_INTF_META_FLASH_MODE usage
   8357             if (NAME_NOT_FOUND != val) {
   8358                 uint8_t flashMode = (uint8_t)val;
   8359                 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
   8360                     rc = BAD_VALUE;
   8361                 }
   8362             }
   8363         }
   8364     }
   8365 
   8366     if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
   8367         uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
   8368         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
   8369             rc = BAD_VALUE;
   8370         }
   8371     }
   8372 
   8373     if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
   8374         int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
   8375         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
   8376                 flashFiringTime)) {
   8377             rc = BAD_VALUE;
   8378         }
   8379     }
   8380 
   8381     if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
   8382         uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
   8383         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
   8384                 hotPixelMode)) {
   8385             rc = BAD_VALUE;
   8386         }
   8387     }
   8388 
   8389     if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
   8390         float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
   8391         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
   8392                 lensAperture)) {
   8393             rc = BAD_VALUE;
   8394         }
   8395     }
   8396 
   8397     if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
   8398         float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
   8399         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
   8400                 filterDensity)) {
   8401             rc = BAD_VALUE;
   8402         }
   8403     }
   8404 
   8405     if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
   8406         float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
   8407         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
   8408                 focalLength)) {
   8409             rc = BAD_VALUE;
   8410         }
   8411     }
   8412 
   8413     if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
   8414         uint8_t optStabMode =
   8415                 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
   8416         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
   8417                 optStabMode)) {
   8418             rc = BAD_VALUE;
   8419         }
   8420     }
   8421 
   8422     if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
   8423         uint8_t videoStabMode =
   8424                 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
   8425         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
   8426                 videoStabMode)) {
   8427             rc = BAD_VALUE;
   8428         }
   8429     }
   8430 
   8431 
   8432     if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
   8433         uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
   8434         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
   8435                 noiseRedMode)) {
   8436             rc = BAD_VALUE;
   8437         }
   8438     }
   8439 
   8440     if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
   8441         float reprocessEffectiveExposureFactor =
   8442             frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
   8443         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
   8444                 reprocessEffectiveExposureFactor)) {
   8445             rc = BAD_VALUE;
   8446         }
   8447     }
   8448 
   8449     cam_crop_region_t scalerCropRegion;
   8450     bool scalerCropSet = false;
   8451     if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
   8452         scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
   8453         scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
   8454         scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
   8455         scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
   8456 
   8457         // Map coordinate system from active array to sensor output.
   8458         mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
   8459                 scalerCropRegion.width, scalerCropRegion.height);
   8460 
   8461         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
   8462                 scalerCropRegion)) {
   8463             rc = BAD_VALUE;
   8464         }
   8465         scalerCropSet = true;
   8466     }
   8467 
   8468     if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
   8469         int64_t sensorExpTime =
   8470                 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
   8471         CDBG("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
   8472         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
   8473                 sensorExpTime)) {
   8474             rc = BAD_VALUE;
   8475         }
   8476     }
   8477 
   8478     if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
   8479         int64_t sensorFrameDuration =
   8480                 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
   8481         int64_t minFrameDuration = getMinFrameDuration(request);
   8482         sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
   8483         if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
   8484             sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
   8485         CDBG("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
   8486         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
   8487                 sensorFrameDuration)) {
   8488             rc = BAD_VALUE;
   8489         }
   8490     }
   8491 
   8492     if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
   8493         int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
   8494         if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
   8495                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
   8496         if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
   8497                 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
   8498         CDBG("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
   8499         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
   8500                 sensorSensitivity)) {
   8501             rc = BAD_VALUE;
   8502         }
   8503     }
   8504 
   8505     if (frame_settings.exists(ANDROID_SHADING_MODE)) {
   8506         uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
   8507         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
   8508             rc = BAD_VALUE;
   8509         }
   8510     }
   8511 
   8512     if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
   8513         uint8_t fwk_facedetectMode =
   8514                 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
   8515 
   8516         int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
   8517                 fwk_facedetectMode);
   8518 
   8519         if (NAME_NOT_FOUND != val) {
   8520             uint8_t facedetectMode = (uint8_t)val;
   8521             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
   8522                     facedetectMode)) {
   8523                 rc = BAD_VALUE;
   8524             }
   8525         }
   8526     }
   8527 
   8528     if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
   8529         uint8_t histogramMode =
   8530                 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
   8531         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
   8532                 histogramMode)) {
   8533             rc = BAD_VALUE;
   8534         }
   8535     }
   8536 
   8537     if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
   8538         uint8_t sharpnessMapMode =
   8539                 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
   8540         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
   8541                 sharpnessMapMode)) {
   8542             rc = BAD_VALUE;
   8543         }
   8544     }
   8545 
   8546     if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
   8547         uint8_t tonemapMode =
   8548                 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
   8549         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
   8550             rc = BAD_VALUE;
   8551         }
   8552     }
   8553     /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
   8554     /*All tonemap channels will have the same number of points*/
   8555     if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
   8556         frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
   8557         frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
   8558         cam_rgb_tonemap_curves tonemapCurves;
   8559         tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
   8560         if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
   8561             ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
   8562                     __func__, tonemapCurves.tonemap_points_cnt,
   8563                     CAM_MAX_TONEMAP_CURVE_SIZE);
   8564             tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
   8565         }
   8566 
   8567         /* ch0 = G*/
   8568         size_t point = 0;
   8569         cam_tonemap_curve_t tonemapCurveGreen;
   8570         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   8571             for (size_t j = 0; j < 2; j++) {
   8572                tonemapCurveGreen.tonemap_points[i][j] =
   8573                   frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
   8574                point++;
   8575             }
   8576         }
   8577         tonemapCurves.curves[0] = tonemapCurveGreen;
   8578 
   8579         /* ch 1 = B */
   8580         point = 0;
   8581         cam_tonemap_curve_t tonemapCurveBlue;
   8582         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   8583             for (size_t j = 0; j < 2; j++) {
   8584                tonemapCurveBlue.tonemap_points[i][j] =
   8585                   frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
   8586                point++;
   8587             }
   8588         }
   8589         tonemapCurves.curves[1] = tonemapCurveBlue;
   8590 
   8591         /* ch 2 = R */
   8592         point = 0;
   8593         cam_tonemap_curve_t tonemapCurveRed;
   8594         for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
   8595             for (size_t j = 0; j < 2; j++) {
   8596                tonemapCurveRed.tonemap_points[i][j] =
   8597                   frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
   8598                point++;
   8599             }
   8600         }
   8601         tonemapCurves.curves[2] = tonemapCurveRed;
   8602 
   8603         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
   8604                 tonemapCurves)) {
   8605             rc = BAD_VALUE;
   8606         }
   8607     }
   8608 
   8609     if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
   8610         uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
   8611         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
   8612                 captureIntent)) {
   8613             rc = BAD_VALUE;
   8614         }
   8615     }
   8616 
   8617     if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
   8618         uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
   8619         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
   8620                 blackLevelLock)) {
   8621             rc = BAD_VALUE;
   8622         }
   8623     }
   8624 
   8625     if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
   8626         uint8_t lensShadingMapMode =
   8627                 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
   8628         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
   8629                 lensShadingMapMode)) {
   8630             rc = BAD_VALUE;
   8631         }
   8632     }
   8633 
   8634     if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
   8635         cam_area_t roi;
   8636         bool reset = true;
   8637         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
   8638 
   8639         // Map coordinate system from active array to sensor output.
   8640         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   8641                 roi.rect.height);
   8642 
   8643         if (scalerCropSet) {
   8644             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   8645         }
   8646         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
   8647             rc = BAD_VALUE;
   8648         }
   8649     }
   8650 
   8651     if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
   8652         cam_area_t roi;
   8653         bool reset = true;
   8654         convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
   8655 
   8656         // Map coordinate system from active array to sensor output.
   8657         mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
   8658                 roi.rect.height);
   8659 
   8660         if (scalerCropSet) {
   8661             reset = resetIfNeededROI(&roi, &scalerCropRegion);
   8662         }
   8663         if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
   8664             rc = BAD_VALUE;
   8665         }
   8666     }
   8667 
   8668     if (m_bIs4KVideo) {
   8669         /* Override needed for Video template in case of 4K video */
   8670         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8671                 CAM_INTF_PARM_CDS_MODE, m_CdsPreference)) {
   8672             rc = BAD_VALUE;
   8673         }
   8674     } else if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
   8675             frame_settings.exists(QCAMERA3_CDS_MODE)) {
   8676         int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
   8677         if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
   8678             ALOGE("%s: Invalid CDS mode %d!", __func__, *fwk_cds);
   8679         } else {
   8680             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8681                     CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
   8682                 rc = BAD_VALUE;
   8683             }
   8684         }
   8685     }
   8686 
   8687     // TNR
   8688     if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
   8689         frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
   8690         uint8_t b_TnrRequested = 0;
   8691         cam_denoise_param_t tnr;
   8692         tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
   8693         tnr.process_plates =
   8694             (cam_denoise_process_type_t)frame_settings.find(
   8695             QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
   8696         b_TnrRequested = tnr.denoise_enable;
   8697         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
   8698             rc = BAD_VALUE;
   8699         }
   8700     }
   8701 
   8702     if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
   8703         int32_t fwk_testPatternMode =
   8704                 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
   8705         int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
   8706                 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
   8707 
   8708         if (NAME_NOT_FOUND != testPatternMode) {
   8709             cam_test_pattern_data_t testPatternData;
   8710             memset(&testPatternData, 0, sizeof(testPatternData));
   8711             testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
   8712             if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
   8713                     frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
   8714                 int32_t *fwk_testPatternData =
   8715                         frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
   8716                 testPatternData.r = fwk_testPatternData[0];
   8717                 testPatternData.b = fwk_testPatternData[3];
   8718                 switch (gCamCapability[mCameraId]->color_arrangement) {
   8719                     case CAM_FILTER_ARRANGEMENT_RGGB:
   8720                     case CAM_FILTER_ARRANGEMENT_GRBG:
   8721                         testPatternData.gr = fwk_testPatternData[1];
   8722                         testPatternData.gb = fwk_testPatternData[2];
   8723                         break;
   8724                     case CAM_FILTER_ARRANGEMENT_GBRG:
   8725                     case CAM_FILTER_ARRANGEMENT_BGGR:
   8726                         testPatternData.gr = fwk_testPatternData[2];
   8727                         testPatternData.gb = fwk_testPatternData[1];
   8728                         break;
   8729                     default:
   8730                         ALOGE("%s: color arrangement %d is not supported", __func__,
   8731                                 gCamCapability[mCameraId]->color_arrangement);
   8732                         break;
   8733                 }
   8734             }
   8735             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
   8736                     testPatternData)) {
   8737                 rc = BAD_VALUE;
   8738             }
   8739         } else {
   8740             ALOGE("%s: Invalid framework sensor test pattern mode %d", __func__,
   8741                     fwk_testPatternMode);
   8742         }
   8743     }
   8744 
   8745     if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
   8746         size_t count = 0;
   8747         camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
   8748         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
   8749                 gps_coords.data.d, gps_coords.count, count);
   8750         if (gps_coords.count != count) {
   8751             rc = BAD_VALUE;
   8752         }
   8753     }
   8754 
   8755     if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
   8756         char gps_methods[GPS_PROCESSING_METHOD_SIZE];
   8757         size_t count = 0;
   8758         const char *gps_methods_src = (const char *)
   8759                 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
   8760         memset(gps_methods, '\0', sizeof(gps_methods));
   8761         strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
   8762         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
   8763                 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
   8764         if (GPS_PROCESSING_METHOD_SIZE != count) {
   8765             rc = BAD_VALUE;
   8766         }
   8767     }
   8768 
   8769     if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
   8770         int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
   8771         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
   8772                 gps_timestamp)) {
   8773             rc = BAD_VALUE;
   8774         }
   8775     }
   8776 
   8777     if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
   8778         int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
   8779         cam_rotation_info_t rotation_info;
   8780         if (orientation == 0) {
   8781            rotation_info.rotation = ROTATE_0;
   8782         } else if (orientation == 90) {
   8783            rotation_info.rotation = ROTATE_90;
   8784         } else if (orientation == 180) {
   8785            rotation_info.rotation = ROTATE_180;
   8786         } else if (orientation == 270) {
   8787            rotation_info.rotation = ROTATE_270;
   8788         }
   8789         rotation_info.streamId = snapshotStreamId;
   8790         ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
   8791         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
   8792             rc = BAD_VALUE;
   8793         }
   8794     }
   8795 
   8796     if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
   8797         uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
   8798         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
   8799             rc = BAD_VALUE;
   8800         }
   8801     }
   8802 
   8803     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
   8804         uint32_t thumb_quality = (uint32_t)
   8805                 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
   8806         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
   8807                 thumb_quality)) {
   8808             rc = BAD_VALUE;
   8809         }
   8810     }
   8811 
   8812     if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
   8813         cam_dimension_t dim;
   8814         dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
   8815         dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
   8816         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
   8817             rc = BAD_VALUE;
   8818         }
   8819     }
   8820 
   8821     // Internal metadata
   8822     if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
   8823         size_t count = 0;
   8824         camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
   8825         ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
   8826                 privatedata.data.i32, privatedata.count, count);
   8827         if (privatedata.count != count) {
   8828             rc = BAD_VALUE;
   8829         }
   8830     }
   8831 
   8832     if (frame_settings.exists(QCAMERA3_USE_AV_TIMER)) {
   8833         uint8_t* use_av_timer =
   8834                 frame_settings.find(QCAMERA3_USE_AV_TIMER).data.u8;
   8835         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
   8836             rc = BAD_VALUE;
   8837         }
   8838     }
   8839 
   8840     // EV step
   8841     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
   8842             gCamCapability[mCameraId]->exp_compensation_step)) {
   8843         rc = BAD_VALUE;
   8844     }
   8845 
   8846     // CDS info
   8847     if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
   8848         cam_cds_data_t *cdsData = (cam_cds_data_t *)
   8849                 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
   8850 
   8851         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8852                 CAM_INTF_META_CDS_DATA, *cdsData)) {
   8853             rc = BAD_VALUE;
   8854         }
   8855     }
   8856 
   8857     // Hybrid AE
   8858     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
   8859         uint8_t *hybrid_ae = (uint8_t *)
   8860                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
   8861 
   8862         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   8863                 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
   8864             rc = BAD_VALUE;
   8865         }
   8866     }
   8867 
   8868     return rc;
   8869 }
   8870 
   8871 /*===========================================================================
   8872  * FUNCTION   : captureResultCb
   8873  *
   8874  * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
   8875  *
   8876  * PARAMETERS :
   8877  *   @frame  : frame information from mm-camera-interface
   8878  *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
   8879  *   @userdata: userdata
   8880  *
   8881  * RETURN     : NONE
   8882  *==========================================================================*/
   8883 void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
   8884                 camera3_stream_buffer_t *buffer,
   8885                 uint32_t frame_number, bool isInputBuffer, void *userdata)
   8886 {
   8887     QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
   8888     if (hw == NULL) {
   8889         ALOGE("%s: Invalid hw %p", __func__, hw);
   8890         return;
   8891     }
   8892 
   8893     hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
   8894     return;
   8895 }
   8896 
   8897 
   8898 /*===========================================================================
   8899  * FUNCTION   : initialize
   8900  *
   8901  * DESCRIPTION: Pass framework callback pointers to HAL
   8902  *
   8903  * PARAMETERS :
   8904  *
   8905  *
   8906  * RETURN     : Success : 0
   8907  *              Failure: -ENODEV
   8908  *==========================================================================*/
   8909 
   8910 int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
   8911                                   const camera3_callback_ops_t *callback_ops)
   8912 {
   8913     CDBG("%s: E", __func__);
   8914     QCamera3HardwareInterface *hw =
   8915         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8916     if (!hw) {
   8917         ALOGE("%s: NULL camera device", __func__);
   8918         return -ENODEV;
   8919     }
   8920 
   8921     int rc = hw->initialize(callback_ops);
   8922     CDBG("%s: X", __func__);
   8923     return rc;
   8924 }
   8925 
   8926 /*===========================================================================
   8927  * FUNCTION   : configure_streams
   8928  *
   8929  * DESCRIPTION:
   8930  *
   8931  * PARAMETERS :
   8932  *
   8933  *
   8934  * RETURN     : Success: 0
   8935  *              Failure: -EINVAL (if stream configuration is invalid)
   8936  *                       -ENODEV (fatal error)
   8937  *==========================================================================*/
   8938 
   8939 int QCamera3HardwareInterface::configure_streams(
   8940         const struct camera3_device *device,
   8941         camera3_stream_configuration_t *stream_list)
   8942 {
   8943     CDBG("%s: E", __func__);
   8944     QCamera3HardwareInterface *hw =
   8945         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8946     if (!hw) {
   8947         ALOGE("%s: NULL camera device", __func__);
   8948         return -ENODEV;
   8949     }
   8950     int rc = hw->configureStreams(stream_list);
   8951     CDBG("%s: X", __func__);
   8952     return rc;
   8953 }
   8954 
   8955 /*===========================================================================
   8956  * FUNCTION   : construct_default_request_settings
   8957  *
   8958  * DESCRIPTION: Configure a settings buffer to meet the required use case
   8959  *
   8960  * PARAMETERS :
   8961  *
   8962  *
   8963  * RETURN     : Success: Return valid metadata
   8964  *              Failure: Return NULL
   8965  *==========================================================================*/
   8966 const camera_metadata_t* QCamera3HardwareInterface::
   8967     construct_default_request_settings(const struct camera3_device *device,
   8968                                         int type)
   8969 {
   8970 
   8971     CDBG("%s: E", __func__);
   8972     camera_metadata_t* fwk_metadata = NULL;
   8973     QCamera3HardwareInterface *hw =
   8974         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   8975     if (!hw) {
   8976         ALOGE("%s: NULL camera device", __func__);
   8977         return NULL;
   8978     }
   8979 
   8980     fwk_metadata = hw->translateCapabilityToMetadata(type);
   8981 
   8982     CDBG("%s: X", __func__);
   8983     return fwk_metadata;
   8984 }
   8985 
   8986 /*===========================================================================
   8987  * FUNCTION   : process_capture_request
   8988  *
   8989  * DESCRIPTION:
   8990  *
   8991  * PARAMETERS :
   8992  *
   8993  *
   8994  * RETURN     :
   8995  *==========================================================================*/
   8996 int QCamera3HardwareInterface::process_capture_request(
   8997                     const struct camera3_device *device,
   8998                     camera3_capture_request_t *request)
   8999 {
   9000     CDBG("%s: E", __func__);
   9001     QCamera3HardwareInterface *hw =
   9002         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   9003     if (!hw) {
   9004         ALOGE("%s: NULL camera device", __func__);
   9005         return -EINVAL;
   9006     }
   9007 
   9008     int rc = hw->processCaptureRequest(request);
   9009     CDBG("%s: X", __func__);
   9010     return rc;
   9011 }
   9012 
   9013 /*===========================================================================
   9014  * FUNCTION   : dump
   9015  *
   9016  * DESCRIPTION:
   9017  *
   9018  * PARAMETERS :
   9019  *
   9020  *
   9021  * RETURN     :
   9022  *==========================================================================*/
   9023 
   9024 void QCamera3HardwareInterface::dump(
   9025                 const struct camera3_device *device, int fd)
   9026 {
   9027     /* Log level property is read when "adb shell dumpsys media.camera" is
   9028        called so that the log level can be controlled without restarting
   9029        the media server */
   9030     getLogLevel();
   9031 
   9032     CDBG("%s: E", __func__);
   9033     QCamera3HardwareInterface *hw =
   9034         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   9035     if (!hw) {
   9036         ALOGE("%s: NULL camera device", __func__);
   9037         return;
   9038     }
   9039 
   9040     hw->dump(fd);
   9041     CDBG("%s: X", __func__);
   9042     return;
   9043 }
   9044 
   9045 /*===========================================================================
   9046  * FUNCTION   : flush
   9047  *
   9048  * DESCRIPTION:
   9049  *
   9050  * PARAMETERS :
   9051  *
   9052  *
   9053  * RETURN     :
   9054  *==========================================================================*/
   9055 
   9056 int QCamera3HardwareInterface::flush(
   9057                 const struct camera3_device *device)
   9058 {
   9059     int rc;
   9060     CDBG("%s: E", __func__);
   9061     QCamera3HardwareInterface *hw =
   9062         reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
   9063     if (!hw) {
   9064         ALOGE("%s: NULL camera device", __func__);
   9065         return -EINVAL;
   9066     }
   9067 
   9068     rc = hw->flush();
   9069     CDBG("%s: X", __func__);
   9070     return rc;
   9071 }
   9072 
   9073 /*===========================================================================
   9074  * FUNCTION   : close_camera_device
   9075  *
   9076  * DESCRIPTION:
   9077  *
   9078  * PARAMETERS :
   9079  *
   9080  *
   9081  * RETURN     :
   9082  *==========================================================================*/
   9083 int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
   9084 {
   9085     CDBG("%s: E", __func__);
   9086     int ret = NO_ERROR;
   9087     QCamera3HardwareInterface *hw =
   9088         reinterpret_cast<QCamera3HardwareInterface *>(
   9089             reinterpret_cast<camera3_device_t *>(device)->priv);
   9090     if (!hw) {
   9091         ALOGE("NULL camera device");
   9092         return BAD_VALUE;
   9093     }
   9094     delete hw;
   9095 
   9096     CDBG("%s: X", __func__);
   9097     return ret;
   9098 }
   9099 
   9100 /*===========================================================================
   9101  * FUNCTION   : getWaveletDenoiseProcessPlate
   9102  *
   9103  * DESCRIPTION: query wavelet denoise process plate
   9104  *
   9105  * PARAMETERS : None
   9106  *
   9107  * RETURN     : WNR prcocess plate value
   9108  *==========================================================================*/
   9109 cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
   9110 {
   9111     char prop[PROPERTY_VALUE_MAX];
   9112     memset(prop, 0, sizeof(prop));
   9113     property_get("persist.denoise.process.plates", prop, "0");
   9114     int processPlate = atoi(prop);
   9115     switch(processPlate) {
   9116     case 0:
   9117         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   9118     case 1:
   9119         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   9120     case 2:
   9121         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9122     case 3:
   9123         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   9124     default:
   9125         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9126     }
   9127 }
   9128 
   9129 
   9130 /*===========================================================================
   9131  * FUNCTION   : getTemporalDenoiseProcessPlate
   9132  *
   9133  * DESCRIPTION: query temporal denoise process plate
   9134  *
   9135  * PARAMETERS : None
   9136  *
   9137  * RETURN     : TNR prcocess plate value
   9138  *==========================================================================*/
   9139 cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
   9140 {
   9141     char prop[PROPERTY_VALUE_MAX];
   9142     memset(prop, 0, sizeof(prop));
   9143     property_get("persist.tnr.process.plates", prop, "0");
   9144     int processPlate = atoi(prop);
   9145     switch(processPlate) {
   9146     case 0:
   9147         return CAM_WAVELET_DENOISE_YCBCR_PLANE;
   9148     case 1:
   9149         return CAM_WAVELET_DENOISE_CBCR_ONLY;
   9150     case 2:
   9151         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9152     case 3:
   9153         return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
   9154     default:
   9155         return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
   9156     }
   9157 }
   9158 
   9159 
   9160 /*===========================================================================
   9161  * FUNCTION   : extractSceneMode
   9162  *
   9163  * DESCRIPTION: Extract scene mode from frameworks set metadata
   9164  *
   9165  * PARAMETERS :
   9166  *      @frame_settings: CameraMetadata reference
   9167  *      @metaMode: ANDROID_CONTORL_MODE
   9168  *      @hal_metadata: hal metadata structure
   9169  *
   9170  * RETURN     : None
   9171  *==========================================================================*/
   9172 int32_t QCamera3HardwareInterface::extractSceneMode(
   9173         const CameraMetadata &frame_settings, uint8_t metaMode,
   9174         metadata_buffer_t *hal_metadata)
   9175 {
   9176     int32_t rc = NO_ERROR;
   9177 
   9178     if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
   9179         camera_metadata_ro_entry entry =
   9180                 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
   9181         if (0 == entry.count)
   9182             return rc;
   9183 
   9184         uint8_t fwk_sceneMode = entry.data.u8[0];
   9185 
   9186         int val = lookupHalName(SCENE_MODES_MAP,
   9187                 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
   9188                 fwk_sceneMode);
   9189         if (NAME_NOT_FOUND != val) {
   9190             uint8_t sceneMode = (uint8_t)val;
   9191             CDBG("%s: sceneMode: %d", __func__, sceneMode);
   9192             if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   9193                     CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   9194                 rc = BAD_VALUE;
   9195             }
   9196         }
   9197     } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
   9198             (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
   9199         uint8_t sceneMode = CAM_SCENE_MODE_OFF;
   9200         CDBG("%s: sceneMode: %d", __func__, sceneMode);
   9201         if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
   9202                 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
   9203             rc = BAD_VALUE;
   9204         }
   9205     }
   9206     return rc;
   9207 }
   9208 
   9209 /*===========================================================================
   9210  * FUNCTION   : needRotationReprocess
   9211  *
   9212  * DESCRIPTION: if rotation needs to be done by reprocess in pp
   9213  *
   9214  * PARAMETERS : none
   9215  *
   9216  * RETURN     : true: needed
   9217  *              false: no need
   9218  *==========================================================================*/
   9219 bool QCamera3HardwareInterface::needRotationReprocess()
   9220 {
   9221     if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
   9222         // current rotation is not zero, and pp has the capability to process rotation
   9223         CDBG_HIGH("%s: need do reprocess for rotation", __func__);
   9224         return true;
   9225     }
   9226 
   9227     return false;
   9228 }
   9229 
   9230 /*===========================================================================
   9231  * FUNCTION   : needReprocess
   9232  *
   9233  * DESCRIPTION: if reprocess in needed
   9234  *
   9235  * PARAMETERS : none
   9236  *
   9237  * RETURN     : true: needed
   9238  *              false: no need
   9239  *==========================================================================*/
   9240 bool QCamera3HardwareInterface::needReprocess(uint32_t postprocess_mask)
   9241 {
   9242     if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
   9243         // TODO: add for ZSL HDR later
   9244         // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
   9245         if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
   9246             CDBG_HIGH("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
   9247             return true;
   9248         } else {
   9249             CDBG_HIGH("%s: already post processed frame", __func__);
   9250             return false;
   9251         }
   9252     }
   9253     return needRotationReprocess();
   9254 }
   9255 
   9256 /*===========================================================================
   9257  * FUNCTION   : needJpegRotation
   9258  *
   9259  * DESCRIPTION: if rotation from jpeg is needed
   9260  *
   9261  * PARAMETERS : none
   9262  *
   9263  * RETURN     : true: needed
   9264  *              false: no need
   9265  *==========================================================================*/
   9266 bool QCamera3HardwareInterface::needJpegRotation()
   9267 {
   9268    /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
   9269     if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
   9270        CDBG("%s: Need Jpeg to do the rotation", __func__);
   9271        return true;
   9272     }
   9273     return false;
   9274 }
   9275 
   9276 /*===========================================================================
   9277  * FUNCTION   : addOfflineReprocChannel
   9278  *
   9279  * DESCRIPTION: add a reprocess channel that will do reprocess on frames
   9280  *              coming from input channel
   9281  *
   9282  * PARAMETERS :
   9283  *   @config  : reprocess configuration
   9284  *   @inputChHandle : pointer to the input (source) channel
   9285  *
   9286  *
   9287  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
   9288  *==========================================================================*/
   9289 QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
   9290         const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
   9291 {
   9292     int32_t rc = NO_ERROR;
   9293     QCamera3ReprocessChannel *pChannel = NULL;
   9294 
   9295     pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
   9296             mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
   9297             CAM_QCOM_FEATURE_NONE, this, inputChHandle);
   9298     if (NULL == pChannel) {
   9299         ALOGE("%s: no mem for reprocess channel", __func__);
   9300         return NULL;
   9301     }
   9302 
   9303     rc = pChannel->initialize(IS_TYPE_NONE);
   9304     if (rc != NO_ERROR) {
   9305         ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
   9306         delete pChannel;
   9307         return NULL;
   9308     }
   9309 
   9310     // pp feature config
   9311     cam_pp_feature_config_t pp_config;
   9312     memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
   9313 
   9314     pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
   9315 
   9316     rc = pChannel->addReprocStreamsFromSource(pp_config,
   9317             config,
   9318             IS_TYPE_NONE,
   9319             mMetadataChannel);
   9320 
   9321     if (rc != NO_ERROR) {
   9322         delete pChannel;
   9323         return NULL;
   9324     }
   9325     return pChannel;
   9326 }
   9327 
   9328 /*===========================================================================
   9329  * FUNCTION   : getMobicatMask
   9330  *
   9331  * DESCRIPTION: returns mobicat mask
   9332  *
   9333  * PARAMETERS : none
   9334  *
   9335  * RETURN     : mobicat mask
   9336  *
   9337  *==========================================================================*/
   9338 uint8_t QCamera3HardwareInterface::getMobicatMask()
   9339 {
   9340     return m_MobicatMask;
   9341 }
   9342 
   9343 /*===========================================================================
   9344  * FUNCTION   : setMobicat
   9345  *
   9346  * DESCRIPTION: set Mobicat on/off.
   9347  *
   9348  * PARAMETERS :
   9349  *   @params  : none
   9350  *
   9351  * RETURN     : int32_t type of status
   9352  *              NO_ERROR  -- success
   9353  *              none-zero failure code
   9354  *==========================================================================*/
   9355 int32_t QCamera3HardwareInterface::setMobicat()
   9356 {
   9357     char value [PROPERTY_VALUE_MAX];
   9358     property_get("persist.camera.mobicat", value, "0");
   9359     int32_t ret = NO_ERROR;
   9360     uint8_t enableMobi = (uint8_t)atoi(value);
   9361 
   9362     if (enableMobi) {
   9363         tune_cmd_t tune_cmd;
   9364         tune_cmd.type = SET_RELOAD_CHROMATIX;
   9365         tune_cmd.module = MODULE_ALL;
   9366         tune_cmd.value = TRUE;
   9367         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   9368                 CAM_INTF_PARM_SET_VFE_COMMAND,
   9369                 tune_cmd);
   9370 
   9371         ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   9372                 CAM_INTF_PARM_SET_PP_COMMAND,
   9373                 tune_cmd);
   9374     }
   9375     m_MobicatMask = enableMobi;
   9376 
   9377     return ret;
   9378 }
   9379 
   9380 /*===========================================================================
   9381 * FUNCTION   : getLogLevel
   9382 *
   9383 * DESCRIPTION: Reads the log level property into a variable
   9384 *
   9385 * PARAMETERS :
   9386 *   None
   9387 *
   9388 * RETURN     :
   9389 *   None
   9390 *==========================================================================*/
   9391 void QCamera3HardwareInterface::getLogLevel()
   9392 {
   9393     char prop[PROPERTY_VALUE_MAX];
   9394     uint32_t globalLogLevel = 0;
   9395 
   9396     property_get("persist.camera.hal.debug", prop, "0");
   9397     int val = atoi(prop);
   9398     if (0 <= val) {
   9399         gCamHal3LogLevel = (uint32_t)val;
   9400     }
   9401     property_get("persist.camera.global.debug", prop, "0");
   9402     val = atoi(prop);
   9403     if (0 <= val) {
   9404         globalLogLevel = (uint32_t)val;
   9405     }
   9406 
   9407     /* Highest log level among hal.logs and global.logs is selected */
   9408     if (gCamHal3LogLevel < globalLogLevel)
   9409         gCamHal3LogLevel = globalLogLevel;
   9410 
   9411     return;
   9412 }
   9413 
   9414 /*===========================================================================
   9415  * FUNCTION   : validateStreamRotations
   9416  *
   9417  * DESCRIPTION: Check if the rotations requested are supported
   9418  *
   9419  * PARAMETERS :
   9420  *   @stream_list : streams to be configured
   9421  *
   9422  * RETURN     : NO_ERROR on success
   9423  *              -EINVAL on failure
   9424  *
   9425  *==========================================================================*/
   9426 int QCamera3HardwareInterface::validateStreamRotations(
   9427         camera3_stream_configuration_t *streamList)
   9428 {
   9429     int rc = NO_ERROR;
   9430 
   9431     /*
   9432     * Loop through all streams requested in configuration
   9433     * Check if unsupported rotations have been requested on any of them
   9434     */
   9435     for (size_t j = 0; j < streamList->num_streams; j++){
   9436         camera3_stream_t *newStream = streamList->streams[j];
   9437 
   9438         switch(newStream->rotation) {
   9439             case CAMERA3_STREAM_ROTATION_0:
   9440             case CAMERA3_STREAM_ROTATION_90:
   9441             case CAMERA3_STREAM_ROTATION_180:
   9442             case CAMERA3_STREAM_ROTATION_270:
   9443                 //Expected values
   9444                 break;
   9445             default:
   9446                 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
   9447                         "type:%d and stream format:%d", __func__,
   9448                         newStream->rotation, newStream->stream_type,
   9449                         newStream->format);
   9450                 return -EINVAL;
   9451         }
   9452 
   9453         bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
   9454         bool isImplDef = (newStream->format ==
   9455                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
   9456         bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
   9457                 isImplDef);
   9458 
   9459         if (isRotated && (!isImplDef || isZsl)) {
   9460             ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
   9461                     "type:%d and stream format:%d", __func__,
   9462                     newStream->rotation, newStream->stream_type,
   9463                     newStream->format);
   9464             rc = -EINVAL;
   9465             break;
   9466         }
   9467     }
   9468     return rc;
   9469 }
   9470 
   9471 /*===========================================================================
   9472 * FUNCTION   : getFlashInfo
   9473 *
   9474 * DESCRIPTION: Retrieve information about whether the device has a flash.
   9475 *
   9476 * PARAMETERS :
   9477 *   @cameraId  : Camera id to query
   9478 *   @hasFlash  : Boolean indicating whether there is a flash device
   9479 *                associated with given camera
   9480 *   @flashNode : If a flash device exists, this will be its device node.
   9481 *
   9482 * RETURN     :
   9483 *   None
   9484 *==========================================================================*/
   9485 void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
   9486         bool& hasFlash,
   9487         char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
   9488 {
   9489     cam_capability_t* camCapability = gCamCapability[cameraId];
   9490     if (NULL == camCapability) {
   9491         hasFlash = false;
   9492         flashNode[0] = '\0';
   9493     } else {
   9494         hasFlash = camCapability->flash_available;
   9495         strlcpy(flashNode,
   9496                 (char*)camCapability->flash_dev_name,
   9497                 QCAMERA_MAX_FILEPATH_LENGTH);
   9498     }
   9499 }
   9500 
   9501 /*===========================================================================
   9502 * FUNCTION   : getEepromVersionInfo
   9503 *
   9504 * DESCRIPTION: Retrieve version info of the sensor EEPROM data
   9505 *
   9506 * PARAMETERS : None
   9507 *
   9508 * RETURN     : string describing EEPROM version
   9509 *              "\0" if no such info available
   9510 *==========================================================================*/
   9511 const char *QCamera3HardwareInterface::getEepromVersionInfo()
   9512 {
   9513     return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
   9514 }
   9515 
   9516 /*===========================================================================
   9517 * FUNCTION   : getLdafCalib
   9518 *
   9519 * DESCRIPTION: Retrieve Laser AF calibration data
   9520 *
   9521 * PARAMETERS : None
   9522 *
   9523 * RETURN     : Two uint32_t describing laser AF calibration data
   9524 *              NULL if none is available.
   9525 *==========================================================================*/
   9526 const uint32_t *QCamera3HardwareInterface::getLdafCalib()
   9527 {
   9528     if (mLdafCalibExist) {
   9529         return &mLdafCalib[0];
   9530     } else {
   9531         return NULL;
   9532     }
   9533 }
   9534 
   9535 /*===========================================================================
   9536  * FUNCTION   : dynamicUpdateMetaStreamInfo
   9537  *
   9538  * DESCRIPTION: This function:
   9539  *             (1) stops all the channels
   9540  *             (2) returns error on pending requests and buffers
   9541  *             (3) sends metastream_info in setparams
   9542  *             (4) starts all channels
   9543  *             This is useful when sensor has to be restarted to apply any
   9544  *             settings such as frame rate from a different sensor mode
   9545  *
   9546  * PARAMETERS : None
   9547  *
   9548  * RETURN     : NO_ERROR on success
   9549  *              Error codes on failure
   9550  *
   9551  *==========================================================================*/
   9552 int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
   9553 {
   9554     ATRACE_CALL();
   9555     int rc = NO_ERROR;
   9556 
   9557     CDBG("%s: E", __func__);
   9558 
   9559     rc = stopAllChannels();
   9560     if (rc < 0) {
   9561         ALOGE("%s: stopAllChannels failed", __func__);
   9562         return rc;
   9563     }
   9564 
   9565     rc = notifyErrorForPendingRequests();
   9566     if (rc < 0) {
   9567         ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
   9568         return rc;
   9569     }
   9570 
   9571     /* Send meta stream info once again so that ISP can start */
   9572     ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
   9573             CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
   9574     CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
   9575     rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
   9576             mParameters);
   9577     if (rc < 0) {
   9578         ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
   9579                 __func__);
   9580     }
   9581 
   9582     rc = startAllChannels();
   9583     if (rc < 0) {
   9584         ALOGE("%s: startAllChannels failed", __func__);
   9585         return rc;
   9586     }
   9587 
   9588     CDBG("%s:%d X", __func__, __LINE__);
   9589     return rc;
   9590 }
   9591 
   9592 /*===========================================================================
   9593  * FUNCTION   : stopAllChannels
   9594  *
   9595  * DESCRIPTION: This function stops (equivalent to stream-off) all channels
   9596  *
   9597  * PARAMETERS : None
   9598  *
   9599  * RETURN     : NO_ERROR on success
   9600  *              Error codes on failure
   9601  *
   9602  *==========================================================================*/
   9603 int32_t QCamera3HardwareInterface::stopAllChannels()
   9604 {
   9605     int32_t rc = NO_ERROR;
   9606 
   9607     // Stop the Streams/Channels
   9608     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   9609         it != mStreamInfo.end(); it++) {
   9610         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   9611         if (channel != nullptr) {
   9612             channel->stop();
   9613         }
   9614         (*it)->status = INVALID;
   9615     }
   9616 
   9617     if (mSupportChannel) {
   9618         mSupportChannel->stop();
   9619     }
   9620     if (mAnalysisChannel) {
   9621         mAnalysisChannel->stop();
   9622     }
   9623     if (mRawDumpChannel) {
   9624         mRawDumpChannel->stop();
   9625     }
   9626     if (mMetadataChannel) {
   9627         /* If content of mStreamInfo is not 0, there is metadata stream */
   9628         mMetadataChannel->stop();
   9629     }
   9630 
   9631     CDBG("%s:%d All channels stopped", __func__, __LINE__);
   9632     return rc;
   9633 }
   9634 
   9635 /*===========================================================================
   9636  * FUNCTION   : startAllChannels
   9637  *
   9638  * DESCRIPTION: This function starts (equivalent to stream-on) all channels
   9639  *
   9640  * PARAMETERS : None
   9641  *
   9642  * RETURN     : NO_ERROR on success
   9643  *              Error codes on failure
   9644  *
   9645  *==========================================================================*/
   9646 int32_t QCamera3HardwareInterface::startAllChannels()
   9647 {
   9648     int32_t rc = NO_ERROR;
   9649 
   9650     CDBG("%s: Start all channels ", __func__);
   9651     // Start the Streams/Channels
   9652     if (mMetadataChannel) {
   9653         /* If content of mStreamInfo is not 0, there is metadata stream */
   9654         rc = mMetadataChannel->start();
   9655         if (rc < 0) {
   9656             ALOGE("%s: META channel start failed", __func__);
   9657             return rc;
   9658         }
   9659     }
   9660     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   9661         it != mStreamInfo.end(); it++) {
   9662         QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   9663         rc = channel->start();
   9664         if (rc < 0) {
   9665             ALOGE("%s: channel start failed", __func__);
   9666             return rc;
   9667         }
   9668     }
   9669     if (mAnalysisChannel) {
   9670         mAnalysisChannel->start();
   9671     }
   9672     if (mSupportChannel) {
   9673         rc = mSupportChannel->start();
   9674         if (rc < 0) {
   9675             ALOGE("%s: Support channel start failed", __func__);
   9676             return rc;
   9677         }
   9678     }
   9679     if (mRawDumpChannel) {
   9680         rc = mRawDumpChannel->start();
   9681         if (rc < 0) {
   9682             ALOGE("%s: RAW dump channel start failed", __func__);
   9683             return rc;
   9684         }
   9685     }
   9686 
   9687     CDBG("%s:%d All channels started", __func__, __LINE__);
   9688     return rc;
   9689 }
   9690 
   9691 /*===========================================================================
   9692  * FUNCTION   : notifyErrorForPendingRequests
   9693  *
   9694  * DESCRIPTION: This function sends error for all the pending requests/buffers
   9695  *
   9696  * PARAMETERS : None
   9697  *
   9698  * RETURN     : Error codes
   9699  *              NO_ERROR on success
   9700  *
   9701  *==========================================================================*/
   9702 int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
   9703 {
   9704     int32_t rc = NO_ERROR;
   9705     unsigned int frameNum = 0;
   9706     camera3_capture_result_t result;
   9707     camera3_stream_buffer_t *pStream_Buf = NULL;
   9708     FlushMap flushMap;
   9709 
   9710     memset(&result, 0, sizeof(camera3_capture_result_t));
   9711 
   9712     if (mPendingRequestsList.size() > 0) {
   9713         pendingRequestIterator i = mPendingRequestsList.begin();
   9714         frameNum = i->frame_number;
   9715     } else {
   9716         /* There might still be pending buffers even though there are
   9717          no pending requests. Setting the frameNum to MAX so that
   9718          all the buffers with smaller frame numbers are returned */
   9719         frameNum = UINT_MAX;
   9720     }
   9721 
   9722     CDBG_HIGH("%s: Oldest frame num on  mPendingRequestsList = %d",
   9723       __func__, frameNum);
   9724 
   9725     // Go through the pending buffers and group them depending
   9726     // on frame number
   9727     for (List<PendingBufferInfo>::iterator k =
   9728             mPendingBuffersMap.mPendingBufferList.begin();
   9729             k != mPendingBuffersMap.mPendingBufferList.end();) {
   9730 
   9731         if (k->frame_number < frameNum) {
   9732             ssize_t idx = flushMap.indexOfKey(k->frame_number);
   9733             if (idx == NAME_NOT_FOUND) {
   9734                 Vector<PendingBufferInfo> pending;
   9735                 pending.add(*k);
   9736                 flushMap.add(k->frame_number, pending);
   9737             } else {
   9738                 Vector<PendingBufferInfo> &pending =
   9739                         flushMap.editValueFor(k->frame_number);
   9740                 pending.add(*k);
   9741             }
   9742 
   9743             mPendingBuffersMap.num_buffers--;
   9744             k = mPendingBuffersMap.mPendingBufferList.erase(k);
   9745         } else {
   9746             k++;
   9747         }
   9748     }
   9749 
   9750     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
   9751         uint32_t frame_number = flushMap.keyAt(iFlush);
   9752         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
   9753 
   9754         // Send Error notify to frameworks for each buffer for which
   9755         // metadata buffer is already sent
   9756         CDBG_HIGH("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
   9757           __func__, frame_number, pending.size());
   9758 
   9759         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   9760         if (NULL == pStream_Buf) {
   9761             ALOGE("%s: No memory for pending buffers array", __func__);
   9762             return NO_MEMORY;
   9763         }
   9764         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
   9765 
   9766         for (size_t j = 0; j < pending.size(); j++) {
   9767             const PendingBufferInfo &info = pending.itemAt(j);
   9768             camera3_notify_msg_t notify_msg;
   9769             memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   9770             notify_msg.type = CAMERA3_MSG_ERROR;
   9771             notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
   9772             notify_msg.message.error.error_stream = info.stream;
   9773             notify_msg.message.error.frame_number = frame_number;
   9774             pStream_Buf[j].acquire_fence = -1;
   9775             pStream_Buf[j].release_fence = -1;
   9776             pStream_Buf[j].buffer = info.buffer;
   9777             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   9778             pStream_Buf[j].stream = info.stream;
   9779             mCallbackOps->notify(mCallbackOps, &notify_msg);
   9780             CDBG_HIGH("%s: notify frame_number = %d stream %p", __func__,
   9781                     frame_number, info.stream);
   9782         }
   9783 
   9784         result.result = NULL;
   9785         result.frame_number = frame_number;
   9786         result.num_output_buffers = (uint32_t)pending.size();
   9787         result.output_buffers = pStream_Buf;
   9788         mCallbackOps->process_capture_result(mCallbackOps, &result);
   9789 
   9790         delete [] pStream_Buf;
   9791     }
   9792 
   9793     CDBG_HIGH("%s:Sending ERROR REQUEST for all pending requests", __func__);
   9794 
   9795     flushMap.clear();
   9796     for (List<PendingBufferInfo>::iterator k =
   9797             mPendingBuffersMap.mPendingBufferList.begin();
   9798             k != mPendingBuffersMap.mPendingBufferList.end();) {
   9799         ssize_t idx = flushMap.indexOfKey(k->frame_number);
   9800         if (idx == NAME_NOT_FOUND) {
   9801             Vector<PendingBufferInfo> pending;
   9802             pending.add(*k);
   9803             flushMap.add(k->frame_number, pending);
   9804         } else {
   9805             Vector<PendingBufferInfo> &pending =
   9806                     flushMap.editValueFor(k->frame_number);
   9807             pending.add(*k);
   9808         }
   9809 
   9810         mPendingBuffersMap.num_buffers--;
   9811         k = mPendingBuffersMap.mPendingBufferList.erase(k);
   9812     }
   9813 
   9814     pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
   9815 
   9816     // Go through the pending requests info and send error request to framework
   9817     for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
   9818         uint32_t frame_number = flushMap.keyAt(iFlush);
   9819         const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
   9820         CDBG_HIGH("%s:Sending ERROR REQUEST for frame %d",
   9821               __func__, frame_number);
   9822 
   9823         // Send shutter notify to frameworks
   9824         camera3_notify_msg_t notify_msg;
   9825         memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
   9826         notify_msg.type = CAMERA3_MSG_ERROR;
   9827         notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
   9828         notify_msg.message.error.error_stream = NULL;
   9829         notify_msg.message.error.frame_number = frame_number;
   9830         mCallbackOps->notify(mCallbackOps, &notify_msg);
   9831 
   9832         pStream_Buf = new camera3_stream_buffer_t[pending.size()];
   9833         if (NULL == pStream_Buf) {
   9834             ALOGE("%s: No memory for pending buffers array", __func__);
   9835             return NO_MEMORY;
   9836         }
   9837         memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
   9838 
   9839         for (size_t j = 0; j < pending.size(); j++) {
   9840             const PendingBufferInfo &info = pending.itemAt(j);
   9841             pStream_Buf[j].acquire_fence = -1;
   9842             pStream_Buf[j].release_fence = -1;
   9843             pStream_Buf[j].buffer = info.buffer;
   9844             pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
   9845             pStream_Buf[j].stream = info.stream;
   9846         }
   9847 
   9848         result.input_buffer = i->input_buffer;
   9849         result.num_output_buffers = (uint32_t)pending.size();
   9850         result.output_buffers = pStream_Buf;
   9851         result.result = NULL;
   9852         result.frame_number = frame_number;
   9853         mCallbackOps->process_capture_result(mCallbackOps, &result);
   9854         delete [] pStream_Buf;
   9855         i = erasePendingRequest(i);
   9856     }
   9857 
   9858     /* Reset pending frame Drop list and requests list */
   9859     mPendingFrameDropList.clear();
   9860 
   9861     flushMap.clear();
   9862     mPendingBuffersMap.num_buffers = 0;
   9863     mPendingBuffersMap.mPendingBufferList.clear();
   9864     mPendingReprocessResultList.clear();
   9865     CDBG_HIGH("%s: Cleared all the pending buffers ", __func__);
   9866 
   9867     return rc;
   9868 }
   9869 
   9870 bool QCamera3HardwareInterface::isOnEncoder(
   9871         const cam_dimension_t max_viewfinder_size,
   9872         uint32_t width, uint32_t height)
   9873 {
   9874     return (width > (uint32_t)max_viewfinder_size.width ||
   9875             height > (uint32_t)max_viewfinder_size.height);
   9876 }
   9877 
   9878 /*===========================================================================
   9879  * FUNCTION   : setBundleInfo
   9880  *
   9881  * DESCRIPTION: Set bundle info for all streams that are bundle.
   9882  *
   9883  * PARAMETERS : None
   9884  *
   9885  * RETURN     : NO_ERROR on success
   9886  *              Error codes on failure
   9887  *==========================================================================*/
   9888 int32_t QCamera3HardwareInterface::setBundleInfo()
   9889 {
   9890     int32_t rc = NO_ERROR;
   9891 
   9892     if (mChannelHandle) {
   9893         cam_bundle_config_t bundleInfo;
   9894         memset(&bundleInfo, 0, sizeof(bundleInfo));
   9895         rc = mCameraHandle->ops->get_bundle_info(
   9896                 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
   9897         if (rc != NO_ERROR) {
   9898             ALOGE("%s: get_bundle_info failed", __func__);
   9899             return rc;
   9900         }
   9901         if (mAnalysisChannel) {
   9902             mAnalysisChannel->setBundleInfo(bundleInfo);
   9903         }
   9904         if (mSupportChannel) {
   9905             mSupportChannel->setBundleInfo(bundleInfo);
   9906         }
   9907         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
   9908                 it != mStreamInfo.end(); it++) {
   9909             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
   9910             channel->setBundleInfo(bundleInfo);
   9911         }
   9912         if (mRawDumpChannel) {
   9913             mRawDumpChannel->setBundleInfo(bundleInfo);
   9914         }
   9915     }
   9916 
   9917     return rc;
   9918 }
   9919 
   9920 }; //end namespace qcamera
   9921