Home | History | Annotate | Download | only in libcamera2
      1 /*
      2 **
      3 ** Copyright 2008, The Android Open Source Project
      4 ** Copyright 2012, Samsung Electronics Co. LTD
      5 **
      6 ** Licensed under the Apache License, Version 2.0 (the "License");
      7 ** you may not use this file except in compliance with the License.
      8 ** You may obtain a copy of the License at
      9 **
     10 **     http://www.apache.org/licenses/LICENSE-2.0
     11 **
     12 ** Unless required by applicable law or agreed to in writing, software
     13 ** distributed under the License is distributed on an "AS IS" BASIS,
     14 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15 ** See the License for the specific language governing permissions and
     16 ** limitations under the License.
     17 */
     18 
     19 /*!
     20  * \file      ExynosCameraHWInterface2.cpp
     21  * \brief     source file for Android Camera API 2.0 HAL
     22  * \author    Sungjoong Kang(sj3.kang (at) samsung.com)
     23  * \date      2012/07/10
     24  *
     25  * <b>Revision History: </b>
     26  * - 2012/05/31 : Sungjoong Kang(sj3.kang (at) samsung.com) \n
     27  *   Initial Release
     28  *
     29  * - 2012/07/10 : Sungjoong Kang(sj3.kang (at) samsung.com) \n
     30  *   2nd Release
     31  *
     32  */
     33 
     34 //#define LOG_NDEBUG 0
     35 #define LOG_TAG "ExynosCameraHAL2"
     36 #include <utils/Log.h>
     37 #include <math.h>
     38 
     39 #include "ExynosCameraHWInterface2.h"
     40 #include "exynos_format.h"
     41 
     42 namespace android {
     43 
     44 void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
     45 {
     46     int nw;
     47     int cnt = 0;
     48     uint32_t written = 0;
     49 
     50     ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
     51     int fd = open(fname, O_RDWR | O_CREAT, 0644);
     52     if (fd < 0) {
     53         ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
     54         return;
     55     }
     56 
     57     ALOGV("writing %d bytes to file [%s]", size, fname);
     58     while (written < size) {
     59         nw = ::write(fd, buf + written, size - written);
     60         if (nw < 0) {
     61             ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
     62             break;
     63         }
     64         written += nw;
     65         cnt++;
     66     }
     67     ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
     68     ::close(fd);
     69 }
     70 
     71 int get_pixel_depth(uint32_t fmt)
     72 {
     73     int depth = 0;
     74 
     75     switch (fmt) {
     76     case V4L2_PIX_FMT_JPEG:
     77         depth = 8;
     78         break;
     79 
     80     case V4L2_PIX_FMT_NV12:
     81     case V4L2_PIX_FMT_NV21:
     82     case V4L2_PIX_FMT_YUV420:
     83     case V4L2_PIX_FMT_YVU420M:
     84     case V4L2_PIX_FMT_NV12M:
     85     case V4L2_PIX_FMT_NV12MT:
     86         depth = 12;
     87         break;
     88 
     89     case V4L2_PIX_FMT_RGB565:
     90     case V4L2_PIX_FMT_YUYV:
     91     case V4L2_PIX_FMT_YVYU:
     92     case V4L2_PIX_FMT_UYVY:
     93     case V4L2_PIX_FMT_VYUY:
     94     case V4L2_PIX_FMT_NV16:
     95     case V4L2_PIX_FMT_NV61:
     96     case V4L2_PIX_FMT_YUV422P:
     97     case V4L2_PIX_FMT_SBGGR10:
     98     case V4L2_PIX_FMT_SBGGR12:
     99     case V4L2_PIX_FMT_SBGGR16:
    100         depth = 16;
    101         break;
    102 
    103     case V4L2_PIX_FMT_RGB32:
    104         depth = 32;
    105         break;
    106     default:
    107         ALOGE("Get depth failed(format : %d)", fmt);
    108         break;
    109     }
    110 
    111     return depth;
    112 }
    113 
    114 int cam_int_s_fmt(node_info_t *node)
    115 {
    116     struct v4l2_format v4l2_fmt;
    117     unsigned int framesize;
    118     int ret;
    119 
    120     memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
    121 
    122     v4l2_fmt.type = node->type;
    123     framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
    124 
    125     if (node->planes >= 1) {
    126         v4l2_fmt.fmt.pix_mp.width       = node->width;
    127         v4l2_fmt.fmt.pix_mp.height      = node->height;
    128         v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
    129         v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
    130     } else {
    131         ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
    132     }
    133 
    134     /* Set up for capture */
    135     ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
    136 
    137     if (ret < 0)
    138         ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
    139 
    140 
    141     return ret;
    142 }
    143 
    144 int cam_int_reqbufs(node_info_t *node)
    145 {
    146     struct v4l2_requestbuffers req;
    147     int ret;
    148 
    149     req.count = node->buffers;
    150     req.type = node->type;
    151     req.memory = node->memory;
    152 
    153     ret = exynos_v4l2_reqbufs(node->fd, &req);
    154 
    155     if (ret < 0)
    156         ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
    157 
    158     return req.count;
    159 }
    160 
    161 int cam_int_qbuf(node_info_t *node, int index)
    162 {
    163     struct v4l2_buffer v4l2_buf;
    164     struct v4l2_plane planes[VIDEO_MAX_PLANES];
    165     int i;
    166     int ret = 0;
    167 
    168     v4l2_buf.m.planes   = planes;
    169     v4l2_buf.type       = node->type;
    170     v4l2_buf.memory     = node->memory;
    171     v4l2_buf.index      = index;
    172     v4l2_buf.length     = node->planes;
    173 
    174     for(i = 0; i < node->planes; i++){
    175         v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
    176         v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
    177     }
    178 
    179     ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
    180 
    181     if (ret < 0)
    182         ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
    183 
    184     return ret;
    185 }
    186 
    187 int cam_int_streamon(node_info_t *node)
    188 {
    189     enum v4l2_buf_type type = node->type;
    190     int ret;
    191 
    192 
    193     ret = exynos_v4l2_streamon(node->fd, type);
    194 
    195     if (ret < 0)
    196         ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
    197 
    198     ALOGV("On streaming I/O... ... fd(%d)", node->fd);
    199 
    200     return ret;
    201 }
    202 
    203 int cam_int_streamoff(node_info_t *node)
    204 {
    205     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
    206     int ret;
    207 
    208 
    209     ALOGV("Off streaming I/O... fd(%d)", node->fd);
    210     ret = exynos_v4l2_streamoff(node->fd, type);
    211 
    212     if (ret < 0)
    213         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
    214 
    215     return ret;
    216 }
    217 
    218 int isp_int_streamoff(node_info_t *node)
    219 {
    220     enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
    221     int ret;
    222 
    223     ALOGV("Off streaming I/O... fd(%d)", node->fd);
    224     ret = exynos_v4l2_streamoff(node->fd, type);
    225 
    226     if (ret < 0)
    227         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
    228 
    229     return ret;
    230 }
    231 
    232 int cam_int_dqbuf(node_info_t *node)
    233 {
    234     struct v4l2_buffer v4l2_buf;
    235     struct v4l2_plane planes[VIDEO_MAX_PLANES];
    236     int ret;
    237 
    238     v4l2_buf.type       = node->type;
    239     v4l2_buf.memory     = node->memory;
    240     v4l2_buf.m.planes   = planes;
    241     v4l2_buf.length     = node->planes;
    242 
    243     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
    244     if (ret < 0)
    245         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
    246 
    247     return v4l2_buf.index;
    248 }
    249 
    250 int cam_int_dqbuf(node_info_t *node, int num_plane)
    251 {
    252     struct v4l2_buffer v4l2_buf;
    253     struct v4l2_plane planes[VIDEO_MAX_PLANES];
    254     int ret;
    255 
    256     v4l2_buf.type       = node->type;
    257     v4l2_buf.memory     = node->memory;
    258     v4l2_buf.m.planes   = planes;
    259     v4l2_buf.length     = num_plane;
    260 
    261     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
    262     if (ret < 0)
    263         ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
    264 
    265     return v4l2_buf.index;
    266 }
    267 
    268 int cam_int_s_input(node_info_t *node, int index)
    269 {
    270     int ret;
    271 
    272     ret = exynos_v4l2_s_input(node->fd, index);
    273     if (ret < 0)
    274         ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
    275 
    276     return ret;
    277 }
    278 
    279 
    280 gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
    281 
    282 RequestManager::RequestManager(SignalDrivenThread* main_thread):
    283     m_lastAeMode(0),
    284     m_lastAaMode(0),
    285     m_lastAwbMode(0),
    286     m_vdisBubbleEn(false),
    287     m_lastAeComp(0),
    288     m_lastCompletedFrameCnt(-1)
    289 {
    290     m_metadataConverter = new MetadataConverter;
    291     m_mainThread = main_thread;
    292     ResetEntry();
    293     m_sensorPipelineSkipCnt = 0;
    294     return;
    295 }
    296 
    297 RequestManager::~RequestManager()
    298 {
    299     ALOGV("%s", __FUNCTION__);
    300     if (m_metadataConverter != NULL) {
    301         delete m_metadataConverter;
    302         m_metadataConverter = NULL;
    303     }
    304 
    305     releaseSensorQ();
    306     return;
    307 }
    308 
    309 void RequestManager::ResetEntry()
    310 {
    311     Mutex::Autolock lock(m_requestMutex);
    312     Mutex::Autolock lock2(m_numOfEntriesLock);
    313     for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
    314         memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
    315         entries[i].internal_shot.shot.ctl.request.frameCount = -1;
    316     }
    317     m_numOfEntries = 0;
    318     m_entryInsertionIndex = -1;
    319     m_entryProcessingIndex = -1;
    320     m_entryFrameOutputIndex = -1;
    321 }
    322 
    323 int RequestManager::GetNumEntries()
    324 {
    325     Mutex::Autolock lock(m_numOfEntriesLock);
    326     return m_numOfEntries;
    327 }
    328 
    329 void RequestManager::SetDefaultParameters(int cropX)
    330 {
    331     m_cropX = cropX;
    332 }
    333 
    334 bool RequestManager::IsRequestQueueFull()
    335 {
    336     Mutex::Autolock lock(m_requestMutex);
    337     Mutex::Autolock lock2(m_numOfEntriesLock);
    338     if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
    339         return true;
    340     else
    341         return false;
    342 }
    343 
    344 void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
    345 {
    346     ALOGV("DEBUG(%s):", __FUNCTION__);
    347 
    348     Mutex::Autolock lock(m_requestMutex);
    349     Mutex::Autolock lock2(m_numOfEntriesLock);
    350 
    351     request_manager_entry * newEntry = NULL;
    352     int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
    353     ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
    354 
    355 
    356     newEntry = &(entries[newInsertionIndex]);
    357 
    358     if (newEntry->status!=EMPTY) {
    359         ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
    360         return;
    361     }
    362     newEntry->status = REGISTERED;
    363     newEntry->original_request = new_request;
    364     memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
    365     m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
    366     newEntry->output_stream_count = 0;
    367     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
    368         newEntry->output_stream_count++;
    369 
    370     if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
    371         newEntry->output_stream_count++;
    372 
    373     m_numOfEntries++;
    374     m_entryInsertionIndex = newInsertionIndex;
    375 
    376 
    377     *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
    378     afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
    379     afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
    380     afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
    381     afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
    382     ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
    383     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
    384 }
    385 
    386 void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
    387 {
    388     ALOGV("DEBUG(%s):", __FUNCTION__);
    389     int frame_index;
    390     request_manager_entry * currentEntry;
    391 
    392     Mutex::Autolock lock(m_requestMutex);
    393     Mutex::Autolock lock2(m_numOfEntriesLock);
    394 
    395     frame_index = GetCompletedIndex();
    396     currentEntry =  &(entries[frame_index]);
    397     if (currentEntry->status != COMPLETED) {
    398         CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
    399                        m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
    400         return;
    401     }
    402     if (deregistered_request)  *deregistered_request = currentEntry->original_request;
    403 
    404     m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
    405 
    406     currentEntry->status = EMPTY;
    407     currentEntry->original_request = NULL;
    408     memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
    409     currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
    410     currentEntry->output_stream_count = 0;
    411     m_numOfEntries--;
    412     ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
    413      m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
    414 
    415     CheckCompleted(GetNextIndex(frame_index));
    416     return;
    417 }
    418 
    419 bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
    420                 camera_metadata_t ** prepared_frame, int afState)
    421 {
    422     ALOGV("DEBUG(%s):", __FUNCTION__);
    423     Mutex::Autolock lock(m_requestMutex);
    424     status_t res = NO_ERROR;
    425     int tempFrameOutputIndex = GetCompletedIndex();
    426     request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
    427     ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
    428         m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
    429 
    430     if (currentEntry->status != COMPLETED) {
    431         ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
    432 
    433         return false;
    434     }
    435     m_entryFrameOutputIndex = tempFrameOutputIndex;
    436     m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
    437     add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
    438     res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
    439                 m_tempFrameMetadata);
    440     if (res!=NO_ERROR) {
    441         ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
    442         return false;
    443     }
    444     *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
    445     *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
    446     *prepared_frame = m_tempFrameMetadata;
    447     ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
    448         currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
    449     // Dump();
    450     return true;
    451 }
    452 
    453 int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
    454 {
    455     struct camera2_shot_ext * shot_ext;
    456     struct camera2_shot_ext * request_shot;
    457     int targetStreamIndex = 0;
    458     request_manager_entry * newEntry = NULL;
    459     static int count = 0;
    460 
    461     Mutex::Autolock lock(m_requestMutex);
    462     Mutex::Autolock lock2(m_numOfEntriesLock);
    463     if (m_numOfEntries == 0)  {
    464         CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
    465         return -1;
    466     }
    467 
    468     if ((m_entryProcessingIndex == m_entryInsertionIndex)
    469         && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
    470         ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
    471          m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
    472         return -1;
    473     }
    474 
    475     int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
    476     ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
    477 
    478     newEntry = &(entries[newProcessingIndex]);
    479     request_shot = &(newEntry->internal_shot);
    480     if (newEntry->status != REGISTERED) {
    481         CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
    482         for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
    483                 CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
    484         }
    485         return -1;
    486     }
    487 
    488     newEntry->status = REQUESTED;
    489 
    490     shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
    491 
    492     memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
    493     shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
    494     shot_ext->request_sensor = 1;
    495     shot_ext->dis_bypass = 1;
    496     shot_ext->dnr_bypass = 1;
    497     shot_ext->fd_bypass = 1;
    498     shot_ext->setfile = 0;
    499 
    500     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
    501     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
    502     if (targetStreamIndex & MASK_OUTPUT_SCP)
    503         shot_ext->request_scp = 1;
    504 
    505     if (targetStreamIndex & MASK_OUTPUT_SCC)
    506         shot_ext->request_scc = 1;
    507 
    508     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
    509         shot_ext->fd_bypass = 0;
    510 
    511     if (count == 0){
    512         shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
    513     } else
    514         shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
    515 
    516     count++;
    517     shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
    518     shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
    519     shot_ext->shot.magicNumber = 0x23456789;
    520     shot_ext->shot.ctl.sensor.exposureTime = 0;
    521     shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
    522     shot_ext->shot.ctl.sensor.sensitivity = 0;
    523 
    524 
    525     shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
    526     shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
    527     shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
    528 
    529     m_entryProcessingIndex = newProcessingIndex;
    530     return newProcessingIndex;
    531 }
    532 
    533 void RequestManager::NotifyStreamOutput(int frameCnt)
    534 {
    535     int index;
    536 
    537     Mutex::Autolock lock(m_requestMutex);
    538     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
    539 
    540     index = FindEntryIndexByFrameCnt(frameCnt);
    541     if (index == -1) {
    542         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
    543         return;
    544     }
    545     ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
    546 
    547     entries[index].output_stream_count--;  //TODO : match stream id also
    548     CheckCompleted(index);
    549 }
    550 
    551 void RequestManager::CheckCompleted(int index)
    552 {
    553     if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
    554         && (entries[index].output_stream_count <= 0)){
    555         ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
    556                 index, entries[index].internal_shot.shot.ctl.request.frameCount );
    557         entries[index].status = COMPLETED;
    558         if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount)
    559             m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
    560     }
    561 }
    562 
    563 int RequestManager::GetCompletedIndex()
    564 {
    565     return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
    566 }
    567 
    568 void  RequestManager::pushSensorQ(int index)
    569 {
    570     Mutex::Autolock lock(m_requestMutex);
    571     m_sensorQ.push_back(index);
    572 }
    573 
    574 int RequestManager::popSensorQ()
    575 {
    576    List<int>::iterator sensor_token;
    577    int index;
    578 
    579     Mutex::Autolock lock(m_requestMutex);
    580 
    581     if(m_sensorQ.size() == 0)
    582         return -1;
    583 
    584     sensor_token = m_sensorQ.begin()++;
    585     index = *sensor_token;
    586     m_sensorQ.erase(sensor_token);
    587 
    588     return (index);
    589 }
    590 
    591 void RequestManager::releaseSensorQ()
    592 {
    593     List<int>::iterator r;
    594 
    595     Mutex::Autolock lock(m_requestMutex);
    596     ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
    597 
    598     while(m_sensorQ.size() > 0){
    599         r  = m_sensorQ.begin()++;
    600         m_sensorQ.erase(r);
    601     }
    602     return;
    603 }
    604 
    605 void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
    606 {
    607     int index;
    608     struct camera2_shot_ext * request_shot;
    609     nsecs_t timeStamp;
    610     int i;
    611 
    612     Mutex::Autolock lock(m_requestMutex);
    613     ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
    614 
    615     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
    616         if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
    617             && (entries[i].status == CAPTURED)){
    618             entries[i].status = METADONE;
    619             break;
    620         }
    621     }
    622 
    623     if (i == NUM_MAX_REQUEST_MGR_ENTRY){
    624         ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
    625         return;
    626     }
    627 
    628     request_manager_entry * newEntry = &(entries[i]);
    629     request_shot = &(newEntry->internal_shot);
    630 
    631     timeStamp = request_shot->shot.dm.sensor.timeStamp;
    632     memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
    633     request_shot->shot.dm.sensor.timeStamp = timeStamp;
    634     m_lastTimeStamp = timeStamp;
    635     CheckCompleted(i);
    636 }
    637 
    638 void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
    639 {
    640     int index, targetStreamIndex;
    641     struct camera2_shot_ext * request_shot;
    642 
    643     ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
    644     if (frameCnt < 0)
    645         return;
    646 
    647     index = FindEntryIndexByFrameCnt(frameCnt);
    648     if (index == -1) {
    649         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
    650         return;
    651     }
    652 
    653     request_manager_entry * newEntry = &(entries[index]);
    654     request_shot = &(newEntry->internal_shot);
    655     memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
    656     shot_ext->shot.ctl.request.frameCount = frameCnt;
    657     shot_ext->request_sensor = 1;
    658     shot_ext->dis_bypass = 1;
    659     shot_ext->dnr_bypass = 1;
    660     shot_ext->fd_bypass = 1;
    661     shot_ext->drc_bypass = 1;
    662     shot_ext->setfile = 0;
    663 
    664     shot_ext->request_scc = 0;
    665     shot_ext->request_scp = 0;
    666 
    667     shot_ext->isReprocessing = request_shot->isReprocessing;
    668     shot_ext->reprocessInput = request_shot->reprocessInput;
    669     shot_ext->shot.ctl.request.outputStreams[0] = 0;
    670 
    671     shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
    672 
    673     shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
    674     shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
    675     shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
    676 
    677     // mapping flash UI mode from aeMode
    678     if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
    679         if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
    680             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
    681         else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
    682             ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
    683         request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
    684     }
    685 
    686     // Apply ae/awb lock or unlock
    687     if (request_shot->ae_lock == AEMODE_LOCK_ON)
    688             request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
    689     if (request_shot->awb_lock == AWBMODE_LOCK_ON)
    690             request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
    691 
    692     if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
    693         shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
    694     }
    695     else {
    696         shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
    697         m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
    698     }
    699     if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
    700         shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
    701     }
    702     else {
    703         shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
    704         m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
    705     }
    706     if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
    707         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
    708     }
    709     else {
    710         shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
    711         m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
    712     }
    713     if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
    714         shot_ext->shot.ctl.aa.aeExpCompensation = 0;
    715     }
    716     else {
    717         shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
    718         m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
    719     }
    720 
    721     if (request_shot->shot.ctl.aa.videoStabilizationMode) {
    722         m_vdisBubbleEn = true;
    723         shot_ext->dis_bypass = 0;
    724         shot_ext->dnr_bypass = 0;
    725     } else {
    726         m_vdisBubbleEn = false;
    727         shot_ext->dis_bypass = 1;
    728         shot_ext->dnr_bypass = 1;
    729     }
    730 
    731     shot_ext->shot.ctl.aa.afTrigger = 0;
    732 
    733     targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
    734     shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
    735     if (targetStreamIndex & MASK_OUTPUT_SCP)
    736         shot_ext->request_scp = 1;
    737 
    738     if (targetStreamIndex & MASK_OUTPUT_SCC)
    739         shot_ext->request_scc = 1;
    740 
    741     if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
    742         shot_ext->fd_bypass = 0;
    743 
    744     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
    745     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
    746 
    747     ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
    748     (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
    749     (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
    750     (int)(shot_ext->shot.ctl.aa.afMode));
    751 }
    752 
    753 bool    RequestManager::IsVdisEnable(void)
    754 {
    755         return m_vdisBubbleEn;
    756 }
    757 
    758 int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
    759 {
    760     for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
    761         if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
    762             return i;
    763     }
    764     return -1;
    765 }
    766 
    767 void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
    768 {
    769     int index = FindEntryIndexByFrameCnt(frameCnt);
    770     if (index == -1) {
    771         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
    772         return;
    773     }
    774 
    775     request_manager_entry * currentEntry = &(entries[index]);
    776     if (currentEntry->internal_shot.isReprocessing == 1) {
    777         ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
    778         index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
    779     } else {
    780         currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
    781         ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
    782             index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
    783     }
    784 }
    785 
    786 
    787 nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
    788 {
    789     int index = FindEntryIndexByFrameCnt(frameCnt);
    790     if (index == -1) {
    791         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
    792         return m_lastTimeStamp;
    793     }
    794     else
    795         return GetTimestamp(index);
    796 }
    797 
    798 nsecs_t  RequestManager::GetTimestamp(int index)
    799 {
    800     Mutex::Autolock lock(m_requestMutex);
    801     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
    802         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
    803         return 0;
    804     }
    805 
    806     request_manager_entry * currentEntry = &(entries[index]);
    807     nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
    808     if (frameTime == 0) {
    809         ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
    810         frameTime = m_lastTimeStamp;
    811     }
    812     ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
    813     return frameTime;
    814 }
    815 
    816 uint8_t  RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
    817 {
    818     int index = FindEntryIndexByFrameCnt(frameCnt);
    819     if (index == -1) {
    820         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
    821         return 0;
    822     }
    823     else
    824         return GetOutputStream(index);
    825 }
    826 
    827 uint8_t  RequestManager::GetOutputStream(int index)
    828 {
    829     Mutex::Autolock lock(m_requestMutex);
    830     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
    831         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
    832         return 0;
    833     }
    834 
    835     request_manager_entry * currentEntry = &(entries[index]);
    836     return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
    837 }
    838 
    839 camera2_shot_ext *  RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
    840 {
    841     int index = FindEntryIndexByFrameCnt(frameCnt);
    842     if (index == -1) {
    843         ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
    844         return 0;
    845     }
    846     else
    847         return GetInternalShotExt(index);
    848 }
    849 
    850 camera2_shot_ext *  RequestManager::GetInternalShotExt(int index)
    851 {
    852     Mutex::Autolock lock(m_requestMutex);
    853     if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
    854         ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
    855         return 0;
    856     }
    857 
    858     request_manager_entry * currentEntry = &(entries[index]);
    859     return &currentEntry->internal_shot;
    860 }
    861 
    862 int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
    863 {
    864     Mutex::Autolock lock(m_requestMutex);
    865     int i;
    866 
    867     if (m_numOfEntries == 0) {
    868         CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
    869         return -1;
    870     }
    871 
    872     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
    873         if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
    874             continue;
    875 
    876         if (entries[i].status == REQUESTED) {
    877             entries[i].status = CAPTURED;
    878             return entries[i].internal_shot.shot.ctl.request.frameCount;
    879         }
    880         CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
    881 
    882     }
    883     CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
    884 
    885     return -1;
    886 }
    887 
    888 void     RequestManager::SetInitialSkip(int count)
    889 {
    890     ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
    891     if (count > m_sensorPipelineSkipCnt)
    892         m_sensorPipelineSkipCnt = count;
    893 }
    894 
    895 int     RequestManager::GetSkipCnt()
    896 {
    897     ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
    898     if (m_sensorPipelineSkipCnt == 0)
    899         return m_sensorPipelineSkipCnt;
    900     else
    901         return --m_sensorPipelineSkipCnt;
    902 }
    903 
    904 void RequestManager::Dump(void)
    905 {
    906     int i = 0;
    907     request_manager_entry * currentEntry;
    908     Mutex::Autolock lock(m_numOfEntriesLock);
    909     ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
    910     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
    911 
    912     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
    913         currentEntry =  &(entries[i]);
    914         ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
    915         currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
    916             currentEntry->output_stream_count,
    917             currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
    918     }
    919 }
    920 
    921 int     RequestManager::GetNextIndex(int index)
    922 {
    923     index++;
    924     if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
    925         index = 0;
    926 
    927     return index;
    928 }
    929 
    930 int     RequestManager::GetPrevIndex(int index)
    931 {
    932     index--;
    933     if (index < 0)
    934         index = NUM_MAX_REQUEST_MGR_ENTRY-1;
    935 
    936     return index;
    937 }
    938 
    939 ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
    940             m_requestQueueOps(NULL),
    941             m_frameQueueOps(NULL),
    942             m_callbackCookie(NULL),
    943             m_numOfRemainingReqInSvc(0),
    944             m_isRequestQueuePending(false),
    945             m_isRequestQueueNull(true),
    946             m_isIspStarted(false),
    947             m_ionCameraClient(0),
    948             m_zoomRatio(1),
    949             m_scp_closing(false),
    950             m_scp_closed(false),
    951             m_afState(HAL_AFSTATE_INACTIVE),
    952             m_afMode(NO_CHANGE),
    953             m_afMode2(NO_CHANGE),
    954             m_vdisBubbleCnt(0),
    955             m_vdisDupFrame(0),
    956             m_IsAfModeUpdateRequired(false),
    957             m_IsAfTriggerRequired(false),
    958             m_IsAfLockRequired(false),
    959             m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE),
    960             m_sccLocalBufferValid(false),
    961             m_wideAspect(false),
    962             m_scpOutputSignalCnt(0),
    963             m_scpOutputImageCnt(0),
    964             m_afTriggerId(0),
    965             m_afPendingTriggerId(0),
    966             m_afModeWaitingCnt(0),
    967             m_jpegEncodingCount(0),
    968             m_scpForceSuspended(false),
    969             m_halDevice(dev),
    970             m_nightCaptureCnt(0),
    971             m_nightCaptureFrameCnt(0),
    972             m_lastSceneMode(0),
    973             m_cameraId(cameraId),
    974             m_thumbNailW(160),
    975             m_thumbNailH(120)
    976 {
    977     ALOGD("(%s): ENTER", __FUNCTION__);
    978     int ret = 0;
    979     int res = 0;
    980 
    981     m_exynosPictureCSC = NULL;
    982     m_exynosVideoCSC = NULL;
    983 
    984     if (!m_grallocHal) {
    985         ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
    986         if (ret)
    987             ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
    988     }
    989 
    990     m_camera2 = camera;
    991     m_ionCameraClient = createIonClient(m_ionCameraClient);
    992     if(m_ionCameraClient == 0)
    993         ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
    994 
    995 
    996     m_BayerManager = new BayerBufManager();
    997     m_mainThread    = new MainThread(this);
    998     m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
    999     *openInvalid = InitializeISPChain();
   1000     if (*openInvalid < 0) {
   1001         ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
   1002         // clean process
   1003         // 1. close video nodes
   1004         // SCP
   1005         res = exynos_v4l2_close(m_camera_info.scp.fd);
   1006         if (res != NO_ERROR ) {
   1007             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1008         }
   1009         // SCC
   1010         res = exynos_v4l2_close(m_camera_info.capture.fd);
   1011         if (res != NO_ERROR ) {
   1012             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1013         }
   1014         // Sensor
   1015         res = exynos_v4l2_close(m_camera_info.sensor.fd);
   1016         if (res != NO_ERROR ) {
   1017             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1018         }
   1019         // ISP
   1020         res = exynos_v4l2_close(m_camera_info.isp.fd);
   1021         if (res != NO_ERROR ) {
   1022             ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1023         }
   1024     } else {
   1025         m_sensorThread  = new SensorThread(this);
   1026         m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
   1027         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
   1028         ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
   1029 
   1030         for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
   1031             m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
   1032         CSC_METHOD cscMethod = CSC_METHOD_HW;
   1033         m_exynosPictureCSC = csc_init(cscMethod);
   1034         if (m_exynosPictureCSC == NULL)
   1035             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
   1036         csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
   1037         csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
   1038 
   1039         m_exynosVideoCSC = csc_init(cscMethod);
   1040         if (m_exynosVideoCSC == NULL)
   1041             ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
   1042         csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
   1043         csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
   1044 
   1045         m_setExifFixedAttribute();
   1046 
   1047         // contol information clear
   1048         // flash
   1049         m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
   1050         m_ctlInfo.flash.m_afFlashDoneFlg= false;
   1051         m_ctlInfo.flash.m_flashEnableFlg = false;
   1052         m_ctlInfo.flash.m_flashFrameCount = 0;
   1053         m_ctlInfo.flash.m_flashCnt = 0;
   1054         m_ctlInfo.flash.m_flashTimeOut = 0;
   1055         m_ctlInfo.flash.m_flashDecisionResult = false;
   1056         m_ctlInfo.flash.m_flashTorchMode = false;
   1057         m_ctlInfo.flash.m_precaptureState = 0;
   1058         m_ctlInfo.flash.m_precaptureTriggerId = 0;
   1059         // ae
   1060         m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
   1061         // af
   1062         m_ctlInfo.af.m_afTriggerTimeOut = 0;
   1063         // scene
   1064         m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
   1065     }
   1066     ALOGD("(%s): EXIT", __FUNCTION__);
   1067 }
   1068 
   1069 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
   1070 {
   1071     ALOGD("(%s): ENTER", __FUNCTION__);
   1072     this->release();
   1073     ALOGD("(%s): EXIT", __FUNCTION__);
   1074 }
   1075 
   1076 void ExynosCameraHWInterface2::release()
   1077 {
   1078     int i, res;
   1079     ALOGD("(HAL2::release): ENTER");
   1080 
   1081     if (m_streamThreads[1] != NULL) {
   1082         m_streamThreads[1]->release();
   1083         m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
   1084     }
   1085 
   1086     if (m_streamThreads[0] != NULL) {
   1087         m_streamThreads[0]->release();
   1088         m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
   1089     }
   1090 
   1091     if (m_sensorThread != NULL) {
   1092         m_sensorThread->release();
   1093     }
   1094 
   1095     if (m_mainThread != NULL) {
   1096         m_mainThread->release();
   1097     }
   1098 
   1099     if (m_exynosPictureCSC)
   1100         csc_deinit(m_exynosPictureCSC);
   1101     m_exynosPictureCSC = NULL;
   1102 
   1103     if (m_exynosVideoCSC)
   1104         csc_deinit(m_exynosVideoCSC);
   1105     m_exynosVideoCSC = NULL;
   1106 
   1107     if (m_streamThreads[1] != NULL) {
   1108         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
   1109         while (!m_streamThreads[1]->IsTerminated())
   1110             usleep(SIG_WAITING_TICK);
   1111         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 1 termination");
   1112         m_streamThreads[1] = NULL;
   1113     }
   1114 
   1115     if (m_streamThreads[0] != NULL) {
   1116         ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
   1117         while (!m_streamThreads[0]->IsTerminated())
   1118             usleep(SIG_WAITING_TICK);
   1119         ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 0 termination");
   1120         m_streamThreads[0] = NULL;
   1121     }
   1122 
   1123     if (m_sensorThread != NULL) {
   1124         ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
   1125         while (!m_sensorThread->IsTerminated())
   1126             usleep(SIG_WAITING_TICK);
   1127         ALOGD("(HAL2::release): END   Waiting for (indirect) sensor thread termination");
   1128         m_sensorThread = NULL;
   1129     }
   1130 
   1131     if (m_mainThread != NULL) {
   1132         ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
   1133         while (!m_mainThread->IsTerminated())
   1134             usleep(SIG_WAITING_TICK);
   1135         ALOGD("(HAL2::release): END   Waiting for (indirect) main thread termination");
   1136         m_mainThread = NULL;
   1137     }
   1138 
   1139     if (m_requestManager != NULL) {
   1140         delete m_requestManager;
   1141         m_requestManager = NULL;
   1142     }
   1143 
   1144     if (m_BayerManager != NULL) {
   1145         delete m_BayerManager;
   1146         m_BayerManager = NULL;
   1147     }
   1148     for (i = 0; i < NUM_BAYER_BUFFERS; i++)
   1149         freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
   1150 
   1151     if (m_sccLocalBufferValid) {
   1152         for (i = 0; i < NUM_SCC_BUFFERS; i++)
   1153 #ifdef ENABLE_FRAME_SYNC
   1154             freeCameraMemory(&m_sccLocalBuffer[i], 2);
   1155 #else
   1156             freeCameraMemory(&m_sccLocalBuffer[i], 1);
   1157 #endif
   1158     }
   1159     else {
   1160         for (i = 0; i < NUM_SCC_BUFFERS; i++)
   1161             freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
   1162     }
   1163 
   1164     ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
   1165     res = exynos_v4l2_close(m_camera_info.sensor.fd);
   1166     if (res != NO_ERROR ) {
   1167         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1168     }
   1169 
   1170     ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
   1171     res = exynos_v4l2_close(m_camera_info.isp.fd);
   1172     if (res != NO_ERROR ) {
   1173         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1174     }
   1175 
   1176     ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
   1177     res = exynos_v4l2_close(m_camera_info.capture.fd);
   1178     if (res != NO_ERROR ) {
   1179         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1180     }
   1181 
   1182     ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
   1183     res = exynos_v4l2_close(m_camera_info.scp.fd);
   1184     if (res != NO_ERROR ) {
   1185         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
   1186     }
   1187     ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
   1188     deleteIonClient(m_ionCameraClient);
   1189 
   1190     ALOGD("(HAL2::release): EXIT");
   1191 }
   1192 
   1193 int ExynosCameraHWInterface2::InitializeISPChain()
   1194 {
   1195     char node_name[30];
   1196     int fd = 0;
   1197     int i;
   1198     int ret = 0;
   1199 
   1200     /* Open Sensor */
   1201     memset(&node_name, 0x00, sizeof(char[30]));
   1202     sprintf(node_name, "%s%d", NODE_PREFIX, 40);
   1203     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
   1204 
   1205     if (fd < 0) {
   1206         ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
   1207     }
   1208     else {
   1209         ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
   1210     }
   1211     m_camera_info.sensor.fd = fd;
   1212 
   1213     /* Open ISP */
   1214     memset(&node_name, 0x00, sizeof(char[30]));
   1215     sprintf(node_name, "%s%d", NODE_PREFIX, 41);
   1216     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
   1217 
   1218     if (fd < 0) {
   1219         ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
   1220     }
   1221     else {
   1222         ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
   1223     }
   1224     m_camera_info.isp.fd = fd;
   1225 
   1226     /* Open ScalerC */
   1227     memset(&node_name, 0x00, sizeof(char[30]));
   1228     sprintf(node_name, "%s%d", NODE_PREFIX, 42);
   1229     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
   1230 
   1231     if (fd < 0) {
   1232         ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
   1233     }
   1234     else {
   1235         ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
   1236     }
   1237     m_camera_info.capture.fd = fd;
   1238 
   1239     /* Open ScalerP */
   1240     memset(&node_name, 0x00, sizeof(char[30]));
   1241     sprintf(node_name, "%s%d", NODE_PREFIX, 44);
   1242     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
   1243     if (fd < 0) {
   1244         ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
   1245     }
   1246     else {
   1247         ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
   1248     }
   1249     m_camera_info.scp.fd = fd;
   1250 
   1251     if(m_cameraId == 0)
   1252         m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
   1253     else
   1254         m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
   1255 
   1256     memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
   1257     m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
   1258     m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
   1259 
   1260     m_camera_info.dummy_shot.dis_bypass = 1;
   1261     m_camera_info.dummy_shot.dnr_bypass = 1;
   1262     m_camera_info.dummy_shot.fd_bypass = 1;
   1263 
   1264     /*sensor setting*/
   1265     m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
   1266     m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
   1267     m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
   1268 
   1269     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
   1270     m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
   1271 
   1272     /*request setting*/
   1273     m_camera_info.dummy_shot.request_sensor = 1;
   1274     m_camera_info.dummy_shot.request_scc = 0;
   1275     m_camera_info.dummy_shot.request_scp = 0;
   1276     m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
   1277 
   1278     m_camera_info.sensor.width = m_camera2->getSensorRawW();
   1279     m_camera_info.sensor.height = m_camera2->getSensorRawH();
   1280 
   1281     m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
   1282     m_camera_info.sensor.planes = 2;
   1283     m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
   1284     m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1285     m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
   1286 
   1287     for(i = 0; i < m_camera_info.sensor.buffers; i++){
   1288         int res;
   1289         initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
   1290         m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
   1291         m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
   1292         res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
   1293         if (res) {
   1294             ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i);
   1295             // Free allocated sensor buffers
   1296             for (int j = 0; j < i; j++) {
   1297                 freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes);
   1298             }
   1299             return false;
   1300         }
   1301     }
   1302 
   1303     m_camera_info.isp.width = m_camera_info.sensor.width;
   1304     m_camera_info.isp.height = m_camera_info.sensor.height;
   1305     m_camera_info.isp.format = m_camera_info.sensor.format;
   1306     m_camera_info.isp.planes = m_camera_info.sensor.planes;
   1307     m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
   1308     m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
   1309     m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
   1310 
   1311     for(i = 0; i < m_camera_info.isp.buffers; i++){
   1312         initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
   1313         m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
   1314         m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
   1315         m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
   1316         m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
   1317         m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
   1318         m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
   1319     };
   1320 
   1321     /* init ISP */
   1322     ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
   1323     if (ret < 0) {
   1324         ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
   1325         return false;
   1326     }
   1327     cam_int_s_fmt(&(m_camera_info.isp));
   1328     ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
   1329     cam_int_reqbufs(&(m_camera_info.isp));
   1330     ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
   1331     ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
   1332 
   1333     /* init Sensor */
   1334     cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
   1335     ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
   1336     if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
   1337         ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
   1338     }
   1339     ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
   1340     cam_int_reqbufs(&(m_camera_info.sensor));
   1341     ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
   1342     for (i = 0; i < m_camera_info.sensor.buffers; i++) {
   1343         ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
   1344         m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
   1345         m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
   1346         memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
   1347                 sizeof(struct camera2_shot_ext));
   1348     }
   1349 
   1350     for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
   1351         cam_int_qbuf(&(m_camera_info.sensor), i);
   1352 
   1353     for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
   1354         m_requestManager->pushSensorQ(i);
   1355 
   1356     ALOGV("== stream_on :: sensor");
   1357     cam_int_streamon(&(m_camera_info.sensor));
   1358     m_camera_info.sensor.status = true;
   1359 
   1360     /* init Capture */
   1361     m_camera_info.capture.width = m_camera2->getSensorW();
   1362     m_camera_info.capture.height = m_camera2->getSensorH();
   1363     m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
   1364 #ifdef ENABLE_FRAME_SYNC
   1365     m_camera_info.capture.planes = 2;
   1366 #else
   1367     m_camera_info.capture.planes = 1;
   1368 #endif
   1369     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
   1370     m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1371     m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
   1372 
   1373     m_camera_info.capture.status = false;
   1374 
   1375     return true;
   1376 }
   1377 
   1378 void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
   1379 {
   1380     ALOGV("(%s)", __FUNCTION__);
   1381     StreamThread *AllocatedStream;
   1382     stream_parameters_t newParameters;
   1383     uint32_t format_actual;
   1384 
   1385 
   1386     if (!threadExists) {
   1387         m_streamThreads[1]  = new StreamThread(this, 1);
   1388     }
   1389     AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
   1390     if (!threadExists) {
   1391         AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
   1392         m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
   1393         AllocatedStream->m_numRegisteredStream = 1;
   1394     }
   1395     AllocatedStream->m_index        = 1;
   1396 
   1397     format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
   1398 
   1399     newParameters.width             = m_camera2->getSensorW();
   1400     newParameters.height            = m_camera2->getSensorH();
   1401     newParameters.format            = format_actual;
   1402     newParameters.streamOps         = NULL;
   1403     newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
   1404 #ifdef ENABLE_FRAME_SYNC
   1405     newParameters.planes            = 2;
   1406 #else
   1407     newParameters.planes            = 1;
   1408 #endif
   1409 
   1410     newParameters.numSvcBufsInHal   = 0;
   1411 
   1412     newParameters.node              = &m_camera_info.capture;
   1413 
   1414     AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
   1415     ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
   1416 
   1417     if (!threadExists) {
   1418         if (!m_sccLocalBufferValid) {
   1419             for (int i = 0; i < m_camera_info.capture.buffers; i++){
   1420                 initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
   1421                 m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
   1422 #ifdef ENABLE_FRAME_SYNC
   1423                 m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
   1424                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
   1425 #else
   1426                 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
   1427 #endif
   1428                 m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
   1429             }
   1430             m_sccLocalBufferValid = true;
   1431         }
   1432     } else {
   1433         if (m_sccLocalBufferValid) {
   1434              for (int i = 0; i < m_camera_info.capture.buffers; i++)
   1435                 m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
   1436         } else {
   1437             ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
   1438         }
   1439     }
   1440     cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
   1441     m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
   1442     cam_int_s_fmt(newParameters.node);
   1443     ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
   1444     cam_int_reqbufs(newParameters.node);
   1445     ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
   1446 
   1447     for (int i = 0; i < newParameters.node->buffers; i++) {
   1448         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
   1449         cam_int_qbuf(newParameters.node, i);
   1450         newParameters.svcBufStatus[i] = ON_DRIVER;
   1451     }
   1452 
   1453     ALOGV("== stream_on :: capture");
   1454     if (cam_int_streamon(newParameters.node) < 0) {
   1455         ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
   1456     } else {
   1457         m_camera_info.capture.status = true;
   1458     }
   1459 
   1460     AllocatedStream->setParameter(&newParameters);
   1461     AllocatedStream->m_activated    = true;
   1462     AllocatedStream->m_isBufferInit = true;
   1463 }
   1464 
   1465 void ExynosCameraHWInterface2::StartISP()
   1466 {
   1467     ALOGV("== stream_on :: isp");
   1468     cam_int_streamon(&(m_camera_info.isp));
   1469     exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
   1470 }
   1471 
   1472 int ExynosCameraHWInterface2::getCameraId() const
   1473 {
   1474     return m_cameraId;
   1475 }
   1476 
   1477 int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
   1478 {
   1479     ALOGV("DEBUG(%s):", __FUNCTION__);
   1480     if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
   1481             && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
   1482         m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
   1483         return 0;
   1484     }
   1485     else {
   1486         ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
   1487         return 1;
   1488     }
   1489 }
   1490 
   1491 int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
   1492 {
   1493     int i = 0;
   1494 
   1495     ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
   1496     if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
   1497         ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
   1498         return 0;
   1499     }
   1500     m_isRequestQueueNull = false;
   1501     if (m_requestManager->GetNumEntries() == 0)
   1502         m_requestManager->SetInitialSkip(0);
   1503 
   1504     if (m_isIspStarted == false) {
   1505         /* isp */
   1506         m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
   1507         m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
   1508         cam_int_s_fmt(&(m_camera_info.isp));
   1509         cam_int_reqbufs(&(m_camera_info.isp));
   1510 
   1511         /* sensor */
   1512         if (m_camera_info.sensor.status == false) {
   1513             cam_int_s_fmt(&(m_camera_info.sensor));
   1514             cam_int_reqbufs(&(m_camera_info.sensor));
   1515 
   1516             for (i = 0; i < m_camera_info.sensor.buffers; i++) {
   1517                 ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
   1518                 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
   1519                 m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
   1520                 memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
   1521                         sizeof(struct camera2_shot_ext));
   1522             }
   1523             for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
   1524                 cam_int_qbuf(&(m_camera_info.sensor), i);
   1525 
   1526             for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
   1527                 m_requestManager->pushSensorQ(i);
   1528             ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
   1529             cam_int_streamon(&(m_camera_info.sensor));
   1530             m_camera_info.sensor.status = true;
   1531         }
   1532     }
   1533     if (!(m_streamThreads[1].get())) {
   1534         ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
   1535         StartSCCThread(false);
   1536     } else {
   1537         if (m_streamThreads[1]->m_activated ==  false) {
   1538             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
   1539             StartSCCThread(true);
   1540         } else {
   1541             if (m_camera_info.capture.status == false) {
   1542                 m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
   1543                 cam_int_s_fmt(&(m_camera_info.capture));
   1544                 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
   1545                 cam_int_reqbufs(&(m_camera_info.capture));
   1546                 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
   1547 
   1548                 if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
   1549                     StreamThread *          targetStream = m_streamThreads[1].get();
   1550                     stream_parameters_t     *targetStreamParms = &(targetStream->m_parameters);
   1551                     node_info_t             *currentNode = targetStreamParms->node;
   1552 
   1553                     struct v4l2_buffer v4l2_buf;
   1554                     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
   1555 
   1556                     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
   1557                         v4l2_buf.m.planes   = planes;
   1558                         v4l2_buf.type       = currentNode->type;
   1559                         v4l2_buf.memory     = currentNode->memory;
   1560 
   1561                         v4l2_buf.length     = currentNode->planes;
   1562                         v4l2_buf.index      = i;
   1563                         ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
   1564 
   1565                         if (i < currentNode->buffers) {
   1566 #ifdef ENABLE_FRAME_SYNC
   1567                             v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
   1568                             v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
   1569                             v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
   1570                             v4l2_buf.length += targetStreamParms->metaPlanes;
   1571                             v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
   1572                             v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
   1573 
   1574                             ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
   1575 #endif
   1576                             if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
   1577                                 ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
   1578                             }
   1579                             ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
   1580                             targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
   1581                         }
   1582                         else {
   1583                             targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
   1584                         }
   1585 
   1586                     }
   1587 
   1588                 } else {
   1589                     for (int i = 0; i < m_camera_info.capture.buffers; i++) {
   1590                         ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
   1591                         cam_int_qbuf(&(m_camera_info.capture), i);
   1592                     }
   1593                 }
   1594                 ALOGV("== stream_on :: capture");
   1595                 if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
   1596                     ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
   1597                 } else {
   1598                     m_camera_info.capture.status = true;
   1599                 }
   1600             }
   1601             if (m_scpForceSuspended) {
   1602                 m_scpForceSuspended = false;
   1603             }
   1604         }
   1605     }
   1606     if (m_isIspStarted == false) {
   1607         StartISP();
   1608         ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
   1609         m_requestManager->SetInitialSkip(6);
   1610         m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
   1611         m_isIspStarted = true;
   1612     }
   1613     m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
   1614     return 0;
   1615 }
   1616 
   1617 int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
   1618 {
   1619     ALOGV("DEBUG(%s):", __FUNCTION__);
   1620     if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
   1621             && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
   1622         m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
   1623         return 0;
   1624     }
   1625     else {
   1626         ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
   1627         return 1;
   1628     }
   1629 }
   1630 
   1631 int ExynosCameraHWInterface2::getInProgressCount()
   1632 {
   1633     int inProgressJpeg;
   1634     int inProgressCount;
   1635 
   1636     {
   1637         Mutex::Autolock lock(m_jpegEncoderLock);
   1638         inProgressJpeg = m_jpegEncodingCount;
   1639         inProgressCount = m_requestManager->GetNumEntries();
   1640     }
   1641     ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
   1642         inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg));
   1643     return (inProgressCount + inProgressJpeg);
   1644 }
   1645 
   1646 int ExynosCameraHWInterface2::flushCapturesInProgress()
   1647 {
   1648     return 0;
   1649 }
   1650 
   1651 int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
   1652 {
   1653     ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
   1654 
   1655     if (request == NULL) return BAD_VALUE;
   1656     if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
   1657         return BAD_VALUE;
   1658     }
   1659     status_t res;
   1660     // Pass 1, calculate size and allocate
   1661     res = m_camera2->constructDefaultRequest(request_template,
   1662             request,
   1663             true);
   1664     if (res != OK) {
   1665         return res;
   1666     }
   1667     // Pass 2, build request
   1668     res = m_camera2->constructDefaultRequest(request_template,
   1669             request,
   1670             false);
   1671     if (res != OK) {
   1672         ALOGE("Unable to populate new request for template %d",
   1673                 request_template);
   1674     }
   1675 
   1676     return res;
   1677 }
   1678 
   1679 int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
   1680                                     uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
   1681 {
   1682     ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
   1683     bool useDirectOutput = false;
   1684     StreamThread *AllocatedStream;
   1685     stream_parameters_t newParameters;
   1686     substream_parameters_t *subParameters;
   1687     StreamThread *parentStream;
   1688     status_t res;
   1689     int allocCase = 0;
   1690 
   1691     if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
   1692             m_camera2->isSupportedResolution(width, height)) {
   1693         if (!(m_streamThreads[0].get())) {
   1694             ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
   1695             allocCase = 0;
   1696         }
   1697         else {
   1698             if ((m_streamThreads[0].get())->m_activated == true) {
   1699                 ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
   1700                 allocCase = 1;
   1701             }
   1702             else {
   1703                 ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
   1704                 allocCase = 2;
   1705             }
   1706         }
   1707 
   1708         // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
   1709         if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
   1710                     || (width == 720 && height == 480) || (width == 1440 && height == 960)
   1711                     || (width == 1344 && height == 896)) {
   1712             m_wideAspect = true;
   1713         } else {
   1714             m_wideAspect = false;
   1715         }
   1716         ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
   1717 
   1718         if (allocCase == 0 || allocCase == 2) {
   1719             *stream_id = STREAM_ID_PREVIEW;
   1720 
   1721             m_streamThreads[0]  = new StreamThread(this, *stream_id);
   1722 
   1723             AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
   1724             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
   1725             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
   1726 
   1727             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
   1728             *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
   1729             if (m_wideAspect)
   1730                 *usage                         |= GRALLOC_USAGE_PRIVATE_CHROMA;
   1731             *max_buffers                        = 6;
   1732 
   1733             newParameters.width                 = width;
   1734             newParameters.height                = height;
   1735             newParameters.format                = *format_actual;
   1736             newParameters.streamOps             = stream_ops;
   1737             newParameters.usage                 = *usage;
   1738             newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
   1739             newParameters.numOwnSvcBuffers      = *max_buffers;
   1740             newParameters.planes                = NUM_PLANES(*format_actual);
   1741             newParameters.metaPlanes            = 1;
   1742             newParameters.numSvcBufsInHal       = 0;
   1743             newParameters.minUndequedBuffer     = 3;
   1744             newParameters.needsIonMap           = true;
   1745 
   1746             newParameters.node                  = &m_camera_info.scp;
   1747             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1748             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
   1749 
   1750             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
   1751             AllocatedStream->m_index            = 0;
   1752             AllocatedStream->setParameter(&newParameters);
   1753             AllocatedStream->m_activated = true;
   1754             AllocatedStream->m_numRegisteredStream = 1;
   1755             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
   1756             m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
   1757             m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
   1758             if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
   1759                 AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
   1760             if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
   1761                 AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
   1762             return 0;
   1763         } else if (allocCase == 1) {
   1764             *stream_id = STREAM_ID_RECORD;
   1765 
   1766             subParameters = &m_subStreams[STREAM_ID_RECORD];
   1767             memset(subParameters, 0, sizeof(substream_parameters_t));
   1768 
   1769             parentStream = (StreamThread*)(m_streamThreads[0].get());
   1770             if (!parentStream) {
   1771                 return 1;
   1772             }
   1773 
   1774             *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
   1775             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
   1776             if (m_wideAspect)
   1777                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
   1778             *max_buffers = 6;
   1779 
   1780             subParameters->type         = SUBSTREAM_TYPE_RECORD;
   1781             subParameters->width        = width;
   1782             subParameters->height       = height;
   1783             subParameters->format       = *format_actual;
   1784             subParameters->svcPlanes     = NUM_PLANES(*format_actual);
   1785             subParameters->streamOps     = stream_ops;
   1786             subParameters->usage         = *usage;
   1787             subParameters->numOwnSvcBuffers = *max_buffers;
   1788             subParameters->numSvcBufsInHal  = 0;
   1789             subParameters->needBufferInit    = false;
   1790             subParameters->minUndequedBuffer = 2;
   1791 
   1792             res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
   1793             if (res != NO_ERROR) {
   1794                 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
   1795                 return 1;
   1796             }
   1797             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
   1798             ALOGV("(%s): Enabling Record", __FUNCTION__);
   1799             return 0;
   1800         }
   1801     }
   1802     else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
   1803             && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
   1804 
   1805         if (!(m_streamThreads[1].get())) {
   1806             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
   1807             useDirectOutput = true;
   1808         }
   1809         else {
   1810             ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
   1811             useDirectOutput = false;
   1812         }
   1813         if (useDirectOutput) {
   1814             *stream_id = STREAM_ID_ZSL;
   1815 
   1816             m_streamThreads[1]  = new StreamThread(this, *stream_id);
   1817             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
   1818             AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
   1819             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
   1820 
   1821             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
   1822             *max_buffers                        = 6;
   1823 
   1824             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
   1825             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
   1826             if (m_wideAspect)
   1827                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
   1828             *max_buffers = 6;
   1829 
   1830             newParameters.width                 = width;
   1831             newParameters.height                = height;
   1832             newParameters.format                = *format_actual;
   1833             newParameters.streamOps             = stream_ops;
   1834             newParameters.usage                 = *usage;
   1835             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
   1836             newParameters.numOwnSvcBuffers      = *max_buffers;
   1837             newParameters.planes                = NUM_PLANES(*format_actual);
   1838             newParameters.metaPlanes            = 1;
   1839 
   1840             newParameters.numSvcBufsInHal       = 0;
   1841             newParameters.minUndequedBuffer     = 2;
   1842             newParameters.needsIonMap           = false;
   1843 
   1844             newParameters.node                  = &m_camera_info.capture;
   1845             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1846             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
   1847 
   1848             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
   1849             AllocatedStream->m_index            = 1;
   1850             AllocatedStream->setParameter(&newParameters);
   1851             AllocatedStream->m_activated = true;
   1852             AllocatedStream->m_numRegisteredStream = 1;
   1853             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
   1854             return 0;
   1855         } else {
   1856             bool bJpegExists = false;
   1857             AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
   1858             subParameters = &m_subStreams[STREAM_ID_JPEG];
   1859             if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
   1860                 ALOGD("(%s): jpeg stream exists", __FUNCTION__);
   1861                 bJpegExists = true;
   1862                 AllocatedStream->detachSubStream(STREAM_ID_JPEG);
   1863             }
   1864             AllocatedStream->m_releasing = true;
   1865             ALOGD("START stream thread 1 release %d", __LINE__);
   1866             do {
   1867                 AllocatedStream->release();
   1868                 usleep(SIG_WAITING_TICK);
   1869             } while (AllocatedStream->m_releasing);
   1870             ALOGD("END   stream thread 1 release %d", __LINE__);
   1871 
   1872             *stream_id = STREAM_ID_ZSL;
   1873 
   1874             m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
   1875 
   1876             *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
   1877             *max_buffers                        = 6;
   1878 
   1879             *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
   1880             *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
   1881             if (m_wideAspect)
   1882                 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
   1883             *max_buffers = 6;
   1884 
   1885             newParameters.width                 = width;
   1886             newParameters.height                = height;
   1887             newParameters.format                = *format_actual;
   1888             newParameters.streamOps             = stream_ops;
   1889             newParameters.usage                 = *usage;
   1890             newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
   1891             newParameters.numOwnSvcBuffers      = *max_buffers;
   1892             newParameters.planes                = NUM_PLANES(*format_actual);
   1893             newParameters.metaPlanes            = 1;
   1894 
   1895             newParameters.numSvcBufsInHal       = 0;
   1896             newParameters.minUndequedBuffer     = 2;
   1897             newParameters.needsIonMap           = false;
   1898 
   1899             newParameters.node                  = &m_camera_info.capture;
   1900             newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
   1901             newParameters.node->memory          = V4L2_MEMORY_DMABUF;
   1902 
   1903             AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
   1904             AllocatedStream->m_index            = 1;
   1905             AllocatedStream->setParameter(&newParameters);
   1906             AllocatedStream->m_activated = true;
   1907             AllocatedStream->m_numRegisteredStream = 1;
   1908             if (bJpegExists) {
   1909                 AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
   1910             }
   1911             ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
   1912             return 0;
   1913 
   1914         }
   1915     }
   1916     else if (format == HAL_PIXEL_FORMAT_BLOB
   1917             && m_camera2->isSupportedJpegResolution(width, height)) {
   1918         *stream_id = STREAM_ID_JPEG;
   1919 
   1920         subParameters = &m_subStreams[*stream_id];
   1921         memset(subParameters, 0, sizeof(substream_parameters_t));
   1922 
   1923         if (!(m_streamThreads[1].get())) {
   1924             ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
   1925             StartSCCThread(false);
   1926         }
   1927         else if (m_streamThreads[1]->m_activated ==  false) {
   1928             ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
   1929             StartSCCThread(true);
   1930         }
   1931         parentStream = (StreamThread*)(m_streamThreads[1].get());
   1932 
   1933         *format_actual = HAL_PIXEL_FORMAT_BLOB;
   1934         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
   1935         if (m_wideAspect)
   1936             *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
   1937         *max_buffers = 4;
   1938 
   1939         subParameters->type          = SUBSTREAM_TYPE_JPEG;
   1940         subParameters->width         = width;
   1941         subParameters->height        = height;
   1942         subParameters->format        = *format_actual;
   1943         subParameters->svcPlanes     = 1;
   1944         subParameters->streamOps     = stream_ops;
   1945         subParameters->usage         = *usage;
   1946         subParameters->numOwnSvcBuffers = *max_buffers;
   1947         subParameters->numSvcBufsInHal  = 0;
   1948         subParameters->needBufferInit    = false;
   1949         subParameters->minUndequedBuffer = 2;
   1950 
   1951         res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
   1952         if (res != NO_ERROR) {
   1953             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
   1954             return 1;
   1955         }
   1956         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
   1957         ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
   1958         return 0;
   1959     }
   1960     else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
   1961         *stream_id = STREAM_ID_PRVCB;
   1962 
   1963         subParameters = &m_subStreams[STREAM_ID_PRVCB];
   1964         memset(subParameters, 0, sizeof(substream_parameters_t));
   1965 
   1966         parentStream = (StreamThread*)(m_streamThreads[0].get());
   1967         if (!parentStream) {
   1968             return 1;
   1969         }
   1970 
   1971         *format_actual = format;
   1972         *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
   1973         if (m_wideAspect)
   1974             *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
   1975         *max_buffers = 6;
   1976 
   1977         subParameters->type         = SUBSTREAM_TYPE_PRVCB;
   1978         subParameters->width        = width;
   1979         subParameters->height       = height;
   1980         subParameters->format       = *format_actual;
   1981         subParameters->svcPlanes     = NUM_PLANES(*format_actual);
   1982         subParameters->streamOps     = stream_ops;
   1983         subParameters->usage         = *usage;
   1984         subParameters->numOwnSvcBuffers = *max_buffers;
   1985         subParameters->numSvcBufsInHal  = 0;
   1986         subParameters->needBufferInit    = false;
   1987         subParameters->minUndequedBuffer = 2;
   1988 
   1989         if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
   1990             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
   1991             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
   1992         }
   1993         else {
   1994             subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
   1995             subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
   1996         }
   1997 
   1998         res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
   1999         if (res != NO_ERROR) {
   2000             ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
   2001             return 1;
   2002         }
   2003         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
   2004         ALOGV("(%s): Enabling previewcb", __FUNCTION__);
   2005         return 0;
   2006     }
   2007     ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
   2008     return 1;
   2009 }
   2010 
   2011 int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
   2012         int num_buffers, buffer_handle_t *registeringBuffers)
   2013 {
   2014     int                     i,j;
   2015     void                    *virtAddr[3];
   2016     int                     plane_index = 0;
   2017     StreamThread *          targetStream;
   2018     stream_parameters_t     *targetStreamParms;
   2019     node_info_t             *currentNode;
   2020 
   2021     struct v4l2_buffer v4l2_buf;
   2022     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
   2023 
   2024     ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
   2025         stream_id, num_buffers, (uint32_t)registeringBuffers);
   2026 
   2027     if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
   2028         targetStream = m_streamThreads[0].get();
   2029         targetStreamParms = &(m_streamThreads[0]->m_parameters);
   2030 
   2031     }
   2032     else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
   2033         substream_parameters_t  *targetParms;
   2034         targetParms = &m_subStreams[stream_id];
   2035 
   2036         targetParms->numSvcBuffers = num_buffers;
   2037 
   2038         for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
   2039             ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
   2040                 i, stream_id, (uint32_t)(registeringBuffers[i]));
   2041             if (m_grallocHal) {
   2042                 if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
   2043                        targetParms->usage, 0, 0,
   2044                        targetParms->width, targetParms->height, virtAddr) != 0) {
   2045                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
   2046                 }
   2047                 else {
   2048                     ExynosBuffer currentBuf;
   2049                     const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
   2050                     if (targetParms->svcPlanes == 1) {
   2051                         currentBuf.fd.extFd[0] = priv_handle->fd;
   2052                         currentBuf.size.extS[0] = priv_handle->size;
   2053                         currentBuf.size.extS[1] = 0;
   2054                         currentBuf.size.extS[2] = 0;
   2055                     } else if (targetParms->svcPlanes == 2) {
   2056                         currentBuf.fd.extFd[0] = priv_handle->fd;
   2057                         currentBuf.fd.extFd[1] = priv_handle->fd1;
   2058 
   2059                     } else if (targetParms->svcPlanes == 3) {
   2060                         currentBuf.fd.extFd[0] = priv_handle->fd;
   2061                         currentBuf.fd.extFd[1] = priv_handle->fd1;
   2062                         currentBuf.fd.extFd[2] = priv_handle->fd2;
   2063                     }
   2064                     for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
   2065                         currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
   2066                         CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
   2067                              __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
   2068                              (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
   2069                     }
   2070                     targetParms->svcBufStatus[i]  = ON_SERVICE;
   2071                     targetParms->svcBuffers[i]    = currentBuf;
   2072                     targetParms->svcBufHandle[i]  = registeringBuffers[i];
   2073                 }
   2074             }
   2075         }
   2076         targetParms->needBufferInit = true;
   2077         return 0;
   2078     }
   2079     else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
   2080         targetStream = m_streamThreads[1].get();
   2081         targetStreamParms = &(m_streamThreads[1]->m_parameters);
   2082     }
   2083     else {
   2084         ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
   2085         return 1;
   2086     }
   2087 
   2088     if (targetStream->streamType == STREAM_TYPE_DIRECT) {
   2089         if (num_buffers < targetStreamParms->numHwBuffers) {
   2090             ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
   2091                 __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
   2092             return 1;
   2093         }
   2094     }
   2095     CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
   2096             __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
   2097             targetStreamParms->height, targetStreamParms->planes);
   2098     targetStreamParms->numSvcBuffers = num_buffers;
   2099     currentNode = targetStreamParms->node;
   2100     currentNode->width      = targetStreamParms->width;
   2101     currentNode->height     = targetStreamParms->height;
   2102     currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
   2103     currentNode->planes     = targetStreamParms->planes;
   2104     currentNode->buffers    = targetStreamParms->numHwBuffers;
   2105     cam_int_s_input(currentNode, m_camera_info.sensor_id);
   2106     cam_int_s_fmt(currentNode);
   2107     cam_int_reqbufs(currentNode);
   2108     for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
   2109         ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
   2110             i, (uint32_t)(registeringBuffers[i]));
   2111                 v4l2_buf.m.planes   = planes;
   2112                 v4l2_buf.type       = currentNode->type;
   2113                 v4l2_buf.memory     = currentNode->memory;
   2114                 v4l2_buf.index      = i;
   2115                 v4l2_buf.length     = currentNode->planes;
   2116 
   2117                 ExynosBuffer currentBuf;
   2118                 ExynosBuffer metaBuf;
   2119                 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
   2120 
   2121                 m_getAlignedYUVSize(currentNode->format,
   2122                     currentNode->width, currentNode->height, &currentBuf);
   2123 
   2124                 ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
   2125                 if (currentNode->planes == 1) {
   2126                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
   2127                     currentBuf.fd.extFd[0] = priv_handle->fd;
   2128                     currentBuf.size.extS[0] = priv_handle->size;
   2129                     currentBuf.size.extS[1] = 0;
   2130                     currentBuf.size.extS[2] = 0;
   2131                 } else if (currentNode->planes == 2) {
   2132                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
   2133                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
   2134                     currentBuf.fd.extFd[0] = priv_handle->fd;
   2135                     currentBuf.fd.extFd[1] = priv_handle->fd1;
   2136 
   2137                 } else if (currentNode->planes == 3) {
   2138                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
   2139                     v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
   2140                     v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
   2141                     currentBuf.fd.extFd[0] = priv_handle->fd;
   2142                     currentBuf.fd.extFd[2] = priv_handle->fd1;
   2143                     currentBuf.fd.extFd[1] = priv_handle->fd2;
   2144                 }
   2145 
   2146                 for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
   2147                     if (targetStreamParms->needsIonMap)
   2148                         currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
   2149                     v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
   2150                     ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
   2151                          __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
   2152                          (unsigned int)currentBuf.virt.extP[plane_index],
   2153                          v4l2_buf.m.planes[plane_index].length);
   2154                 }
   2155 
   2156                 if (i < currentNode->buffers) {
   2157 
   2158 
   2159 #ifdef ENABLE_FRAME_SYNC
   2160                     /* add plane for metadata*/
   2161                     metaBuf.size.extS[0] = 4*1024;
   2162                     allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
   2163 
   2164                     v4l2_buf.length += targetStreamParms->metaPlanes;
   2165                     v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
   2166                     v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
   2167 
   2168                     ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
   2169 #endif
   2170                     if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
   2171                         ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
   2172                             __FUNCTION__, stream_id, currentNode->fd);
   2173                     }
   2174                     ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
   2175                             __FUNCTION__, stream_id, currentNode->fd);
   2176                     targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
   2177                 }
   2178                 else {
   2179                     targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
   2180                 }
   2181 
   2182                 targetStreamParms->svcBuffers[i]       = currentBuf;
   2183                 targetStreamParms->metaBuffers[i] = metaBuf;
   2184                 targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
   2185             }
   2186 
   2187     ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
   2188     cam_int_streamon(targetStreamParms->node);
   2189     ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
   2190     currentNode->status = true;
   2191     ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
   2192 
   2193     return 0;
   2194 }
   2195 
   2196 int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
   2197 {
   2198     StreamThread *targetStream;
   2199     status_t res = NO_ERROR;
   2200     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
   2201     bool releasingScpMain = false;
   2202 
   2203     if (stream_id == STREAM_ID_PREVIEW) {
   2204         targetStream = (StreamThread*)(m_streamThreads[0].get());
   2205         if (!targetStream) {
   2206             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
   2207             return NO_ERROR;
   2208         }
   2209         targetStream->m_numRegisteredStream--;
   2210         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
   2211         releasingScpMain = true;
   2212         if (targetStream->m_parameters.needsIonMap) {
   2213             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
   2214                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
   2215                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
   2216                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
   2217                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
   2218                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
   2219                 }
   2220             }
   2221         }
   2222     } else if (stream_id == STREAM_ID_JPEG) {
   2223         if (m_resizeBuf.size.s != 0) {
   2224             freeCameraMemory(&m_resizeBuf, 1);
   2225         }
   2226         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
   2227 
   2228         targetStream = (StreamThread*)(m_streamThreads[1].get());
   2229         if (!targetStream) {
   2230             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
   2231             return NO_ERROR;
   2232         }
   2233 
   2234         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
   2235             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
   2236             return 1;
   2237         }
   2238         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
   2239         return 0;
   2240     } else if (stream_id == STREAM_ID_RECORD) {
   2241         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
   2242 
   2243         targetStream = (StreamThread*)(m_streamThreads[0].get());
   2244         if (!targetStream) {
   2245             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
   2246             return NO_ERROR;
   2247         }
   2248 
   2249         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
   2250             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
   2251             return 1;
   2252         }
   2253 
   2254         if (targetStream->m_numRegisteredStream != 0)
   2255             return 0;
   2256     } else if (stream_id == STREAM_ID_PRVCB) {
   2257         if (m_previewCbBuf.size.s != 0) {
   2258             freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
   2259         }
   2260         memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
   2261 
   2262         targetStream = (StreamThread*)(m_streamThreads[0].get());
   2263         if (!targetStream) {
   2264             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
   2265             return NO_ERROR;
   2266         }
   2267 
   2268         if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
   2269             ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
   2270             return 1;
   2271         }
   2272 
   2273         if (targetStream->m_numRegisteredStream != 0)
   2274             return 0;
   2275     } else if (stream_id == STREAM_ID_ZSL) {
   2276         targetStream = (StreamThread*)(m_streamThreads[1].get());
   2277         if (!targetStream) {
   2278             ALOGW("(%s): Stream Not Exists", __FUNCTION__);
   2279             return NO_ERROR;
   2280         }
   2281 
   2282         targetStream->m_numRegisteredStream--;
   2283         ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
   2284         if (targetStream->m_parameters.needsIonMap) {
   2285             for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
   2286                 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
   2287                     ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
   2288                                     targetStream->m_parameters.svcBuffers[i].size.extS[j]);
   2289                     ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
   2290                                   targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
   2291                 }
   2292             }
   2293         }
   2294     } else {
   2295         ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
   2296         return 1;
   2297     }
   2298 
   2299     if (m_sensorThread != NULL && releasingScpMain) {
   2300         m_sensorThread->release();
   2301         ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
   2302         while (!m_sensorThread->IsTerminated())
   2303             usleep(SIG_WAITING_TICK);
   2304         ALOGD("(%s): END   Waiting for (indirect) sensor thread termination", __FUNCTION__);
   2305     }
   2306 
   2307     if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
   2308         ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
   2309         targetStream = (StreamThread*)(m_streamThreads[1].get());
   2310         targetStream->m_releasing = true;
   2311         ALOGD("START stream thread release %d", __LINE__);
   2312         do {
   2313             targetStream->release();
   2314             usleep(SIG_WAITING_TICK);
   2315         } while (targetStream->m_releasing);
   2316         m_camera_info.capture.status = false;
   2317         ALOGD("END   stream thread release %d", __LINE__);
   2318     }
   2319 
   2320     if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
   2321         ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
   2322         targetStream = (StreamThread*)(m_streamThreads[0].get());
   2323         targetStream->m_releasing = true;
   2324         ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
   2325         do {
   2326             targetStream->release();
   2327             usleep(SIG_WAITING_TICK);
   2328         } while (targetStream->m_releasing);
   2329         ALOGD("(%s): END   Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
   2330         targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
   2331 
   2332         if (targetStream != NULL) {
   2333             ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
   2334             while (!targetStream->IsTerminated())
   2335                 usleep(SIG_WAITING_TICK);
   2336             ALOGD("(%s): END   Waiting for (indirect) stream thread termination", __FUNCTION__);
   2337             m_streamThreads[0] = NULL;
   2338         }
   2339         if (m_camera_info.capture.status == true) {
   2340             m_scpForceSuspended = true;
   2341         }
   2342         m_isIspStarted = false;
   2343     }
   2344     ALOGV("(%s): END", __FUNCTION__);
   2345     return 0;
   2346 }
   2347 
   2348 int ExynosCameraHWInterface2::allocateReprocessStream(
   2349     uint32_t width, uint32_t height, uint32_t format,
   2350     const camera2_stream_in_ops_t *reprocess_stream_ops,
   2351     uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
   2352 {
   2353     ALOGV("DEBUG(%s):", __FUNCTION__);
   2354     return 0;
   2355 }
   2356 
   2357 int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
   2358             uint32_t output_stream_id,
   2359             const camera2_stream_in_ops_t *reprocess_stream_ops,
   2360             // outputs
   2361             uint32_t *stream_id)
   2362 {
   2363     ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
   2364     *stream_id = STREAM_ID_JPEG_REPROCESS;
   2365 
   2366     m_reprocessStreamId = *stream_id;
   2367     m_reprocessOps = reprocess_stream_ops;
   2368     m_reprocessOutputStreamId = output_stream_id;
   2369     return 0;
   2370 }
   2371 
   2372 int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
   2373 {
   2374     ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
   2375     if (stream_id == STREAM_ID_JPEG_REPROCESS) {
   2376         m_reprocessStreamId = 0;
   2377         m_reprocessOps = NULL;
   2378         m_reprocessOutputStreamId = 0;
   2379         return 0;
   2380     }
   2381     return 1;
   2382 }
   2383 
   2384 int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
   2385 {
   2386     Mutex::Autolock lock(m_afModeTriggerLock);
   2387     ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
   2388 
   2389     switch (trigger_id) {
   2390     case CAMERA2_TRIGGER_AUTOFOCUS:
   2391         ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
   2392         OnAfTrigger(ext1);
   2393         break;
   2394 
   2395     case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
   2396         ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
   2397         OnAfCancel(ext1);
   2398         break;
   2399     case CAMERA2_TRIGGER_PRECAPTURE_METERING:
   2400         ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
   2401         OnPrecaptureMeteringTriggerStart(ext1);
   2402         break;
   2403     default:
   2404         break;
   2405     }
   2406     return 0;
   2407 }
   2408 
   2409 int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
   2410 {
   2411     ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
   2412     m_notifyCb = notify_cb;
   2413     m_callbackCookie = user;
   2414     return 0;
   2415 }
   2416 
   2417 int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
   2418 {
   2419     ALOGV("DEBUG(%s):", __FUNCTION__);
   2420     return 0;
   2421 }
   2422 
   2423 int ExynosCameraHWInterface2::dump(int fd)
   2424 {
   2425     ALOGV("DEBUG(%s):", __FUNCTION__);
   2426     return 0;
   2427 }
   2428 
   2429 void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
   2430 {
   2431     switch (colorFormat) {
   2432     // 1p
   2433     case V4L2_PIX_FMT_RGB565 :
   2434     case V4L2_PIX_FMT_YUYV :
   2435     case V4L2_PIX_FMT_UYVY :
   2436     case V4L2_PIX_FMT_VYUY :
   2437     case V4L2_PIX_FMT_YVYU :
   2438         buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
   2439         buf->size.extS[1] = 0;
   2440         buf->size.extS[2] = 0;
   2441         break;
   2442     // 2p
   2443     case V4L2_PIX_FMT_NV12 :
   2444     case V4L2_PIX_FMT_NV12T :
   2445     case V4L2_PIX_FMT_NV21 :
   2446         buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
   2447         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
   2448         buf->size.extS[2] = 0;
   2449         break;
   2450     case V4L2_PIX_FMT_NV12M :
   2451     case V4L2_PIX_FMT_NV12MT_16X16 :
   2452     case V4L2_PIX_FMT_NV21M:
   2453         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
   2454         buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
   2455         buf->size.extS[2] = 0;
   2456         break;
   2457     case V4L2_PIX_FMT_NV16 :
   2458     case V4L2_PIX_FMT_NV61 :
   2459         buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
   2460         buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
   2461         buf->size.extS[2] = 0;
   2462         break;
   2463      // 3p
   2464     case V4L2_PIX_FMT_YUV420 :
   2465     case V4L2_PIX_FMT_YVU420 :
   2466         buf->size.extS[0] = (w * h);
   2467         buf->size.extS[1] = (w * h) >> 2;
   2468         buf->size.extS[2] = (w * h) >> 2;
   2469         break;
   2470     case V4L2_PIX_FMT_YUV420M:
   2471     case V4L2_PIX_FMT_YVU420M :
   2472         buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
   2473         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
   2474         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
   2475         break;
   2476     case V4L2_PIX_FMT_YUV422P :
   2477         buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
   2478         buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
   2479         buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
   2480         break;
   2481     default:
   2482         ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
   2483         return;
   2484         break;
   2485     }
   2486 }
   2487 
   2488 bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
   2489                                              int  dst_w,  int   dst_h,
   2490                                              int *crop_x, int *crop_y,
   2491                                              int *crop_w, int *crop_h,
   2492                                              int zoom)
   2493 {
   2494     *crop_w = src_w;
   2495     *crop_h = src_h;
   2496 
   2497     if (   src_w != dst_w
   2498         || src_h != dst_h) {
   2499         float src_ratio = 1.0f;
   2500         float dst_ratio = 1.0f;
   2501 
   2502         // ex : 1024 / 768
   2503         src_ratio = (float)src_w / (float)src_h;
   2504 
   2505         // ex : 352  / 288
   2506         dst_ratio = (float)dst_w / (float)dst_h;
   2507 
   2508         if (dst_w * dst_h < src_w * src_h) {
   2509             if (dst_ratio <= src_ratio) {
   2510                 // shrink w
   2511                 *crop_w = src_h * dst_ratio;
   2512                 *crop_h = src_h;
   2513             } else {
   2514                 // shrink h
   2515                 *crop_w = src_w;
   2516                 *crop_h = src_w / dst_ratio;
   2517             }
   2518         } else {
   2519             if (dst_ratio <= src_ratio) {
   2520                 // shrink w
   2521                 *crop_w = src_h * dst_ratio;
   2522                 *crop_h = src_h;
   2523             } else {
   2524                 // shrink h
   2525                 *crop_w = src_w;
   2526                 *crop_h = src_w / dst_ratio;
   2527             }
   2528         }
   2529     }
   2530 
   2531     if (zoom != 0) {
   2532         float zoomLevel = ((float)zoom + 10.0) / 10.0;
   2533         *crop_w = (int)((float)*crop_w / zoomLevel);
   2534         *crop_h = (int)((float)*crop_h / zoomLevel);
   2535     }
   2536 
   2537     #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
   2538     unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
   2539     if (w_align != 0) {
   2540         if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
   2541             && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
   2542             *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
   2543         }
   2544         else
   2545             *crop_w -= w_align;
   2546     }
   2547 
   2548     #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
   2549     unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
   2550     if (h_align != 0) {
   2551         if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
   2552             && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
   2553             *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
   2554         }
   2555         else
   2556             *crop_h -= h_align;
   2557     }
   2558 
   2559     *crop_x = (src_w - *crop_w) >> 1;
   2560     *crop_y = (src_h - *crop_h) >> 1;
   2561 
   2562     if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
   2563         *crop_x -= 1;
   2564 
   2565     if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
   2566         *crop_y -= 1;
   2567 
   2568     return true;
   2569 }
   2570 
   2571 BayerBufManager::BayerBufManager()
   2572 {
   2573     ALOGV("DEBUG(%s): ", __FUNCTION__);
   2574     for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
   2575         entries[i].status = BAYER_ON_HAL_EMPTY;
   2576         entries[i].reqFrameCnt = 0;
   2577     }
   2578     sensorEnqueueHead = 0;
   2579     sensorDequeueHead = 0;
   2580     ispEnqueueHead = 0;
   2581     ispDequeueHead = 0;
   2582     numOnSensor = 0;
   2583     numOnIsp = 0;
   2584     numOnHalFilled = 0;
   2585     numOnHalEmpty = NUM_BAYER_BUFFERS;
   2586 }
   2587 
   2588 BayerBufManager::~BayerBufManager()
   2589 {
   2590     ALOGV("%s", __FUNCTION__);
   2591 }
   2592 
   2593 int     BayerBufManager::GetIndexForSensorEnqueue()
   2594 {
   2595     int ret = 0;
   2596     if (numOnHalEmpty == 0)
   2597         ret = -1;
   2598     else
   2599         ret = sensorEnqueueHead;
   2600     ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
   2601     return ret;
   2602 }
   2603 
   2604 int    BayerBufManager::MarkSensorEnqueue(int index)
   2605 {
   2606     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
   2607 
   2608     // sanity check
   2609     if (index != sensorEnqueueHead) {
   2610         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
   2611         return -1;
   2612     }
   2613     if (entries[index].status != BAYER_ON_HAL_EMPTY) {
   2614         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
   2615             index, entries[index].status, BAYER_ON_HAL_EMPTY);
   2616         return -1;
   2617     }
   2618 
   2619     entries[index].status = BAYER_ON_SENSOR;
   2620     entries[index].reqFrameCnt = 0;
   2621     numOnHalEmpty--;
   2622     numOnSensor++;
   2623     sensorEnqueueHead = GetNextIndex(index);
   2624     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
   2625         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
   2626     return 0;
   2627 }
   2628 
   2629 int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
   2630 {
   2631     ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
   2632 
   2633     if (entries[index].status != BAYER_ON_SENSOR) {
   2634         ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
   2635             index, entries[index].status, BAYER_ON_SENSOR);
   2636         return -1;
   2637     }
   2638 
   2639     entries[index].status = BAYER_ON_HAL_FILLED;
   2640     numOnHalFilled++;
   2641     numOnSensor--;
   2642 
   2643     return 0;
   2644 }
   2645 
   2646 int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
   2647 {
   2648     int ret = 0;
   2649     if (numOnHalFilled == 0)
   2650         ret = -1;
   2651     else {
   2652         *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
   2653         ret = ispEnqueueHead;
   2654     }
   2655     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
   2656     return ret;
   2657 }
   2658 
   2659 int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
   2660 {
   2661     int ret = 0;
   2662     if (numOnIsp == 0)
   2663         ret = -1;
   2664     else {
   2665         *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
   2666         ret = ispDequeueHead;
   2667     }
   2668     ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
   2669     return ret;
   2670 }
   2671 
   2672 int    BayerBufManager::MarkIspEnqueue(int index)
   2673 {
   2674     ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
   2675 
   2676     // sanity check
   2677     if (index != ispEnqueueHead) {
   2678         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
   2679         return -1;
   2680     }
   2681     if (entries[index].status != BAYER_ON_HAL_FILLED) {
   2682         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
   2683             index, entries[index].status, BAYER_ON_HAL_FILLED);
   2684         return -1;
   2685     }
   2686 
   2687     entries[index].status = BAYER_ON_ISP;
   2688     numOnHalFilled--;
   2689     numOnIsp++;
   2690     ispEnqueueHead = GetNextIndex(index);
   2691     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
   2692         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
   2693     return 0;
   2694 }
   2695 
   2696 int    BayerBufManager::MarkIspDequeue(int index)
   2697 {
   2698     ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
   2699 
   2700     // sanity check
   2701     if (index != ispDequeueHead) {
   2702         ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
   2703         return -1;
   2704     }
   2705     if (entries[index].status != BAYER_ON_ISP) {
   2706         ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
   2707             index, entries[index].status, BAYER_ON_ISP);
   2708         return -1;
   2709     }
   2710 
   2711     entries[index].status = BAYER_ON_HAL_EMPTY;
   2712     entries[index].reqFrameCnt = 0;
   2713     numOnHalEmpty++;
   2714     numOnIsp--;
   2715     ispDequeueHead = GetNextIndex(index);
   2716     ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
   2717         __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
   2718     return 0;
   2719 }
   2720 
   2721 int BayerBufManager::GetNumOnSensor()
   2722 {
   2723     return numOnSensor;
   2724 }
   2725 
   2726 int BayerBufManager::GetNumOnHalFilled()
   2727 {
   2728     return numOnHalFilled;
   2729 }
   2730 
   2731 int BayerBufManager::GetNumOnIsp()
   2732 {
   2733     return numOnIsp;
   2734 }
   2735 
   2736 int     BayerBufManager::GetNextIndex(int index)
   2737 {
   2738     index++;
   2739     if (index >= NUM_BAYER_BUFFERS)
   2740         index = 0;
   2741 
   2742     return index;
   2743 }
   2744 
   2745 void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
   2746 {
   2747     camera_metadata_t *currentRequest = NULL;
   2748     camera_metadata_t *currentFrame = NULL;
   2749     size_t numEntries = 0;
   2750     size_t frameSize = 0;
   2751     camera_metadata_t * preparedFrame = NULL;
   2752     camera_metadata_t *deregisteredRequest = NULL;
   2753     uint32_t currentSignal = self->GetProcessingSignal();
   2754     MainThread *  selfThread      = ((MainThread*)self);
   2755     int res = 0;
   2756 
   2757     int ret;
   2758     int afMode;
   2759     uint32_t afRegion[4];
   2760 
   2761     ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
   2762 
   2763     if (currentSignal & SIGNAL_THREAD_RELEASE) {
   2764         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
   2765 
   2766         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
   2767         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
   2768         return;
   2769     }
   2770 
   2771     if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
   2772         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
   2773         if (m_requestManager->IsRequestQueueFull()==false) {
   2774             Mutex::Autolock lock(m_afModeTriggerLock);
   2775             m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
   2776             if (NULL == currentRequest) {
   2777                 ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
   2778                 m_isRequestQueueNull = true;
   2779                 if (m_requestManager->IsVdisEnable())
   2780                     m_vdisBubbleCnt = 1;
   2781             }
   2782             else {
   2783                 m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion);
   2784 
   2785                 SetAfMode((enum aa_afmode)afMode);
   2786                 SetAfRegion(afRegion);
   2787 
   2788                 m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
   2789                 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
   2790                 if (m_requestManager->IsRequestQueueFull()==false)
   2791                     selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
   2792 
   2793                 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
   2794             }
   2795         }
   2796         else {
   2797             m_isRequestQueuePending = true;
   2798         }
   2799     }
   2800 
   2801     if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
   2802         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
   2803         /*while (1)*/ {
   2804             ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
   2805             if (ret == false)
   2806                 CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
   2807 
   2808             m_requestManager->DeregisterRequest(&deregisteredRequest);
   2809 
   2810             ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
   2811             if (ret < 0)
   2812                 CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
   2813 
   2814             ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
   2815             if (ret < 0)
   2816                 CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
   2817 
   2818             if (currentFrame==NULL) {
   2819                 ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
   2820             }
   2821             else {
   2822                 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
   2823             }
   2824             res = append_camera_metadata(currentFrame, preparedFrame);
   2825             if (res==0) {
   2826                 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
   2827                 m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
   2828             }
   2829             else {
   2830                 ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
   2831             }
   2832         }
   2833         if (!m_isRequestQueueNull) {
   2834             selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
   2835         }
   2836 
   2837         if (getInProgressCount()>0) {
   2838             ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
   2839             m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
   2840         }
   2841     }
   2842     ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
   2843     return;
   2844 }
   2845 
   2846 void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
   2847 {
   2848     ALOGD("####  common Section");
   2849     ALOGD("####                 magic(%x) ",
   2850         shot_ext->shot.magicNumber);
   2851     ALOGD("####  ctl Section");
   2852     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
   2853         shot_ext->shot.ctl.request.metadataMode,
   2854         shot_ext->shot.ctl.lens.aperture,
   2855         shot_ext->shot.ctl.sensor.exposureTime,
   2856         shot_ext->shot.ctl.sensor.frameDuration,
   2857         shot_ext->shot.ctl.sensor.sensitivity,
   2858         shot_ext->shot.ctl.aa.awbMode);
   2859 
   2860     ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
   2861         shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
   2862         shot_ext->shot.ctl.request.outputStreams[0]);
   2863 
   2864     ALOGD("####  DM Section");
   2865     ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
   2866         shot_ext->shot.dm.request.metadataMode,
   2867         shot_ext->shot.dm.lens.aperture,
   2868         shot_ext->shot.dm.sensor.exposureTime,
   2869         shot_ext->shot.dm.sensor.frameDuration,
   2870         shot_ext->shot.dm.sensor.sensitivity,
   2871         shot_ext->shot.dm.sensor.timeStamp,
   2872         shot_ext->shot.dm.aa.awbMode,
   2873         shot_ext->shot.dm.request.frameCount );
   2874 }
   2875 
   2876 void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
   2877 {
   2878     // Flash
   2879     switch (m_ctlInfo.flash.m_flashCnt) {
   2880     case IS_FLASH_STATE_ON:
   2881         ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
   2882         // check AF locked
   2883         if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
   2884             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
   2885                 if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
   2886                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
   2887                     m_ctlInfo.flash.m_flashTimeOut = 5;
   2888                 } else
   2889                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
   2890                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
   2891             } else {
   2892                 m_ctlInfo.flash.m_flashTimeOut--;
   2893             }
   2894         } else {
   2895             if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
   2896                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
   2897                 m_ctlInfo.flash.m_flashTimeOut = 5;
   2898             } else
   2899                 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
   2900             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
   2901         }
   2902         break;
   2903     case IS_FLASH_STATE_ON_WAIT:
   2904         break;
   2905     case IS_FLASH_STATE_ON_DONE:
   2906         if (!m_ctlInfo.flash.m_afFlashDoneFlg)
   2907             // auto transition at pre-capture trigger
   2908             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
   2909         break;
   2910     case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
   2911         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
   2912         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
   2913         //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
   2914         shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
   2915         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
   2916         break;
   2917     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
   2918     case IS_FLASH_STATE_AUTO_WAIT:
   2919         shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
   2920         shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
   2921         break;
   2922     case IS_FLASH_STATE_AUTO_DONE:
   2923         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
   2924         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
   2925         break;
   2926     case IS_FLASH_STATE_AUTO_OFF:
   2927         ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
   2928         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
   2929         m_ctlInfo.flash.m_flashEnableFlg = false;
   2930         break;
   2931     case IS_FLASH_STATE_CAPTURE:
   2932         ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
   2933         m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
   2934         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
   2935         shot_ext->request_scc = 0;
   2936         shot_ext->request_scp = 0;
   2937         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
   2938         break;
   2939     case IS_FLASH_STATE_CAPTURE_WAIT:
   2940         shot_ext->request_scc = 0;
   2941         shot_ext->request_scp = 0;
   2942         break;
   2943     case IS_FLASH_STATE_CAPTURE_JPEG:
   2944         ALOGV("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
   2945         shot_ext->request_scc = 1;
   2946         shot_ext->request_scp = 1;
   2947         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
   2948         break;
   2949     case IS_FLASH_STATE_CAPTURE_END:
   2950         ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
   2951         shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
   2952         shot_ext->request_scc = 0;
   2953         shot_ext->request_scp = 0;
   2954         m_ctlInfo.flash.m_flashEnableFlg = false;
   2955         m_ctlInfo.flash.m_flashCnt = 0;
   2956         m_ctlInfo.flash.m_afFlashDoneFlg= false;
   2957         break;
   2958     case IS_FLASH_STATE_NONE:
   2959         break;
   2960     default:
   2961         ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
   2962     }
   2963 }
   2964 
   2965 void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
   2966 {
   2967     // Flash
   2968     switch (m_ctlInfo.flash.m_flashCnt) {
   2969     case IS_FLASH_STATE_AUTO_WAIT:
   2970         if (m_ctlInfo.flash.m_flashDecisionResult) {
   2971             if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
   2972                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
   2973                 ALOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
   2974             } else {
   2975                 ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
   2976             }
   2977         } else {
   2978             //If flash isn't activated at flash auto mode, skip flash auto control
   2979             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
   2980             ALOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
   2981         }
   2982         break;
   2983     }
   2984 }
   2985 
   2986 void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
   2987 {
   2988     // Flash
   2989     switch (m_ctlInfo.flash.m_flashCnt) {
   2990     case IS_FLASH_STATE_ON_WAIT:
   2991         if (shot_ext->shot.dm.flash.decision > 0) {
   2992             // store decision result to skip capture sequenece
   2993             ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
   2994             if (shot_ext->shot.dm.flash.decision == 2)
   2995                 m_ctlInfo.flash.m_flashDecisionResult = false;
   2996             else
   2997                 m_ctlInfo.flash.m_flashDecisionResult = true;
   2998             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
   2999         } else {
   3000             if (m_ctlInfo.flash.m_flashTimeOut == 0) {
   3001                 ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
   3002                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
   3003                 m_ctlInfo.flash.m_flashDecisionResult = false;
   3004             } else {
   3005                 m_ctlInfo.flash.m_flashTimeOut--;
   3006             }
   3007         }
   3008         break;
   3009     case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
   3010         if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
   3011             ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
   3012             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
   3013         } else {
   3014             ALOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
   3015         }
   3016         break;
   3017     case IS_FLASH_STATE_CAPTURE_WAIT:
   3018         if (m_ctlInfo.flash.m_flashDecisionResult) {
   3019             if (shot_ext->shot.dm.flash.firingStable) {
   3020                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
   3021             } else {
   3022                 if (m_ctlInfo.flash.m_flashTimeOut == 0) {
   3023                     ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
   3024                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
   3025                 } else {
   3026                     ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
   3027                     m_ctlInfo.flash.m_flashTimeOut--;
   3028                 }
   3029             }
   3030         } else {
   3031             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
   3032         }
   3033         break;
   3034     }
   3035 }
   3036 
   3037 void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
   3038 {
   3039     switch (m_ctlInfo.flash.i_flashMode) {
   3040     case AA_AEMODE_ON:
   3041         // At flash off mode, capture can be done as zsl capture
   3042         shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
   3043         break;
   3044     case AA_AEMODE_ON_AUTO_FLASH:
   3045         // At flash auto mode, main flash have to be done if pre-flash was done.
   3046         if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
   3047             shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
   3048         break;
   3049     }
   3050 }
   3051 
   3052 void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
   3053 {
   3054     shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0];
   3055     shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1];
   3056     shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2];
   3057     shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3];
   3058 }
   3059 
   3060 void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion)
   3061 {
   3062     currentAfRegion[0] = afRegion[0];
   3063     currentAfRegion[1] = afRegion[1];
   3064     currentAfRegion[2] = afRegion[2];
   3065     currentAfRegion[3] = afRegion[3];
   3066 }
   3067 
   3068 void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
   3069 {
   3070     if (m_afState == HAL_AFSTATE_SCANNING) {
   3071         ALOGD("(%s): restarting trigger ", __FUNCTION__);
   3072     } else if (!mode) {
   3073         if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
   3074             ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
   3075         else
   3076             m_afState = HAL_AFSTATE_STARTED;
   3077     }
   3078     ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
   3079     shot_ext->shot.ctl.aa.afTrigger = 1;
   3080     shot_ext->shot.ctl.aa.afMode = m_afMode;
   3081     m_IsAfTriggerRequired = false;
   3082 }
   3083 
   3084 void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
   3085 {
   3086     uint32_t        currentSignal = self->GetProcessingSignal();
   3087     SensorThread *  selfThread      = ((SensorThread*)self);
   3088     int index;
   3089     int index_isp;
   3090     status_t res;
   3091     nsecs_t frameTime;
   3092     int bayersOnSensor = 0, bayersOnIsp = 0;
   3093     int j = 0;
   3094     bool isCapture = false;
   3095     ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
   3096 
   3097     if (currentSignal & SIGNAL_THREAD_RELEASE) {
   3098         CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
   3099 
   3100         ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
   3101         cam_int_streamoff(&(m_camera_info.sensor));
   3102         ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
   3103 
   3104         m_camera_info.sensor.buffers = 0;
   3105         ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
   3106         cam_int_reqbufs(&(m_camera_info.sensor));
   3107         ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
   3108         m_camera_info.sensor.status = false;
   3109 
   3110         ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
   3111         isp_int_streamoff(&(m_camera_info.isp));
   3112         ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
   3113 
   3114         m_camera_info.isp.buffers = 0;
   3115         ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
   3116         cam_int_reqbufs(&(m_camera_info.isp));
   3117         ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
   3118 
   3119         exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
   3120 
   3121         m_requestManager->releaseSensorQ();
   3122         m_requestManager->ResetEntry();
   3123         ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
   3124         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
   3125         return;
   3126     }
   3127 
   3128     if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
   3129     {
   3130         ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
   3131         int targetStreamIndex = 0, i=0;
   3132         int matchedFrameCnt = -1, processingReqIndex;
   3133         struct camera2_shot_ext *shot_ext;
   3134         struct camera2_shot_ext *shot_ext_capture;
   3135         bool triggered = false;
   3136 
   3137         /* dqbuf from sensor */
   3138         ALOGV("Sensor DQbuf start");
   3139         index = cam_int_dqbuf(&(m_camera_info.sensor));
   3140         m_requestManager->pushSensorQ(index);
   3141         ALOGV("Sensor DQbuf done(%d)", index);
   3142         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
   3143 
   3144         if (m_nightCaptureCnt != 0) {
   3145             matchedFrameCnt = m_nightCaptureFrameCnt;
   3146         } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
   3147             matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
   3148             ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
   3149         } else {
   3150             matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
   3151         }
   3152 
   3153         if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
   3154             matchedFrameCnt = m_vdisDupFrame;
   3155         }
   3156 
   3157         if (matchedFrameCnt != -1) {
   3158             if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
   3159                 frameTime = systemTime();
   3160                 m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
   3161                 m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
   3162             } else {
   3163                 ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
   3164             }
   3165 
   3166             // face af mode setting in case of face priority scene mode
   3167             if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
   3168                 ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
   3169                 m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
   3170             }
   3171 
   3172             m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
   3173             float zoomLeft, zoomTop, zoomWidth, zoomHeight;
   3174             int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
   3175 
   3176             m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
   3177                            m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
   3178                            &crop_x, &crop_y,
   3179                            &crop_w, &crop_h,
   3180                            0);
   3181 
   3182             if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
   3183                 zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
   3184                 zoomHeight = zoomWidth *
   3185                         m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
   3186             } else {
   3187                 zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
   3188                 zoomWidth = zoomHeight *
   3189                         m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
   3190             }
   3191             zoomLeft = (crop_w - zoomWidth) / 2;
   3192             zoomTop = (crop_h - zoomHeight) / 2;
   3193 
   3194             int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
   3195 
   3196             int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
   3197             if (cropCompensation)
   3198                 new_cropRegion[2] -= cropCompensation;
   3199 
   3200             shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
   3201             shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
   3202             shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
   3203             if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
   3204                 ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode);
   3205                 shot_ext->shot.ctl.aa.afMode = m_afMode;
   3206                 if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
   3207                     ALOGD("### With Automatic triger for continuous modes");
   3208                     m_afState = HAL_AFSTATE_STARTED;
   3209                     shot_ext->shot.ctl.aa.afTrigger = 1;
   3210                     triggered = true;
   3211                     if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
   3212                             (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
   3213                         switch (m_afMode) {
   3214                         case AA_AFMODE_CONTINUOUS_PICTURE:
   3215                             shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE;
   3216                             ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
   3217                             break;
   3218                         }
   3219                     }
   3220                     // reset flash result
   3221                     if (m_ctlInfo.flash.m_afFlashDoneFlg) {
   3222                         m_ctlInfo.flash.m_flashEnableFlg = false;
   3223                         m_ctlInfo.flash.m_afFlashDoneFlg = false;
   3224                         m_ctlInfo.flash.m_flashDecisionResult = false;
   3225                         m_ctlInfo.flash.m_flashCnt = 0;
   3226                     }
   3227                     m_ctlInfo.af.m_afTriggerTimeOut = 1;
   3228                 }
   3229 
   3230                 m_IsAfModeUpdateRequired = false;
   3231                 // support inifinity focus mode
   3232                 if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
   3233                     shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
   3234                     shot_ext->shot.ctl.aa.afTrigger = 1;
   3235                     triggered = true;
   3236                 }
   3237                 if (m_afMode2 != NO_CHANGE) {
   3238                     enum aa_afmode tempAfMode = m_afMode2;
   3239                     m_afMode2 = NO_CHANGE;
   3240                     SetAfMode(tempAfMode);
   3241                 }
   3242             }
   3243             else {
   3244                 shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
   3245             }
   3246             if (m_IsAfTriggerRequired) {
   3247                 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
   3248                     // flash case
   3249                     if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
   3250                         if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
   3251                             // Flash is enabled and start AF
   3252                             m_afTrigger(shot_ext, 1);
   3253                         } else {
   3254                             m_afTrigger(shot_ext, 0);
   3255                         }
   3256                     }
   3257                 } else {
   3258                     // non-flash case
   3259                     m_afTrigger(shot_ext, 0);
   3260                 }
   3261             } else {
   3262                 shot_ext->shot.ctl.aa.afTrigger = 0;
   3263             }
   3264 
   3265             if (m_wideAspect) {
   3266                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
   3267             } else {
   3268                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
   3269             }
   3270             if (triggered)
   3271                 shot_ext->shot.ctl.aa.afTrigger = 1;
   3272 
   3273             // TODO : check collision with AFMode Update
   3274             if (m_IsAfLockRequired) {
   3275                 shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
   3276                 m_IsAfLockRequired = false;
   3277             }
   3278             ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
   3279                 index,
   3280                 shot_ext->shot.ctl.request.frameCount,
   3281                 shot_ext->request_scp,
   3282                 shot_ext->request_scc,
   3283                 shot_ext->dis_bypass, sizeof(camera2_shot));
   3284 
   3285             // update AF region
   3286             m_updateAfRegion(shot_ext);
   3287 
   3288             m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
   3289             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
   3290                     && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
   3291                 shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
   3292             if (m_nightCaptureCnt == 0) {
   3293                 if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
   3294                         && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
   3295                     shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
   3296                     shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
   3297                     shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
   3298                     m_nightCaptureCnt = 4;
   3299                     m_nightCaptureFrameCnt = matchedFrameCnt;
   3300                     shot_ext->request_scc = 0;
   3301                 }
   3302             }
   3303             else if (m_nightCaptureCnt == 1) {
   3304                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
   3305                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
   3306                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
   3307                 m_nightCaptureCnt--;
   3308                 m_nightCaptureFrameCnt = 0;
   3309                 shot_ext->request_scc = 1;
   3310             }
   3311             else if (m_nightCaptureCnt == 2) {
   3312                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
   3313                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
   3314                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
   3315                 m_nightCaptureCnt--;
   3316                 shot_ext->request_scc = 0;
   3317             }
   3318             else if (m_nightCaptureCnt == 3) {
   3319                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
   3320                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
   3321                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
   3322                 m_nightCaptureCnt--;
   3323                 shot_ext->request_scc = 0;
   3324             }
   3325             else if (m_nightCaptureCnt == 4) {
   3326                 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
   3327                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
   3328                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
   3329                 m_nightCaptureCnt--;
   3330                 shot_ext->request_scc = 0;
   3331             }
   3332 
   3333             switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) {
   3334             case 15:
   3335                 shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000);
   3336                 break;
   3337 
   3338             case 24:
   3339                 shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000);
   3340                 break;
   3341 
   3342             case 25:
   3343                 shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000);
   3344                 break;
   3345 
   3346             case 30:
   3347             default:
   3348                 shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000);
   3349                 break;
   3350             }
   3351             shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
   3352 
   3353             // Flash mode
   3354             // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
   3355             if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
   3356                     && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
   3357                     && (m_cameraId == 0)) {
   3358                 if (!m_ctlInfo.flash.m_flashDecisionResult) {
   3359                     m_ctlInfo.flash.m_flashEnableFlg = false;
   3360                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
   3361                     m_ctlInfo.flash.m_flashCnt = 0;
   3362                 } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
   3363                                           (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
   3364                     ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
   3365                     shot_ext->request_scc = 0;
   3366                     m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
   3367                     m_ctlInfo.flash.m_flashEnableFlg = true;
   3368                     m_ctlInfo.flash.m_afFlashDoneFlg = false;
   3369                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
   3370                 } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
   3371                     ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
   3372                     shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
   3373                     m_ctlInfo.flash.m_flashEnableFlg = false;
   3374                     m_ctlInfo.flash.m_afFlashDoneFlg= false;
   3375                     m_ctlInfo.flash.m_flashCnt = 0;
   3376                 }
   3377             } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
   3378                 m_ctlInfo.flash.m_flashDecisionResult = false;
   3379             }
   3380 
   3381             if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
   3382                 if (m_ctlInfo.flash.m_flashTorchMode == false) {
   3383                     m_ctlInfo.flash.m_flashTorchMode = true;
   3384                 }
   3385             } else {
   3386                 if (m_ctlInfo.flash.m_flashTorchMode == true) {
   3387                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
   3388                     shot_ext->shot.ctl.flash.firingPower = 0;
   3389                     m_ctlInfo.flash.m_flashTorchMode = false;
   3390                 } else {
   3391                     shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
   3392                 }
   3393             }
   3394 
   3395             if (shot_ext->isReprocessing) {
   3396                 ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
   3397                 m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
   3398                 shot_ext->request_scp = 0;
   3399                 shot_ext->request_scc = 0;
   3400                 m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
   3401                 m_ctlInfo.flash.m_flashDecisionResult = false;
   3402                 memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)),
   3403                     sizeof(struct camera2_shot_ext));
   3404                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
   3405                 m_ctlInfo.flash.m_flashEnableFlg = false;
   3406             }
   3407 
   3408             if (m_ctlInfo.flash.m_flashEnableFlg) {
   3409                 m_preCaptureListenerSensor(shot_ext);
   3410                 m_preCaptureSetter(shot_ext);
   3411             }
   3412 
   3413             ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
   3414             (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
   3415             (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
   3416             (int)(shot_ext->shot.ctl.aa.afTrigger));
   3417 
   3418             if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
   3419                 shot_ext->dis_bypass = 1;
   3420                 shot_ext->dnr_bypass = 1;
   3421                 shot_ext->request_scp = 0;
   3422                 shot_ext->request_scc = 0;
   3423                 m_vdisBubbleCnt--;
   3424                 matchedFrameCnt = -1;
   3425             } else {
   3426                 m_vdisDupFrame = matchedFrameCnt;
   3427             }
   3428             if (m_scpForceSuspended)
   3429                 shot_ext->request_scc = 0;
   3430 
   3431             uint32_t current_scp = shot_ext->request_scp;
   3432             uint32_t current_scc = shot_ext->request_scc;
   3433 
   3434             if (shot_ext->shot.dm.request.frameCount == 0) {
   3435                 CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
   3436             }
   3437 
   3438             cam_int_qbuf(&(m_camera_info.isp), index);
   3439 
   3440             ALOGV("### isp DQBUF start");
   3441             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
   3442 
   3443             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
   3444 
   3445             if (m_ctlInfo.flash.m_flashEnableFlg)
   3446                 m_preCaptureListenerISP(shot_ext);
   3447 
   3448             ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
   3449                 index,
   3450                 shot_ext->shot.ctl.request.frameCount,
   3451                 shot_ext->request_scp,
   3452                 shot_ext->request_scc,
   3453                 shot_ext->dis_bypass,
   3454                 shot_ext->dnr_bypass, sizeof(camera2_shot));
   3455 
   3456             ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
   3457                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
   3458                 (int)(shot_ext->shot.dm.aa.awbMode),
   3459                 (int)(shot_ext->shot.dm.aa.afMode));
   3460 
   3461 #ifndef ENABLE_FRAME_SYNC
   3462             m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
   3463 #endif
   3464 
   3465             if (!shot_ext->fd_bypass) {
   3466                 /* FD orientation axis transformation */
   3467                 for (int i=0; i < CAMERA2_MAX_FACES; i++) {
   3468                     if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
   3469                         shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
   3470                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][0])
   3471                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
   3472                     if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
   3473                         shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
   3474                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][1])
   3475                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
   3476                     if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
   3477                         shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
   3478                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][2])
   3479                                                                                                 / m_streamThreads[0].get()->m_parameters.width;
   3480                     if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
   3481                         shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
   3482                                                                                                 * shot_ext->shot.dm.stats.faceRectangles[i][3])
   3483                                                                                                 / m_streamThreads[0].get()->m_parameters.height;
   3484                 }
   3485             }
   3486             // aeState control
   3487             if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
   3488                 m_preCaptureAeState(shot_ext);
   3489 
   3490             // At scene mode face priority
   3491             if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
   3492                 shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
   3493 
   3494             if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
   3495                 m_requestManager->ApplyDynamicMetadata(shot_ext);
   3496             }
   3497 
   3498             if (current_scc != shot_ext->request_scc) {
   3499                 ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
   3500                                 __FUNCTION__, current_scc, shot_ext->request_scc);
   3501                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
   3502             }
   3503             if (shot_ext->request_scc) {
   3504                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
   3505                 if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
   3506                     if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)
   3507                         memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)),
   3508                             sizeof(struct camera2_shot_ext));
   3509                     else
   3510                         memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext));
   3511                 }
   3512                 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
   3513             }
   3514             if (current_scp != shot_ext->request_scp) {
   3515                 ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
   3516                                 __FUNCTION__, current_scp, shot_ext->request_scp);
   3517                 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
   3518             }
   3519             if (shot_ext->request_scp) {
   3520                 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
   3521                 m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
   3522             }
   3523 
   3524             ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
   3525                shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
   3526             if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
   3527                 ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
   3528                 m_scp_closed = true;
   3529             }
   3530             else
   3531                 m_scp_closed = false;
   3532 
   3533             OnAfNotification(shot_ext->shot.dm.aa.afState);
   3534             OnPrecaptureMeteringNotificationISP();
   3535         }   else {
   3536             memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
   3537             shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
   3538             shot_ext->request_sensor = 1;
   3539             shot_ext->dis_bypass = 1;
   3540             shot_ext->dnr_bypass = 1;
   3541             shot_ext->fd_bypass = 1;
   3542             shot_ext->drc_bypass = 1;
   3543             shot_ext->request_scc = 0;
   3544             shot_ext->request_scp = 0;
   3545             if (m_wideAspect) {
   3546                 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
   3547             } else {
   3548                 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
   3549             }
   3550             shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
   3551             if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
   3552                 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
   3553                 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
   3554             }
   3555             shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
   3556             shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
   3557             ALOGV("### isp QBUF start (bubble)");
   3558             ALOGV("bubble: queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
   3559                 (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
   3560                 (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
   3561                 (int)(shot_ext->shot.ctl.aa.afTrigger));
   3562 
   3563             cam_int_qbuf(&(m_camera_info.isp), index);
   3564             ALOGV("### isp DQBUF start (bubble)");
   3565             index_isp = cam_int_dqbuf(&(m_camera_info.isp));
   3566             shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
   3567             ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
   3568                 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
   3569                 (int)(shot_ext->shot.dm.aa.awbMode),
   3570                 (int)(shot_ext->shot.dm.aa.afMode));
   3571 
   3572             OnAfNotification(shot_ext->shot.dm.aa.afState);
   3573         }
   3574 
   3575         index = m_requestManager->popSensorQ();
   3576         if(index < 0){
   3577             ALOGE("sensorQ is empty");
   3578             return;
   3579         }
   3580 
   3581         processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
   3582         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
   3583         if (m_scp_closing || m_scp_closed) {
   3584             ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
   3585             shot_ext->request_scc = 0;
   3586             shot_ext->request_scp = 0;
   3587             shot_ext->request_sensor = 0;
   3588         }
   3589         cam_int_qbuf(&(m_camera_info.sensor), index);
   3590         ALOGV("Sensor Qbuf done(%d)", index);
   3591 
   3592         if (!m_scp_closing
   3593             && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
   3594             ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
   3595                                     matchedFrameCnt, processingReqIndex);
   3596             selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
   3597         }
   3598     }
   3599     return;
   3600 }
   3601 
   3602 void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
   3603 {
   3604     uint32_t                currentSignal   = self->GetProcessingSignal();
   3605     StreamThread *          selfThread      = ((StreamThread*)self);
   3606     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
   3607     node_info_t             *currentNode    = selfStreamParms->node;
   3608     substream_parameters_t  *subParms;
   3609     buffer_handle_t * buf = NULL;
   3610     status_t res;
   3611     void *virtAddr[3];
   3612     int i, j;
   3613     int index;
   3614     nsecs_t timestamp;
   3615 
   3616     if (!(selfThread->m_isBufferInit))
   3617     {
   3618         for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
   3619             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
   3620             if (res != NO_ERROR || buf == NULL) {
   3621                 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
   3622                 return;
   3623             }
   3624             ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
   3625                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
   3626 
   3627             index = selfThread->findBufferIndex(buf);
   3628             if (index == -1) {
   3629                 ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
   3630             }
   3631             else {
   3632                 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
   3633                     __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
   3634                 if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
   3635                     selfStreamParms->svcBufStatus[index] = ON_DRIVER;
   3636                 else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
   3637                     selfStreamParms->svcBufStatus[index] = ON_HAL;
   3638                 else {
   3639                     ALOGV("DBG(%s): buffer status abnormal (%d) "
   3640                         , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
   3641                 }
   3642                 selfStreamParms->numSvcBufsInHal++;
   3643             }
   3644             selfStreamParms->bufIndex = 0;
   3645         }
   3646         selfThread->m_isBufferInit = true;
   3647     }
   3648     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
   3649         if (selfThread->m_attachedSubStreams[i].streamId == -1)
   3650             continue;
   3651 
   3652         subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
   3653         if (subParms->type && subParms->needBufferInit) {
   3654             ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
   3655                 __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
   3656             int checkingIndex = 0;
   3657             bool found = false;
   3658             for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
   3659                 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
   3660                 if (res != NO_ERROR || buf == NULL) {
   3661                     ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
   3662                     return;
   3663                 }
   3664                 subParms->numSvcBufsInHal++;
   3665                 ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
   3666                    subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
   3667 
   3668                 if (m_grallocHal->lock(m_grallocHal, *buf,
   3669                        subParms->usage, 0, 0,
   3670                        subParms->width, subParms->height, virtAddr) != 0) {
   3671                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
   3672                 }
   3673                 else {
   3674                       ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
   3675                         __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
   3676                 }
   3677                 found = false;
   3678                 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
   3679                     if (subParms->svcBufHandle[checkingIndex] == *buf ) {
   3680                         found = true;
   3681                         break;
   3682                     }
   3683                 }
   3684                 ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
   3685                 if (!found) break;
   3686 
   3687                 index = checkingIndex;
   3688 
   3689                 if (index == -1) {
   3690                     ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
   3691                 }
   3692                 else {
   3693                     ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
   3694                         __FUNCTION__, index, subParms->svcBufStatus[index]);
   3695                     if (subParms->svcBufStatus[index]== ON_SERVICE)
   3696                         subParms->svcBufStatus[index] = ON_HAL;
   3697                     else {
   3698                         ALOGV("DBG(%s): buffer status abnormal (%d) "
   3699                             , __FUNCTION__, subParms->svcBufStatus[index]);
   3700                     }
   3701                     if (*buf != subParms->svcBufHandle[index])
   3702                         ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
   3703                     else
   3704                         ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
   3705                 }
   3706                 subParms->svcBufIndex = 0;
   3707             }
   3708             if (subParms->type == SUBSTREAM_TYPE_JPEG) {
   3709                 m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
   3710                 m_resizeBuf.size.extS[1] = 0;
   3711                 m_resizeBuf.size.extS[2] = 0;
   3712 
   3713                 if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
   3714                     ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
   3715                 }
   3716             }
   3717             if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
   3718                 m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
   3719                 subParms->height, &m_previewCbBuf);
   3720 
   3721                 if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
   3722                     ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
   3723                 }
   3724             }
   3725             subParms->needBufferInit= false;
   3726         }
   3727     }
   3728 }
   3729 
   3730 void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
   3731 {
   3732     StreamThread *          selfThread      = ((StreamThread*)self);
   3733     ALOGV("DEBUG(%s): ", __FUNCTION__ );
   3734     memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
   3735     selfThread->m_isBufferInit = false;
   3736     for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
   3737         selfThread->m_attachedSubStreams[i].streamId    = -1;
   3738         selfThread->m_attachedSubStreams[i].priority    = 0;
   3739     }
   3740     return;
   3741 }
   3742 
   3743 int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
   3744     int stream_id, nsecs_t frameTimeStamp)
   3745 {
   3746     substream_parameters_t  *subParms = &m_subStreams[stream_id];
   3747 
   3748     switch (stream_id) {
   3749 
   3750     case STREAM_ID_JPEG:
   3751         return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
   3752 
   3753     case STREAM_ID_RECORD:
   3754         return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
   3755 
   3756     case STREAM_ID_PRVCB:
   3757         return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
   3758 
   3759     default:
   3760         return 0;
   3761     }
   3762 }
   3763 void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
   3764 {
   3765     uint32_t                currentSignal   = self->GetProcessingSignal();
   3766     StreamThread *          selfThread      = ((StreamThread*)self);
   3767     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
   3768     node_info_t             *currentNode    = selfStreamParms->node;
   3769     int i = 0;
   3770     nsecs_t frameTimeStamp;
   3771 
   3772     if (currentSignal & SIGNAL_THREAD_RELEASE) {
   3773         CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
   3774 
   3775         if (selfThread->m_isBufferInit) {
   3776             if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
   3777                 ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
   3778                     selfThread->m_index, currentNode->fd);
   3779                 if (cam_int_streamoff(currentNode) < 0 ) {
   3780                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
   3781                 }
   3782                 ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
   3783                         selfThread->m_index, currentNode->fd);
   3784                 currentNode->buffers = 0;
   3785                 cam_int_reqbufs(currentNode);
   3786                 ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
   3787                         selfThread->m_index, currentNode->fd);
   3788             }
   3789         }
   3790 #ifdef ENABLE_FRAME_SYNC
   3791         // free metabuffers
   3792         for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
   3793             if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
   3794                 freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
   3795                 selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
   3796                 selfStreamParms->metaBuffers[i].size.extS[0] = 0;
   3797             }
   3798 #endif
   3799         selfThread->m_isBufferInit = false;
   3800         selfThread->m_releasing = false;
   3801         selfThread->m_activated = false;
   3802         ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
   3803         return;
   3804     }
   3805     if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
   3806         status_t    res;
   3807         buffer_handle_t * buf = NULL;
   3808         bool found = false;
   3809         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
   3810             __FUNCTION__, selfThread->m_index);
   3811         res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
   3812         if (res != NO_ERROR || buf == NULL) {
   3813             ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
   3814             return;
   3815         }
   3816         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
   3817         int checkingIndex = 0;
   3818         for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
   3819             if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
   3820                 found = true;
   3821                 break;
   3822             }
   3823         }
   3824         ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
   3825             __FUNCTION__, (unsigned int)buf, found, checkingIndex);
   3826 
   3827         if (!found) return;
   3828 
   3829         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
   3830             if (selfThread->m_attachedSubStreams[i].streamId == -1)
   3831                 continue;
   3832 
   3833 #ifdef ENABLE_FRAME_SYNC
   3834             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
   3835             m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
   3836 #else
   3837             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
   3838 #endif
   3839             if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
   3840                 m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
   3841                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
   3842         }
   3843 
   3844         res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
   3845         if (res != NO_ERROR) {
   3846             ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
   3847             return;
   3848         }
   3849         ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
   3850             __FUNCTION__,selfThread->m_index);
   3851 
   3852         return;
   3853     }
   3854     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
   3855         buffer_handle_t * buf = NULL;
   3856         status_t res = 0;
   3857         int i, j;
   3858         int index;
   3859         nsecs_t timestamp;
   3860 #ifdef ENABLE_FRAME_SYNC
   3861         camera2_stream *frame;
   3862         uint8_t currentOutputStreams;
   3863         bool directOutputEnabled = false;
   3864 #endif
   3865         int numOfUndqbuf = 0;
   3866 
   3867         ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
   3868 
   3869         m_streamBufferInit(self);
   3870 
   3871         do {
   3872             ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
   3873                 selfThread->m_index, selfThread->streamType);
   3874 
   3875 #ifdef ENABLE_FRAME_SYNC
   3876             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
   3877             frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
   3878             frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
   3879             currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
   3880             ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
   3881             if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
   3882                  ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
   3883                 directOutputEnabled = true;
   3884             }
   3885             if (!directOutputEnabled) {
   3886                 if (!m_nightCaptureFrameCnt)
   3887                     m_requestManager->NotifyStreamOutput(frame->rcount);
   3888             }
   3889 #else
   3890             selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
   3891             frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
   3892 #endif
   3893             ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
   3894                 selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
   3895 
   3896             if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
   3897                 ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
   3898                        __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
   3899             selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
   3900 
   3901             for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
   3902                 if (selfThread->m_attachedSubStreams[i].streamId == -1)
   3903                     continue;
   3904 #ifdef ENABLE_FRAME_SYNC
   3905                 if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
   3906                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
   3907                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
   3908                 }
   3909 #else
   3910                 if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
   3911                     m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
   3912                         selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
   3913                 }
   3914 #endif
   3915             }
   3916 
   3917             if (m_requestManager->GetSkipCnt() <= 0) {
   3918 #ifdef ENABLE_FRAME_SYNC
   3919                 if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
   3920                     ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
   3921                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
   3922                             frameTimeStamp,
   3923                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
   3924                 }
   3925                 else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
   3926                     ALOGV("** SCC output (frameCnt:%d)", frame->rcount);
   3927                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
   3928                                 frameTimeStamp,
   3929                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
   3930                 }
   3931                 else {
   3932                     res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
   3933                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
   3934                     ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
   3935                 }
   3936 #else
   3937                 if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
   3938                     ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
   3939                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
   3940                             frameTimeStamp,
   3941                             &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
   3942                 }
   3943                 else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
   3944                     ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
   3945                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
   3946                                 frameTimeStamp,
   3947                                 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
   3948                 }
   3949 #endif
   3950                 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
   3951             }
   3952             else {
   3953                 res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
   3954                         &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
   3955                 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
   3956             }
   3957 #ifdef ENABLE_FRAME_SYNC
   3958             if (directOutputEnabled) {
   3959                 if (!m_nightCaptureFrameCnt)
   3960                      m_requestManager->NotifyStreamOutput(frame->rcount);
   3961             }
   3962 #endif
   3963             if (res == 0) {
   3964                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
   3965                 selfStreamParms->numSvcBufsInHal--;
   3966             }
   3967             else {
   3968                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
   3969             }
   3970 
   3971         }
   3972         while(0);
   3973 
   3974         while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
   3975                     < selfStreamParms->minUndequedBuffer) {
   3976             res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
   3977             if (res != NO_ERROR || buf == NULL) {
   3978                 ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index,  res, selfStreamParms->numSvcBufsInHal);
   3979                 break;
   3980             }
   3981             selfStreamParms->numSvcBufsInHal++;
   3982             ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
   3983                 selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
   3984                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
   3985             const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
   3986 
   3987             bool found = false;
   3988             int checkingIndex = 0;
   3989             for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
   3990                 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
   3991                     found = true;
   3992                     break;
   3993                 }
   3994             }
   3995             if (!found) break;
   3996             selfStreamParms->bufIndex = checkingIndex;
   3997             if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
   3998                 uint32_t    plane_index = 0;
   3999                 ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
   4000                 struct v4l2_buffer v4l2_buf;
   4001                 struct v4l2_plane  planes[VIDEO_MAX_PLANES];
   4002 
   4003                 v4l2_buf.m.planes   = planes;
   4004                 v4l2_buf.type       = currentNode->type;
   4005                 v4l2_buf.memory     = currentNode->memory;
   4006                 v4l2_buf.index      = selfStreamParms->bufIndex;
   4007                 v4l2_buf.length     = currentNode->planes;
   4008 
   4009                 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
   4010                 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
   4011                 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
   4012                 for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
   4013                     v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
   4014                 }
   4015 #ifdef ENABLE_FRAME_SYNC
   4016                 /* add plane for metadata*/
   4017                 v4l2_buf.length += selfStreamParms->metaPlanes;
   4018                 v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
   4019                 v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
   4020 #endif
   4021                 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
   4022                     ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
   4023                         __FUNCTION__, selfThread->m_index);
   4024                     return;
   4025                 }
   4026                 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
   4027                 ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
   4028                     __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
   4029             }
   4030         }
   4031 
   4032         ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
   4033     }
   4034     return;
   4035 }
   4036 
   4037 void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
   4038 {
   4039     uint32_t                currentSignal   = self->GetProcessingSignal();
   4040     StreamThread *          selfThread      = ((StreamThread*)self);
   4041     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
   4042     node_info_t             *currentNode    = selfStreamParms->node;
   4043 
   4044 
   4045     if (currentSignal & SIGNAL_THREAD_RELEASE) {
   4046         CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
   4047 
   4048         if (selfThread->m_isBufferInit) {
   4049             if (currentNode->fd == m_camera_info.capture.fd) {
   4050                 if (m_camera_info.capture.status == true) {
   4051                     ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
   4052                     selfThread->m_index, currentNode->fd);
   4053                     if (cam_int_streamoff(currentNode) < 0 ){
   4054                         ALOGE("ERR(%s): stream off fail", __FUNCTION__);
   4055                     } else {
   4056                         m_camera_info.capture.status = false;
   4057                     }
   4058                 }
   4059             } else {
   4060                 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
   4061                 selfThread->m_index, currentNode->fd);
   4062                 if (cam_int_streamoff(currentNode) < 0 ){
   4063                     ALOGE("ERR(%s): stream off fail", __FUNCTION__);
   4064                 }
   4065             }
   4066             ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
   4067             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
   4068                     selfThread->m_index, currentNode->fd);
   4069             currentNode->buffers = 0;
   4070             cam_int_reqbufs(currentNode);
   4071             ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
   4072                     selfThread->m_index, currentNode->fd);
   4073         }
   4074 
   4075         selfThread->m_isBufferInit = false;
   4076         selfThread->m_releasing = false;
   4077         selfThread->m_activated = false;
   4078         ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
   4079         return;
   4080     }
   4081 
   4082     if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
   4083 #ifdef ENABLE_FRAME_SYNC
   4084         camera2_stream *frame;
   4085         uint8_t currentOutputStreams;
   4086 #endif
   4087         nsecs_t frameTimeStamp;
   4088 
   4089         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
   4090             __FUNCTION__,selfThread->m_index);
   4091 
   4092         m_streamBufferInit(self);
   4093 
   4094         ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
   4095         selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
   4096         ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
   4097             selfThread->m_index, selfStreamParms->bufIndex);
   4098 
   4099 #ifdef ENABLE_FRAME_SYNC
   4100         frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
   4101         frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
   4102         currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
   4103         ALOGV("frame count(SCC) : %d outputStream(%x)",  frame->rcount, currentOutputStreams);
   4104 #else
   4105         frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
   4106 #endif
   4107 
   4108         for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
   4109             if (selfThread->m_attachedSubStreams[i].streamId == -1)
   4110                 continue;
   4111 #ifdef ENABLE_FRAME_SYNC
   4112             if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
   4113                 m_requestManager->NotifyStreamOutput(frame->rcount);
   4114                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
   4115                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
   4116             }
   4117 #else
   4118             if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
   4119                 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
   4120                     selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
   4121             }
   4122 #endif
   4123         }
   4124         cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
   4125         ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
   4126 
   4127 
   4128 
   4129         ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
   4130             __FUNCTION__, selfThread->m_index);
   4131     }
   4132 
   4133 
   4134     return;
   4135 }
   4136 
   4137 void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
   4138 {
   4139     uint32_t                currentSignal   = self->GetProcessingSignal();
   4140     StreamThread *          selfThread      = ((StreamThread*)self);
   4141     stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
   4142     node_info_t             *currentNode    = selfStreamParms->node;
   4143 
   4144     ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
   4145 
   4146     // Do something in Child thread handler
   4147     // Should change function to class that inherited StreamThread class to support dynamic stream allocation
   4148     if (selfThread->streamType == STREAM_TYPE_DIRECT) {
   4149         m_streamFunc_direct(self);
   4150     } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
   4151         m_streamFunc_indirect(self);
   4152     }
   4153 
   4154     return;
   4155 }
   4156 int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
   4157 {
   4158     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
   4159     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
   4160     status_t    res;
   4161     ExynosRect jpegRect;
   4162     bool found = false;
   4163     int srcW, srcH, srcCropX, srcCropY;
   4164     int pictureW, pictureH, pictureFramesize = 0;
   4165     int pictureFormat;
   4166     int cropX, cropY, cropW, cropH = 0;
   4167     ExynosBuffer resizeBufInfo;
   4168     ExynosRect   m_jpegPictureRect;
   4169     buffer_handle_t * buf = NULL;
   4170     camera2_jpeg_blob * jpegBlob = NULL;
   4171     int jpegBufSize = 0;
   4172 
   4173     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
   4174     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
   4175         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
   4176             found = true;
   4177             break;
   4178         }
   4179         subParms->svcBufIndex++;
   4180         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
   4181             subParms->svcBufIndex = 0;
   4182     }
   4183     if (!found) {
   4184         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
   4185         subParms->svcBufIndex++;
   4186         return 1;
   4187     }
   4188 
   4189     {
   4190         Mutex::Autolock lock(m_jpegEncoderLock);
   4191         m_jpegEncodingCount++;
   4192     }
   4193 
   4194     m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
   4195                     m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
   4196                     &srcCropX, &srcCropY,
   4197                     &srcW, &srcH,
   4198                     0);
   4199 
   4200     m_jpegPictureRect.w = subParms->width;
   4201     m_jpegPictureRect.h = subParms->height;
   4202 
   4203      ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
   4204               __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
   4205                    m_jpegPictureRect.w, m_jpegPictureRect.h);
   4206 
   4207     m_getRatioSize(srcW, srcH,
   4208                    m_jpegPictureRect.w, m_jpegPictureRect.h,
   4209                    &cropX, &cropY,
   4210                    &pictureW, &pictureH,
   4211                    0);
   4212     pictureFormat = V4L2_PIX_FMT_YUYV;
   4213     pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
   4214 
   4215     if (m_exynosPictureCSC) {
   4216         float zoom_w = 0, zoom_h = 0;
   4217         if (m_zoomRatio == 0)
   4218             m_zoomRatio = 1;
   4219 
   4220         if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
   4221             zoom_w =  pictureW / m_zoomRatio;
   4222             zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
   4223         } else {
   4224             zoom_h = pictureH / m_zoomRatio;
   4225             zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
   4226         }
   4227         cropX = (srcW - zoom_w) / 2;
   4228         cropY = (srcH - zoom_h) / 2;
   4229         cropW = zoom_w;
   4230         cropH = zoom_h;
   4231 
   4232         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
   4233               __FUNCTION__, cropX, cropY, cropW, cropH);
   4234 
   4235         csc_set_src_format(m_exynosPictureCSC,
   4236                            ALIGN(srcW, 16), ALIGN(srcH, 16),
   4237                            cropX, cropY, cropW, cropH,
   4238                            V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
   4239                            0);
   4240 
   4241         csc_set_dst_format(m_exynosPictureCSC,
   4242                            m_jpegPictureRect.w, m_jpegPictureRect.h,
   4243                            0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
   4244                            V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
   4245                            0);
   4246         for (int i = 0 ; i < 3 ; i++)
   4247             ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
   4248                 __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
   4249         csc_set_src_buffer(m_exynosPictureCSC,
   4250                            (void **)&srcImageBuf->fd.fd);
   4251 
   4252         csc_set_dst_buffer(m_exynosPictureCSC,
   4253                            (void **)&m_resizeBuf.fd.fd);
   4254         for (int i = 0 ; i < 3 ; i++)
   4255             ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
   4256                 __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
   4257 
   4258         if (csc_convert(m_exynosPictureCSC) != 0)
   4259             ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
   4260 
   4261     }
   4262     else {
   4263         ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
   4264     }
   4265 
   4266     resizeBufInfo = m_resizeBuf;
   4267 
   4268     m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
   4269 
   4270     for (int i = 1; i < 3; i++) {
   4271         if (m_resizeBuf.size.extS[i] != 0)
   4272             m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
   4273 
   4274         ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
   4275     }
   4276 
   4277     jpegRect.w = m_jpegPictureRect.w;
   4278     jpegRect.h = m_jpegPictureRect.h;
   4279     jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
   4280 
   4281     for (int j = 0 ; j < 3 ; j++)
   4282         ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
   4283             __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
   4284             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
   4285             (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
   4286 
   4287     jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
   4288     if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
   4289         ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
   4290     } else {
   4291         m_resizeBuf = resizeBufInfo;
   4292 
   4293         int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
   4294         ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
   4295             m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
   4296         char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
   4297         jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
   4298 
   4299         if (jpegBuffer[jpegSize-1] == 0)
   4300             jpegSize--;
   4301         jpegBlob->jpeg_size = jpegSize;
   4302         jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
   4303     }
   4304     subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
   4305     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
   4306 
   4307     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
   4308             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
   4309     if (res == 0) {
   4310         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
   4311         subParms->numSvcBufsInHal--;
   4312     }
   4313     else {
   4314         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
   4315     }
   4316 
   4317     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
   4318     {
   4319         bool found = false;
   4320         int checkingIndex = 0;
   4321 
   4322         ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
   4323 
   4324         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
   4325         if (res != NO_ERROR || buf == NULL) {
   4326             ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
   4327             break;
   4328         }
   4329         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
   4330         subParms->numSvcBufsInHal ++;
   4331         ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
   4332            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
   4333 
   4334 
   4335         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
   4336             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
   4337                 found = true;
   4338                 break;
   4339             }
   4340         }
   4341         ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
   4342 
   4343         if (!found) {
   4344              break;
   4345         }
   4346 
   4347         subParms->svcBufIndex = checkingIndex;
   4348         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
   4349             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
   4350         }
   4351         else {
   4352             ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
   4353                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
   4354         }
   4355     }
   4356     {
   4357         Mutex::Autolock lock(m_jpegEncoderLock);
   4358         m_jpegEncodingCount--;
   4359     }
   4360     return 0;
   4361 }
   4362 
   4363 int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
   4364 {
   4365     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
   4366     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
   4367     status_t    res;
   4368     ExynosRect jpegRect;
   4369     bool found = false;
   4370     int cropX, cropY, cropW, cropH = 0;
   4371     buffer_handle_t * buf = NULL;
   4372 
   4373     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
   4374     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
   4375         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
   4376             found = true;
   4377             break;
   4378         }
   4379         subParms->svcBufIndex++;
   4380         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
   4381             subParms->svcBufIndex = 0;
   4382     }
   4383     if (!found) {
   4384         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
   4385         subParms->svcBufIndex++;
   4386         return 1;
   4387     }
   4388 
   4389     if (m_exynosVideoCSC) {
   4390         int videoW = subParms->width, videoH = subParms->height;
   4391         int cropX, cropY, cropW, cropH = 0;
   4392         int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
   4393         m_getRatioSize(previewW, previewH,
   4394                        videoW, videoH,
   4395                        &cropX, &cropY,
   4396                        &cropW, &cropH,
   4397                        0);
   4398 
   4399         ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
   4400                  __FUNCTION__, cropX, cropY, cropW, cropH);
   4401 
   4402         csc_set_src_format(m_exynosVideoCSC,
   4403                            ALIGN(previewW, 32), previewH,
   4404                            cropX, cropY, cropW, cropH,
   4405                            selfStreamParms->format,
   4406                            0);
   4407 
   4408         csc_set_dst_format(m_exynosVideoCSC,
   4409                            videoW, videoH,
   4410                            0, 0, videoW, videoH,
   4411                            subParms->format,
   4412                            1);
   4413 
   4414         csc_set_src_buffer(m_exynosVideoCSC,
   4415                         (void **)&srcImageBuf->fd.fd);
   4416 
   4417         csc_set_dst_buffer(m_exynosVideoCSC,
   4418             (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
   4419 
   4420         if (csc_convert(m_exynosVideoCSC) != 0) {
   4421             ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
   4422         }
   4423         else {
   4424             ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
   4425         }
   4426     }
   4427     else {
   4428         ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
   4429     }
   4430 
   4431     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
   4432 
   4433     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
   4434             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
   4435     if (res == 0) {
   4436         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
   4437         subParms->numSvcBufsInHal--;
   4438     }
   4439     else {
   4440         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
   4441     }
   4442 
   4443     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
   4444     {
   4445         bool found = false;
   4446         int checkingIndex = 0;
   4447 
   4448         ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
   4449 
   4450         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
   4451         if (res != NO_ERROR || buf == NULL) {
   4452             ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
   4453             break;
   4454         }
   4455         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
   4456         subParms->numSvcBufsInHal ++;
   4457         ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
   4458            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
   4459 
   4460         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
   4461             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
   4462                 found = true;
   4463                 break;
   4464             }
   4465         }
   4466         ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
   4467 
   4468         if (!found) {
   4469              break;
   4470         }
   4471 
   4472         subParms->svcBufIndex = checkingIndex;
   4473         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
   4474             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
   4475         }
   4476         else {
   4477             ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
   4478                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
   4479         }
   4480     }
   4481     return 0;
   4482 }
   4483 
   4484 int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
   4485 {
   4486     stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
   4487     substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
   4488     status_t    res;
   4489     bool found = false;
   4490     int cropX, cropY, cropW, cropH = 0;
   4491     buffer_handle_t * buf = NULL;
   4492 
   4493     ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
   4494     for (int i = 0 ; subParms->numSvcBuffers ; i++) {
   4495         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
   4496             found = true;
   4497             break;
   4498         }
   4499         subParms->svcBufIndex++;
   4500         if (subParms->svcBufIndex >= subParms->numSvcBuffers)
   4501             subParms->svcBufIndex = 0;
   4502     }
   4503     if (!found) {
   4504         ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
   4505         subParms->svcBufIndex++;
   4506         return 1;
   4507     }
   4508 
   4509     if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
   4510         if (m_exynosVideoCSC) {
   4511             int previewCbW = subParms->width, previewCbH = subParms->height;
   4512             int cropX, cropY, cropW, cropH = 0;
   4513             int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
   4514             m_getRatioSize(previewW, previewH,
   4515                            previewCbW, previewCbH,
   4516                            &cropX, &cropY,
   4517                            &cropW, &cropH,
   4518                            0);
   4519 
   4520             ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
   4521                      __FUNCTION__, cropX, cropY, cropW, cropH);
   4522             csc_set_src_format(m_exynosVideoCSC,
   4523                                ALIGN(previewW, 32), previewH,
   4524                                cropX, cropY, cropW, cropH,
   4525                                selfStreamParms->format,
   4526                                0);
   4527 
   4528             csc_set_dst_format(m_exynosVideoCSC,
   4529                                previewCbW, previewCbH,
   4530                                0, 0, previewCbW, previewCbH,
   4531                                subParms->internalFormat,
   4532                                1);
   4533 
   4534             csc_set_src_buffer(m_exynosVideoCSC,
   4535                         (void **)&srcImageBuf->fd.fd);
   4536 
   4537             csc_set_dst_buffer(m_exynosVideoCSC,
   4538                 (void **)(&(m_previewCbBuf.fd.fd)));
   4539 
   4540             if (csc_convert(m_exynosVideoCSC) != 0) {
   4541                 ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
   4542             }
   4543             else {
   4544                 ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
   4545             }
   4546             if (previewCbW == ALIGN(previewCbW, 16)) {
   4547                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
   4548                     m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
   4549                 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
   4550                     m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
   4551             }
   4552             else {
   4553                 // TODO : copy line by line ?
   4554             }
   4555         }
   4556         else {
   4557             ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
   4558         }
   4559     }
   4560     else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
   4561         int previewCbW = subParms->width, previewCbH = subParms->height;
   4562         int stride = ALIGN(previewCbW, 16);
   4563         int uv_stride = ALIGN(previewCbW/2, 16);
   4564         int c_stride = ALIGN(stride / 2, 16);
   4565 
   4566         if (previewCbW == ALIGN(previewCbW, 32)) {
   4567             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
   4568                 srcImageBuf->virt.extP[0], stride * previewCbH);
   4569             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
   4570                 srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
   4571             memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
   4572                 srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
   4573         } else {
   4574             char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
   4575             char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
   4576             for (int i = 0 ; i < previewCbH ; i++) {
   4577                 memcpy(dstAddr, srcAddr, previewCbW);
   4578                 dstAddr += stride;
   4579                 srcAddr += ALIGN(stride, 32);
   4580             }
   4581             dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
   4582             srcAddr = (char *)(srcImageBuf->virt.extP[1]);
   4583             for (int i = 0 ; i < previewCbH/2 ; i++) {
   4584                 memcpy(dstAddr, srcAddr, previewCbW/2);
   4585                 dstAddr += c_stride;
   4586                 srcAddr += uv_stride;
   4587             }
   4588             srcAddr = (char *)(srcImageBuf->virt.extP[2]);
   4589             for (int i = 0 ; i < previewCbH/2 ; i++) {
   4590                 memcpy(dstAddr, srcAddr, previewCbW/2);
   4591                 dstAddr += c_stride;
   4592                 srcAddr += uv_stride;
   4593             }
   4594         }
   4595     }
   4596     res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
   4597 
   4598     ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
   4599             __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
   4600     if (res == 0) {
   4601         subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
   4602         subParms->numSvcBufsInHal--;
   4603     }
   4604     else {
   4605         subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
   4606     }
   4607 
   4608     while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
   4609     {
   4610         bool found = false;
   4611         int checkingIndex = 0;
   4612 
   4613         ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
   4614 
   4615         res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
   4616         if (res != NO_ERROR || buf == NULL) {
   4617             ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
   4618             break;
   4619         }
   4620         const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
   4621         subParms->numSvcBufsInHal ++;
   4622         ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
   4623            subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
   4624 
   4625 
   4626         for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
   4627             if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
   4628                 found = true;
   4629                 break;
   4630             }
   4631         }
   4632         ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
   4633 
   4634         if (!found) {
   4635              break;
   4636         }
   4637 
   4638         subParms->svcBufIndex = checkingIndex;
   4639         if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
   4640             subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
   4641         }
   4642         else {
   4643             ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
   4644                 subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
   4645         }
   4646     }
   4647     return 0;
   4648 }
   4649 
   4650 bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
   4651 {
   4652     int sizeOfSupportList;
   4653 
   4654     //REAR Camera
   4655     if(this->getCameraId() == 0) {
   4656         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
   4657 
   4658         for(int i = 0; i < sizeOfSupportList; i++) {
   4659             if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
   4660                 return true;
   4661         }
   4662 
   4663     }
   4664     else {
   4665         sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
   4666 
   4667         for(int i = 0; i < sizeOfSupportList; i++) {
   4668             if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
   4669                 return true;
   4670         }
   4671     }
   4672 
   4673     return false;
   4674 }
   4675 bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
   4676                             ExynosBuffer *jpegBuf,
   4677                             ExynosRect *rect)
   4678 {
   4679     unsigned char *addr;
   4680 
   4681     ExynosJpegEncoderForCamera jpegEnc;
   4682     bool ret = false;
   4683     int res = 0;
   4684 
   4685     unsigned int *yuvSize = yuvBuf->size.extS;
   4686 
   4687     if (jpegEnc.create()) {
   4688         ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
   4689         goto jpeg_encode_done;
   4690     }
   4691 
   4692     if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
   4693         ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
   4694         goto jpeg_encode_done;
   4695     }
   4696 
   4697     if (jpegEnc.setSize(rect->w, rect->h)) {
   4698         ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
   4699         goto jpeg_encode_done;
   4700     }
   4701     ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
   4702 
   4703     if (jpegEnc.setColorFormat(rect->colorFormat)) {
   4704         ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
   4705         goto jpeg_encode_done;
   4706     }
   4707 
   4708     if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
   4709         ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
   4710         goto jpeg_encode_done;
   4711     }
   4712 
   4713     if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
   4714         mExifInfo.enableThumb = true;
   4715         if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
   4716             // in the case of unsupported parameter, disable thumbnail
   4717             mExifInfo.enableThumb = false;
   4718         } else {
   4719             m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
   4720             m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
   4721         }
   4722 
   4723         ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
   4724 
   4725     } else {
   4726         mExifInfo.enableThumb = false;
   4727     }
   4728 
   4729     if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
   4730         ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
   4731         goto jpeg_encode_done;
   4732     }
   4733 
   4734     ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
   4735     if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
   4736         ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
   4737         goto jpeg_encode_done;
   4738     }
   4739 
   4740     m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
   4741     ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
   4742     if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
   4743         ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
   4744         goto jpeg_encode_done;
   4745     }
   4746     if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
   4747         ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
   4748         goto jpeg_encode_done;
   4749     }
   4750 
   4751     if (jpegEnc.updateConfig()) {
   4752         ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
   4753         goto jpeg_encode_done;
   4754     }
   4755 
   4756     if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
   4757         ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
   4758         goto jpeg_encode_done;
   4759     }
   4760 
   4761     ret = true;
   4762 
   4763 jpeg_encode_done:
   4764 
   4765     if (jpegEnc.flagCreate() == true)
   4766         jpegEnc.destroy();
   4767 
   4768     return ret;
   4769 }
   4770 
   4771 void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
   4772 {
   4773     m_ctlInfo.flash.m_precaptureTriggerId = id;
   4774     m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
   4775     if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
   4776         // flash is required
   4777         switch (m_ctlInfo.flash.m_flashCnt) {
   4778         case IS_FLASH_STATE_AUTO_DONE:
   4779         case IS_FLASH_STATE_AUTO_OFF:
   4780             // Flash capture sequence, AF flash was executed before
   4781             break;
   4782         default:
   4783             // Full flash sequence
   4784             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
   4785             m_ctlInfo.flash.m_flashEnableFlg = true;
   4786             m_ctlInfo.flash.m_flashTimeOut = 0;
   4787         }
   4788     } else {
   4789         // Skip pre-capture in case of non-flash.
   4790         ALOGV("[PreCap] Flash OFF mode ");
   4791         m_ctlInfo.flash.m_flashEnableFlg = false;
   4792         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
   4793     }
   4794     ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
   4795     OnPrecaptureMeteringNotificationSensor();
   4796 }
   4797 
   4798 void ExynosCameraHWInterface2::OnAfTrigger(int id)
   4799 {
   4800     m_afTriggerId = id;
   4801 
   4802     switch (m_afMode) {
   4803     case AA_AFMODE_AUTO:
   4804     case AA_AFMODE_MACRO:
   4805     case AA_AFMODE_MANUAL:
   4806         ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
   4807         // If flash is enable, Flash operation is executed before triggering AF
   4808         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
   4809                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
   4810                 && (m_cameraId == 0)) {
   4811             ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
   4812             m_ctlInfo.flash.m_flashEnableFlg = true;
   4813             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
   4814             m_ctlInfo.flash.m_flashDecisionResult = false;
   4815             m_ctlInfo.flash.m_afFlashDoneFlg = true;
   4816         }
   4817         OnAfTriggerAutoMacro(id);
   4818         break;
   4819     case AA_AFMODE_CONTINUOUS_VIDEO:
   4820         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
   4821         OnAfTriggerCAFVideo(id);
   4822         break;
   4823     case AA_AFMODE_CONTINUOUS_PICTURE:
   4824         ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
   4825         OnAfTriggerCAFPicture(id);
   4826         break;
   4827 
   4828     case AA_AFMODE_OFF:
   4829     default:
   4830         break;
   4831     }
   4832 }
   4833 
   4834 void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
   4835 {
   4836     int nextState = NO_TRANSITION;
   4837 
   4838     switch (m_afState) {
   4839     case HAL_AFSTATE_INACTIVE:
   4840     case HAL_AFSTATE_PASSIVE_FOCUSED:
   4841     case HAL_AFSTATE_SCANNING:
   4842         nextState = HAL_AFSTATE_NEEDS_COMMAND;
   4843         m_IsAfTriggerRequired = true;
   4844         break;
   4845     case HAL_AFSTATE_NEEDS_COMMAND:
   4846         nextState = NO_TRANSITION;
   4847         break;
   4848     case HAL_AFSTATE_STARTED:
   4849         nextState = NO_TRANSITION;
   4850         break;
   4851     case HAL_AFSTATE_LOCKED:
   4852         nextState = HAL_AFSTATE_NEEDS_COMMAND;
   4853         m_IsAfTriggerRequired = true;
   4854         break;
   4855     case HAL_AFSTATE_FAILED:
   4856         nextState = HAL_AFSTATE_NEEDS_COMMAND;
   4857         m_IsAfTriggerRequired = true;
   4858         break;
   4859     default:
   4860         break;
   4861     }
   4862     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
   4863     if (nextState != NO_TRANSITION)
   4864         m_afState = nextState;
   4865 }
   4866 
   4867 void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
   4868 {
   4869     int nextState = NO_TRANSITION;
   4870 
   4871     switch (m_afState) {
   4872     case HAL_AFSTATE_INACTIVE:
   4873         nextState = HAL_AFSTATE_FAILED;
   4874         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   4875         break;
   4876     case HAL_AFSTATE_NEEDS_COMMAND:
   4877         // not used
   4878         break;
   4879     case HAL_AFSTATE_STARTED:
   4880         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
   4881         m_AfHwStateFailed = false;
   4882         break;
   4883     case HAL_AFSTATE_SCANNING:
   4884         nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
   4885         m_AfHwStateFailed = false;
   4886         // If flash is enable, Flash operation is executed before triggering AF
   4887         if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
   4888                 && (m_ctlInfo.flash.m_flashEnableFlg == false)
   4889                 && (m_cameraId == 0)) {
   4890             ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
   4891             m_ctlInfo.flash.m_flashEnableFlg = true;
   4892             m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
   4893             m_ctlInfo.flash.m_flashDecisionResult = false;
   4894             m_ctlInfo.flash.m_afFlashDoneFlg = true;
   4895         }
   4896         break;
   4897     case HAL_AFSTATE_NEEDS_DETERMINATION:
   4898         nextState = NO_TRANSITION;
   4899         break;
   4900     case HAL_AFSTATE_PASSIVE_FOCUSED:
   4901         m_IsAfLockRequired = true;
   4902         if (m_AfHwStateFailed) {
   4903             ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
   4904             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   4905             nextState = HAL_AFSTATE_FAILED;
   4906         }
   4907         else {
   4908             ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
   4909             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
   4910             nextState = HAL_AFSTATE_LOCKED;
   4911         }
   4912         m_AfHwStateFailed = false;
   4913         break;
   4914     case HAL_AFSTATE_LOCKED:
   4915         nextState = NO_TRANSITION;
   4916         break;
   4917     case HAL_AFSTATE_FAILED:
   4918         nextState = NO_TRANSITION;
   4919         break;
   4920     default:
   4921         break;
   4922     }
   4923     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
   4924     if (nextState != NO_TRANSITION)
   4925         m_afState = nextState;
   4926 }
   4927 
   4928 
   4929 void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
   4930 {
   4931     int nextState = NO_TRANSITION;
   4932 
   4933     switch (m_afState) {
   4934     case HAL_AFSTATE_INACTIVE:
   4935         nextState = HAL_AFSTATE_FAILED;
   4936         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   4937         break;
   4938     case HAL_AFSTATE_NEEDS_COMMAND:
   4939         // not used
   4940         break;
   4941     case HAL_AFSTATE_STARTED:
   4942         m_IsAfLockRequired = true;
   4943         nextState = HAL_AFSTATE_FAILED;
   4944         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   4945         break;
   4946     case HAL_AFSTATE_SCANNING:
   4947         m_IsAfLockRequired = true;
   4948         nextState = HAL_AFSTATE_FAILED;
   4949         SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   4950         break;
   4951     case HAL_AFSTATE_NEEDS_DETERMINATION:
   4952         // not used
   4953         break;
   4954     case HAL_AFSTATE_PASSIVE_FOCUSED:
   4955         m_IsAfLockRequired = true;
   4956         SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
   4957         nextState = HAL_AFSTATE_LOCKED;
   4958         break;
   4959     case HAL_AFSTATE_LOCKED:
   4960         nextState = NO_TRANSITION;
   4961         break;
   4962     case HAL_AFSTATE_FAILED:
   4963         nextState = NO_TRANSITION;
   4964         break;
   4965     default:
   4966         break;
   4967     }
   4968     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
   4969     if (nextState != NO_TRANSITION)
   4970         m_afState = nextState;
   4971 }
   4972 
   4973 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
   4974 {
   4975     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
   4976         // Just noti of pre-capture start
   4977         if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
   4978             m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
   4979                         ANDROID_CONTROL_AE_STATE_PRECAPTURE,
   4980                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   4981             ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
   4982             m_notifyCb(CAMERA2_MSG_AUTOWB,
   4983                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
   4984                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   4985             m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
   4986         }
   4987     }
   4988 }
   4989 
   4990 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
   4991 {
   4992     if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
   4993         if (m_ctlInfo.flash.m_flashEnableFlg) {
   4994             // flash case
   4995             switch (m_ctlInfo.flash.m_flashCnt) {
   4996             case IS_FLASH_STATE_AUTO_DONE:
   4997             case IS_FLASH_STATE_AUTO_OFF:
   4998                 if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
   4999                     // End notification
   5000                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
   5001                                     ANDROID_CONTROL_AE_STATE_CONVERGED,
   5002                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5003                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
   5004                     m_notifyCb(CAMERA2_MSG_AUTOWB,
   5005                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
   5006                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5007                     m_ctlInfo.flash.m_precaptureTriggerId = 0;
   5008                 } else {
   5009                     m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
   5010                                     ANDROID_CONTROL_AE_STATE_PRECAPTURE,
   5011                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5012                     ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
   5013                     m_notifyCb(CAMERA2_MSG_AUTOWB,
   5014                                     ANDROID_CONTROL_AWB_STATE_CONVERGED,
   5015                                     m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5016                     m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
   5017                 }
   5018                 break;
   5019             case IS_FLASH_STATE_CAPTURE:
   5020             case IS_FLASH_STATE_CAPTURE_WAIT:
   5021             case IS_FLASH_STATE_CAPTURE_JPEG:
   5022             case IS_FLASH_STATE_CAPTURE_END:
   5023                 ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
   5024                 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
   5025                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
   5026                         ANDROID_CONTROL_AE_STATE_CONVERGED,
   5027                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5028                 m_notifyCb(CAMERA2_MSG_AUTOWB,
   5029                         ANDROID_CONTROL_AWB_STATE_CONVERGED,
   5030                         m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5031                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
   5032                 break;
   5033             }
   5034         } else {
   5035             // non-flash case
   5036             if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
   5037                 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
   5038                                 ANDROID_CONTROL_AE_STATE_CONVERGED,
   5039                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5040                 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
   5041                 m_notifyCb(CAMERA2_MSG_AUTOWB,
   5042                                 ANDROID_CONTROL_AWB_STATE_CONVERGED,
   5043                                 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
   5044                 m_ctlInfo.flash.m_precaptureTriggerId = 0;
   5045             }
   5046         }
   5047     }
   5048 }
   5049 
   5050 void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
   5051 {
   5052     switch (m_afMode) {
   5053     case AA_AFMODE_AUTO:
   5054     case AA_AFMODE_MACRO:
   5055         OnAfNotificationAutoMacro(noti);
   5056         break;
   5057     case AA_AFMODE_CONTINUOUS_VIDEO:
   5058         OnAfNotificationCAFVideo(noti);
   5059         break;
   5060     case AA_AFMODE_CONTINUOUS_PICTURE:
   5061         OnAfNotificationCAFPicture(noti);
   5062         break;
   5063     case AA_AFMODE_OFF:
   5064     default:
   5065         break;
   5066     }
   5067 }
   5068 
   5069 void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
   5070 {
   5071     int nextState = NO_TRANSITION;
   5072     bool bWrongTransition = false;
   5073 
   5074     if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
   5075         switch (noti) {
   5076         case AA_AFSTATE_INACTIVE:
   5077         case AA_AFSTATE_ACTIVE_SCAN:
   5078         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5079         case AA_AFSTATE_AF_FAILED_FOCUS:
   5080         default:
   5081             nextState = NO_TRANSITION;
   5082             break;
   5083         }
   5084     }
   5085     else if (m_afState == HAL_AFSTATE_STARTED) {
   5086         switch (noti) {
   5087         case AA_AFSTATE_INACTIVE:
   5088             nextState = NO_TRANSITION;
   5089             break;
   5090         case AA_AFSTATE_ACTIVE_SCAN:
   5091             nextState = HAL_AFSTATE_SCANNING;
   5092             SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
   5093             break;
   5094         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5095             nextState = NO_TRANSITION;
   5096             break;
   5097         case AA_AFSTATE_AF_FAILED_FOCUS:
   5098             nextState = NO_TRANSITION;
   5099             break;
   5100         default:
   5101             bWrongTransition = true;
   5102             break;
   5103         }
   5104     }
   5105     else if (m_afState == HAL_AFSTATE_SCANNING) {
   5106         switch (noti) {
   5107         case AA_AFSTATE_INACTIVE:
   5108             bWrongTransition = true;
   5109             break;
   5110         case AA_AFSTATE_ACTIVE_SCAN:
   5111             nextState = NO_TRANSITION;
   5112             break;
   5113         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5114             // If Flash mode is enable, after AF execute pre-capture metering
   5115             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
   5116                 switch (m_ctlInfo.flash.m_flashCnt) {
   5117                 case IS_FLASH_STATE_ON_DONE:
   5118                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
   5119                     nextState = NO_TRANSITION;
   5120                     break;
   5121                 case IS_FLASH_STATE_AUTO_DONE:
   5122                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
   5123                     nextState = HAL_AFSTATE_LOCKED;
   5124                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
   5125                     break;
   5126                 default:
   5127                     nextState = NO_TRANSITION;
   5128                 }
   5129             } else {
   5130                 nextState = HAL_AFSTATE_LOCKED;
   5131                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
   5132             }
   5133             break;
   5134         case AA_AFSTATE_AF_FAILED_FOCUS:
   5135             // If Flash mode is enable, after AF execute pre-capture metering
   5136             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
   5137                 switch (m_ctlInfo.flash.m_flashCnt) {
   5138                 case IS_FLASH_STATE_ON_DONE:
   5139                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
   5140                     nextState = NO_TRANSITION;
   5141                     break;
   5142                 case IS_FLASH_STATE_AUTO_DONE:
   5143                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
   5144                     nextState = HAL_AFSTATE_FAILED;
   5145                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5146                     break;
   5147                 default:
   5148                     nextState = NO_TRANSITION;
   5149                 }
   5150             } else {
   5151                 nextState = HAL_AFSTATE_FAILED;
   5152                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5153             }
   5154             break;
   5155         default:
   5156             bWrongTransition = true;
   5157             break;
   5158         }
   5159     }
   5160     else if (m_afState == HAL_AFSTATE_LOCKED) {
   5161         switch (noti) {
   5162             case AA_AFSTATE_INACTIVE:
   5163             case AA_AFSTATE_ACTIVE_SCAN:
   5164                 bWrongTransition = true;
   5165                 break;
   5166             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5167                 nextState = NO_TRANSITION;
   5168                 break;
   5169             case AA_AFSTATE_AF_FAILED_FOCUS:
   5170             default:
   5171                 bWrongTransition = true;
   5172                 break;
   5173         }
   5174     }
   5175     else if (m_afState == HAL_AFSTATE_FAILED) {
   5176         switch (noti) {
   5177             case AA_AFSTATE_INACTIVE:
   5178             case AA_AFSTATE_ACTIVE_SCAN:
   5179             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5180                 bWrongTransition = true;
   5181                 break;
   5182             case AA_AFSTATE_AF_FAILED_FOCUS:
   5183                 nextState = NO_TRANSITION;
   5184                 break;
   5185             default:
   5186                 bWrongTransition = true;
   5187                 break;
   5188         }
   5189     }
   5190     if (bWrongTransition) {
   5191         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
   5192         return;
   5193     }
   5194     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
   5195     if (nextState != NO_TRANSITION)
   5196         m_afState = nextState;
   5197 }
   5198 
   5199 void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
   5200 {
   5201     int nextState = NO_TRANSITION;
   5202     bool bWrongTransition = false;
   5203 
   5204     if (m_afState == HAL_AFSTATE_INACTIVE) {
   5205         switch (noti) {
   5206         case AA_AFSTATE_INACTIVE:
   5207         case AA_AFSTATE_ACTIVE_SCAN:
   5208         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5209         case AA_AFSTATE_AF_FAILED_FOCUS:
   5210         default:
   5211             nextState = NO_TRANSITION;
   5212             break;
   5213         }
   5214         // Check AF notification after triggering
   5215         if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
   5216             if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
   5217                 ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
   5218                 SetAfMode(AA_AFMODE_OFF);
   5219                 SetAfMode(m_afMode);
   5220                 m_ctlInfo.af.m_afTriggerTimeOut = 0;
   5221             } else {
   5222                 m_ctlInfo.af.m_afTriggerTimeOut++;
   5223             }
   5224         }
   5225     }
   5226     else if (m_afState == HAL_AFSTATE_STARTED) {
   5227         switch (noti) {
   5228         case AA_AFSTATE_INACTIVE:
   5229             nextState = NO_TRANSITION;
   5230             break;
   5231         case AA_AFSTATE_ACTIVE_SCAN:
   5232             nextState = HAL_AFSTATE_SCANNING;
   5233             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
   5234             m_ctlInfo.af.m_afTriggerTimeOut = 0;
   5235             break;
   5236         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5237             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
   5238             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
   5239             m_ctlInfo.af.m_afTriggerTimeOut = 0;
   5240             break;
   5241         case AA_AFSTATE_AF_FAILED_FOCUS:
   5242             //nextState = HAL_AFSTATE_FAILED;
   5243             //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5244             nextState = NO_TRANSITION;
   5245             break;
   5246         default:
   5247             bWrongTransition = true;
   5248             break;
   5249         }
   5250     }
   5251     else if (m_afState == HAL_AFSTATE_SCANNING) {
   5252         switch (noti) {
   5253         case AA_AFSTATE_INACTIVE:
   5254             nextState = NO_TRANSITION;
   5255             break;
   5256         case AA_AFSTATE_ACTIVE_SCAN:
   5257             nextState = NO_TRANSITION;
   5258             m_AfHwStateFailed = false;
   5259             break;
   5260         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5261             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
   5262             m_AfHwStateFailed = false;
   5263             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
   5264             break;
   5265         case AA_AFSTATE_AF_FAILED_FOCUS:
   5266             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
   5267             m_AfHwStateFailed = true;
   5268             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
   5269             break;
   5270         default:
   5271             bWrongTransition = true;
   5272             break;
   5273         }
   5274     }
   5275     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
   5276         switch (noti) {
   5277         case AA_AFSTATE_INACTIVE:
   5278             nextState = NO_TRANSITION;
   5279             break;
   5280         case AA_AFSTATE_ACTIVE_SCAN:
   5281             nextState = HAL_AFSTATE_SCANNING;
   5282             m_AfHwStateFailed = false;
   5283             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
   5284             break;
   5285         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5286             nextState = NO_TRANSITION;
   5287             m_AfHwStateFailed = false;
   5288             break;
   5289         case AA_AFSTATE_AF_FAILED_FOCUS:
   5290             nextState = NO_TRANSITION;
   5291             m_AfHwStateFailed = true;
   5292             break;
   5293         default:
   5294             bWrongTransition = true;
   5295             break;
   5296         }
   5297     }
   5298     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
   5299         //Skip notification in case of flash, wait the end of flash on
   5300         if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
   5301             if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
   5302                 return;
   5303         }
   5304         switch (noti) {
   5305         case AA_AFSTATE_INACTIVE:
   5306             nextState = NO_TRANSITION;
   5307             break;
   5308         case AA_AFSTATE_ACTIVE_SCAN:
   5309             nextState = NO_TRANSITION;
   5310             break;
   5311         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5312             // If Flash mode is enable, after AF execute pre-capture metering
   5313             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
   5314                 switch (m_ctlInfo.flash.m_flashCnt) {
   5315                 case IS_FLASH_STATE_ON_DONE:
   5316                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
   5317                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
   5318                     nextState = NO_TRANSITION;
   5319                     break;
   5320                 case IS_FLASH_STATE_AUTO_DONE:
   5321                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
   5322                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
   5323                     m_IsAfLockRequired = true;
   5324                     nextState = HAL_AFSTATE_LOCKED;
   5325                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
   5326                     break;
   5327                 default:
   5328                     nextState = NO_TRANSITION;
   5329                 }
   5330             } else {
   5331                 m_IsAfLockRequired = true;
   5332                 nextState = HAL_AFSTATE_LOCKED;
   5333                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
   5334             }
   5335             break;
   5336         case AA_AFSTATE_AF_FAILED_FOCUS:
   5337             // If Flash mode is enable, after AF execute pre-capture metering
   5338             if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
   5339                 switch (m_ctlInfo.flash.m_flashCnt) {
   5340                 case IS_FLASH_STATE_ON_DONE:
   5341                     ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
   5342                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
   5343                     nextState = NO_TRANSITION;
   5344                     break;
   5345                 case IS_FLASH_STATE_AUTO_DONE:
   5346                     ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
   5347                     m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
   5348                     m_IsAfLockRequired = true;
   5349                     nextState = HAL_AFSTATE_FAILED;
   5350                     SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5351                     break;
   5352                 default:
   5353                     nextState = NO_TRANSITION;
   5354                 }
   5355             } else {
   5356                 m_IsAfLockRequired = true;
   5357                 nextState = HAL_AFSTATE_FAILED;
   5358                 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5359             }
   5360             break;
   5361         default:
   5362             bWrongTransition = true;
   5363             break;
   5364         }
   5365     }
   5366     else if (m_afState == HAL_AFSTATE_LOCKED) {
   5367         switch (noti) {
   5368             case AA_AFSTATE_INACTIVE:
   5369                 nextState = NO_TRANSITION;
   5370                 break;
   5371             case AA_AFSTATE_ACTIVE_SCAN:
   5372                 bWrongTransition = true;
   5373                 break;
   5374             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5375                 nextState = NO_TRANSITION;
   5376                 break;
   5377             case AA_AFSTATE_AF_FAILED_FOCUS:
   5378             default:
   5379                 bWrongTransition = true;
   5380                 break;
   5381         }
   5382     }
   5383     else if (m_afState == HAL_AFSTATE_FAILED) {
   5384         switch (noti) {
   5385             case AA_AFSTATE_INACTIVE:
   5386                 bWrongTransition = true;
   5387                 break;
   5388             case AA_AFSTATE_ACTIVE_SCAN:
   5389                 nextState = HAL_AFSTATE_SCANNING;
   5390                 break;
   5391             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5392                 bWrongTransition = true;
   5393                 break;
   5394             case AA_AFSTATE_AF_FAILED_FOCUS:
   5395                 nextState = NO_TRANSITION;
   5396                 break;
   5397             default:
   5398                 bWrongTransition = true;
   5399                 break;
   5400         }
   5401     }
   5402     if (bWrongTransition) {
   5403         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
   5404         return;
   5405     }
   5406     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
   5407     if (nextState != NO_TRANSITION)
   5408         m_afState = nextState;
   5409 }
   5410 
   5411 void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
   5412 {
   5413     int nextState = NO_TRANSITION;
   5414     bool bWrongTransition = false;
   5415 
   5416     if (m_afState == HAL_AFSTATE_INACTIVE) {
   5417         switch (noti) {
   5418         case AA_AFSTATE_INACTIVE:
   5419         case AA_AFSTATE_ACTIVE_SCAN:
   5420         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5421         case AA_AFSTATE_AF_FAILED_FOCUS:
   5422         default:
   5423             nextState = NO_TRANSITION;
   5424             break;
   5425         }
   5426     }
   5427     else if (m_afState == HAL_AFSTATE_STARTED) {
   5428         switch (noti) {
   5429         case AA_AFSTATE_INACTIVE:
   5430             nextState = NO_TRANSITION;
   5431             break;
   5432         case AA_AFSTATE_ACTIVE_SCAN:
   5433             nextState = HAL_AFSTATE_SCANNING;
   5434             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
   5435             break;
   5436         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5437             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
   5438             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
   5439             break;
   5440         case AA_AFSTATE_AF_FAILED_FOCUS:
   5441             nextState = HAL_AFSTATE_FAILED;
   5442             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5443             break;
   5444         default:
   5445             bWrongTransition = true;
   5446             break;
   5447         }
   5448     }
   5449     else if (m_afState == HAL_AFSTATE_SCANNING) {
   5450         switch (noti) {
   5451         case AA_AFSTATE_INACTIVE:
   5452             bWrongTransition = true;
   5453             break;
   5454         case AA_AFSTATE_ACTIVE_SCAN:
   5455             nextState = NO_TRANSITION;
   5456             break;
   5457         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5458             nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
   5459             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
   5460             break;
   5461         case AA_AFSTATE_AF_FAILED_FOCUS:
   5462             nextState = NO_TRANSITION;
   5463             break;
   5464         default:
   5465             bWrongTransition = true;
   5466             break;
   5467         }
   5468     }
   5469     else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
   5470         switch (noti) {
   5471         case AA_AFSTATE_INACTIVE:
   5472             bWrongTransition = true;
   5473             break;
   5474         case AA_AFSTATE_ACTIVE_SCAN:
   5475             nextState = HAL_AFSTATE_SCANNING;
   5476             SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
   5477             break;
   5478         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5479             nextState = NO_TRANSITION;
   5480             break;
   5481         case AA_AFSTATE_AF_FAILED_FOCUS:
   5482             nextState = HAL_AFSTATE_FAILED;
   5483             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5484             // TODO : needs NO_TRANSITION ?
   5485             break;
   5486         default:
   5487             bWrongTransition = true;
   5488             break;
   5489         }
   5490     }
   5491     else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
   5492         switch (noti) {
   5493         case AA_AFSTATE_INACTIVE:
   5494             bWrongTransition = true;
   5495             break;
   5496         case AA_AFSTATE_ACTIVE_SCAN:
   5497             nextState = NO_TRANSITION;
   5498             break;
   5499         case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5500             m_IsAfLockRequired = true;
   5501             nextState = HAL_AFSTATE_LOCKED;
   5502             SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
   5503             break;
   5504         case AA_AFSTATE_AF_FAILED_FOCUS:
   5505             nextState = HAL_AFSTATE_FAILED;
   5506             SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
   5507             break;
   5508         default:
   5509             bWrongTransition = true;
   5510             break;
   5511         }
   5512     }
   5513     else if (m_afState == HAL_AFSTATE_LOCKED) {
   5514         switch (noti) {
   5515             case AA_AFSTATE_INACTIVE:
   5516                 nextState = NO_TRANSITION;
   5517                 break;
   5518             case AA_AFSTATE_ACTIVE_SCAN:
   5519                 bWrongTransition = true;
   5520                 break;
   5521             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5522                 nextState = NO_TRANSITION;
   5523                 break;
   5524             case AA_AFSTATE_AF_FAILED_FOCUS:
   5525             default:
   5526                 bWrongTransition = true;
   5527                 break;
   5528         }
   5529     }
   5530     else if (m_afState == HAL_AFSTATE_FAILED) {
   5531         switch (noti) {
   5532             case AA_AFSTATE_INACTIVE:
   5533             case AA_AFSTATE_ACTIVE_SCAN:
   5534             case AA_AFSTATE_AF_ACQUIRED_FOCUS:
   5535                 bWrongTransition = true;
   5536                 break;
   5537             case AA_AFSTATE_AF_FAILED_FOCUS:
   5538                 nextState = NO_TRANSITION;
   5539                 break;
   5540             default:
   5541                 bWrongTransition = true;
   5542                 break;
   5543         }
   5544     }
   5545     if (bWrongTransition) {
   5546         ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
   5547         return;
   5548     }
   5549     ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
   5550     if (nextState != NO_TRANSITION)
   5551         m_afState = nextState;
   5552 }
   5553 
   5554 void ExynosCameraHWInterface2::OnAfCancel(int id)
   5555 {
   5556     m_afTriggerId = id;
   5557 
   5558     switch (m_afMode) {
   5559     case AA_AFMODE_AUTO:
   5560     case AA_AFMODE_MACRO:
   5561     case AA_AFMODE_OFF:
   5562     case AA_AFMODE_MANUAL:
   5563         OnAfCancelAutoMacro(id);
   5564         break;
   5565     case AA_AFMODE_CONTINUOUS_VIDEO:
   5566         OnAfCancelCAFVideo(id);
   5567         break;
   5568     case AA_AFMODE_CONTINUOUS_PICTURE:
   5569         OnAfCancelCAFPicture(id);
   5570         break;
   5571     default:
   5572         break;
   5573     }
   5574 }
   5575 
   5576 void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
   5577 {
   5578     int nextState = NO_TRANSITION;
   5579 
   5580     if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
   5581         m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
   5582     }
   5583     switch (m_afState) {
   5584     case HAL_AFSTATE_INACTIVE:
   5585         nextState = NO_TRANSITION;
   5586         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
   5587         break;
   5588     case HAL_AFSTATE_NEEDS_COMMAND:
   5589     case HAL_AFSTATE_STARTED:
   5590     case HAL_AFSTATE_SCANNING:
   5591     case HAL_AFSTATE_LOCKED:
   5592     case HAL_AFSTATE_FAILED:
   5593         SetAfMode(AA_AFMODE_OFF);
   5594         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
   5595         nextState = HAL_AFSTATE_INACTIVE;
   5596         break;
   5597     default:
   5598         break;
   5599     }
   5600     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
   5601     if (nextState != NO_TRANSITION)
   5602         m_afState = nextState;
   5603 }
   5604 
   5605 void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
   5606 {
   5607     int nextState = NO_TRANSITION;
   5608 
   5609     switch (m_afState) {
   5610     case HAL_AFSTATE_INACTIVE:
   5611         nextState = NO_TRANSITION;
   5612         break;
   5613     case HAL_AFSTATE_NEEDS_COMMAND:
   5614     case HAL_AFSTATE_STARTED:
   5615     case HAL_AFSTATE_SCANNING:
   5616     case HAL_AFSTATE_LOCKED:
   5617     case HAL_AFSTATE_FAILED:
   5618     case HAL_AFSTATE_NEEDS_DETERMINATION:
   5619     case HAL_AFSTATE_PASSIVE_FOCUSED:
   5620         SetAfMode(AA_AFMODE_OFF);
   5621         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
   5622         SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
   5623         nextState = HAL_AFSTATE_INACTIVE;
   5624         break;
   5625     default:
   5626         break;
   5627     }
   5628     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
   5629     if (nextState != NO_TRANSITION)
   5630         m_afState = nextState;
   5631 }
   5632 
   5633 void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
   5634 {
   5635     int nextState = NO_TRANSITION;
   5636 
   5637     switch (m_afState) {
   5638     case HAL_AFSTATE_INACTIVE:
   5639         nextState = NO_TRANSITION;
   5640         break;
   5641     case HAL_AFSTATE_NEEDS_COMMAND:
   5642     case HAL_AFSTATE_STARTED:
   5643     case HAL_AFSTATE_SCANNING:
   5644     case HAL_AFSTATE_LOCKED:
   5645     case HAL_AFSTATE_FAILED:
   5646     case HAL_AFSTATE_NEEDS_DETERMINATION:
   5647     case HAL_AFSTATE_PASSIVE_FOCUSED:
   5648         SetAfMode(AA_AFMODE_OFF);
   5649         SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
   5650         SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
   5651         nextState = HAL_AFSTATE_INACTIVE;
   5652         break;
   5653     default:
   5654         break;
   5655     }
   5656     ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
   5657     if (nextState != NO_TRANSITION)
   5658         m_afState = nextState;
   5659 }
   5660 
   5661 void ExynosCameraHWInterface2::SetAfStateForService(int newState)
   5662 {
   5663     if (m_serviceAfState != newState || newState == 0)
   5664         m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
   5665     m_serviceAfState = newState;
   5666 }
   5667 
   5668 int ExynosCameraHWInterface2::GetAfStateForService()
   5669 {
   5670    return m_serviceAfState;
   5671 }
   5672 
   5673 void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
   5674 {
   5675     if (m_afMode != afMode) {
   5676         if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) {
   5677             m_afMode2 = afMode;
   5678             ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
   5679         }
   5680         else {
   5681             ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
   5682             m_IsAfModeUpdateRequired = true;
   5683             m_afMode = afMode;
   5684             SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
   5685             m_afState = HAL_AFSTATE_INACTIVE;
   5686         }
   5687     }
   5688 }
   5689 
   5690 void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
   5691 {
   5692     char property[PROPERTY_VALUE_MAX];
   5693 
   5694     //2 0th IFD TIFF Tags
   5695     //3 Maker
   5696     property_get("ro.product.brand", property, EXIF_DEF_MAKER);
   5697     strncpy((char *)mExifInfo.maker, property,
   5698                 sizeof(mExifInfo.maker) - 1);
   5699     mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
   5700     //3 Model
   5701     property_get("ro.product.model", property, EXIF_DEF_MODEL);
   5702     strncpy((char *)mExifInfo.model, property,
   5703                 sizeof(mExifInfo.model) - 1);
   5704     mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
   5705     //3 Software
   5706     property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
   5707     strncpy((char *)mExifInfo.software, property,
   5708                 sizeof(mExifInfo.software) - 1);
   5709     mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
   5710 
   5711     //3 YCbCr Positioning
   5712     mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
   5713 
   5714     //2 0th IFD Exif Private Tags
   5715     //3 F Number
   5716     mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
   5717     mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
   5718     //3 Exposure Program
   5719     mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
   5720     //3 Exif Version
   5721     memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
   5722     //3 Aperture
   5723     double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
   5724     mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
   5725     mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
   5726     //3 Maximum lens aperture
   5727     mExifInfo.max_aperture.num = mExifInfo.aperture.num;
   5728     mExifInfo.max_aperture.den = mExifInfo.aperture.den;
   5729     //3 Lens Focal Length
   5730     mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
   5731 
   5732     mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
   5733     //3 User Comments
   5734     strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
   5735     //3 Color Space information
   5736     mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
   5737     //3 Exposure Mode
   5738     mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
   5739 
   5740     //2 0th IFD GPS Info Tags
   5741     unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
   5742     memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
   5743 
   5744     //2 1th IFD TIFF Tags
   5745     mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
   5746     mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
   5747     mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
   5748     mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
   5749     mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
   5750     mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
   5751 }
   5752 
   5753 void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
   5754 	camera2_shot_ext *currentEntry)
   5755 {
   5756     camera2_dm *dm = &(currentEntry->shot.dm);
   5757     camera2_ctl *ctl = &(currentEntry->shot.ctl);
   5758 
   5759     ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
   5760     if (!ctl->request.frameCount)
   5761        return;
   5762     //2 0th IFD TIFF Tags
   5763     //3 Width
   5764     exifInfo->width = rect->w;
   5765     //3 Height
   5766     exifInfo->height = rect->h;
   5767     //3 Orientation
   5768     switch (ctl->jpeg.orientation) {
   5769     case 90:
   5770         exifInfo->orientation = EXIF_ORIENTATION_90;
   5771         break;
   5772     case 180:
   5773         exifInfo->orientation = EXIF_ORIENTATION_180;
   5774         break;
   5775     case 270:
   5776         exifInfo->orientation = EXIF_ORIENTATION_270;
   5777         break;
   5778     case 0:
   5779     default:
   5780         exifInfo->orientation = EXIF_ORIENTATION_UP;
   5781         break;
   5782     }
   5783 
   5784     //3 Date time
   5785     time_t rawtime;
   5786     struct tm *timeinfo;
   5787     time(&rawtime);
   5788     timeinfo = localtime(&rawtime);
   5789     strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
   5790 
   5791     //2 0th IFD Exif Private Tags
   5792     //3 Exposure Time
   5793     int shutterSpeed = (dm->sensor.exposureTime/1000);
   5794 
   5795     // To display exposure time just above 500ms as 1/2sec, not 1 sec.
   5796     if (shutterSpeed > 500000)
   5797         shutterSpeed -=  100000;
   5798 
   5799     if (shutterSpeed < 0) {
   5800         shutterSpeed = 100;
   5801     }
   5802 
   5803     exifInfo->exposure_time.num = 1;
   5804     // x us -> 1/x s */
   5805     //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
   5806     exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
   5807 
   5808     //3 ISO Speed Rating
   5809     exifInfo->iso_speed_rating = dm->aa.isoValue;
   5810 
   5811     uint32_t av, tv, bv, sv, ev;
   5812     av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
   5813     tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
   5814     sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
   5815     bv = av + tv - sv;
   5816     ev = av + tv;
   5817     //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
   5818     ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
   5819 
   5820     //3 Shutter Speed
   5821     exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
   5822     exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
   5823     //3 Brightness
   5824     exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
   5825     exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
   5826     //3 Exposure Bias
   5827     if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
   5828         ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
   5829         exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
   5830         exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
   5831     } else {
   5832         exifInfo->exposure_bias.num = 0;
   5833         exifInfo->exposure_bias.den = 0;
   5834     }
   5835     //3 Metering Mode
   5836     /*switch (m_curCameraInfo->metering) {
   5837     case METERING_MODE_CENTER:
   5838         exifInfo->metering_mode = EXIF_METERING_CENTER;
   5839         break;
   5840     case METERING_MODE_MATRIX:
   5841         exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
   5842         break;
   5843     case METERING_MODE_SPOT:
   5844         exifInfo->metering_mode = EXIF_METERING_SPOT;
   5845         break;
   5846     case METERING_MODE_AVERAGE:
   5847     default:
   5848         exifInfo->metering_mode = EXIF_METERING_AVERAGE;
   5849         break;
   5850     }*/
   5851     exifInfo->metering_mode = EXIF_METERING_CENTER;
   5852 
   5853     //3 Flash
   5854     if (m_ctlInfo.flash.m_flashDecisionResult)
   5855         exifInfo->flash = 1;
   5856     else
   5857         exifInfo->flash = EXIF_DEF_FLASH;
   5858 
   5859     //3 White Balance
   5860     if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
   5861         exifInfo->white_balance = EXIF_WB_AUTO;
   5862     else
   5863         exifInfo->white_balance = EXIF_WB_MANUAL;
   5864 
   5865     //3 Scene Capture Type
   5866     switch (ctl->aa.sceneMode) {
   5867     case AA_SCENE_MODE_PORTRAIT:
   5868         exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
   5869         break;
   5870     case AA_SCENE_MODE_LANDSCAPE:
   5871         exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
   5872         break;
   5873     case AA_SCENE_MODE_NIGHT_PORTRAIT:
   5874         exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
   5875         break;
   5876     default:
   5877         exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
   5878         break;
   5879     }
   5880 
   5881     //2 0th IFD GPS Info Tags
   5882     if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
   5883 
   5884         if (ctl->jpeg.gpsCoordinates[0] > 0)
   5885             strcpy((char *)exifInfo->gps_latitude_ref, "N");
   5886         else
   5887             strcpy((char *)exifInfo->gps_latitude_ref, "S");
   5888 
   5889         if (ctl->jpeg.gpsCoordinates[1] > 0)
   5890             strcpy((char *)exifInfo->gps_longitude_ref, "E");
   5891         else
   5892             strcpy((char *)exifInfo->gps_longitude_ref, "W");
   5893 
   5894         if (ctl->jpeg.gpsCoordinates[2] > 0)
   5895             exifInfo->gps_altitude_ref = 0;
   5896         else
   5897             exifInfo->gps_altitude_ref = 1;
   5898 
   5899         double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
   5900         double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
   5901         double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
   5902 
   5903         exifInfo->gps_latitude[0].num = (uint32_t)latitude;
   5904         exifInfo->gps_latitude[0].den = 1;
   5905         exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
   5906         exifInfo->gps_latitude[1].den = 1;
   5907         exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60)
   5908                                         - exifInfo->gps_latitude[1].num) * 60);
   5909         exifInfo->gps_latitude[2].den = 1;
   5910 
   5911         exifInfo->gps_longitude[0].num = (uint32_t)longitude;
   5912         exifInfo->gps_longitude[0].den = 1;
   5913         exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
   5914         exifInfo->gps_longitude[1].den = 1;
   5915         exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60)
   5916                                         - exifInfo->gps_longitude[1].num) * 60);
   5917         exifInfo->gps_longitude[2].den = 1;
   5918 
   5919         exifInfo->gps_altitude.num = (uint32_t)round(altitude);
   5920         exifInfo->gps_altitude.den = 1;
   5921 
   5922         struct tm tm_data;
   5923         long timestamp;
   5924         timestamp = (long)ctl->jpeg.gpsTimestamp;
   5925         gmtime_r(&timestamp, &tm_data);
   5926         exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
   5927         exifInfo->gps_timestamp[0].den = 1;
   5928         exifInfo->gps_timestamp[1].num = tm_data.tm_min;
   5929         exifInfo->gps_timestamp[1].den = 1;
   5930         exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
   5931         exifInfo->gps_timestamp[2].den = 1;
   5932         snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
   5933                 "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
   5934 
   5935         memset(exifInfo->gps_processing_method, 0, 100);
   5936         memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
   5937         exifInfo->enableGps = true;
   5938     } else {
   5939         exifInfo->enableGps = false;
   5940     }
   5941 
   5942     //2 1th IFD TIFF Tags
   5943     exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
   5944     exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
   5945 }
   5946 
   5947 ExynosCameraHWInterface2::MainThread::~MainThread()
   5948 {
   5949     ALOGV("(%s):", __FUNCTION__);
   5950 }
   5951 
   5952 void ExynosCameraHWInterface2::MainThread::release()
   5953 {
   5954     ALOGV("(%s):", __func__);
   5955     SetSignal(SIGNAL_THREAD_RELEASE);
   5956 }
   5957 
   5958 ExynosCameraHWInterface2::SensorThread::~SensorThread()
   5959 {
   5960     ALOGV("(%s):", __FUNCTION__);
   5961 }
   5962 
   5963 void ExynosCameraHWInterface2::SensorThread::release()
   5964 {
   5965     ALOGV("(%s):", __func__);
   5966     SetSignal(SIGNAL_THREAD_RELEASE);
   5967 }
   5968 
   5969 ExynosCameraHWInterface2::StreamThread::~StreamThread()
   5970 {
   5971     ALOGV("(%s):", __FUNCTION__);
   5972 }
   5973 
   5974 void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
   5975 {
   5976     ALOGV("DEBUG(%s):", __FUNCTION__);
   5977     memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
   5978 }
   5979 
   5980 void ExynosCameraHWInterface2::StreamThread::release()
   5981 {
   5982     ALOGV("(%s):", __func__);
   5983     SetSignal(SIGNAL_THREAD_RELEASE);
   5984 }
   5985 
   5986 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
   5987 {
   5988     int index;
   5989     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
   5990         if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
   5991             return index;
   5992     }
   5993     return -1;
   5994 }
   5995 
   5996 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
   5997 {
   5998     int index;
   5999     for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
   6000         if (m_parameters.svcBufHandle[index] == *bufHandle)
   6001             return index;
   6002     }
   6003     return -1;
   6004 }
   6005 
   6006 status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
   6007 {
   6008     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
   6009     int index, vacantIndex;
   6010     bool vacancy = false;
   6011 
   6012     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
   6013         if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
   6014             vacancy = true;
   6015             vacantIndex = index;
   6016         } else if (m_attachedSubStreams[index].streamId == stream_id) {
   6017             return BAD_VALUE;
   6018         }
   6019     }
   6020     if (!vacancy)
   6021         return NO_MEMORY;
   6022     m_attachedSubStreams[vacantIndex].streamId = stream_id;
   6023     m_attachedSubStreams[vacantIndex].priority = priority;
   6024     m_numRegisteredStream++;
   6025     return NO_ERROR;
   6026 }
   6027 
   6028 status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
   6029 {
   6030     ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
   6031     int index;
   6032     bool found = false;
   6033 
   6034     for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
   6035         if (m_attachedSubStreams[index].streamId == stream_id) {
   6036             found = true;
   6037             break;
   6038         }
   6039     }
   6040     if (!found)
   6041         return BAD_VALUE;
   6042     m_attachedSubStreams[index].streamId = -1;
   6043     m_attachedSubStreams[index].priority = 0;
   6044     m_numRegisteredStream--;
   6045     return NO_ERROR;
   6046 }
   6047 
   6048 int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
   6049 {
   6050     if (ionClient == 0) {
   6051         ionClient = ion_client_create();
   6052         if (ionClient < 0) {
   6053             ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
   6054             return 0;
   6055         }
   6056     }
   6057     return ionClient;
   6058 }
   6059 
   6060 int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
   6061 {
   6062     if (ionClient != 0) {
   6063         if (ionClient > 0) {
   6064             ion_client_destroy(ionClient);
   6065         }
   6066         ionClient = 0;
   6067     }
   6068     return ionClient;
   6069 }
   6070 
   6071 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
   6072 {
   6073     return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
   6074 }
   6075 
   6076 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
   6077 {
   6078     int ret = 0;
   6079     int i = 0;
   6080     int flag = 0;
   6081 
   6082     if (ionClient == 0) {
   6083         ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
   6084         return -1;
   6085     }
   6086 
   6087     for (i = 0 ; i < iMemoryNum ; i++) {
   6088         if (buf->size.extS[i] == 0) {
   6089             break;
   6090         }
   6091         if (1 << i & cacheFlag)
   6092             flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC;
   6093         else
   6094             flag = 0;
   6095         buf->fd.extFd[i] = ion_alloc(ionClient, \
   6096                                       buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag);
   6097         if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
   6098             ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
   6099             buf->fd.extFd[i] = -1;
   6100             freeCameraMemory(buf, iMemoryNum);
   6101             return -1;
   6102         }
   6103 
   6104         buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
   6105                                         buf->size.extS[i], 0);
   6106         if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
   6107             ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
   6108             buf->virt.extP[i] = (char *)MAP_FAILED;
   6109             freeCameraMemory(buf, iMemoryNum);
   6110             return -1;
   6111         }
   6112         ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
   6113     }
   6114 
   6115     return ret;
   6116 }
   6117 
   6118 void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
   6119 {
   6120 
   6121     int i = 0 ;
   6122     int ret = 0;
   6123 
   6124     for (i=0;i<iMemoryNum;i++) {
   6125         if (buf->fd.extFd[i] != -1) {
   6126             if (buf->virt.extP[i] != (char *)MAP_FAILED) {
   6127                 ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
   6128                 if (ret < 0)
   6129                     ALOGE("ERR(%s)", __FUNCTION__);
   6130             }
   6131             ion_free(buf->fd.extFd[i]);
   6132         ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
   6133         }
   6134         buf->fd.extFd[i] = -1;
   6135         buf->virt.extP[i] = (char *)MAP_FAILED;
   6136         buf->size.extS[i] = 0;
   6137     }
   6138 }
   6139 
   6140 void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
   6141 {
   6142     int i =0 ;
   6143     for (i=0;i<iMemoryNum;i++) {
   6144         buf->virt.extP[i] = (char *)MAP_FAILED;
   6145         buf->fd.extFd[i] = -1;
   6146         buf->size.extS[i] = 0;
   6147     }
   6148 }
   6149 
   6150 
   6151 
   6152 
   6153 static camera2_device_t *g_cam2_device = NULL;
   6154 static bool g_camera_vaild = false;
   6155 static Mutex g_camera_mutex;
   6156 ExynosCamera2 * g_camera2[2] = { NULL, NULL };
   6157 
   6158 static int HAL2_camera_device_close(struct hw_device_t* device)
   6159 {
   6160     Mutex::Autolock lock(g_camera_mutex);
   6161     ALOGD("(%s): ENTER", __FUNCTION__);
   6162     if (device) {
   6163 
   6164         camera2_device_t *cam_device = (camera2_device_t *)device;
   6165         ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
   6166         ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
   6167         delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
   6168         free(cam_device);
   6169         g_camera_vaild = false;
   6170         g_cam2_device = NULL;
   6171     }
   6172 
   6173     ALOGD("(%s): EXIT", __FUNCTION__);
   6174     return 0;
   6175 }
   6176 
   6177 static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
   6178 {
   6179     return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
   6180 }
   6181 
   6182 static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
   6183             const camera2_request_queue_src_ops_t *request_src_ops)
   6184 {
   6185     ALOGV("DEBUG(%s):", __FUNCTION__);
   6186     return obj(dev)->setRequestQueueSrcOps(request_src_ops);
   6187 }
   6188 
   6189 static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
   6190 {
   6191     ALOGV("DEBUG(%s):", __FUNCTION__);
   6192     return obj(dev)->notifyRequestQueueNotEmpty();
   6193 }
   6194 
   6195 static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
   6196             const camera2_frame_queue_dst_ops_t *frame_dst_ops)
   6197 {
   6198     ALOGV("DEBUG(%s):", __FUNCTION__);
   6199     return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
   6200 }
   6201 
   6202 static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
   6203 {
   6204     ALOGV("DEBUG(%s):", __FUNCTION__);
   6205     return obj(dev)->getInProgressCount();
   6206 }
   6207 
   6208 static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
   6209 {
   6210     ALOGV("DEBUG(%s):", __FUNCTION__);
   6211     return obj(dev)->flushCapturesInProgress();
   6212 }
   6213 
   6214 static int HAL2_device_construct_default_request(const struct camera2_device *dev,
   6215             int request_template, camera_metadata_t **request)
   6216 {
   6217     ALOGV("DEBUG(%s):", __FUNCTION__);
   6218     return obj(dev)->constructDefaultRequest(request_template, request);
   6219 }
   6220 
   6221 static int HAL2_device_allocate_stream(
   6222             const struct camera2_device *dev,
   6223             // inputs
   6224             uint32_t width,
   6225             uint32_t height,
   6226             int      format,
   6227             const camera2_stream_ops_t *stream_ops,
   6228             // outputs
   6229             uint32_t *stream_id,
   6230             uint32_t *format_actual,
   6231             uint32_t *usage,
   6232             uint32_t *max_buffers)
   6233 {
   6234     ALOGV("(%s): ", __FUNCTION__);
   6235     return obj(dev)->allocateStream(width, height, format, stream_ops,
   6236                                     stream_id, format_actual, usage, max_buffers);
   6237 }
   6238 
   6239 static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
   6240             uint32_t stream_id,
   6241             int num_buffers,
   6242             buffer_handle_t *buffers)
   6243 {
   6244     ALOGV("DEBUG(%s):", __FUNCTION__);
   6245     return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
   6246 }
   6247 
   6248 static int HAL2_device_release_stream(
   6249         const struct camera2_device *dev,
   6250             uint32_t stream_id)
   6251 {
   6252     ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
   6253     if (!g_camera_vaild)
   6254         return 0;
   6255     return obj(dev)->releaseStream(stream_id);
   6256 }
   6257 
   6258 static int HAL2_device_allocate_reprocess_stream(
   6259            const struct camera2_device *dev,
   6260             uint32_t width,
   6261             uint32_t height,
   6262             uint32_t format,
   6263             const camera2_stream_in_ops_t *reprocess_stream_ops,
   6264             // outputs
   6265             uint32_t *stream_id,
   6266             uint32_t *consumer_usage,
   6267             uint32_t *max_buffers)
   6268 {
   6269     ALOGV("DEBUG(%s):", __FUNCTION__);
   6270     return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
   6271                                     stream_id, consumer_usage, max_buffers);
   6272 }
   6273 
   6274 static int HAL2_device_allocate_reprocess_stream_from_stream(
   6275            const struct camera2_device *dev,
   6276             uint32_t output_stream_id,
   6277             const camera2_stream_in_ops_t *reprocess_stream_ops,
   6278             // outputs
   6279             uint32_t *stream_id)
   6280 {
   6281     ALOGV("DEBUG(%s):", __FUNCTION__);
   6282     return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
   6283                                     reprocess_stream_ops, stream_id);
   6284 }
   6285 
   6286 static int HAL2_device_release_reprocess_stream(
   6287         const struct camera2_device *dev,
   6288             uint32_t stream_id)
   6289 {
   6290     ALOGV("DEBUG(%s):", __FUNCTION__);
   6291     return obj(dev)->releaseReprocessStream(stream_id);
   6292 }
   6293 
   6294 static int HAL2_device_trigger_action(const struct camera2_device *dev,
   6295            uint32_t trigger_id,
   6296             int ext1,
   6297             int ext2)
   6298 {
   6299     ALOGV("DEBUG(%s):", __FUNCTION__);
   6300     if (!g_camera_vaild)
   6301         return 0;
   6302     return obj(dev)->triggerAction(trigger_id, ext1, ext2);
   6303 }
   6304 
   6305 static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
   6306             camera2_notify_callback notify_cb,
   6307             void *user)
   6308 {
   6309     ALOGV("DEBUG(%s):", __FUNCTION__);
   6310     return obj(dev)->setNotifyCallback(notify_cb, user);
   6311 }
   6312 
   6313 static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
   6314             vendor_tag_query_ops_t **ops)
   6315 {
   6316     ALOGV("DEBUG(%s):", __FUNCTION__);
   6317     return obj(dev)->getMetadataVendorTagOps(ops);
   6318 }
   6319 
   6320 static int HAL2_device_dump(const struct camera2_device *dev, int fd)
   6321 {
   6322     ALOGV("DEBUG(%s):", __FUNCTION__);
   6323     return obj(dev)->dump(fd);
   6324 }
   6325 
   6326 
   6327 
   6328 
   6329 
   6330 static int HAL2_getNumberOfCameras()
   6331 {
   6332     ALOGV("(%s): returning 2", __FUNCTION__);
   6333     return 2;
   6334 }
   6335 
   6336 
   6337 static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
   6338 {
   6339     ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
   6340     static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
   6341 
   6342     status_t res;
   6343 
   6344     if (cameraId == 0) {
   6345         info->facing = CAMERA_FACING_BACK;
   6346         if (!g_camera2[0])
   6347             g_camera2[0] = new ExynosCamera2(0);
   6348     }
   6349     else if (cameraId == 1) {
   6350         info->facing = CAMERA_FACING_FRONT;
   6351         if (!g_camera2[1])
   6352             g_camera2[1] = new ExynosCamera2(1);
   6353     }
   6354     else
   6355         return BAD_VALUE;
   6356 
   6357     info->orientation = 0;
   6358     info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
   6359     if (mCameraInfo[cameraId] == NULL) {
   6360         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
   6361         if (res != OK) {
   6362             ALOGE("%s: Unable to allocate static info: %s (%d)",
   6363                     __FUNCTION__, strerror(-res), res);
   6364             return res;
   6365         }
   6366         res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
   6367         if (res != OK) {
   6368             ALOGE("%s: Unable to fill in static info: %s (%d)",
   6369                     __FUNCTION__, strerror(-res), res);
   6370             return res;
   6371         }
   6372     }
   6373     info->static_camera_characteristics = mCameraInfo[cameraId];
   6374     return NO_ERROR;
   6375 }
   6376 
   6377 #define SET_METHOD(m) m : HAL2_device_##m
   6378 
   6379 static camera2_device_ops_t camera2_device_ops = {
   6380         SET_METHOD(set_request_queue_src_ops),
   6381         SET_METHOD(notify_request_queue_not_empty),
   6382         SET_METHOD(set_frame_queue_dst_ops),
   6383         SET_METHOD(get_in_progress_count),
   6384         SET_METHOD(flush_captures_in_progress),
   6385         SET_METHOD(construct_default_request),
   6386         SET_METHOD(allocate_stream),
   6387         SET_METHOD(register_stream_buffers),
   6388         SET_METHOD(release_stream),
   6389         SET_METHOD(allocate_reprocess_stream),
   6390         SET_METHOD(allocate_reprocess_stream_from_stream),
   6391         SET_METHOD(release_reprocess_stream),
   6392         SET_METHOD(trigger_action),
   6393         SET_METHOD(set_notify_callback),
   6394         SET_METHOD(get_metadata_vendor_tag_ops),
   6395         SET_METHOD(dump),
   6396 };
   6397 
   6398 #undef SET_METHOD
   6399 
   6400 
   6401 static int HAL2_camera_device_open(const struct hw_module_t* module,
   6402                                   const char *id,
   6403                                   struct hw_device_t** device)
   6404 {
   6405     int cameraId = atoi(id);
   6406     int openInvalid = 0;
   6407 
   6408     Mutex::Autolock lock(g_camera_mutex);
   6409     if (g_camera_vaild) {
   6410         ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
   6411         return -EBUSY;
   6412     }
   6413     g_camera_vaild = false;
   6414     ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
   6415     if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
   6416         ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
   6417         return -EINVAL;
   6418     }
   6419 
   6420     ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
   6421     if (g_cam2_device) {
   6422         if (obj(g_cam2_device)->getCameraId() == cameraId) {
   6423             ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
   6424             goto done;
   6425         } else {
   6426             ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
   6427             while (g_cam2_device)
   6428                 usleep(SIG_WAITING_TICK);
   6429             ALOGD("(%s): END   waiting for cam device free", __FUNCTION__);
   6430         }
   6431     }
   6432 
   6433     g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
   6434     ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
   6435 
   6436     if (!g_cam2_device)
   6437         return -ENOMEM;
   6438 
   6439     g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
   6440     g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
   6441     g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
   6442     g_cam2_device->common.close   = HAL2_camera_device_close;
   6443 
   6444     g_cam2_device->ops = &camera2_device_ops;
   6445 
   6446     ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
   6447 
   6448     g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
   6449     if (!openInvalid) {
   6450         ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
   6451         return -ENODEV;
   6452     }
   6453 done:
   6454     *device = (hw_device_t *)g_cam2_device;
   6455     ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
   6456     g_camera_vaild = true;
   6457 
   6458     return 0;
   6459 }
   6460 
   6461 
   6462 static hw_module_methods_t camera_module_methods = {
   6463             open : HAL2_camera_device_open
   6464 };
   6465 
   6466 extern "C" {
   6467     struct camera_module HAL_MODULE_INFO_SYM = {
   6468       common : {
   6469           tag                : HARDWARE_MODULE_TAG,
   6470           module_api_version : CAMERA_MODULE_API_VERSION_2_0,
   6471           hal_api_version    : HARDWARE_HAL_API_VERSION,
   6472           id                 : CAMERA_HARDWARE_MODULE_ID,
   6473           name               : "Exynos Camera HAL2",
   6474           author             : "Samsung Corporation",
   6475           methods            : &camera_module_methods,
   6476           dso:                NULL,
   6477           reserved:           {0},
   6478       },
   6479       get_number_of_cameras : HAL2_getNumberOfCameras,
   6480       get_camera_info       : HAL2_getCameraInfo
   6481     };
   6482 }
   6483 
   6484 }; // namespace android
   6485