1 /* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved. 2 * 3 * Redistribution and use in source and binary forms, with or without 4 * modification, are permitted provided that the following conditions are 5 * met: 6 * * Redistributions of source code must retain the above copyright 7 * notice, this list of conditions and the following disclaimer. 8 * * Redistributions in binary form must reproduce the above 9 * copyright notice, this list of conditions and the following 10 * disclaimer in the documentation and/or other materials provided 11 * with the distribution. 12 * * Neither the name of The Linux Foundation nor the names of its 13 * contributors may be used to endorse or promote products derived 14 * from this software without specific prior written permission. 15 * 16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 * 28 */ 29 30 #define LOG_TAG "QCamera2HWI" 31 32 // System dependencies 33 #include <fcntl.h> 34 #include <stdio.h> 35 #include <stdlib.h> 36 #define STAT_H <SYSTEM_HEADER_PREFIX/stat.h> 37 #include STAT_H 38 #include <utils/Errors.h> 39 40 // Camera dependencies 41 #include "QCamera2HWI.h" 42 #include "QCameraTrace.h" 43 44 extern "C" { 45 #include "mm_camera_dbg.h" 46 } 47 48 namespace qcamera { 49 50 /*=========================================================================== 51 * FUNCTION : zsl_channel_cb 52 * 53 * DESCRIPTION: helper function to handle ZSL superbuf callback directly from 54 * mm-camera-interface 55 * 56 * PARAMETERS : 57 * @recvd_frame : received super buffer 58 * @userdata : user data ptr 59 * 60 * RETURN : None 61 * 62 * NOTE : recvd_frame will be released after this call by caller, so if 63 * async operation needed for recvd_frame, it's our responsibility 64 * to save a copy for this variable to be used later. 65 *==========================================================================*/ 66 void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame, 67 void *userdata) 68 { 69 ATRACE_CALL(); 70 LOGH("[KPI Perf]: E"); 71 char value[PROPERTY_VALUE_MAX]; 72 bool dump_raw = false; 73 bool log_matching = false; 74 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 75 76 if (pme == NULL || 77 pme->mCameraHandle == 0 || 78 !validate_handle(pme->mCameraHandle->camera_handle, 79 recvd_frame->camera_handle)) { 80 LOGE("camera obj not valid"); 81 return; 82 } 83 84 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL]; 85 if (pChannel == NULL || 86 !validate_handle(pChannel->getMyHandle(), 87 recvd_frame->ch_id)) { 88 LOGE("ZSL channel doesn't exist, return here"); 89 return; 90 } 91 92 if(pme->mParameters.isSceneSelectionEnabled() && 93 !pme->m_stateMachine.isCaptureRunning()) { 94 pme->selectScene(pChannel, recvd_frame); 95 pChannel->bufDone(recvd_frame); 96 return; 97 } 98 99 LOGD("Frame CB Unlock : %d, is AEC Locked: %d", 100 recvd_frame->bUnlockAEC, pme->m_bLedAfAecLock); 101 if(recvd_frame->bUnlockAEC && pme->m_bLedAfAecLock) { 102 qcamera_sm_internal_evt_payload_t *payload = 103 (qcamera_sm_internal_evt_payload_t *)malloc( 104 sizeof(qcamera_sm_internal_evt_payload_t)); 105 if (NULL != payload) { 106 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 107 payload->evt_type = QCAMERA_INTERNAL_EVT_RETRO_AEC_UNLOCK; 108 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 109 if (rc != NO_ERROR) { 110 LOGE("processEvt for retro AEC unlock failed"); 111 free(payload); 112 payload = NULL; 113 } 114 } else { 115 LOGE("No memory for retro AEC event"); 116 } 117 } 118 119 // Check if retro-active frames are completed and camera is 120 // ready to go ahead with LED estimation for regular frames 121 if (recvd_frame->bReadyForPrepareSnapshot) { 122 // Send an event 123 LOGD("Ready for Prepare Snapshot, signal "); 124 qcamera_sm_internal_evt_payload_t *payload = 125 (qcamera_sm_internal_evt_payload_t *)malloc( 126 sizeof(qcamera_sm_internal_evt_payload_t)); 127 if (NULL != payload) { 128 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 129 payload->evt_type = QCAMERA_INTERNAL_EVT_READY_FOR_SNAPSHOT; 130 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 131 if (rc != NO_ERROR) { 132 LOGW("processEvt Ready for Snaphot failed"); 133 free(payload); 134 payload = NULL; 135 } 136 } else { 137 LOGE("No memory for prepare signal event detect" 138 " qcamera_sm_internal_evt_payload_t"); 139 } 140 } 141 142 /* indicate the parent that capture is done */ 143 pme->captureDone(); 144 145 // save a copy for the superbuf 146 mm_camera_super_buf_t* frame = 147 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 148 if (frame == NULL) { 149 LOGE("Error allocating memory to save received_frame structure."); 150 pChannel->bufDone(recvd_frame); 151 return; 152 } 153 *frame = *recvd_frame; 154 155 if (recvd_frame->num_bufs > 0) { 156 LOGI("[KPI Perf]: superbuf frame_idx %d", 157 recvd_frame->bufs[0]->frame_idx); 158 } 159 160 // DUMP RAW if available 161 property_get("persist.camera.zsl_raw", value, "0"); 162 dump_raw = atoi(value) > 0 ? true : false; 163 if (dump_raw) { 164 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) { 165 if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) { 166 mm_camera_buf_def_t * raw_frame = recvd_frame->bufs[i]; 167 QCameraStream *pStream = pChannel->getStreamByHandle(raw_frame->stream_id); 168 if (NULL != pStream) { 169 pme->dumpFrameToFile(pStream, raw_frame, QCAMERA_DUMP_FRM_RAW); 170 } 171 break; 172 } 173 } 174 } 175 176 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) { 177 if (recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT) { 178 mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i]; 179 QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id); 180 if (NULL != pStream) { 181 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS); 182 } 183 break; 184 } 185 } 186 187 // whether need FD Metadata along with Snapshot frame in ZSL mode 188 if(pme->needFDMetadata(QCAMERA_CH_TYPE_ZSL)){ 189 //Need Face Detection result for snapshot frames 190 //Get the Meta Data frames 191 mm_camera_buf_def_t *pMetaFrame = NULL; 192 for (uint32_t i = 0; i < frame->num_bufs; i++) { 193 QCameraStream *pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id); 194 if (pStream != NULL) { 195 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) { 196 pMetaFrame = frame->bufs[i]; //find the metadata 197 break; 198 } 199 } 200 } 201 202 if(pMetaFrame != NULL){ 203 metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer; 204 //send the face detection info 205 cam_faces_data_t faces_data; 206 pme->fillFacesData(faces_data, pMetaData); 207 //HARD CODE here before MCT can support 208 faces_data.detection_data.fd_type = QCAMERA_FD_SNAPSHOT; 209 210 qcamera_sm_internal_evt_payload_t *payload = 211 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 212 if (NULL != payload) { 213 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 214 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT; 215 payload->faces_data = faces_data; 216 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 217 if (rc != NO_ERROR) { 218 LOGW("processEvt face_detection_result failed"); 219 free(payload); 220 payload = NULL; 221 } 222 } else { 223 LOGE("No memory for face_detection_result qcamera_sm_internal_evt_payload_t"); 224 } 225 } 226 } 227 228 property_get("persist.camera.dumpmetadata", value, "0"); 229 int32_t enabled = atoi(value); 230 if (enabled) { 231 mm_camera_buf_def_t *pMetaFrame = NULL; 232 QCameraStream *pStream = NULL; 233 for (uint32_t i = 0; i < frame->num_bufs; i++) { 234 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id); 235 if (pStream != NULL) { 236 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) { 237 pMetaFrame = frame->bufs[i]; 238 if (pMetaFrame != NULL && 239 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) { 240 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "ZSL_Snapshot"); 241 } 242 break; 243 } 244 } 245 } 246 } 247 248 property_get("persist.camera.zsl_matching", value, "0"); 249 log_matching = atoi(value) > 0 ? true : false; 250 if (log_matching) { 251 LOGH("ZSL super buffer contains:"); 252 QCameraStream *pStream = NULL; 253 for (uint32_t i = 0; i < frame->num_bufs; i++) { 254 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id); 255 if (pStream != NULL ) { 256 LOGH("Buffer with V4L index %d frame index %d of type %d Timestamp: %ld %ld ", 257 frame->bufs[i]->buf_idx, 258 frame->bufs[i]->frame_idx, 259 pStream->getMyType(), 260 frame->bufs[i]->ts.tv_sec, 261 frame->bufs[i]->ts.tv_nsec); 262 } 263 } 264 } 265 266 // Wait on Postproc initialization if needed 267 // then send to postprocessor 268 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) || 269 (NO_ERROR != pme->m_postprocessor.processData(frame))) { 270 LOGE("Failed to trigger process data"); 271 pChannel->bufDone(recvd_frame); 272 free(frame); 273 frame = NULL; 274 return; 275 } 276 277 LOGH("[KPI Perf]: X"); 278 } 279 280 /*=========================================================================== 281 * FUNCTION : selectScene 282 * 283 * DESCRIPTION: send a preview callback when a specific selected scene is applied 284 * 285 * PARAMETERS : 286 * @pChannel: Camera channel 287 * @frame : Bundled super buffer 288 * 289 * RETURN : int32_t type of status 290 * NO_ERROR -- success 291 * none-zero failure code 292 *==========================================================================*/ 293 int32_t QCamera2HardwareInterface::selectScene(QCameraChannel *pChannel, 294 mm_camera_super_buf_t *frame) 295 { 296 mm_camera_buf_def_t *pMetaFrame = NULL; 297 QCameraStream *pStream = NULL; 298 int32_t rc = NO_ERROR; 299 300 if ((NULL == frame) || (NULL == pChannel)) { 301 LOGE("Invalid scene select input"); 302 return BAD_VALUE; 303 } 304 305 cam_scene_mode_type selectedScene = mParameters.getSelectedScene(); 306 if (CAM_SCENE_MODE_MAX == selectedScene) { 307 LOGL("No selected scene"); 308 return NO_ERROR; 309 } 310 311 for (uint32_t i = 0; i < frame->num_bufs; i++) { 312 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id); 313 if (pStream != NULL) { 314 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) { 315 pMetaFrame = frame->bufs[i]; 316 break; 317 } 318 } 319 } 320 321 if (NULL == pMetaFrame) { 322 LOGE("No metadata buffer found in scene select super buffer"); 323 return NO_INIT; 324 } 325 326 metadata_buffer_t *pMetaData = (metadata_buffer_t *)pMetaFrame->buffer; 327 328 IF_META_AVAILABLE(cam_scene_mode_type, scene, CAM_INTF_META_CURRENT_SCENE, pMetaData) { 329 if ((*scene == selectedScene) && 330 (mDataCb != NULL) && 331 (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0)) { 332 mm_camera_buf_def_t *preview_frame = NULL; 333 for (uint32_t i = 0; i < frame->num_bufs; i++) { 334 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id); 335 if (pStream != NULL) { 336 if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW)) { 337 preview_frame = frame->bufs[i]; 338 break; 339 } 340 } 341 } 342 if (preview_frame) { 343 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)preview_frame->mem_info; 344 uint32_t idx = preview_frame->buf_idx; 345 rc = sendPreviewCallback(pStream, memory, idx); 346 if (NO_ERROR != rc) { 347 LOGE("Error triggering scene select preview callback"); 348 } else { 349 mParameters.setSelectedScene(CAM_SCENE_MODE_MAX); 350 } 351 } else { 352 LOGE("No preview buffer found in scene select super buffer"); 353 return NO_INIT; 354 } 355 } 356 } else { 357 LOGE("No current scene metadata!"); 358 rc = NO_INIT; 359 } 360 361 return rc; 362 } 363 364 /*=========================================================================== 365 * FUNCTION : capture_channel_cb_routine 366 * 367 * DESCRIPTION: helper function to handle snapshot superbuf callback directly from 368 * mm-camera-interface 369 * 370 * PARAMETERS : 371 * @recvd_frame : received super buffer 372 * @userdata : user data ptr 373 * 374 * RETURN : None 375 * 376 * NOTE : recvd_frame will be released after this call by caller, so if 377 * async operation needed for recvd_frame, it's our responsibility 378 * to save a copy for this variable to be used later. 379 *==========================================================================*/ 380 void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame, 381 void *userdata) 382 { 383 KPI_ATRACE_CALL(); 384 char value[PROPERTY_VALUE_MAX]; 385 LOGH("[KPI Perf]: E PROFILE_YUV_CB_TO_HAL"); 386 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 387 if (pme == NULL || 388 pme->mCameraHandle == NULL || 389 pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){ 390 LOGE("camera obj not valid"); 391 return; 392 } 393 394 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE]; 395 if (pChannel == NULL || 396 !validate_handle(pChannel->getMyHandle(), 397 recvd_frame->ch_id)) { 398 LOGE("Capture channel doesn't exist, return here"); 399 return; 400 } 401 402 // save a copy for the superbuf 403 mm_camera_super_buf_t* frame = 404 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 405 if (frame == NULL) { 406 LOGE("Error allocating memory to save received_frame structure."); 407 pChannel->bufDone(recvd_frame); 408 return; 409 } 410 *frame = *recvd_frame; 411 412 if (recvd_frame->num_bufs > 0) { 413 LOGI("[KPI Perf]: superbuf frame_idx %d", 414 recvd_frame->bufs[0]->frame_idx); 415 } 416 417 for ( uint32_t i= 0 ; i < recvd_frame->num_bufs ; i++ ) { 418 if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ) { 419 mm_camera_buf_def_t * yuv_frame = recvd_frame->bufs[i]; 420 QCameraStream *pStream = pChannel->getStreamByHandle(yuv_frame->stream_id); 421 if ( NULL != pStream ) { 422 pme->dumpFrameToFile(pStream, yuv_frame, QCAMERA_DUMP_FRM_INPUT_REPROCESS); 423 } 424 break; 425 } 426 } 427 428 property_get("persist.camera.dumpmetadata", value, "0"); 429 int32_t enabled = atoi(value); 430 if (enabled) { 431 mm_camera_buf_def_t *pMetaFrame = NULL; 432 QCameraStream *pStream = NULL; 433 for (uint32_t i = 0; i < frame->num_bufs; i++) { 434 pStream = pChannel->getStreamByHandle(frame->bufs[i]->stream_id); 435 if (pStream != NULL) { 436 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) { 437 pMetaFrame = frame->bufs[i]; //find the metadata 438 if (pMetaFrame != NULL && 439 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) { 440 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot"); 441 } 442 break; 443 } 444 } 445 } 446 } 447 448 // Wait on Postproc initialization if needed 449 // then send to postprocessor 450 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) || 451 (NO_ERROR != pme->m_postprocessor.processData(frame))) { 452 LOGE("Failed to trigger process data"); 453 pChannel->bufDone(recvd_frame); 454 free(frame); 455 frame = NULL; 456 return; 457 } 458 459 /* START of test register face image for face authentication */ 460 #ifdef QCOM_TEST_FACE_REGISTER_FACE 461 static uint8_t bRunFaceReg = 1; 462 463 if (bRunFaceReg > 0) { 464 // find snapshot frame 465 QCameraStream *main_stream = NULL; 466 mm_camera_buf_def_t *main_frame = NULL; 467 for (int i = 0; i < recvd_frame->num_bufs; i++) { 468 QCameraStream *pStream = 469 pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id); 470 if (pStream != NULL) { 471 if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT)) { 472 main_stream = pStream; 473 main_frame = recvd_frame->bufs[i]; 474 break; 475 } 476 } 477 } 478 if (main_stream != NULL && main_frame != NULL) { 479 int32_t faceId = -1; 480 cam_pp_offline_src_config_t config; 481 memset(&config, 0, sizeof(cam_pp_offline_src_config_t)); 482 config.num_of_bufs = 1; 483 main_stream->getFormat(config.input_fmt); 484 main_stream->getFrameDimension(config.input_dim); 485 main_stream->getFrameOffset(config.input_buf_planes.plane_info); 486 LOGH("DEBUG: registerFaceImage E"); 487 int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId); 488 LOGH("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId); 489 bRunFaceReg = 0; 490 } 491 } 492 493 #endif 494 /* END of test register face image for face authentication */ 495 496 LOGH("[KPI Perf]: X"); 497 } 498 #ifdef TARGET_TS_MAKEUP 499 bool QCamera2HardwareInterface::TsMakeupProcess_Preview(mm_camera_buf_def_t *pFrame, 500 QCameraStream * pStream) { 501 LOGD("begin"); 502 bool bRet = false; 503 if (pStream == NULL || pFrame == NULL) { 504 bRet = false; 505 LOGH("pStream == NULL || pFrame == NULL"); 506 } else { 507 bRet = TsMakeupProcess(pFrame, pStream, mFaceRect); 508 } 509 LOGD("end bRet = %d ",bRet); 510 return bRet; 511 } 512 513 bool QCamera2HardwareInterface::TsMakeupProcess_Snapshot(mm_camera_buf_def_t *pFrame, 514 QCameraStream * pStream) { 515 LOGD("begin"); 516 bool bRet = false; 517 if (pStream == NULL || pFrame == NULL) { 518 bRet = false; 519 LOGH("pStream == NULL || pFrame == NULL"); 520 } else { 521 cam_frame_len_offset_t offset; 522 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 523 pStream->getFrameOffset(offset); 524 525 cam_dimension_t dim; 526 pStream->getFrameDimension(dim); 527 528 unsigned char *yBuf = (unsigned char*)pFrame->buffer; 529 unsigned char *uvBuf = yBuf + offset.mp[0].len; 530 TSMakeupDataEx inMakeupData; 531 inMakeupData.frameWidth = dim.width; 532 inMakeupData.frameHeight = dim.height; 533 inMakeupData.yBuf = yBuf; 534 inMakeupData.uvBuf = uvBuf; 535 inMakeupData.yStride = offset.mp[0].stride; 536 inMakeupData.uvStride = offset.mp[1].stride; 537 LOGD("detect begin"); 538 TSHandle fd_handle = ts_detectface_create_context(); 539 if (fd_handle != NULL) { 540 cam_format_t fmt; 541 pStream->getFormat(fmt); 542 int iret = ts_detectface_detectEx(fd_handle, &inMakeupData); 543 LOGD("ts_detectface_detect iret = %d",iret); 544 if (iret <= 0) { 545 bRet = false; 546 } else { 547 TSRect faceRect; 548 memset(&faceRect,-1,sizeof(TSRect)); 549 iret = ts_detectface_get_face_info(fd_handle, 0, &faceRect, NULL,NULL,NULL); 550 LOGD("ts_detectface_get_face_info iret=%d,faceRect.left=%ld," 551 "faceRect.top=%ld,faceRect.right=%ld,faceRect.bottom=%ld" 552 ,iret,faceRect.left,faceRect.top,faceRect.right,faceRect.bottom); 553 bRet = TsMakeupProcess(pFrame,pStream,faceRect); 554 } 555 ts_detectface_destroy_context(&fd_handle); 556 fd_handle = NULL; 557 } else { 558 LOGH("fd_handle == NULL"); 559 } 560 LOGD("detect end"); 561 } 562 LOGD("end bRet = %d ",bRet); 563 return bRet; 564 } 565 566 bool QCamera2HardwareInterface::TsMakeupProcess(mm_camera_buf_def_t *pFrame, 567 QCameraStream * pStream,TSRect& faceRect) { 568 bool bRet = false; 569 LOGD("begin"); 570 if (pStream == NULL || pFrame == NULL) { 571 LOGH("pStream == NULL || pFrame == NULL "); 572 return false; 573 } 574 575 int whiteLevel, cleanLevel; 576 bool enableMakeup = (faceRect.left > -1) && 577 (mParameters.getTsMakeupInfo(whiteLevel, cleanLevel)); 578 if (enableMakeup) { 579 cam_dimension_t dim; 580 cam_frame_len_offset_t offset; 581 pStream->getFrameDimension(dim); 582 pStream->getFrameOffset(offset); 583 unsigned char *tempOriBuf = NULL; 584 585 tempOriBuf = (unsigned char*)pFrame->buffer; 586 unsigned char *yBuf = tempOriBuf; 587 unsigned char *uvBuf = tempOriBuf + offset.mp[0].len; 588 unsigned char *tmpBuf = new unsigned char[offset.frame_len]; 589 if (tmpBuf == NULL) { 590 LOGH("tmpBuf == NULL "); 591 return false; 592 } 593 TSMakeupDataEx inMakeupData, outMakeupData; 594 whiteLevel = whiteLevel <= 0 ? 0 : (whiteLevel >= 100 ? 100 : whiteLevel); 595 cleanLevel = cleanLevel <= 0 ? 0 : (cleanLevel >= 100 ? 100 : cleanLevel); 596 inMakeupData.frameWidth = dim.width; // NV21 Frame width > 0 597 inMakeupData.frameHeight = dim.height; // NV21 Frame height > 0 598 inMakeupData.yBuf = yBuf; // Y buffer pointer 599 inMakeupData.uvBuf = uvBuf; // VU buffer pointer 600 inMakeupData.yStride = offset.mp[0].stride; 601 inMakeupData.uvStride = offset.mp[1].stride; 602 outMakeupData.frameWidth = dim.width; // NV21 Frame width > 0 603 outMakeupData.frameHeight = dim.height; // NV21 Frame height > 0 604 outMakeupData.yBuf = tmpBuf; // Y buffer pointer 605 outMakeupData.uvBuf = tmpBuf + offset.mp[0].len; // VU buffer pointer 606 outMakeupData.yStride = offset.mp[0].stride; 607 outMakeupData.uvStride = offset.mp[1].stride; 608 LOGD("faceRect:left 2:%ld,,right:%ld,,top:%ld,,bottom:%ld,,Level:%dx%d", 609 faceRect.left,faceRect.right,faceRect.top,faceRect.bottom,cleanLevel,whiteLevel); 610 ts_makeup_skin_beautyEx(&inMakeupData, &outMakeupData, &(faceRect),cleanLevel,whiteLevel); 611 memcpy((unsigned char*)pFrame->buffer, tmpBuf, offset.frame_len); 612 QCameraMemory *memory = (QCameraMemory *)pFrame->mem_info; 613 memory->cleanCache(pFrame->buf_idx); 614 if (tmpBuf != NULL) { 615 delete[] tmpBuf; 616 tmpBuf = NULL; 617 } 618 } 619 LOGD("end bRet = %d ",bRet); 620 return bRet; 621 } 622 #endif 623 /*=========================================================================== 624 * FUNCTION : postproc_channel_cb_routine 625 * 626 * DESCRIPTION: helper function to handle postprocess superbuf callback directly from 627 * mm-camera-interface 628 * 629 * PARAMETERS : 630 * @recvd_frame : received super buffer 631 * @userdata : user data ptr 632 * 633 * RETURN : None 634 * 635 * NOTE : recvd_frame will be released after this call by caller, so if 636 * async operation needed for recvd_frame, it's our responsibility 637 * to save a copy for this variable to be used later. 638 *==========================================================================*/ 639 void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame, 640 void *userdata) 641 { 642 ATRACE_CALL(); 643 LOGH("[KPI Perf]: E"); 644 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 645 646 if (pme == NULL || 647 pme->mCameraHandle == 0 || 648 !validate_handle(pme->mCameraHandle->camera_handle, 649 recvd_frame->camera_handle)) { 650 LOGE("camera obj not valid"); 651 return; 652 } 653 654 // save a copy for the superbuf 655 mm_camera_super_buf_t* frame = 656 (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 657 if (frame == NULL) { 658 LOGE("Error allocating memory to save received_frame structure."); 659 return; 660 } 661 *frame = *recvd_frame; 662 663 if (recvd_frame->num_bufs > 0) { 664 LOGI("[KPI Perf]: frame_idx %d", recvd_frame->bufs[0]->frame_idx); 665 } 666 // Wait on JPEG create session 667 pme->waitDeferredWork(pme->mJpegJob); 668 669 // send to postprocessor 670 pme->m_postprocessor.processPPData(frame); 671 672 ATRACE_INT("Camera:Reprocess", 0); 673 LOGH("[KPI Perf]: X"); 674 } 675 676 /*=========================================================================== 677 * FUNCTION : synchronous_stream_cb_routine 678 * 679 * DESCRIPTION: Function to handle STREAM SYNC CALLBACKS 680 * 681 * PARAMETERS : 682 * @super_frame : received super buffer 683 * @stream : stream object 684 * @userdata : user data ptr 685 * 686 * RETURN : None 687 * 688 * NOTE : This Function is excecuted in mm-interface context. 689 * Avoid adding latency on this thread. 690 *==========================================================================*/ 691 void QCamera2HardwareInterface::synchronous_stream_cb_routine( 692 mm_camera_super_buf_t *super_frame, QCameraStream * stream, 693 void *userdata) 694 { 695 nsecs_t frameTime = 0, mPreviewTimestamp = 0; 696 int err = NO_ERROR; 697 698 ATRACE_CALL(); 699 LOGH("[KPI Perf] : BEGIN"); 700 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 701 702 if (pme == NULL) { 703 LOGE("Invalid hardware object"); 704 return; 705 } 706 if (super_frame == NULL) { 707 LOGE("Invalid super buffer"); 708 return; 709 } 710 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 711 if (NULL == frame) { 712 LOGE("Frame is NULL"); 713 return; 714 } 715 716 if (stream->getMyType() != CAM_STREAM_TYPE_PREVIEW) { 717 LOGE("This is only for PREVIEW stream for now"); 718 return; 719 } 720 721 if(pme->m_bPreviewStarted) { 722 LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME"); 723 pme->m_bPreviewStarted = false; 724 } 725 726 QCameraGrallocMemory *memory = (QCameraGrallocMemory *) frame->mem_info; 727 if (!pme->needProcessPreviewFrame(frame->frame_idx)) { 728 pthread_mutex_lock(&pme->mGrallocLock); 729 pme->mLastPreviewFrameID = frame->frame_idx; 730 memory->setBufferStatus(frame->buf_idx, STATUS_SKIPPED); 731 pthread_mutex_unlock(&pme->mGrallocLock); 732 LOGH("preview is not running, no need to process"); 733 return; 734 } 735 736 if (pme->needDebugFps()) { 737 pme->debugShowPreviewFPS(); 738 } 739 740 frameTime = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec; 741 // Convert Boottime from camera to Monotime for display if needed. 742 // Otherwise, mBootToMonoTimestampOffset value will be 0. 743 frameTime = frameTime - pme->mBootToMonoTimestampOffset; 744 // Calculate the future presentation time stamp for displaying frames at regular interval 745 mPreviewTimestamp = pme->mCameraDisplay.computePresentationTimeStamp(frameTime); 746 stream->mStreamTimestamp = frameTime; 747 748 #ifdef TARGET_TS_MAKEUP 749 pme->TsMakeupProcess_Preview(frame,stream); 750 #endif 751 752 // Enqueue buffer to gralloc. 753 uint32_t idx = frame->buf_idx; 754 LOGD("%p Enqueue Buffer to display %d frame Time = %lld Display Time = %lld", 755 pme, idx, frameTime, mPreviewTimestamp); 756 err = memory->enqueueBuffer(idx, mPreviewTimestamp); 757 758 if (err == NO_ERROR) { 759 pthread_mutex_lock(&pme->mGrallocLock); 760 pme->mLastPreviewFrameID = frame->frame_idx; 761 pme->mEnqueuedBuffers++; 762 pthread_mutex_unlock(&pme->mGrallocLock); 763 } else { 764 LOGE("Enqueue Buffer failed"); 765 } 766 767 LOGH("[KPI Perf] : END"); 768 return; 769 } 770 771 /*=========================================================================== 772 * FUNCTION : preview_stream_cb_routine 773 * 774 * DESCRIPTION: helper function to handle preview frame from preview stream in 775 * normal case with display. 776 * 777 * PARAMETERS : 778 * @super_frame : received super buffer 779 * @stream : stream object 780 * @userdata : user data ptr 781 * 782 * RETURN : None 783 * 784 * NOTE : caller passes the ownership of super_frame, it's our 785 * responsibility to free super_frame once it's done. The new 786 * preview frame will be sent to display, and an older frame 787 * will be dequeued from display and needs to be returned back 788 * to kernel for future use. 789 *==========================================================================*/ 790 void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame, 791 QCameraStream * stream, 792 void *userdata) 793 { 794 KPI_ATRACE_CALL(); 795 LOGH("[KPI Perf] : BEGIN"); 796 int err = NO_ERROR; 797 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 798 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info; 799 uint8_t dequeueCnt = 0; 800 801 if (pme == NULL) { 802 LOGE("Invalid hardware object"); 803 free(super_frame); 804 return; 805 } 806 if (memory == NULL) { 807 LOGE("Invalid memory object"); 808 free(super_frame); 809 return; 810 } 811 812 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 813 if (NULL == frame) { 814 LOGE("preview frame is NLUL"); 815 free(super_frame); 816 return; 817 } 818 819 // For instant capture and for instant AEC, keep track of the frame counter. 820 // This count will be used to check against the corresponding bound values. 821 if (pme->mParameters.isInstantAECEnabled() || 822 pme->mParameters.isInstantCaptureEnabled()) { 823 pme->mInstantAecFrameCount++; 824 } 825 826 pthread_mutex_lock(&pme->mGrallocLock); 827 if (!stream->isSyncCBEnabled()) { 828 pme->mLastPreviewFrameID = frame->frame_idx; 829 } 830 bool discardFrame = false; 831 if (!stream->isSyncCBEnabled() && 832 !pme->needProcessPreviewFrame(frame->frame_idx)) 833 { 834 discardFrame = true; 835 } else if (stream->isSyncCBEnabled() && 836 memory->isBufSkipped(frame->buf_idx)) { 837 discardFrame = true; 838 memory->setBufferStatus(frame->buf_idx, STATUS_IDLE); 839 } 840 pthread_mutex_unlock(&pme->mGrallocLock); 841 842 if (discardFrame) { 843 LOGH("preview is not running, no need to process"); 844 stream->bufDone(frame->buf_idx); 845 free(super_frame); 846 return; 847 } 848 849 uint32_t idx = frame->buf_idx; 850 851 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW); 852 853 if(pme->m_bPreviewStarted) { 854 LOGI("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME"); 855 pme->m_bPreviewStarted = false ; 856 } 857 858 if (!stream->isSyncCBEnabled()) { 859 860 if (pme->needDebugFps()) { 861 pme->debugShowPreviewFPS(); 862 } 863 864 LOGD("Enqueue Buffer to display %d", idx); 865 #ifdef TARGET_TS_MAKEUP 866 pme->TsMakeupProcess_Preview(frame,stream); 867 #endif 868 err = memory->enqueueBuffer(idx); 869 870 if (err == NO_ERROR) { 871 pthread_mutex_lock(&pme->mGrallocLock); 872 pme->mEnqueuedBuffers++; 873 dequeueCnt = pme->mEnqueuedBuffers; 874 pthread_mutex_unlock(&pme->mGrallocLock); 875 } else { 876 LOGE("Enqueue Buffer failed"); 877 } 878 } else { 879 pthread_mutex_lock(&pme->mGrallocLock); 880 dequeueCnt = pme->mEnqueuedBuffers; 881 pthread_mutex_unlock(&pme->mGrallocLock); 882 } 883 884 // Display the buffer. 885 LOGD("%p displayBuffer %d E", pme, idx); 886 uint8_t numMapped = memory->getMappable(); 887 888 for (uint8_t i = 0; i < dequeueCnt; i++) { 889 int dequeuedIdx = memory->dequeueBuffer(); 890 if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) { 891 LOGE("Invalid dequeued buffer index %d from display", 892 dequeuedIdx); 893 break; 894 } else { 895 pthread_mutex_lock(&pme->mGrallocLock); 896 pme->mEnqueuedBuffers--; 897 pthread_mutex_unlock(&pme->mGrallocLock); 898 if (dequeuedIdx >= numMapped) { 899 // This buffer has not yet been mapped to the backend 900 err = stream->mapNewBuffer((uint32_t)dequeuedIdx); 901 if (memory->checkIfAllBuffersMapped()) { 902 // check if mapping is done for all the buffers 903 // Signal the condition for create jpeg session 904 Mutex::Autolock l(pme->mMapLock); 905 pme->mMapCond.signal(); 906 LOGH("Mapping done for all bufs"); 907 } else { 908 LOGH("All buffers are not yet mapped"); 909 } 910 } 911 } 912 913 if (err < 0) { 914 LOGE("buffer mapping failed %d", err); 915 } else { 916 // Return dequeued buffer back to driver 917 err = stream->bufDone((uint32_t)dequeuedIdx); 918 if ( err < 0) { 919 LOGW("stream bufDone failed %d", err); 920 } 921 } 922 } 923 924 // Handle preview data callback 925 if (pme->m_channels[QCAMERA_CH_TYPE_CALLBACK] == NULL) { 926 if (pme->needSendPreviewCallback() && 927 (!pme->mParameters.isSceneSelectionEnabled())) { 928 int32_t rc = pme->sendPreviewCallback(stream, memory, idx); 929 if (NO_ERROR != rc) { 930 LOGW("Preview callback was not sent succesfully"); 931 } 932 } 933 } 934 935 free(super_frame); 936 LOGH("[KPI Perf] : END"); 937 return; 938 } 939 940 /*=========================================================================== 941 * FUNCTION : sendPreviewCallback 942 * 943 * DESCRIPTION: helper function for triggering preview callbacks 944 * 945 * PARAMETERS : 946 * @stream : stream object 947 * @memory : Stream memory allocator 948 * @idx : buffer index 949 * 950 * RETURN : int32_t type of status 951 * NO_ERROR -- success 952 * none-zero failure code 953 *==========================================================================*/ 954 int32_t QCamera2HardwareInterface::sendPreviewCallback(QCameraStream *stream, 955 QCameraMemory *memory, uint32_t idx) 956 { 957 camera_memory_t *previewMem = NULL; 958 camera_memory_t *data = NULL; 959 camera_memory_t *dataToApp = NULL; 960 size_t previewBufSize = 0; 961 size_t previewBufSizeFromCallback = 0; 962 cam_dimension_t preview_dim; 963 cam_format_t previewFmt; 964 int32_t rc = NO_ERROR; 965 int32_t yStride = 0; 966 int32_t yScanline = 0; 967 int32_t uvStride = 0; 968 int32_t uvScanline = 0; 969 int32_t uStride = 0; 970 int32_t uScanline = 0; 971 int32_t vStride = 0; 972 int32_t vScanline = 0; 973 int32_t yStrideToApp = 0; 974 int32_t uvStrideToApp = 0; 975 int32_t yScanlineToApp = 0; 976 int32_t uvScanlineToApp = 0; 977 int32_t srcOffset = 0; 978 int32_t dstOffset = 0; 979 int32_t srcBaseOffset = 0; 980 int32_t dstBaseOffset = 0; 981 int i; 982 983 if ((NULL == stream) || (NULL == memory)) { 984 LOGE("Invalid preview callback input"); 985 return BAD_VALUE; 986 } 987 988 cam_stream_info_t *streamInfo = 989 reinterpret_cast<cam_stream_info_t *>(stream->getStreamInfoBuf()->getPtr(0)); 990 if (NULL == streamInfo) { 991 LOGE("Invalid streamInfo"); 992 return BAD_VALUE; 993 } 994 995 stream->getFrameDimension(preview_dim); 996 stream->getFormat(previewFmt); 997 998 yStrideToApp = preview_dim.width; 999 yScanlineToApp = preview_dim.height; 1000 uvStrideToApp = yStrideToApp; 1001 uvScanlineToApp = yScanlineToApp / 2; 1002 1003 /* The preview buffer size in the callback should be 1004 * (width*height*bytes_per_pixel). As all preview formats we support, 1005 * use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2. 1006 * We need to put a check if some other formats are supported in future. */ 1007 if ((previewFmt == CAM_FORMAT_YUV_420_NV21) || 1008 (previewFmt == CAM_FORMAT_YUV_420_NV12) || 1009 (previewFmt == CAM_FORMAT_YUV_420_YV12) || 1010 (previewFmt == CAM_FORMAT_YUV_420_NV12_VENUS) || 1011 (previewFmt == CAM_FORMAT_YUV_420_NV21_VENUS) || 1012 (previewFmt == CAM_FORMAT_YUV_420_NV21_ADRENO)) { 1013 if(previewFmt == CAM_FORMAT_YUV_420_YV12) { 1014 yStride = streamInfo->buf_planes.plane_info.mp[0].stride; 1015 yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline; 1016 uStride = streamInfo->buf_planes.plane_info.mp[1].stride; 1017 uScanline = streamInfo->buf_planes.plane_info.mp[1].scanline; 1018 vStride = streamInfo->buf_planes.plane_info.mp[2].stride; 1019 vScanline = streamInfo->buf_planes.plane_info.mp[2].scanline; 1020 1021 previewBufSize = (size_t) 1022 (yStride * yScanline + uStride * uScanline + vStride * vScanline); 1023 previewBufSizeFromCallback = previewBufSize; 1024 } else { 1025 yStride = streamInfo->buf_planes.plane_info.mp[0].stride; 1026 yScanline = streamInfo->buf_planes.plane_info.mp[0].scanline; 1027 uvStride = streamInfo->buf_planes.plane_info.mp[1].stride; 1028 uvScanline = streamInfo->buf_planes.plane_info.mp[1].scanline; 1029 1030 previewBufSize = (size_t) 1031 ((yStrideToApp * yScanlineToApp) + (uvStrideToApp * uvScanlineToApp)); 1032 1033 previewBufSizeFromCallback = (size_t) 1034 ((yStride * yScanline) + (uvStride * uvScanline)); 1035 } 1036 if(previewBufSize == previewBufSizeFromCallback) { 1037 previewMem = mGetMemory(memory->getFd(idx), 1038 previewBufSize, 1, mCallbackCookie); 1039 if (!previewMem || !previewMem->data) { 1040 LOGE("mGetMemory failed.\n"); 1041 return NO_MEMORY; 1042 } else { 1043 data = previewMem; 1044 } 1045 } else { 1046 data = memory->getMemory(idx, false); 1047 dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie); 1048 if (!dataToApp || !dataToApp->data) { 1049 LOGE("mGetMemory failed.\n"); 1050 return NO_MEMORY; 1051 } 1052 1053 for (i = 0; i < preview_dim.height; i++) { 1054 srcOffset = i * yStride; 1055 dstOffset = i * yStrideToApp; 1056 1057 memcpy((unsigned char *) dataToApp->data + dstOffset, 1058 (unsigned char *) data->data + srcOffset, 1059 (size_t)yStrideToApp); 1060 } 1061 1062 srcBaseOffset = yStride * yScanline; 1063 dstBaseOffset = yStrideToApp * yScanlineToApp; 1064 1065 for (i = 0; i < preview_dim.height/2; i++) { 1066 srcOffset = i * uvStride + srcBaseOffset; 1067 dstOffset = i * uvStrideToApp + dstBaseOffset; 1068 1069 memcpy((unsigned char *) dataToApp->data + dstOffset, 1070 (unsigned char *) data->data + srcOffset, 1071 (size_t)yStrideToApp); 1072 } 1073 } 1074 } else { 1075 /*Invalid Buffer content. But can be used as a first preview frame trigger in 1076 framework/app */ 1077 previewBufSize = (size_t) 1078 ((yStrideToApp * yScanlineToApp) + 1079 (uvStrideToApp * uvScanlineToApp)); 1080 previewBufSizeFromCallback = 0; 1081 LOGW("Invalid preview format. Buffer content cannot be processed size = %d", 1082 previewBufSize); 1083 dataToApp = mGetMemory(-1, previewBufSize, 1, mCallbackCookie); 1084 if (!dataToApp || !dataToApp->data) { 1085 LOGE("mGetMemory failed.\n"); 1086 return NO_MEMORY; 1087 } 1088 } 1089 qcamera_callback_argm_t cbArg; 1090 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t)); 1091 cbArg.cb_type = QCAMERA_DATA_CALLBACK; 1092 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME; 1093 if (previewBufSize != 0 && previewBufSizeFromCallback != 0 && 1094 previewBufSize == previewBufSizeFromCallback) { 1095 cbArg.data = data; 1096 } else { 1097 cbArg.data = dataToApp; 1098 } 1099 if ( previewMem ) { 1100 cbArg.user_data = previewMem; 1101 cbArg.release_cb = releaseCameraMemory; 1102 } else if (dataToApp) { 1103 cbArg.user_data = dataToApp; 1104 cbArg.release_cb = releaseCameraMemory; 1105 } 1106 cbArg.cookie = this; 1107 rc = m_cbNotifier.notifyCallback(cbArg); 1108 if (rc != NO_ERROR) { 1109 LOGW("fail sending notification"); 1110 if (previewMem) { 1111 previewMem->release(previewMem); 1112 } else if (dataToApp) { 1113 dataToApp->release(dataToApp); 1114 } 1115 } 1116 1117 return rc; 1118 } 1119 1120 /*=========================================================================== 1121 * FUNCTION : nodisplay_preview_stream_cb_routine 1122 * 1123 * DESCRIPTION: helper function to handle preview frame from preview stream in 1124 * no-display case 1125 * 1126 * PARAMETERS : 1127 * @super_frame : received super buffer 1128 * @stream : stream object 1129 * @userdata : user data ptr 1130 * 1131 * RETURN : None 1132 * 1133 * NOTE : caller passes the ownership of super_frame, it's our 1134 * responsibility to free super_frame once it's done. 1135 *==========================================================================*/ 1136 void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine( 1137 mm_camera_super_buf_t *super_frame, 1138 QCameraStream *stream, 1139 void * userdata) 1140 { 1141 ATRACE_CALL(); 1142 LOGH("[KPI Perf] E"); 1143 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1144 if (pme == NULL || 1145 pme->mCameraHandle == NULL || 1146 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 1147 LOGE("camera obj not valid"); 1148 // simply free super frame 1149 free(super_frame); 1150 return; 1151 } 1152 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 1153 if (NULL == frame) { 1154 LOGE("preview frame is NULL"); 1155 free(super_frame); 1156 return; 1157 } 1158 1159 if (!pme->needProcessPreviewFrame(frame->frame_idx)) { 1160 LOGH("preview is not running, no need to process"); 1161 stream->bufDone(frame->buf_idx); 1162 free(super_frame); 1163 return; 1164 } 1165 1166 if (pme->needDebugFps()) { 1167 pme->debugShowPreviewFPS(); 1168 } 1169 1170 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info; 1171 camera_memory_t *preview_mem = NULL; 1172 if (previewMemObj != NULL) { 1173 preview_mem = previewMemObj->getMemory(frame->buf_idx, false); 1174 } 1175 if (NULL != previewMemObj && NULL != preview_mem) { 1176 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_PREVIEW); 1177 1178 if ((pme->needProcessPreviewFrame(frame->frame_idx)) && 1179 pme->needSendPreviewCallback() && 1180 (pme->getRelatedCamSyncInfo()->mode != CAM_MODE_SECONDARY)) { 1181 qcamera_callback_argm_t cbArg; 1182 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t)); 1183 cbArg.cb_type = QCAMERA_DATA_CALLBACK; 1184 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME; 1185 cbArg.data = preview_mem; 1186 cbArg.user_data = (void *) &frame->buf_idx; 1187 cbArg.cookie = stream; 1188 cbArg.release_cb = returnStreamBuffer; 1189 int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg); 1190 if (rc != NO_ERROR) { 1191 LOGE ("fail sending data notify"); 1192 stream->bufDone(frame->buf_idx); 1193 } 1194 } else { 1195 stream->bufDone(frame->buf_idx); 1196 } 1197 } 1198 free(super_frame); 1199 LOGH("[KPI Perf] X"); 1200 } 1201 1202 /*=========================================================================== 1203 * FUNCTION : rdi_mode_stream_cb_routine 1204 * 1205 * DESCRIPTION: helper function to handle RDI frame from preview stream in 1206 * rdi mode case 1207 * 1208 * PARAMETERS : 1209 * @super_frame : received super buffer 1210 * @stream : stream object 1211 * @userdata : user data ptr 1212 * 1213 * RETURN : None 1214 * 1215 * NOTE : caller passes the ownership of super_frame, it's our 1216 * responsibility to free super_frame once it's done. 1217 *==========================================================================*/ 1218 void QCamera2HardwareInterface::rdi_mode_stream_cb_routine( 1219 mm_camera_super_buf_t *super_frame, 1220 QCameraStream *stream, 1221 void * userdata) 1222 { 1223 ATRACE_CALL(); 1224 LOGH("RDI_DEBUG Enter"); 1225 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1226 if (pme == NULL || 1227 pme->mCameraHandle == NULL || 1228 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 1229 LOGE("camera obj not valid"); 1230 free(super_frame); 1231 return; 1232 } 1233 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 1234 if (NULL == frame) { 1235 LOGE("preview frame is NLUL"); 1236 goto end; 1237 } 1238 if (!pme->needProcessPreviewFrame(frame->frame_idx)) { 1239 LOGE("preview is not running, no need to process"); 1240 stream->bufDone(frame->buf_idx); 1241 goto end; 1242 } 1243 if (pme->needDebugFps()) { 1244 pme->debugShowPreviewFPS(); 1245 } 1246 // Non-secure Mode 1247 if (!pme->isSecureMode()) { 1248 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info; 1249 if (NULL == previewMemObj) { 1250 LOGE("previewMemObj is NULL"); 1251 stream->bufDone(frame->buf_idx); 1252 goto end; 1253 } 1254 1255 camera_memory_t *preview_mem = previewMemObj->getMemory(frame->buf_idx, false); 1256 if (NULL != preview_mem) { 1257 previewMemObj->cleanCache(frame->buf_idx); 1258 // Dump RAW frame 1259 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_RAW); 1260 // Notify Preview callback frame 1261 if (pme->needProcessPreviewFrame(frame->frame_idx) && 1262 pme->mDataCb != NULL && 1263 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) { 1264 qcamera_callback_argm_t cbArg; 1265 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t)); 1266 cbArg.cb_type = QCAMERA_DATA_CALLBACK; 1267 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME; 1268 cbArg.data = preview_mem; 1269 cbArg.user_data = (void *) &frame->buf_idx; 1270 cbArg.cookie = stream; 1271 cbArg.release_cb = returnStreamBuffer; 1272 pme->m_cbNotifier.notifyCallback(cbArg); 1273 } else { 1274 LOGE("preview_mem is NULL"); 1275 stream->bufDone(frame->buf_idx); 1276 } 1277 } 1278 else { 1279 LOGE("preview_mem is NULL"); 1280 stream->bufDone(frame->buf_idx); 1281 } 1282 } else { 1283 // Secure Mode 1284 // We will do QCAMERA_NOTIFY_CALLBACK and share FD in case of secure mode 1285 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info; 1286 if (NULL == previewMemObj) { 1287 LOGE("previewMemObj is NULL"); 1288 stream->bufDone(frame->buf_idx); 1289 goto end; 1290 } 1291 1292 int fd = previewMemObj->getFd(frame->buf_idx); 1293 LOGD("Preview frame fd =%d for index = %d ", fd, frame->buf_idx); 1294 if (pme->needProcessPreviewFrame(frame->frame_idx) && 1295 pme->mDataCb != NULL && 1296 pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) { 1297 // Prepare Callback structure 1298 qcamera_callback_argm_t cbArg; 1299 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t)); 1300 cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK; 1301 cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME; 1302 #ifndef VANILLA_HAL 1303 cbArg.ext1 = CAMERA_FRAME_DATA_FD; 1304 cbArg.ext2 = fd; 1305 #endif 1306 cbArg.user_data = (void *) &frame->buf_idx; 1307 cbArg.cookie = stream; 1308 cbArg.release_cb = returnStreamBuffer; 1309 pme->m_cbNotifier.notifyCallback(cbArg); 1310 } else { 1311 LOGH("No need to process preview frame, return buffer"); 1312 stream->bufDone(frame->buf_idx); 1313 } 1314 } 1315 end: 1316 free(super_frame); 1317 LOGH("RDI_DEBUG Exit"); 1318 return; 1319 } 1320 1321 /*=========================================================================== 1322 * FUNCTION : postview_stream_cb_routine 1323 * 1324 * DESCRIPTION: helper function to handle post frame from postview stream 1325 * 1326 * PARAMETERS : 1327 * @super_frame : received super buffer 1328 * @stream : stream object 1329 * @userdata : user data ptr 1330 * 1331 * RETURN : None 1332 * 1333 * NOTE : caller passes the ownership of super_frame, it's our 1334 * responsibility to free super_frame once it's done. 1335 *==========================================================================*/ 1336 void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame, 1337 QCameraStream *stream, 1338 void *userdata) 1339 { 1340 ATRACE_CALL(); 1341 int err = NO_ERROR; 1342 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1343 QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info; 1344 1345 if (pme == NULL) { 1346 LOGE("Invalid hardware object"); 1347 free(super_frame); 1348 return; 1349 } 1350 if (memory == NULL) { 1351 LOGE("Invalid memory object"); 1352 free(super_frame); 1353 return; 1354 } 1355 1356 LOGH("[KPI Perf] : BEGIN"); 1357 1358 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 1359 if (NULL == frame) { 1360 LOGE("preview frame is NULL"); 1361 free(super_frame); 1362 return; 1363 } 1364 1365 QCameraMemory *memObj = (QCameraMemory *)frame->mem_info; 1366 if (NULL != memObj) { 1367 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_THUMBNAIL); 1368 } 1369 1370 // Return buffer back to driver 1371 err = stream->bufDone(frame->buf_idx); 1372 if ( err < 0) { 1373 LOGE("stream bufDone failed %d", err); 1374 } 1375 1376 free(super_frame); 1377 LOGH("[KPI Perf] : END"); 1378 return; 1379 } 1380 1381 /*=========================================================================== 1382 * FUNCTION : video_stream_cb_routine 1383 * 1384 * DESCRIPTION: helper function to handle video frame from video stream 1385 * 1386 * PARAMETERS : 1387 * @super_frame : received super buffer 1388 * @stream : stream object 1389 * @userdata : user data ptr 1390 * 1391 * RETURN : None 1392 * 1393 * NOTE : caller passes the ownership of super_frame, it's our 1394 * responsibility to free super_frame once it's done. video 1395 * frame will be sent to video encoder. Once video encoder is 1396 * done with the video frame, it will call another API 1397 * (release_recording_frame) to return the frame back 1398 *==========================================================================*/ 1399 void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame, 1400 QCameraStream *stream, 1401 void *userdata) 1402 { 1403 ATRACE_CALL(); 1404 QCameraVideoMemory *videoMemObj = NULL; 1405 camera_memory_t *video_mem = NULL; 1406 nsecs_t timeStamp = 0; 1407 bool triggerTCB = FALSE; 1408 1409 LOGD("[KPI Perf] : BEGIN"); 1410 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1411 if (pme == NULL || 1412 pme->mCameraHandle == 0 || 1413 !validate_handle(pme->mCameraHandle->camera_handle, 1414 super_frame->camera_handle)) { 1415 // simply free super frame 1416 free(super_frame); 1417 return; 1418 } 1419 1420 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 1421 1422 if (pme->needDebugFps()) { 1423 pme->debugShowVideoFPS(); 1424 } 1425 if(pme->m_bRecordStarted) { 1426 LOGI("[KPI Perf] : PROFILE_FIRST_RECORD_FRAME"); 1427 pme->m_bRecordStarted = false ; 1428 } 1429 LOGD("Stream(%d), Timestamp: %ld %ld", 1430 frame->stream_id, 1431 frame->ts.tv_sec, 1432 frame->ts.tv_nsec); 1433 1434 if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) { 1435 if (pme->mParameters.getVideoBatchSize() == 0) { 1436 timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL 1437 + frame->ts.tv_nsec; 1438 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO); 1439 videoMemObj = (QCameraVideoMemory *)frame->mem_info; 1440 video_mem = NULL; 1441 if (NULL != videoMemObj) { 1442 video_mem = videoMemObj->getMemory(frame->buf_idx, 1443 (pme->mStoreMetaDataInFrame > 0)? true : false); 1444 triggerTCB = TRUE; 1445 LOGH("Video frame TimeStamp : %lld batch = 0 index = %d", 1446 timeStamp, frame->buf_idx); 1447 } 1448 } else { 1449 //Handle video batch callback 1450 native_handle_t *nh = NULL; 1451 pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO); 1452 QCameraVideoMemory *videoMemObj = (QCameraVideoMemory *)frame->mem_info; 1453 if ((stream->mCurMetaMemory == NULL) 1454 || (stream->mCurBufIndex == -1)) { 1455 //get Free metadata available 1456 for (int i = 0; i < CAMERA_MIN_VIDEO_BATCH_BUFFERS; i++) { 1457 if (stream->mStreamMetaMemory[i].consumerOwned == 0) { 1458 stream->mCurMetaMemory = videoMemObj->getMemory(i,true); 1459 stream->mCurBufIndex = 0; 1460 stream->mCurMetaIndex = i; 1461 stream->mStreamMetaMemory[i].numBuffers = 0; 1462 break; 1463 } 1464 } 1465 } 1466 video_mem = stream->mCurMetaMemory; 1467 nh = videoMemObj->getNativeHandle(stream->mCurMetaIndex); 1468 if (video_mem == NULL || nh == NULL) { 1469 LOGE("No Free metadata. Drop this frame"); 1470 stream->mCurBufIndex = -1; 1471 stream->bufDone(frame->buf_idx); 1472 free(super_frame); 1473 return; 1474 } 1475 1476 int index = stream->mCurBufIndex; 1477 int fd_cnt = pme->mParameters.getVideoBatchSize(); 1478 nsecs_t frame_ts = nsecs_t(frame->ts.tv_sec) * 1000000000LL 1479 + frame->ts.tv_nsec; 1480 if (index == 0) { 1481 stream->mFirstTimeStamp = frame_ts; 1482 } 1483 1484 stream->mStreamMetaMemory[stream->mCurMetaIndex].buf_index[index] 1485 = (uint8_t)frame->buf_idx; 1486 stream->mStreamMetaMemory[stream->mCurMetaIndex].numBuffers++; 1487 stream->mStreamMetaMemory[stream->mCurMetaIndex].consumerOwned 1488 = TRUE; 1489 /* 1490 * data[0] => FD 1491 * data[mNumFDs + 1] => OFFSET 1492 * data[mNumFDs + 2] => SIZE 1493 * data[mNumFDs + 3] => Usage Flag (Color format/Compression) 1494 * data[mNumFDs + 4] => TIMESTAMP 1495 * data[mNumFDs + 5] => FORMAT 1496 */ 1497 nh->data[index] = videoMemObj->getFd(frame->buf_idx); 1498 nh->data[index + fd_cnt] = 0; 1499 nh->data[index + (fd_cnt * 2)] = (int)videoMemObj->getSize(frame->buf_idx); 1500 nh->data[index + (fd_cnt * 3)] = videoMemObj->getUsage(); 1501 nh->data[index + (fd_cnt * 4)] = (int)(frame_ts - stream->mFirstTimeStamp); 1502 nh->data[index + (fd_cnt * 5)] = videoMemObj->getFormat(); 1503 stream->mCurBufIndex++; 1504 if (stream->mCurBufIndex == fd_cnt) { 1505 timeStamp = stream->mFirstTimeStamp; 1506 LOGH("Video frame to encoder TimeStamp : %lld batch = %d Buffer idx = %d", 1507 timeStamp, fd_cnt, 1508 nh->data[nh->numFds + nh->numInts - VIDEO_METADATA_NUM_COMMON_INTS]); 1509 stream->mCurBufIndex = -1; 1510 stream->mCurMetaIndex = -1; 1511 stream->mCurMetaMemory = NULL; 1512 triggerTCB = TRUE; 1513 } 1514 } 1515 } else { 1516 videoMemObj = (QCameraVideoMemory *)frame->mem_info; 1517 video_mem = NULL; 1518 native_handle_t *nh = NULL; 1519 int fd_cnt = frame->user_buf.bufs_used; 1520 if (NULL != videoMemObj) { 1521 video_mem = videoMemObj->getMemory(frame->buf_idx, true); 1522 nh = videoMemObj->getNativeHandle(frame->buf_idx); 1523 } else { 1524 LOGE("videoMemObj NULL"); 1525 } 1526 1527 if (nh != NULL) { 1528 timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL 1529 + frame->ts.tv_nsec; 1530 1531 for (int i = 0; i < fd_cnt; i++) { 1532 if (frame->user_buf.buf_idx[i] >= 0) { 1533 mm_camera_buf_def_t *plane_frame = 1534 &frame->user_buf.plane_buf[frame->user_buf.buf_idx[i]]; 1535 QCameraVideoMemory *frameobj = 1536 (QCameraVideoMemory *)plane_frame->mem_info; 1537 int usage = frameobj->getUsage(); 1538 nsecs_t frame_ts = nsecs_t(plane_frame->ts.tv_sec) * 1000000000LL 1539 + plane_frame->ts.tv_nsec; 1540 /* 1541 data[0] => FD 1542 data[mNumFDs + 1] => OFFSET 1543 data[mNumFDs + 2] => SIZE 1544 data[mNumFDs + 3] => Usage Flag (Color format/Compression) 1545 data[mNumFDs + 4] => TIMESTAMP 1546 data[mNumFDs + 5] => FORMAT 1547 */ 1548 nh->data[i] = frameobj->getFd(plane_frame->buf_idx); 1549 nh->data[fd_cnt + i] = 0; 1550 nh->data[(2 * fd_cnt) + i] = (int)frameobj->getSize(plane_frame->buf_idx); 1551 nh->data[(3 * fd_cnt) + i] = usage; 1552 nh->data[(4 * fd_cnt) + i] = (int)(frame_ts - timeStamp); 1553 nh->data[(5 * fd_cnt) + i] = frameobj->getFormat(); 1554 LOGD("Send Video frames to services/encoder delta : %lld FD = %d index = %d", 1555 (frame_ts - timeStamp), plane_frame->fd, plane_frame->buf_idx); 1556 pme->dumpFrameToFile(stream, plane_frame, QCAMERA_DUMP_FRM_VIDEO); 1557 } 1558 } 1559 triggerTCB = TRUE; 1560 LOGH("Batch buffer TimeStamp : %lld FD = %d index = %d fd_cnt = %d", 1561 timeStamp, frame->fd, frame->buf_idx, fd_cnt); 1562 } else { 1563 LOGE("No Video Meta Available. Return Buffer"); 1564 stream->bufDone(super_frame->bufs[0]->buf_idx); 1565 } 1566 } 1567 1568 if ((NULL != video_mem) && (triggerTCB == TRUE)) { 1569 if ((pme->mDataCbTimestamp != NULL) && 1570 pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) { 1571 qcamera_callback_argm_t cbArg; 1572 memset(&cbArg, 0, sizeof(qcamera_callback_argm_t)); 1573 cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK; 1574 cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME; 1575 cbArg.data = video_mem; 1576 1577 // For VT usecase, ISP uses AVtimer not CLOCK_BOOTTIME as time source. 1578 // So do not change video timestamp. 1579 if (!pme->mParameters.isAVTimerEnabled()) { 1580 // Convert Boottime from camera to Monotime for video if needed. 1581 // Otherwise, mBootToMonoTimestampOffset value will be 0. 1582 timeStamp = timeStamp - pme->mBootToMonoTimestampOffset; 1583 } 1584 LOGD("Final video buffer TimeStamp : %lld ", timeStamp); 1585 cbArg.timestamp = timeStamp; 1586 int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg); 1587 if (rc != NO_ERROR) { 1588 LOGE("fail sending data notify"); 1589 stream->bufDone(frame->buf_idx); 1590 } 1591 } 1592 } 1593 1594 free(super_frame); 1595 LOGD("[KPI Perf] : END"); 1596 } 1597 1598 /*=========================================================================== 1599 * FUNCTION : snapshot_channel_cb_routine 1600 * 1601 * DESCRIPTION: helper function to handle snapshot frame from snapshot channel 1602 * 1603 * PARAMETERS : 1604 * @super_frame : received super buffer 1605 * @userdata : user data ptr 1606 * 1607 * RETURN : None 1608 * 1609 * NOTE : recvd_frame will be released after this call by caller, so if 1610 * async operation needed for recvd_frame, it's our responsibility 1611 * to save a copy for this variable to be used later. 1612 *==========================================================================*/ 1613 void QCamera2HardwareInterface::snapshot_channel_cb_routine(mm_camera_super_buf_t *super_frame, 1614 void *userdata) 1615 { 1616 ATRACE_CALL(); 1617 char value[PROPERTY_VALUE_MAX]; 1618 QCameraChannel *pChannel = NULL; 1619 1620 LOGH("[KPI Perf]: E"); 1621 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1622 if (pme == NULL || 1623 pme->mCameraHandle == NULL || 1624 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 1625 LOGE("camera obj not valid"); 1626 // simply free super frame 1627 free(super_frame); 1628 return; 1629 } 1630 1631 if (pme->isLowPowerMode()) { 1632 pChannel = pme->m_channels[QCAMERA_CH_TYPE_VIDEO]; 1633 } else { 1634 pChannel = pme->m_channels[QCAMERA_CH_TYPE_SNAPSHOT]; 1635 } 1636 1637 if ((pChannel == NULL) 1638 || (!validate_handle(pChannel->getMyHandle(), 1639 super_frame->ch_id))) { 1640 LOGE("Snapshot channel doesn't exist, return here"); 1641 return; 1642 } 1643 1644 property_get("persist.camera.dumpmetadata", value, "0"); 1645 int32_t enabled = atoi(value); 1646 if (enabled) { 1647 mm_camera_buf_def_t *pMetaFrame = NULL; 1648 QCameraStream *pStream = NULL; 1649 for (uint32_t i = 0; i < super_frame->num_bufs; i++) { 1650 pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id); 1651 if (pStream != NULL) { 1652 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) { 1653 pMetaFrame = super_frame->bufs[i]; //find the metadata 1654 if (pMetaFrame != NULL && 1655 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) { 1656 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "Snapshot"); 1657 } 1658 break; 1659 } 1660 } 1661 } 1662 } 1663 1664 // save a copy for the superbuf 1665 mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 1666 if (frame == NULL) { 1667 LOGE("Error allocating memory to save received_frame structure."); 1668 pChannel->bufDone(super_frame); 1669 return; 1670 } 1671 *frame = *super_frame; 1672 1673 if (frame->num_bufs > 0) { 1674 LOGI("[KPI Perf]: superbuf frame_idx %d", 1675 frame->bufs[0]->frame_idx); 1676 } 1677 1678 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) || 1679 (NO_ERROR != pme->m_postprocessor.processData(frame))) { 1680 LOGE("Failed to trigger process data"); 1681 pChannel->bufDone(super_frame); 1682 free(frame); 1683 frame = NULL; 1684 return; 1685 } 1686 1687 LOGH("[KPI Perf]: X"); 1688 } 1689 1690 /*=========================================================================== 1691 * FUNCTION : raw_stream_cb_routine 1692 * 1693 * DESCRIPTION: helper function to handle raw dump frame from raw stream 1694 * 1695 * PARAMETERS : 1696 * @super_frame : received super buffer 1697 * @stream : stream object 1698 * @userdata : user data ptr 1699 * 1700 * RETURN : None 1701 * 1702 * NOTE : caller passes the ownership of super_frame, it's our 1703 * responsibility to free super_frame once it's done. For raw 1704 * frame, there is no need to send to postprocessor for jpeg 1705 * encoding. this function will play shutter and send the data 1706 * callback to upper layer. Raw frame buffer will be returned 1707 * back to kernel, and frame will be free after use. 1708 *==========================================================================*/ 1709 void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame, 1710 QCameraStream * /*stream*/, 1711 void * userdata) 1712 { 1713 ATRACE_CALL(); 1714 LOGH("[KPI Perf] : BEGIN"); 1715 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1716 if (pme == NULL || 1717 pme->mCameraHandle == NULL || 1718 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 1719 LOGE("camera obj not valid"); 1720 // simply free super frame 1721 free(super_frame); 1722 return; 1723 } 1724 1725 pme->m_postprocessor.processRawData(super_frame); 1726 LOGH("[KPI Perf] : END"); 1727 } 1728 1729 /*=========================================================================== 1730 * FUNCTION : raw_channel_cb_routine 1731 * 1732 * DESCRIPTION: helper function to handle RAW superbuf callback directly from 1733 * mm-camera-interface 1734 * 1735 * PARAMETERS : 1736 * @super_frame : received super buffer 1737 * @userdata : user data ptr 1738 * 1739 * RETURN : None 1740 * 1741 * NOTE : recvd_frame will be released after this call by caller, so if 1742 * async operation needed for recvd_frame, it's our responsibility 1743 * to save a copy for this variable to be used later. 1744 *==========================================================================*/ 1745 void QCamera2HardwareInterface::raw_channel_cb_routine(mm_camera_super_buf_t *super_frame, 1746 void *userdata) 1747 1748 { 1749 ATRACE_CALL(); 1750 char value[PROPERTY_VALUE_MAX]; 1751 1752 LOGH("[KPI Perf]: E"); 1753 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1754 if (pme == NULL || 1755 pme->mCameraHandle == NULL || 1756 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 1757 LOGE("camera obj not valid"); 1758 // simply free super frame 1759 free(super_frame); 1760 return; 1761 } 1762 1763 QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_RAW]; 1764 if (pChannel == NULL) { 1765 LOGE("RAW channel doesn't exist, return here"); 1766 return; 1767 } 1768 1769 if (!validate_handle(pChannel->getMyHandle(), super_frame->ch_id)) { 1770 LOGE("Invalid Input super buffer"); 1771 pChannel->bufDone(super_frame); 1772 return; 1773 } 1774 1775 property_get("persist.camera.dumpmetadata", value, "0"); 1776 int32_t enabled = atoi(value); 1777 if (enabled) { 1778 mm_camera_buf_def_t *pMetaFrame = NULL; 1779 QCameraStream *pStream = NULL; 1780 for (uint32_t i = 0; i < super_frame->num_bufs; i++) { 1781 pStream = pChannel->getStreamByHandle(super_frame->bufs[i]->stream_id); 1782 if (pStream != NULL) { 1783 if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) { 1784 pMetaFrame = super_frame->bufs[i]; //find the metadata 1785 if (pMetaFrame != NULL && 1786 ((metadata_buffer_t *)pMetaFrame->buffer)->is_tuning_params_valid) { 1787 pme->dumpMetadataToFile(pStream, pMetaFrame, (char *) "raw"); 1788 } 1789 break; 1790 } 1791 } 1792 } 1793 } 1794 1795 // save a copy for the superbuf 1796 mm_camera_super_buf_t* frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 1797 if (frame == NULL) { 1798 LOGE("Error allocating memory to save received_frame structure."); 1799 pChannel->bufDone(super_frame); 1800 return; 1801 } 1802 *frame = *super_frame; 1803 1804 if (frame->num_bufs > 0) { 1805 LOGI("[KPI Perf]: superbuf frame_idx %d", 1806 frame->bufs[0]->frame_idx); 1807 } 1808 1809 // Wait on Postproc initialization if needed 1810 // then send to postprocessor 1811 if ((NO_ERROR != pme->waitDeferredWork(pme->mReprocJob)) || 1812 (NO_ERROR != pme->m_postprocessor.processData(frame))) { 1813 LOGE("Failed to trigger process data"); 1814 pChannel->bufDone(super_frame); 1815 free(frame); 1816 frame = NULL; 1817 return; 1818 } 1819 1820 LOGH("[KPI Perf]: X"); 1821 1822 } 1823 1824 /*=========================================================================== 1825 * FUNCTION : preview_raw_stream_cb_routine 1826 * 1827 * DESCRIPTION: helper function to handle raw frame during standard preview 1828 * 1829 * PARAMETERS : 1830 * @super_frame : received super buffer 1831 * @stream : stream object 1832 * @userdata : user data ptr 1833 * 1834 * RETURN : None 1835 * 1836 * NOTE : caller passes the ownership of super_frame, it's our 1837 * responsibility to free super_frame once it's done. 1838 *==========================================================================*/ 1839 void QCamera2HardwareInterface::preview_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame, 1840 QCameraStream * stream, 1841 void * userdata) 1842 { 1843 ATRACE_CALL(); 1844 LOGH("[KPI Perf] : BEGIN"); 1845 char value[PROPERTY_VALUE_MAX]; 1846 bool dump_preview_raw = false, dump_video_raw = false; 1847 1848 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1849 if (pme == NULL || 1850 pme->mCameraHandle == NULL || 1851 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 1852 LOGE("camera obj not valid"); 1853 // simply free super frame 1854 free(super_frame); 1855 return; 1856 } 1857 1858 mm_camera_buf_def_t *raw_frame = super_frame->bufs[0]; 1859 1860 if (raw_frame != NULL) { 1861 property_get("persist.camera.preview_raw", value, "0"); 1862 dump_preview_raw = atoi(value) > 0 ? true : false; 1863 property_get("persist.camera.video_raw", value, "0"); 1864 dump_video_raw = atoi(value) > 0 ? true : false; 1865 if (dump_preview_raw || (pme->mParameters.getRecordingHintValue() 1866 && dump_video_raw)) { 1867 pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW); 1868 } 1869 stream->bufDone(raw_frame->buf_idx); 1870 } 1871 free(super_frame); 1872 1873 LOGH("[KPI Perf] : END"); 1874 } 1875 1876 /*=========================================================================== 1877 * FUNCTION : snapshot_raw_stream_cb_routine 1878 * 1879 * DESCRIPTION: helper function to handle raw frame during standard capture 1880 * 1881 * PARAMETERS : 1882 * @super_frame : received super buffer 1883 * @stream : stream object 1884 * @userdata : user data ptr 1885 * 1886 * RETURN : None 1887 * 1888 * NOTE : caller passes the ownership of super_frame, it's our 1889 * responsibility to free super_frame once it's done. 1890 *==========================================================================*/ 1891 void QCamera2HardwareInterface::snapshot_raw_stream_cb_routine(mm_camera_super_buf_t * super_frame, 1892 QCameraStream * stream, 1893 void * userdata) 1894 { 1895 ATRACE_CALL(); 1896 LOGH("[KPI Perf] : BEGIN"); 1897 char value[PROPERTY_VALUE_MAX]; 1898 bool dump_raw = false; 1899 1900 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 1901 if (pme == NULL || 1902 pme->mCameraHandle == NULL || 1903 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 1904 LOGE("camera obj not valid"); 1905 // simply free super frame 1906 free(super_frame); 1907 return; 1908 } 1909 1910 property_get("persist.camera.snapshot_raw", value, "0"); 1911 dump_raw = atoi(value) > 0 ? true : false; 1912 1913 for (uint32_t i = 0; i < super_frame->num_bufs; i++) { 1914 if (super_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW) { 1915 mm_camera_buf_def_t * raw_frame = super_frame->bufs[i]; 1916 if (NULL != stream) { 1917 if (dump_raw) { 1918 pme->dumpFrameToFile(stream, raw_frame, QCAMERA_DUMP_FRM_RAW); 1919 } 1920 stream->bufDone(super_frame->bufs[i]->buf_idx); 1921 } 1922 break; 1923 } 1924 } 1925 1926 free(super_frame); 1927 1928 LOGH("[KPI Perf] : END"); 1929 } 1930 1931 /*=========================================================================== 1932 * FUNCTION : updateMetadata 1933 * 1934 * DESCRIPTION: Frame related parameter can be updated here 1935 * 1936 * PARAMETERS : 1937 * @pMetaData : pointer to metadata buffer 1938 * 1939 * RETURN : int32_t type of status 1940 * NO_ERROR -- success 1941 * none-zero failure code 1942 *==========================================================================*/ 1943 int32_t QCamera2HardwareInterface::updateMetadata(metadata_buffer_t *pMetaData) 1944 { 1945 int32_t rc = NO_ERROR; 1946 1947 if (pMetaData == NULL) { 1948 LOGE("Null Metadata buffer"); 1949 return rc; 1950 } 1951 1952 // Sharpness 1953 cam_edge_application_t edge_application; 1954 memset(&edge_application, 0x00, sizeof(cam_edge_application_t)); 1955 edge_application.sharpness = mParameters.getSharpness(); 1956 if (edge_application.sharpness != 0) { 1957 edge_application.edge_mode = CAM_EDGE_MODE_FAST; 1958 } else { 1959 edge_application.edge_mode = CAM_EDGE_MODE_OFF; 1960 } 1961 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, 1962 CAM_INTF_META_EDGE_MODE, edge_application); 1963 1964 //Effect 1965 int32_t prmEffect = mParameters.getEffect(); 1966 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_EFFECT, prmEffect); 1967 1968 //flip 1969 int32_t prmFlip = mParameters.getFlipMode(CAM_STREAM_TYPE_SNAPSHOT); 1970 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_FLIP, prmFlip); 1971 1972 //denoise 1973 uint8_t prmDenoise = (uint8_t)mParameters.isWNREnabled(); 1974 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, 1975 CAM_INTF_META_NOISE_REDUCTION_MODE, prmDenoise); 1976 1977 //rotation & device rotation 1978 uint32_t prmRotation = mParameters.getJpegRotation(); 1979 cam_rotation_info_t rotation_info; 1980 memset(&rotation_info, 0, sizeof(cam_rotation_info_t)); 1981 if (prmRotation == 0) { 1982 rotation_info.rotation = ROTATE_0; 1983 } else if (prmRotation == 90) { 1984 rotation_info.rotation = ROTATE_90; 1985 } else if (prmRotation == 180) { 1986 rotation_info.rotation = ROTATE_180; 1987 } else if (prmRotation == 270) { 1988 rotation_info.rotation = ROTATE_270; 1989 } 1990 1991 uint32_t device_rotation = mParameters.getDeviceRotation(); 1992 if (device_rotation == 0) { 1993 rotation_info.device_rotation = ROTATE_0; 1994 } else if (device_rotation == 90) { 1995 rotation_info.device_rotation = ROTATE_90; 1996 } else if (device_rotation == 180) { 1997 rotation_info.device_rotation = ROTATE_180; 1998 } else if (device_rotation == 270) { 1999 rotation_info.device_rotation = ROTATE_270; 2000 } else { 2001 rotation_info.device_rotation = ROTATE_0; 2002 } 2003 2004 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_PARM_ROTATION, rotation_info); 2005 2006 // Imglib Dynamic Scene Data 2007 cam_dyn_img_data_t dyn_img_data = mParameters.getDynamicImgData(); 2008 if (mParameters.isStillMoreEnabled()) { 2009 cam_still_more_t stillmore_cap = mParameters.getStillMoreSettings(); 2010 dyn_img_data.input_count = stillmore_cap.burst_count; 2011 } 2012 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, 2013 CAM_INTF_META_IMG_DYN_FEAT, dyn_img_data); 2014 2015 //CPP CDS 2016 int32_t prmCDSMode = mParameters.getCDSMode(); 2017 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, 2018 CAM_INTF_PARM_CDS_MODE, prmCDSMode); 2019 2020 return rc; 2021 } 2022 2023 /*=========================================================================== 2024 * FUNCTION : metadata_stream_cb_routine 2025 * 2026 * DESCRIPTION: helper function to handle metadata frame from metadata stream 2027 * 2028 * PARAMETERS : 2029 * @super_frame : received super buffer 2030 * @stream : stream object 2031 * @userdata : user data ptr 2032 * 2033 * RETURN : None 2034 * 2035 * NOTE : caller passes the ownership of super_frame, it's our 2036 * responsibility to free super_frame once it's done. Metadata 2037 * could have valid entries for face detection result or 2038 * histogram statistics information. 2039 *==========================================================================*/ 2040 void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame, 2041 QCameraStream * stream, 2042 void * userdata) 2043 { 2044 ATRACE_CALL(); 2045 LOGD("[KPI Perf] : BEGIN"); 2046 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 2047 2048 if (pme == NULL || 2049 pme->mCameraHandle == 0 || 2050 !validate_handle(pme->mCameraHandle->camera_handle, 2051 super_frame->camera_handle)) { 2052 // simply free super frame 2053 free(super_frame); 2054 return; 2055 } 2056 2057 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 2058 metadata_buffer_t *pMetaData = (metadata_buffer_t *)frame->buffer; 2059 if(pme->m_stateMachine.isNonZSLCaptureRunning()&& 2060 !pme->mLongshotEnabled) { 2061 //Make shutter call back in non ZSL mode once raw frame is received from VFE. 2062 pme->playShutter(); 2063 } 2064 2065 if (pMetaData->is_tuning_params_valid && pme->mParameters.getRecordingHintValue() == true) { 2066 //Dump Tuning data for video 2067 pme->dumpMetadataToFile(stream,frame,(char *)"Video"); 2068 } 2069 2070 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, pMetaData) { 2071 // process histogram statistics info 2072 qcamera_sm_internal_evt_payload_t *payload = 2073 (qcamera_sm_internal_evt_payload_t *) 2074 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2075 if (NULL != payload) { 2076 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2077 payload->evt_type = QCAMERA_INTERNAL_EVT_HISTOGRAM_STATS; 2078 payload->stats_data = *stats_data; 2079 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2080 if (rc != NO_ERROR) { 2081 LOGW("processEvt histogram failed"); 2082 free(payload); 2083 payload = NULL; 2084 2085 } 2086 } else { 2087 LOGE("No memory for histogram qcamera_sm_internal_evt_payload_t"); 2088 } 2089 } 2090 2091 IF_META_AVAILABLE(cam_face_detection_data_t, detection_data, 2092 CAM_INTF_META_FACE_DETECTION, pMetaData) { 2093 2094 cam_faces_data_t faces_data; 2095 pme->fillFacesData(faces_data, pMetaData); 2096 faces_data.detection_data.fd_type = QCAMERA_FD_PREVIEW; //HARD CODE here before MCT can support 2097 2098 qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *) 2099 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2100 if (NULL != payload) { 2101 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2102 payload->evt_type = QCAMERA_INTERNAL_EVT_FACE_DETECT_RESULT; 2103 payload->faces_data = faces_data; 2104 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2105 if (rc != NO_ERROR) { 2106 LOGW("processEvt face detection failed"); 2107 free(payload); 2108 payload = NULL; 2109 } 2110 } else { 2111 LOGE("No memory for face detect qcamera_sm_internal_evt_payload_t"); 2112 } 2113 } 2114 2115 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, pMetaData) { 2116 uint8_t forceAFUpdate = FALSE; 2117 //1. Earlier HAL used to rely on AF done flags set in metadata to generate callbacks to 2118 //upper layers. But in scenarios where metadata drops especially which contain important 2119 //AF information, APP will wait indefinitely for focus result resulting in capture hang. 2120 //2. HAL can check for AF state transitions to generate AF state callbacks to upper layers. 2121 //This will help overcome metadata drop issue with the earlier approach. 2122 //3. But sometimes AF state transitions can happen so fast within same metadata due to 2123 //which HAL will receive only the final AF state. HAL may perceive this as no change in AF 2124 //state depending on the state transitions happened (for example state A -> B -> A). 2125 //4. To overcome the drawbacks of both the approaches, we go for a hybrid model in which 2126 //we check state transition at both HAL level and AF module level. We rely on 2127 //'state transition' meta field set by AF module for the state transition detected by it. 2128 IF_META_AVAILABLE(uint8_t, stateChange, CAM_INTF_AF_STATE_TRANSITION, pMetaData) { 2129 forceAFUpdate = *stateChange; 2130 } 2131 //This is a special scenario in which when scene modes like landscape are selected, AF mode 2132 //gets changed to INFINITY at backend, but HAL will not be aware of it. Also, AF state in 2133 //such cases will be set to CAM_AF_STATE_INACTIVE by backend. So, detect the AF mode 2134 //change here and trigger AF callback @ processAutoFocusEvent(). 2135 IF_META_AVAILABLE(uint32_t, afFocusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) { 2136 if (((cam_focus_mode_type)(*afFocusMode) == CAM_FOCUS_MODE_INFINITY) && 2137 pme->mActiveAF){ 2138 forceAFUpdate = TRUE; 2139 } 2140 } 2141 if ((pme->m_currentFocusState != (*afState)) || forceAFUpdate) { 2142 cam_af_state_t prevFocusState = pme->m_currentFocusState; 2143 pme->m_currentFocusState = (cam_af_state_t)(*afState); 2144 qcamera_sm_internal_evt_payload_t *payload = (qcamera_sm_internal_evt_payload_t *) 2145 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2146 if (NULL != payload) { 2147 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2148 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE; 2149 payload->focus_data.focus_state = (cam_af_state_t)(*afState); 2150 //Need to flush ZSL Q only if we are transitioning from scanning state 2151 //to focused/not focused state. 2152 payload->focus_data.flush_info.needFlush = 2153 ((prevFocusState == CAM_AF_STATE_PASSIVE_SCAN) || 2154 (prevFocusState == CAM_AF_STATE_ACTIVE_SCAN)) && 2155 ((pme->m_currentFocusState == CAM_AF_STATE_FOCUSED_LOCKED) || 2156 (pme->m_currentFocusState == CAM_AF_STATE_NOT_FOCUSED_LOCKED)); 2157 payload->focus_data.flush_info.focused_frame_idx = frame->frame_idx; 2158 2159 IF_META_AVAILABLE(float, focusDistance, 2160 CAM_INTF_META_LENS_FOCUS_DISTANCE, pMetaData) { 2161 payload->focus_data.focus_dist. 2162 focus_distance[CAM_FOCUS_DISTANCE_OPTIMAL_INDEX] = *focusDistance; 2163 } 2164 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, pMetaData) { 2165 payload->focus_data.focus_dist. 2166 focus_distance[CAM_FOCUS_DISTANCE_NEAR_INDEX] = focusRange[0]; 2167 payload->focus_data.focus_dist. 2168 focus_distance[CAM_FOCUS_DISTANCE_FAR_INDEX] = focusRange[1]; 2169 } 2170 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, pMetaData) { 2171 payload->focus_data.focus_mode = (cam_focus_mode_type)(*focusMode); 2172 } 2173 IF_META_AVAILABLE(uint8_t, isDepthFocus, 2174 CAM_INTF_META_FOCUS_DEPTH_INFO, pMetaData) { 2175 payload->focus_data.isDepth = *isDepthFocus; 2176 } 2177 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2178 if (rc != NO_ERROR) { 2179 LOGW("processEvt focus failed"); 2180 free(payload); 2181 payload = NULL; 2182 } 2183 } else { 2184 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t"); 2185 } 2186 } 2187 } 2188 2189 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, pMetaData) { 2190 if (crop_data->num_of_streams > MAX_NUM_STREAMS) { 2191 LOGE("Invalid num_of_streams %d in crop_data", 2192 crop_data->num_of_streams); 2193 } else { 2194 qcamera_sm_internal_evt_payload_t *payload = 2195 (qcamera_sm_internal_evt_payload_t *) 2196 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2197 if (NULL != payload) { 2198 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2199 payload->evt_type = QCAMERA_INTERNAL_EVT_CROP_INFO; 2200 payload->crop_data = *crop_data; 2201 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2202 if (rc != NO_ERROR) { 2203 LOGE("processEvt crop info failed"); 2204 free(payload); 2205 payload = NULL; 2206 } 2207 } else { 2208 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t"); 2209 } 2210 } 2211 } 2212 2213 IF_META_AVAILABLE(int32_t, prep_snapshot_done_state, 2214 CAM_INTF_META_PREP_SNAPSHOT_DONE, pMetaData) { 2215 qcamera_sm_internal_evt_payload_t *payload = 2216 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2217 if (NULL != payload) { 2218 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2219 payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE; 2220 payload->prep_snapshot_state = (cam_prep_snapshot_state_t)*prep_snapshot_done_state; 2221 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2222 if (rc != NO_ERROR) { 2223 LOGW("processEvt prep_snapshot failed"); 2224 free(payload); 2225 payload = NULL; 2226 } 2227 } else { 2228 LOGE("No memory for prep_snapshot qcamera_sm_internal_evt_payload_t"); 2229 } 2230 } 2231 2232 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data, 2233 CAM_INTF_META_ASD_HDR_SCENE_DATA, pMetaData) { 2234 LOGH("hdr_scene_data: %d %f\n", 2235 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence); 2236 //Handle this HDR meta data only if capture is not in process 2237 if (!pme->m_stateMachine.isCaptureRunning()) { 2238 qcamera_sm_internal_evt_payload_t *payload = 2239 (qcamera_sm_internal_evt_payload_t *) 2240 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2241 if (NULL != payload) { 2242 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2243 payload->evt_type = QCAMERA_INTERNAL_EVT_HDR_UPDATE; 2244 payload->hdr_data = *hdr_scene_data; 2245 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2246 if (rc != NO_ERROR) { 2247 LOGW("processEvt hdr update failed"); 2248 free(payload); 2249 payload = NULL; 2250 } 2251 } else { 2252 LOGE("No memory for hdr update qcamera_sm_internal_evt_payload_t"); 2253 } 2254 } 2255 } 2256 2257 IF_META_AVAILABLE(cam_asd_decision_t, cam_asd_info, 2258 CAM_INTF_META_ASD_SCENE_INFO, pMetaData) { 2259 qcamera_sm_internal_evt_payload_t *payload = 2260 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2261 if (NULL != payload) { 2262 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2263 payload->evt_type = QCAMERA_INTERNAL_EVT_ASD_UPDATE; 2264 payload->asd_data = (cam_asd_decision_t)*cam_asd_info; 2265 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2266 if (rc != NO_ERROR) { 2267 LOGW("processEvt asd_update failed"); 2268 free(payload); 2269 payload = NULL; 2270 } 2271 } else { 2272 LOGE("No memory for asd_update qcamera_sm_internal_evt_payload_t"); 2273 } 2274 } 2275 2276 IF_META_AVAILABLE(cam_awb_params_t, awb_params, CAM_INTF_META_AWB_INFO, pMetaData) { 2277 LOGH(", metadata for awb params."); 2278 qcamera_sm_internal_evt_payload_t *payload = 2279 (qcamera_sm_internal_evt_payload_t *) 2280 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2281 if (NULL != payload) { 2282 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2283 payload->evt_type = QCAMERA_INTERNAL_EVT_AWB_UPDATE; 2284 payload->awb_data = *awb_params; 2285 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2286 if (rc != NO_ERROR) { 2287 LOGW("processEvt awb_update failed"); 2288 free(payload); 2289 payload = NULL; 2290 } 2291 } else { 2292 LOGE("No memory for awb_update qcamera_sm_internal_evt_payload_t"); 2293 } 2294 } 2295 2296 IF_META_AVAILABLE(uint32_t, flash_mode, CAM_INTF_META_FLASH_MODE, pMetaData) { 2297 pme->mExifParams.sensor_params.flash_mode = (cam_flash_mode_t)*flash_mode; 2298 } 2299 2300 IF_META_AVAILABLE(int32_t, flash_state, CAM_INTF_META_FLASH_STATE, pMetaData) { 2301 pme->mExifParams.sensor_params.flash_state = (cam_flash_state_t) *flash_state; 2302 } 2303 2304 IF_META_AVAILABLE(float, aperture_value, CAM_INTF_META_LENS_APERTURE, pMetaData) { 2305 pme->mExifParams.sensor_params.aperture_value = *aperture_value; 2306 } 2307 2308 IF_META_AVAILABLE(cam_3a_params_t, ae_params, CAM_INTF_META_AEC_INFO, pMetaData) { 2309 pme->mExifParams.cam_3a_params = *ae_params; 2310 pme->mExifParams.cam_3a_params_valid = TRUE; 2311 pme->mFlashNeeded = ae_params->flash_needed; 2312 pme->mExifParams.cam_3a_params.brightness = (float) pme->mParameters.getBrightness(); 2313 qcamera_sm_internal_evt_payload_t *payload = 2314 (qcamera_sm_internal_evt_payload_t *) 2315 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2316 if (NULL != payload) { 2317 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2318 payload->evt_type = QCAMERA_INTERNAL_EVT_AE_UPDATE; 2319 payload->ae_data = *ae_params; 2320 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2321 if (rc != NO_ERROR) { 2322 LOGW("processEvt ae_update failed"); 2323 free(payload); 2324 payload = NULL; 2325 } 2326 } else { 2327 LOGE("No memory for ae_update qcamera_sm_internal_evt_payload_t"); 2328 } 2329 } 2330 2331 IF_META_AVAILABLE(int32_t, wb_mode, CAM_INTF_PARM_WHITE_BALANCE, pMetaData) { 2332 pme->mExifParams.cam_3a_params.wb_mode = (cam_wb_mode_type) *wb_mode; 2333 } 2334 2335 IF_META_AVAILABLE(cam_sensor_params_t, sensor_params, CAM_INTF_META_SENSOR_INFO, pMetaData) { 2336 pme->mExifParams.sensor_params = *sensor_params; 2337 } 2338 2339 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params, 2340 CAM_INTF_META_EXIF_DEBUG_AE, pMetaData) { 2341 if (pme->mExifParams.debug_params) { 2342 pme->mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params; 2343 pme->mExifParams.debug_params->ae_debug_params_valid = TRUE; 2344 } 2345 } 2346 2347 IF_META_AVAILABLE(cam_awb_exif_debug_t, awb_exif_debug_params, 2348 CAM_INTF_META_EXIF_DEBUG_AWB, pMetaData) { 2349 if (pme->mExifParams.debug_params) { 2350 pme->mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params; 2351 pme->mExifParams.debug_params->awb_debug_params_valid = TRUE; 2352 } 2353 } 2354 2355 IF_META_AVAILABLE(cam_af_exif_debug_t, af_exif_debug_params, 2356 CAM_INTF_META_EXIF_DEBUG_AF, pMetaData) { 2357 if (pme->mExifParams.debug_params) { 2358 pme->mExifParams.debug_params->af_debug_params = *af_exif_debug_params; 2359 pme->mExifParams.debug_params->af_debug_params_valid = TRUE; 2360 } 2361 } 2362 2363 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params, 2364 CAM_INTF_META_EXIF_DEBUG_ASD, pMetaData) { 2365 if (pme->mExifParams.debug_params) { 2366 pme->mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params; 2367 pme->mExifParams.debug_params->asd_debug_params_valid = TRUE; 2368 } 2369 } 2370 2371 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t, stats_exif_debug_params, 2372 CAM_INTF_META_EXIF_DEBUG_STATS, pMetaData) { 2373 if (pme->mExifParams.debug_params) { 2374 pme->mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params; 2375 pme->mExifParams.debug_params->stats_debug_params_valid = TRUE; 2376 } 2377 } 2378 2379 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t, bestats_exif_debug_params, 2380 CAM_INTF_META_EXIF_DEBUG_BESTATS, pMetaData) { 2381 if (pme->mExifParams.debug_params) { 2382 pme->mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params; 2383 pme->mExifParams.debug_params->bestats_debug_params_valid = TRUE; 2384 } 2385 } 2386 2387 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params, 2388 CAM_INTF_META_EXIF_DEBUG_BHIST, pMetaData) { 2389 if (pme->mExifParams.debug_params) { 2390 pme->mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params; 2391 pme->mExifParams.debug_params->bhist_debug_params_valid = TRUE; 2392 } 2393 } 2394 2395 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params, 2396 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, pMetaData) { 2397 if (pme->mExifParams.debug_params) { 2398 pme->mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params; 2399 pme->mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE; 2400 } 2401 } 2402 2403 IF_META_AVAILABLE(uint32_t, led_mode, CAM_INTF_META_LED_MODE_OVERRIDE, pMetaData) { 2404 qcamera_sm_internal_evt_payload_t *payload = 2405 (qcamera_sm_internal_evt_payload_t *) 2406 malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2407 if (NULL != payload) { 2408 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2409 payload->evt_type = QCAMERA_INTERNAL_EVT_LED_MODE_OVERRIDE; 2410 payload->led_data = (cam_flash_mode_t)*led_mode; 2411 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2412 if (rc != NO_ERROR) { 2413 LOGW("processEvt led mode override failed"); 2414 free(payload); 2415 payload = NULL; 2416 } 2417 } else { 2418 LOGE("No memory for focus qcamera_sm_internal_evt_payload_t"); 2419 } 2420 } 2421 2422 cam_edge_application_t edge_application; 2423 memset(&edge_application, 0x00, sizeof(cam_edge_application_t)); 2424 edge_application.sharpness = pme->mParameters.getSharpness(); 2425 if (edge_application.sharpness != 0) { 2426 edge_application.edge_mode = CAM_EDGE_MODE_FAST; 2427 } else { 2428 edge_application.edge_mode = CAM_EDGE_MODE_OFF; 2429 } 2430 ADD_SET_PARAM_ENTRY_TO_BATCH(pMetaData, CAM_INTF_META_EDGE_MODE, edge_application); 2431 2432 IF_META_AVAILABLE(cam_focus_pos_info_t, cur_pos_info, 2433 CAM_INTF_META_FOCUS_POSITION, pMetaData) { 2434 qcamera_sm_internal_evt_payload_t *payload = 2435 (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t)); 2436 if (NULL != payload) { 2437 memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t)); 2438 payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_POS_UPDATE; 2439 payload->focus_pos = *cur_pos_info; 2440 int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload); 2441 if (rc != NO_ERROR) { 2442 LOGW("processEvt focus_pos_update failed"); 2443 free(payload); 2444 payload = NULL; 2445 } 2446 } else { 2447 LOGE("No memory for focus_pos_update qcamera_sm_internal_evt_payload_t"); 2448 } 2449 } 2450 2451 if (pme->mParameters.getLowLightCapture()) { 2452 IF_META_AVAILABLE(cam_low_light_mode_t, low_light_level, 2453 CAM_INTF_META_LOW_LIGHT, pMetaData) { 2454 pme->mParameters.setLowLightLevel(*low_light_level); 2455 } 2456 } 2457 2458 IF_META_AVAILABLE(cam_dyn_img_data_t, dyn_img_data, 2459 CAM_INTF_META_IMG_DYN_FEAT, pMetaData) { 2460 pme->mParameters.setDynamicImgData(*dyn_img_data); 2461 } 2462 2463 IF_META_AVAILABLE(int32_t, touch_ae_status, CAM_INTF_META_TOUCH_AE_RESULT, pMetaData) { 2464 LOGD("touch_ae_status: %d", *touch_ae_status); 2465 } 2466 2467 if (pme->isDualCamera()) { 2468 pme->fillDualCameraFOVControl(); 2469 } 2470 2471 stream->bufDone(super_frame); 2472 free(super_frame); 2473 2474 LOGD("[KPI Perf] : END"); 2475 } 2476 2477 /*=========================================================================== 2478 * FUNCTION : reprocess_stream_cb_routine 2479 * 2480 * DESCRIPTION: helper function to handle reprocess frame from reprocess stream 2481 (after reprocess, e.g., ZSL snapshot frame after WNR if 2482 * WNR is enabled) 2483 * 2484 * PARAMETERS : 2485 * @super_frame : received super buffer 2486 * @stream : stream object 2487 * @userdata : user data ptr 2488 * 2489 * RETURN : None 2490 * 2491 * NOTE : caller passes the ownership of super_frame, it's our 2492 * responsibility to free super_frame once it's done. In this 2493 * case, reprocessed frame need to be passed to postprocessor 2494 * for jpeg encoding. 2495 *==========================================================================*/ 2496 void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame, 2497 QCameraStream * /*stream*/, 2498 void * userdata) 2499 { 2500 ATRACE_CALL(); 2501 LOGH("[KPI Perf]: E"); 2502 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 2503 if (pme == NULL || 2504 pme->mCameraHandle == NULL || 2505 pme->mCameraHandle->camera_handle != super_frame->camera_handle){ 2506 LOGE("camera obj not valid"); 2507 // simply free super frame 2508 free(super_frame); 2509 return; 2510 } 2511 2512 pme->m_postprocessor.processPPData(super_frame); 2513 2514 LOGH("[KPI Perf]: X"); 2515 } 2516 2517 /*=========================================================================== 2518 * FUNCTION : callback_stream_cb_routine 2519 * 2520 * DESCRIPTION: function to process CALBACK stream data 2521 Frame will processed and sent to framework 2522 * 2523 * PARAMETERS : 2524 * @super_frame : received super buffer 2525 * @stream : stream object 2526 * @userdata : user data ptr 2527 * 2528 * RETURN : None 2529 *==========================================================================*/ 2530 void QCamera2HardwareInterface::callback_stream_cb_routine(mm_camera_super_buf_t *super_frame, 2531 QCameraStream *stream, void *userdata) 2532 { 2533 ATRACE_CALL(); 2534 LOGH("[KPI Perf]: E"); 2535 QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata; 2536 2537 if (pme == NULL || 2538 pme->mCameraHandle == 0 || 2539 !validate_handle(pme->mCameraHandle->camera_handle, 2540 super_frame->camera_handle)) { 2541 // simply free super frame 2542 free(super_frame); 2543 return; 2544 } 2545 2546 mm_camera_buf_def_t *frame = super_frame->bufs[0]; 2547 if (NULL == frame) { 2548 LOGE("preview callback frame is NULL"); 2549 free(super_frame); 2550 return; 2551 } 2552 2553 if (!pme->needProcessPreviewFrame(frame->frame_idx)) { 2554 LOGH("preview is not running, no need to process"); 2555 stream->bufDone(frame->buf_idx); 2556 free(super_frame); 2557 return; 2558 } 2559 2560 QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info; 2561 // Handle preview data callback 2562 if (pme->mDataCb != NULL && 2563 (pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) && 2564 (!pme->mParameters.isSceneSelectionEnabled())) { 2565 int32_t rc = pme->sendPreviewCallback(stream, previewMemObj, frame->buf_idx); 2566 if (NO_ERROR != rc) { 2567 LOGE("Preview callback was not sent succesfully"); 2568 } 2569 } 2570 stream->bufDone(frame->buf_idx); 2571 free(super_frame); 2572 LOGH("[KPI Perf]: X"); 2573 } 2574 2575 /*=========================================================================== 2576 * FUNCTION : dumpFrameToFile 2577 * 2578 * DESCRIPTION: helper function to dump jpeg into file for debug purpose. 2579 * 2580 * PARAMETERS : 2581 * @data : data ptr 2582 * @size : length of data buffer 2583 * @index : identifier for data 2584 * 2585 * RETURN : None 2586 *==========================================================================*/ 2587 void QCamera2HardwareInterface::dumpJpegToFile(const void *data, 2588 size_t size, uint32_t index) 2589 { 2590 char value[PROPERTY_VALUE_MAX]; 2591 property_get("persist.camera.dumpimg", value, "0"); 2592 uint32_t enabled = (uint32_t) atoi(value); 2593 uint32_t frm_num = 0; 2594 uint32_t skip_mode = 0; 2595 2596 char buf[32]; 2597 cam_dimension_t dim; 2598 memset(buf, 0, sizeof(buf)); 2599 memset(&dim, 0, sizeof(dim)); 2600 2601 if(((enabled & QCAMERA_DUMP_FRM_OUTPUT_JPEG) && data) || 2602 ((true == m_bIntJpegEvtPending) && data)) { 2603 frm_num = ((enabled & 0xffff0000) >> 16); 2604 if(frm_num == 0) { 2605 frm_num = 10; //default 10 frames 2606 } 2607 if(frm_num > 256) { 2608 frm_num = 256; //256 buffers cycle around 2609 } 2610 skip_mode = ((enabled & 0x0000ff00) >> 8); 2611 if(skip_mode == 0) { 2612 skip_mode = 1; //no-skip 2613 } 2614 2615 if( mDumpSkipCnt % skip_mode == 0) { 2616 if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) { 2617 // reset frame count if cycling 2618 mDumpFrmCnt = 0; 2619 } 2620 if (mDumpFrmCnt <= frm_num) { 2621 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION "%d_%d.jpg", 2622 mDumpFrmCnt, index); 2623 if (true == m_bIntJpegEvtPending) { 2624 strlcpy(m_BackendFileName, buf, QCAMERA_MAX_FILEPATH_LENGTH); 2625 mBackendFileSize = size; 2626 } 2627 2628 int file_fd = open(buf, O_RDWR | O_CREAT, 0777); 2629 if (file_fd >= 0) { 2630 ssize_t written_len = write(file_fd, data, size); 2631 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH); 2632 LOGH("written number of bytes %zd\n", 2633 written_len); 2634 close(file_fd); 2635 } else { 2636 LOGE("fail to open file for image dumping"); 2637 } 2638 if (false == m_bIntJpegEvtPending) { 2639 mDumpFrmCnt++; 2640 } 2641 } 2642 } 2643 mDumpSkipCnt++; 2644 } 2645 } 2646 2647 2648 void QCamera2HardwareInterface::dumpMetadataToFile(QCameraStream *stream, 2649 mm_camera_buf_def_t *frame,char *type) 2650 { 2651 char value[PROPERTY_VALUE_MAX]; 2652 uint32_t frm_num = 0; 2653 metadata_buffer_t *metadata = (metadata_buffer_t *)frame->buffer; 2654 property_get("persist.camera.dumpmetadata", value, "0"); 2655 uint32_t enabled = (uint32_t) atoi(value); 2656 if (stream == NULL) { 2657 LOGH("No op"); 2658 return; 2659 } 2660 2661 uint32_t dumpFrmCnt = stream->mDumpMetaFrame; 2662 if(enabled){ 2663 frm_num = ((enabled & 0xffff0000) >> 16); 2664 if (frm_num == 0) { 2665 frm_num = 10; //default 10 frames 2666 } 2667 if (frm_num > 256) { 2668 frm_num = 256; //256 buffers cycle around 2669 } 2670 if ((frm_num == 256) && (dumpFrmCnt >= frm_num)) { 2671 // reset frame count if cycling 2672 dumpFrmCnt = 0; 2673 } 2674 LOGH("dumpFrmCnt= %u, frm_num = %u", dumpFrmCnt, frm_num); 2675 if (dumpFrmCnt < frm_num) { 2676 char timeBuf[128]; 2677 char buf[32]; 2678 memset(buf, 0, sizeof(buf)); 2679 memset(timeBuf, 0, sizeof(timeBuf)); 2680 time_t current_time; 2681 struct tm * timeinfo; 2682 time (¤t_time); 2683 timeinfo = localtime (¤t_time); 2684 if (NULL != timeinfo) { 2685 strftime(timeBuf, sizeof(timeBuf), 2686 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo); 2687 } 2688 String8 filePath(timeBuf); 2689 snprintf(buf, sizeof(buf), "%um_%s_%d.bin", dumpFrmCnt, type, frame->frame_idx); 2690 filePath.append(buf); 2691 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777); 2692 if (file_fd >= 0) { 2693 ssize_t written_len = 0; 2694 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION; 2695 void *data = (void *)((uint8_t *)&metadata->tuning_params.tuning_data_version); 2696 written_len += write(file_fd, data, sizeof(uint32_t)); 2697 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size); 2698 LOGH("tuning_sensor_data_size %d",(int)(*(int *)data)); 2699 written_len += write(file_fd, data, sizeof(uint32_t)); 2700 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size); 2701 LOGH("tuning_vfe_data_size %d",(int)(*(int *)data)); 2702 written_len += write(file_fd, data, sizeof(uint32_t)); 2703 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size); 2704 LOGH("tuning_cpp_data_size %d",(int)(*(int *)data)); 2705 written_len += write(file_fd, data, sizeof(uint32_t)); 2706 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size); 2707 LOGH("tuning_cac_data_size %d",(int)(*(int *)data)); 2708 written_len += write(file_fd, data, sizeof(uint32_t)); 2709 data = (void *)((uint8_t *)&metadata->tuning_params.tuning_cac_data_size2); 2710 LOGH("< skrajago >tuning_cac_data_size %d",(int)(*(int *)data)); 2711 written_len += write(file_fd, data, sizeof(uint32_t)); 2712 size_t total_size = metadata->tuning_params.tuning_sensor_data_size; 2713 data = (void *)((uint8_t *)&metadata->tuning_params.data); 2714 written_len += write(file_fd, data, total_size); 2715 total_size = metadata->tuning_params.tuning_vfe_data_size; 2716 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]); 2717 written_len += write(file_fd, data, total_size); 2718 total_size = metadata->tuning_params.tuning_cpp_data_size; 2719 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]); 2720 written_len += write(file_fd, data, total_size); 2721 total_size = metadata->tuning_params.tuning_cac_data_size; 2722 data = (void *)((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]); 2723 written_len += write(file_fd, data, total_size); 2724 close(file_fd); 2725 }else { 2726 LOGE("fail t open file for image dumping"); 2727 } 2728 dumpFrmCnt++; 2729 } 2730 } 2731 stream->mDumpMetaFrame = dumpFrmCnt; 2732 } 2733 /*=========================================================================== 2734 * FUNCTION : dumpFrameToFile 2735 * 2736 * DESCRIPTION: helper function to dump frame into file for debug purpose. 2737 * 2738 * PARAMETERS : 2739 * @data : data ptr 2740 * @size : length of data buffer 2741 * @index : identifier for data 2742 * @dump_type : type of the frame to be dumped. Only such 2743 * dump type is enabled, the frame will be 2744 * dumped into a file. 2745 * 2746 * RETURN : None 2747 *==========================================================================*/ 2748 void QCamera2HardwareInterface::dumpFrameToFile(QCameraStream *stream, 2749 mm_camera_buf_def_t *frame, uint32_t dump_type, const char *misc) 2750 { 2751 char value[PROPERTY_VALUE_MAX]; 2752 property_get("persist.camera.dumpimg", value, "0"); 2753 uint32_t enabled = (uint32_t) atoi(value); 2754 uint32_t frm_num = 0; 2755 uint32_t skip_mode = 0; 2756 2757 if (NULL == stream) { 2758 LOGE("stream object is null"); 2759 return; 2760 } 2761 2762 uint32_t dumpFrmCnt = stream->mDumpFrame; 2763 2764 if (true == m_bIntRawEvtPending) { 2765 enabled = QCAMERA_DUMP_FRM_RAW; 2766 } 2767 2768 if((enabled & QCAMERA_DUMP_FRM_MASK_ALL)) { 2769 if((enabled & dump_type) && stream && frame) { 2770 frm_num = ((enabled & 0xffff0000) >> 16); 2771 if(frm_num == 0) { 2772 frm_num = 10; //default 10 frames 2773 } 2774 if(frm_num > 256) { 2775 frm_num = 256; //256 buffers cycle around 2776 } 2777 skip_mode = ((enabled & 0x0000ff00) >> 8); 2778 if(skip_mode == 0) { 2779 skip_mode = 1; //no-skip 2780 } 2781 if(stream->mDumpSkipCnt == 0) 2782 stream->mDumpSkipCnt = 1; 2783 2784 if( stream->mDumpSkipCnt % skip_mode == 0) { 2785 if((frm_num == 256) && (dumpFrmCnt >= frm_num)) { 2786 // reset frame count if cycling 2787 dumpFrmCnt = 0; 2788 } 2789 if (dumpFrmCnt <= frm_num) { 2790 char buf[32]; 2791 char timeBuf[128]; 2792 time_t current_time; 2793 struct tm * timeinfo; 2794 2795 memset(timeBuf, 0, sizeof(timeBuf)); 2796 2797 time (¤t_time); 2798 timeinfo = localtime (¤t_time); 2799 memset(buf, 0, sizeof(buf)); 2800 2801 cam_dimension_t dim; 2802 memset(&dim, 0, sizeof(dim)); 2803 stream->getFrameDimension(dim); 2804 2805 cam_frame_len_offset_t offset; 2806 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 2807 stream->getFrameOffset(offset); 2808 2809 if (NULL != timeinfo) { 2810 strftime(timeBuf, sizeof(timeBuf), 2811 QCAMERA_DUMP_FRM_LOCATION "%Y%m%d%H%M%S", timeinfo); 2812 } 2813 String8 filePath(timeBuf); 2814 switch (dump_type) { 2815 case QCAMERA_DUMP_FRM_PREVIEW: 2816 { 2817 snprintf(buf, sizeof(buf), "%dp_%dx%d_%d.yuv", 2818 dumpFrmCnt, dim.width, dim.height, frame->frame_idx); 2819 } 2820 break; 2821 case QCAMERA_DUMP_FRM_THUMBNAIL: 2822 { 2823 snprintf(buf, sizeof(buf), "%dt_%dx%d_%d.yuv", 2824 dumpFrmCnt, dim.width, dim.height, frame->frame_idx); 2825 } 2826 break; 2827 case QCAMERA_DUMP_FRM_INPUT_JPEG: 2828 { 2829 if (!mParameters.isPostProcScaling()) { 2830 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim); 2831 } else { 2832 stream->getFrameDimension(dim); 2833 } 2834 if (misc != NULL) { 2835 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d_%s.yuv", 2836 dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc); 2837 } else { 2838 snprintf(buf, sizeof(buf), "%ds_%dx%d_%d.yuv", 2839 dumpFrmCnt, dim.width, dim.height, frame->frame_idx); 2840 } 2841 } 2842 break; 2843 case QCAMERA_DUMP_FRM_INPUT_REPROCESS: 2844 { 2845 stream->getFrameDimension(dim); 2846 if (misc != NULL) { 2847 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d_%s.yuv", 2848 dumpFrmCnt, dim.width, dim.height, frame->frame_idx, misc); 2849 } else { 2850 snprintf(buf, sizeof(buf), "%dir_%dx%d_%d.yuv", 2851 dumpFrmCnt, dim.width, dim.height, frame->frame_idx); 2852 } 2853 } 2854 break; 2855 case QCAMERA_DUMP_FRM_VIDEO: 2856 { 2857 snprintf(buf, sizeof(buf), "%dv_%dx%d_%d.yuv", 2858 dumpFrmCnt, dim.width, dim.height, frame->frame_idx); 2859 } 2860 break; 2861 case QCAMERA_DUMP_FRM_RAW: 2862 { 2863 mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim); 2864 snprintf(buf, sizeof(buf), "%dr_%dx%d_%d.raw", 2865 dumpFrmCnt, dim.width, dim.height, frame->frame_idx); 2866 } 2867 break; 2868 case QCAMERA_DUMP_FRM_OUTPUT_JPEG: 2869 { 2870 mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim); 2871 snprintf(buf, sizeof(buf), "%dj_%dx%d_%d.yuv", 2872 dumpFrmCnt, dim.width, dim.height, frame->frame_idx); 2873 } 2874 break; 2875 default: 2876 LOGE("Not supported for dumping stream type %d", 2877 dump_type); 2878 return; 2879 } 2880 2881 filePath.append(buf); 2882 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777); 2883 ssize_t written_len = 0; 2884 if (file_fd >= 0) { 2885 void *data = NULL; 2886 2887 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH); 2888 for (uint32_t i = 0; i < offset.num_planes; i++) { 2889 uint32_t index = offset.mp[i].offset; 2890 if (i > 0) { 2891 index += offset.mp[i-1].len; 2892 } 2893 2894 if (offset.mp[i].meta_len != 0) { 2895 data = (void *)((uint8_t *)frame->buffer + index); 2896 written_len += write(file_fd, data, 2897 (size_t)offset.mp[i].meta_len); 2898 index += (uint32_t)offset.mp[i].meta_len; 2899 } 2900 2901 for (int j = 0; j < offset.mp[i].height; j++) { 2902 data = (void *)((uint8_t *)frame->buffer + index); 2903 written_len += write(file_fd, data, 2904 (size_t)offset.mp[i].width); 2905 index += (uint32_t)offset.mp[i].stride; 2906 } 2907 } 2908 2909 LOGH("written number of bytes %ld\n", 2910 written_len); 2911 close(file_fd); 2912 } else { 2913 LOGE("fail to open file for image dumping"); 2914 } 2915 if (true == m_bIntRawEvtPending) { 2916 strlcpy(m_BackendFileName, filePath.string(), QCAMERA_MAX_FILEPATH_LENGTH); 2917 mBackendFileSize = (size_t)written_len; 2918 } else { 2919 dumpFrmCnt++; 2920 } 2921 } 2922 } 2923 stream->mDumpSkipCnt++; 2924 } 2925 } else { 2926 dumpFrmCnt = 0; 2927 } 2928 stream->mDumpFrame = dumpFrmCnt; 2929 } 2930 2931 /*=========================================================================== 2932 * FUNCTION : debugShowVideoFPS 2933 * 2934 * DESCRIPTION: helper function to log video frame FPS for debug purpose. 2935 * 2936 * PARAMETERS : None 2937 * 2938 * RETURN : None 2939 *==========================================================================*/ 2940 void QCamera2HardwareInterface::debugShowVideoFPS() 2941 { 2942 mVFrameCount++; 2943 nsecs_t now = systemTime(); 2944 nsecs_t diff = now - mVLastFpsTime; 2945 if (diff > ms2ns(250)) { 2946 mVFps = (((double)(mVFrameCount - mVLastFrameCount)) * 2947 (double)(s2ns(1))) / (double)diff; 2948 LOGI("[KPI Perf]: PROFILE_VIDEO_FRAMES_PER_SECOND: %.4f Cam ID = %d", 2949 mVFps, mCameraId); 2950 mVLastFpsTime = now; 2951 mVLastFrameCount = mVFrameCount; 2952 } 2953 } 2954 2955 /*=========================================================================== 2956 * FUNCTION : debugShowPreviewFPS 2957 * 2958 * DESCRIPTION: helper function to log preview frame FPS for debug purpose. 2959 * 2960 * PARAMETERS : None 2961 * 2962 * RETURN : None 2963 *==========================================================================*/ 2964 void QCamera2HardwareInterface::debugShowPreviewFPS() 2965 { 2966 mPFrameCount++; 2967 nsecs_t now = systemTime(); 2968 nsecs_t diff = now - mPLastFpsTime; 2969 if (diff > ms2ns(250)) { 2970 mPFps = (((double)(mPFrameCount - mPLastFrameCount)) * 2971 (double)(s2ns(1))) / (double)diff; 2972 LOGI("[KPI Perf]: PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f Cam ID = %d", 2973 mPFps, mCameraId); 2974 mPLastFpsTime = now; 2975 mPLastFrameCount = mPFrameCount; 2976 } 2977 } 2978 2979 /*=========================================================================== 2980 * FUNCTION : fillFacesData 2981 * 2982 * DESCRIPTION: helper function to fill in face related metadata into a struct. 2983 * 2984 * PARAMETERS : 2985 * @faces_data : face features data to be filled 2986 * @metadata : metadata structure to read face features from 2987 * 2988 * RETURN : None 2989 *==========================================================================*/ 2990 void QCamera2HardwareInterface::fillFacesData(cam_faces_data_t &faces_data, 2991 metadata_buffer_t *metadata) 2992 { 2993 memset(&faces_data, 0, sizeof(cam_faces_data_t)); 2994 2995 IF_META_AVAILABLE(cam_face_detection_data_t, p_detection_data, 2996 CAM_INTF_META_FACE_DETECTION, metadata) { 2997 faces_data.detection_data = *p_detection_data; 2998 if (faces_data.detection_data.num_faces_detected > MAX_ROI) { 2999 faces_data.detection_data.num_faces_detected = MAX_ROI; 3000 } 3001 3002 LOGH("[KPI Perf] PROFILE_NUMBER_OF_FACES_DETECTED %d", 3003 faces_data.detection_data.num_faces_detected); 3004 3005 IF_META_AVAILABLE(cam_face_recog_data_t, p_recog_data, 3006 CAM_INTF_META_FACE_RECOG, metadata) { 3007 faces_data.recog_valid = true; 3008 faces_data.recog_data = *p_recog_data; 3009 } 3010 3011 IF_META_AVAILABLE(cam_face_blink_data_t, p_blink_data, 3012 CAM_INTF_META_FACE_BLINK, metadata) { 3013 faces_data.blink_valid = true; 3014 faces_data.blink_data = *p_blink_data; 3015 } 3016 3017 IF_META_AVAILABLE(cam_face_gaze_data_t, p_gaze_data, 3018 CAM_INTF_META_FACE_GAZE, metadata) { 3019 faces_data.gaze_valid = true; 3020 faces_data.gaze_data = *p_gaze_data; 3021 } 3022 3023 IF_META_AVAILABLE(cam_face_smile_data_t, p_smile_data, 3024 CAM_INTF_META_FACE_SMILE, metadata) { 3025 faces_data.smile_valid = true; 3026 faces_data.smile_data = *p_smile_data; 3027 } 3028 3029 IF_META_AVAILABLE(cam_face_landmarks_data_t, p_landmarks, 3030 CAM_INTF_META_FACE_LANDMARK, metadata) { 3031 faces_data.landmark_valid = true; 3032 faces_data.landmark_data = *p_landmarks; 3033 } 3034 3035 IF_META_AVAILABLE(cam_face_contour_data_t, p_contour, 3036 CAM_INTF_META_FACE_CONTOUR, metadata) { 3037 faces_data.contour_valid = true; 3038 faces_data.contour_data = *p_contour; 3039 } 3040 } 3041 } 3042 3043 /*=========================================================================== 3044 * FUNCTION : ~QCameraCbNotifier 3045 * 3046 * DESCRIPTION: Destructor for exiting the callback context. 3047 * 3048 * PARAMETERS : None 3049 * 3050 * RETURN : None 3051 *==========================================================================*/ 3052 QCameraCbNotifier::~QCameraCbNotifier() 3053 { 3054 } 3055 3056 /*=========================================================================== 3057 * FUNCTION : exit 3058 * 3059 * DESCRIPTION: exit notify thread. 3060 * 3061 * PARAMETERS : None 3062 * 3063 * RETURN : None 3064 *==========================================================================*/ 3065 void QCameraCbNotifier::exit() 3066 { 3067 mActive = false; 3068 mProcTh.exit(); 3069 } 3070 3071 /*=========================================================================== 3072 * FUNCTION : releaseNotifications 3073 * 3074 * DESCRIPTION: callback for releasing data stored in the callback queue. 3075 * 3076 * PARAMETERS : 3077 * @data : data to be released 3078 * @user_data : context data 3079 * 3080 * RETURN : None 3081 *==========================================================================*/ 3082 void QCameraCbNotifier::releaseNotifications(void *data, void *user_data) 3083 { 3084 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data; 3085 3086 if ( ( NULL != arg ) && ( NULL != user_data ) ) { 3087 if ( arg->release_cb ) { 3088 arg->release_cb(arg->user_data, arg->cookie, FAILED_TRANSACTION); 3089 } 3090 } 3091 } 3092 3093 /*=========================================================================== 3094 * FUNCTION : matchSnapshotNotifications 3095 * 3096 * DESCRIPTION: matches snapshot data callbacks 3097 * 3098 * PARAMETERS : 3099 * @data : data to match 3100 * @user_data : context data 3101 * 3102 * RETURN : bool match 3103 * true - match found 3104 * false- match not found 3105 *==========================================================================*/ 3106 bool QCameraCbNotifier::matchSnapshotNotifications(void *data, 3107 void */*user_data*/) 3108 { 3109 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data; 3110 if ( NULL != arg ) { 3111 if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) { 3112 return true; 3113 } 3114 } 3115 3116 return false; 3117 } 3118 3119 /*=========================================================================== 3120 * FUNCTION : matchPreviewNotifications 3121 * 3122 * DESCRIPTION: matches preview data callbacks 3123 * 3124 * PARAMETERS : 3125 * @data : data to match 3126 * @user_data : context data 3127 * 3128 * RETURN : bool match 3129 * true - match found 3130 * false- match not found 3131 *==========================================================================*/ 3132 bool QCameraCbNotifier::matchPreviewNotifications(void *data, 3133 void */*user_data*/) 3134 { 3135 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data; 3136 if (NULL != arg) { 3137 if ((QCAMERA_DATA_CALLBACK == arg->cb_type) && 3138 (CAMERA_MSG_PREVIEW_FRAME == arg->msg_type)) { 3139 return true; 3140 } 3141 } 3142 3143 return false; 3144 } 3145 3146 /*=========================================================================== 3147 * FUNCTION : matchTimestampNotifications 3148 * 3149 * DESCRIPTION: matches timestamp data callbacks 3150 * 3151 * PARAMETERS : 3152 * @data : data to match 3153 * @user_data : context data 3154 * 3155 * RETURN : bool match 3156 * true - match found 3157 * false- match not found 3158 *==========================================================================*/ 3159 bool QCameraCbNotifier::matchTimestampNotifications(void *data, 3160 void */*user_data*/) 3161 { 3162 qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data; 3163 if (NULL != arg) { 3164 if ((QCAMERA_DATA_TIMESTAMP_CALLBACK == arg->cb_type) && 3165 (CAMERA_MSG_VIDEO_FRAME == arg->msg_type)) { 3166 return true; 3167 } 3168 } 3169 3170 return false; 3171 } 3172 3173 /*=========================================================================== 3174 * FUNCTION : cbNotifyRoutine 3175 * 3176 * DESCRIPTION: callback thread which interfaces with the upper layers 3177 * given input commands. 3178 * 3179 * PARAMETERS : 3180 * @data : context data 3181 * 3182 * RETURN : None 3183 *==========================================================================*/ 3184 void * QCameraCbNotifier::cbNotifyRoutine(void * data) 3185 { 3186 int running = 1; 3187 int ret; 3188 QCameraCbNotifier *pme = (QCameraCbNotifier *)data; 3189 QCameraCmdThread *cmdThread = &pme->mProcTh; 3190 cmdThread->setName("CAM_cbNotify"); 3191 uint8_t isSnapshotActive = FALSE; 3192 bool longShotEnabled = false; 3193 uint32_t numOfSnapshotExpected = 0; 3194 uint32_t numOfSnapshotRcvd = 0; 3195 int32_t cbStatus = NO_ERROR; 3196 3197 LOGD("E"); 3198 do { 3199 do { 3200 ret = cam_sem_wait(&cmdThread->cmd_sem); 3201 if (ret != 0 && errno != EINVAL) { 3202 LOGD("cam_sem_wait error (%s)", 3203 strerror(errno)); 3204 return NULL; 3205 } 3206 } while (ret != 0); 3207 3208 camera_cmd_type_t cmd = cmdThread->getCmd(); 3209 LOGD("get cmd %d", cmd); 3210 switch (cmd) { 3211 case CAMERA_CMD_TYPE_START_DATA_PROC: 3212 { 3213 isSnapshotActive = TRUE; 3214 numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected(); 3215 longShotEnabled = pme->mParent->isLongshotEnabled(); 3216 LOGD("Num Snapshots Expected = %d", 3217 numOfSnapshotExpected); 3218 numOfSnapshotRcvd = 0; 3219 } 3220 break; 3221 case CAMERA_CMD_TYPE_STOP_DATA_PROC: 3222 { 3223 pme->mDataQ.flushNodes(matchSnapshotNotifications); 3224 isSnapshotActive = FALSE; 3225 3226 numOfSnapshotExpected = 0; 3227 numOfSnapshotRcvd = 0; 3228 } 3229 break; 3230 case CAMERA_CMD_TYPE_DO_NEXT_JOB: 3231 { 3232 qcamera_callback_argm_t *cb = 3233 (qcamera_callback_argm_t *)pme->mDataQ.dequeue(); 3234 cbStatus = NO_ERROR; 3235 if (NULL != cb) { 3236 LOGD("cb type %d received", 3237 cb->cb_type); 3238 3239 if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) { 3240 switch (cb->cb_type) { 3241 case QCAMERA_NOTIFY_CALLBACK: 3242 { 3243 if (cb->msg_type == CAMERA_MSG_FOCUS) { 3244 KPI_ATRACE_INT("Camera:AutoFocus", 0); 3245 LOGH("[KPI Perf] : PROFILE_SENDING_FOCUS_EVT_TO APP"); 3246 } 3247 if (pme->mNotifyCb) { 3248 pme->mNotifyCb(cb->msg_type, 3249 cb->ext1, 3250 cb->ext2, 3251 pme->mCallbackCookie); 3252 } else { 3253 LOGW("notify callback not set!"); 3254 } 3255 if (cb->release_cb) { 3256 cb->release_cb(cb->user_data, cb->cookie, 3257 cbStatus); 3258 } 3259 } 3260 break; 3261 case QCAMERA_DATA_CALLBACK: 3262 { 3263 if (pme->mDataCb) { 3264 pme->mDataCb(cb->msg_type, 3265 cb->data, 3266 cb->index, 3267 cb->metadata, 3268 pme->mCallbackCookie); 3269 } else { 3270 LOGW("data callback not set!"); 3271 } 3272 if (cb->release_cb) { 3273 cb->release_cb(cb->user_data, cb->cookie, 3274 cbStatus); 3275 } 3276 } 3277 break; 3278 case QCAMERA_DATA_TIMESTAMP_CALLBACK: 3279 { 3280 if(pme->mDataCbTimestamp) { 3281 pme->mDataCbTimestamp(cb->timestamp, 3282 cb->msg_type, 3283 cb->data, 3284 cb->index, 3285 pme->mCallbackCookie); 3286 } else { 3287 LOGE("Timestamp data callback not set!"); 3288 } 3289 if (cb->release_cb) { 3290 cb->release_cb(cb->user_data, cb->cookie, 3291 cbStatus); 3292 } 3293 } 3294 break; 3295 case QCAMERA_DATA_SNAPSHOT_CALLBACK: 3296 { 3297 if (TRUE == isSnapshotActive && pme->mDataCb ) { 3298 if (!longShotEnabled) { 3299 numOfSnapshotRcvd++; 3300 LOGI("Num Snapshots Received = %d Expected = %d", 3301 numOfSnapshotRcvd, numOfSnapshotExpected); 3302 if (numOfSnapshotExpected > 0 && 3303 (numOfSnapshotExpected == numOfSnapshotRcvd)) { 3304 LOGI("Received all snapshots"); 3305 // notify HWI that snapshot is done 3306 pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE, 3307 NULL); 3308 } 3309 } 3310 if (pme->mJpegCb) { 3311 LOGI("Calling JPEG Callback!! for camera %d" 3312 "release_data %p", 3313 "frame_idx %d", 3314 pme->mParent->getCameraId(), 3315 cb->user_data, 3316 cb->frame_index); 3317 pme->mJpegCb(cb->msg_type, cb->data, 3318 cb->index, cb->metadata, 3319 pme->mJpegCallbackCookie, 3320 cb->frame_index, cb->release_cb, 3321 cb->cookie, cb->user_data); 3322 // incase of non-null Jpeg cb we transfer 3323 // ownership of buffer to muxer. hence 3324 // release_cb should not be called 3325 // muxer will release after its done with 3326 // processing the buffer 3327 } else if(pme->mDataCb){ 3328 pme->mDataCb(cb->msg_type, cb->data, cb->index, 3329 cb->metadata, pme->mCallbackCookie); 3330 if (cb->release_cb) { 3331 cb->release_cb(cb->user_data, cb->cookie, 3332 cbStatus); 3333 } 3334 } 3335 } 3336 } 3337 break; 3338 default: 3339 { 3340 LOGE("invalid cb type %d", 3341 cb->cb_type); 3342 cbStatus = BAD_VALUE; 3343 if (cb->release_cb) { 3344 cb->release_cb(cb->user_data, cb->cookie, 3345 cbStatus); 3346 } 3347 } 3348 break; 3349 }; 3350 } else { 3351 LOGW("cb message type %d not enabled!", 3352 cb->msg_type); 3353 cbStatus = INVALID_OPERATION; 3354 if (cb->release_cb) { 3355 cb->release_cb(cb->user_data, cb->cookie, cbStatus); 3356 } 3357 } 3358 delete cb; 3359 } else { 3360 LOGW("invalid cb type passed"); 3361 } 3362 } 3363 break; 3364 case CAMERA_CMD_TYPE_EXIT: 3365 { 3366 running = 0; 3367 pme->mDataQ.flush(); 3368 } 3369 break; 3370 default: 3371 break; 3372 } 3373 } while (running); 3374 LOGD("X"); 3375 3376 return NULL; 3377 } 3378 3379 /*=========================================================================== 3380 * FUNCTION : notifyCallback 3381 * 3382 * DESCRIPTION: Enqueus pending callback notifications for the upper layers. 3383 * 3384 * PARAMETERS : 3385 * @cbArgs : callback arguments 3386 * 3387 * RETURN : int32_t type of status 3388 * NO_ERROR -- success 3389 * none-zero failure code 3390 *==========================================================================*/ 3391 int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs) 3392 { 3393 if (!mActive) { 3394 LOGE("notify thread is not active"); 3395 return UNKNOWN_ERROR; 3396 } 3397 3398 qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t(); 3399 if (NULL == cbArg) { 3400 LOGE("no mem for qcamera_callback_argm_t"); 3401 return NO_MEMORY; 3402 } 3403 memset(cbArg, 0, sizeof(qcamera_callback_argm_t)); 3404 *cbArg = cbArgs; 3405 3406 if (mDataQ.enqueue((void *)cbArg)) { 3407 return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE); 3408 } else { 3409 LOGE("Error adding cb data into queue"); 3410 delete cbArg; 3411 return UNKNOWN_ERROR; 3412 } 3413 } 3414 3415 /*=========================================================================== 3416 * FUNCTION : setCallbacks 3417 * 3418 * DESCRIPTION: Initializes the callback functions, which would be used for 3419 * communication with the upper layers and launches the callback 3420 * context in which the callbacks will occur. 3421 * 3422 * PARAMETERS : 3423 * @notifyCb : notification callback 3424 * @dataCb : data callback 3425 * @dataCbTimestamp : data with timestamp callback 3426 * @callbackCookie : callback context data 3427 * 3428 * RETURN : None 3429 *==========================================================================*/ 3430 void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb, 3431 camera_data_callback dataCb, 3432 camera_data_timestamp_callback dataCbTimestamp, 3433 void *callbackCookie) 3434 { 3435 if ( ( NULL == mNotifyCb ) && 3436 ( NULL == mDataCb ) && 3437 ( NULL == mDataCbTimestamp ) && 3438 ( NULL == mCallbackCookie ) ) { 3439 mNotifyCb = notifyCb; 3440 mDataCb = dataCb; 3441 mDataCbTimestamp = dataCbTimestamp; 3442 mCallbackCookie = callbackCookie; 3443 mActive = true; 3444 mProcTh.launch(cbNotifyRoutine, this); 3445 } else { 3446 LOGE("Camera callback notifier already initialized!"); 3447 } 3448 } 3449 3450 /*=========================================================================== 3451 * FUNCTION : setJpegCallBacks 3452 * 3453 * DESCRIPTION: Initializes the JPEG callback function, which would be used for 3454 * communication with the upper layers and launches the callback 3455 * context in which the callbacks will occur. 3456 * 3457 * PARAMETERS : 3458 * @jpegCb : notification callback 3459 * @callbackCookie : callback context data 3460 * 3461 * RETURN : None 3462 *==========================================================================*/ 3463 void QCameraCbNotifier::setJpegCallBacks( 3464 jpeg_data_callback jpegCb, void *callbackCookie) 3465 { 3466 LOGH("Setting JPEG Callback notifier"); 3467 mJpegCb = jpegCb; 3468 mJpegCallbackCookie = callbackCookie; 3469 } 3470 3471 /*=========================================================================== 3472 * FUNCTION : flushPreviewNotifications 3473 * 3474 * DESCRIPTION: flush all pending preview notifications 3475 * from the notifier queue 3476 * 3477 * PARAMETERS : None 3478 * 3479 * RETURN : int32_t type of status 3480 * NO_ERROR -- success 3481 * none-zero failure code 3482 *==========================================================================*/ 3483 int32_t QCameraCbNotifier::flushPreviewNotifications() 3484 { 3485 if (!mActive) { 3486 LOGE("notify thread is not active"); 3487 return UNKNOWN_ERROR; 3488 } 3489 mDataQ.flushNodes(matchPreviewNotifications); 3490 return NO_ERROR; 3491 } 3492 3493 /*=========================================================================== 3494 * FUNCTION : flushVideoNotifications 3495 * 3496 * DESCRIPTION: flush all pending video notifications 3497 * from the notifier queue 3498 * 3499 * PARAMETERS : None 3500 * 3501 * RETURN : int32_t type of status 3502 * NO_ERROR -- success 3503 * none-zero failure code 3504 *==========================================================================*/ 3505 int32_t QCameraCbNotifier::flushVideoNotifications() 3506 { 3507 if (!mActive) { 3508 LOGE("notify thread is not active"); 3509 return UNKNOWN_ERROR; 3510 } 3511 mDataQ.flushNodes(matchTimestampNotifications); 3512 return NO_ERROR; 3513 } 3514 3515 /*=========================================================================== 3516 * FUNCTION : startSnapshots 3517 * 3518 * DESCRIPTION: Enables snapshot mode 3519 * 3520 * PARAMETERS : None 3521 * 3522 * RETURN : int32_t type of status 3523 * NO_ERROR -- success 3524 * none-zero failure code 3525 *==========================================================================*/ 3526 int32_t QCameraCbNotifier::startSnapshots() 3527 { 3528 return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE); 3529 } 3530 3531 /*=========================================================================== 3532 * FUNCTION : stopSnapshots 3533 * 3534 * DESCRIPTION: Disables snapshot processing mode 3535 * 3536 * PARAMETERS : None 3537 * 3538 * RETURN : None 3539 *==========================================================================*/ 3540 void QCameraCbNotifier::stopSnapshots() 3541 { 3542 mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE); 3543 } 3544 3545 }; // namespace qcamera 3546