1 /* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved. 2 * 3 * Redistribution and use in source and binary forms, with or without 4 * modification, are permitted provided that the following conditions are 5 * met: 6 * * Redistributions of source code must retain the above copyright 7 * notice, this list of conditions and the following disclaimer. 8 * * Redistributions in binary form must reproduce the above 9 * copyright notice, this list of conditions and the following 10 * disclaimer in the documentation and/or other materials provided 11 * with the distribution. 12 * * Neither the name of The Linux Foundation nor the names of its 13 * contributors may be used to endorse or promote products derived 14 * from this software without specific prior written permission. 15 * 16 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 * 28 */ 29 30 #define LOG_TAG "QCamera3Channel" 31 //#define LOG_NDEBUG 0 32 #include <fcntl.h> 33 #include <stdlib.h> 34 #include <cstdlib> 35 #include <stdio.h> 36 #include <string.h> 37 #include <hardware/camera3.h> 38 #include <system/camera_metadata.h> 39 #include <gralloc_priv.h> 40 #include <utils/Log.h> 41 #include <utils/Errors.h> 42 #include <cutils/properties.h> 43 #include "QCamera3Channel.h" 44 45 using namespace android; 46 47 #define MIN_STREAMING_BUFFER_NUM 7+11 48 49 namespace qcamera { 50 static const char ExifAsciiPrefix[] = 51 { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0" 52 static const char ExifUndefinedPrefix[] = 53 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0" 54 55 #define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix)) 56 #define FOCAL_LENGTH_DECIMAL_PRECISION 1000 57 58 /*=========================================================================== 59 * FUNCTION : QCamera3Channel 60 * 61 * DESCRIPTION: constrcutor of QCamera3Channel 62 * 63 * PARAMETERS : 64 * @cam_handle : camera handle 65 * @cam_ops : ptr to camera ops table 66 * 67 * RETURN : none 68 *==========================================================================*/ 69 QCamera3Channel::QCamera3Channel(uint32_t cam_handle, 70 mm_camera_ops_t *cam_ops, 71 channel_cb_routine cb_routine, 72 cam_padding_info_t *paddingInfo, 73 void *userData) 74 { 75 m_camHandle = cam_handle; 76 m_camOps = cam_ops; 77 m_bIsActive = false; 78 79 m_handle = 0; 80 m_numStreams = 0; 81 memset(mStreams, 0, sizeof(mStreams)); 82 mUserData = userData; 83 84 mStreamInfoBuf = NULL; 85 mChannelCB = cb_routine; 86 mPaddingInfo = paddingInfo; 87 } 88 89 /*=========================================================================== 90 * FUNCTION : QCamera3Channel 91 * 92 * DESCRIPTION: default constrcutor of QCamera3Channel 93 * 94 * PARAMETERS : none 95 * 96 * RETURN : none 97 *==========================================================================*/ 98 QCamera3Channel::QCamera3Channel() 99 { 100 m_camHandle = 0; 101 m_camOps = NULL; 102 m_bIsActive = false; 103 104 m_handle = 0; 105 m_numStreams = 0; 106 memset(mStreams, 0, sizeof(mStreams)); 107 mUserData = NULL; 108 109 mStreamInfoBuf = NULL; 110 mChannelCB = NULL; 111 mPaddingInfo = NULL; 112 } 113 114 /*=========================================================================== 115 * FUNCTION : ~QCamera3Channel 116 * 117 * DESCRIPTION: destructor of QCamera3Channel 118 * 119 * PARAMETERS : none 120 * 121 * RETURN : none 122 *==========================================================================*/ 123 QCamera3Channel::~QCamera3Channel() 124 { 125 if (m_bIsActive) 126 stop(); 127 128 for (int i = 0; i < m_numStreams; i++) { 129 if (mStreams[i] != NULL) { 130 delete mStreams[i]; 131 mStreams[i] = 0; 132 } 133 } 134 if (m_handle) { 135 m_camOps->delete_channel(m_camHandle, m_handle); 136 ALOGE("%s: deleting channel %d", __func__, m_handle); 137 m_handle = 0; 138 } 139 m_numStreams = 0; 140 } 141 142 /*=========================================================================== 143 * FUNCTION : init 144 * 145 * DESCRIPTION: initialization of channel 146 * 147 * PARAMETERS : 148 * @attr : channel bundle attribute setting 149 * @dataCB : data notify callback 150 * @userData: user data ptr 151 * 152 * RETURN : int32_t type of status 153 * NO_ERROR -- success 154 * none-zero failure code 155 *==========================================================================*/ 156 int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr, 157 mm_camera_buf_notify_t dataCB) 158 { 159 m_handle = m_camOps->add_channel(m_camHandle, 160 attr, 161 dataCB, 162 this); 163 if (m_handle == 0) { 164 ALOGE("%s: Add channel failed", __func__); 165 return UNKNOWN_ERROR; 166 } 167 return NO_ERROR; 168 } 169 170 /*=========================================================================== 171 * FUNCTION : addStream 172 * 173 * DESCRIPTION: add a stream into channel 174 * 175 * PARAMETERS : 176 * @allocator : stream related buffer allocator 177 * @streamInfoBuf : ptr to buf that constains stream info 178 * @minStreamBufNum: number of stream buffers needed 179 * @paddingInfo : padding information 180 * @stream_cb : stream data notify callback 181 * @userdata : user data ptr 182 * 183 * RETURN : int32_t type of status 184 * NO_ERROR -- success 185 * none-zero failure code 186 *==========================================================================*/ 187 int32_t QCamera3Channel::addStream(cam_stream_type_t streamType, 188 cam_format_t streamFormat, 189 cam_dimension_t streamDim, 190 uint8_t minStreamBufNum) 191 { 192 int32_t rc = NO_ERROR; 193 194 if (m_numStreams >= 1) { 195 ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__); 196 return BAD_VALUE; 197 } 198 199 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) { 200 ALOGE("%s: stream number (%d) exceeds max limit (%d)", 201 __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE); 202 return BAD_VALUE; 203 } 204 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle, 205 m_handle, 206 m_camOps, 207 mPaddingInfo, 208 this); 209 if (pStream == NULL) { 210 ALOGE("%s: No mem for Stream", __func__); 211 return NO_MEMORY; 212 } 213 214 rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum, 215 streamCbRoutine, this); 216 if (rc == 0) { 217 mStreams[m_numStreams] = pStream; 218 m_numStreams++; 219 } else { 220 delete pStream; 221 } 222 return rc; 223 } 224 225 /*=========================================================================== 226 * FUNCTION : start 227 * 228 * DESCRIPTION: start channel, which will start all streams belong to this channel 229 * 230 * PARAMETERS : 231 * 232 * RETURN : int32_t type of status 233 * NO_ERROR -- success 234 * none-zero failure code 235 *==========================================================================*/ 236 int32_t QCamera3Channel::start() 237 { 238 int32_t rc = NO_ERROR; 239 240 if (m_numStreams > 1) { 241 ALOGE("%s: bundle not supported", __func__); 242 } else if (m_numStreams == 0) { 243 return NO_INIT; 244 } 245 246 if(m_bIsActive) { 247 ALOGD("%s: Attempt to start active channel", __func__); 248 return rc; 249 } 250 251 for (int i = 0; i < m_numStreams; i++) { 252 if (mStreams[i] != NULL) { 253 mStreams[i]->start(); 254 } 255 } 256 rc = m_camOps->start_channel(m_camHandle, m_handle); 257 258 if (rc != NO_ERROR) { 259 for (int i = 0; i < m_numStreams; i++) { 260 if (mStreams[i] != NULL) { 261 mStreams[i]->stop(); 262 } 263 } 264 } else { 265 m_bIsActive = true; 266 } 267 268 return rc; 269 } 270 271 /*=========================================================================== 272 * FUNCTION : stop 273 * 274 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel 275 * 276 * PARAMETERS : none 277 * 278 * RETURN : int32_t type of status 279 * NO_ERROR -- success 280 * none-zero failure code 281 *==========================================================================*/ 282 int32_t QCamera3Channel::stop() 283 { 284 int32_t rc = NO_ERROR; 285 if(!m_bIsActive) { 286 ALOGE("%s: Attempt to stop inactive channel",__func__); 287 return rc; 288 } 289 290 for (int i = 0; i < m_numStreams; i++) { 291 if (mStreams[i] != NULL) { 292 mStreams[i]->stop(); 293 } 294 } 295 296 rc = m_camOps->stop_channel(m_camHandle, m_handle); 297 298 m_bIsActive = false; 299 return rc; 300 } 301 302 /*=========================================================================== 303 * FUNCTION : bufDone 304 * 305 * DESCRIPTION: return a stream buf back to kernel 306 * 307 * PARAMETERS : 308 * @recvd_frame : stream buf frame to be returned 309 * 310 * RETURN : int32_t type of status 311 * NO_ERROR -- success 312 * none-zero failure code 313 *==========================================================================*/ 314 int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame) 315 { 316 int32_t rc = NO_ERROR; 317 for (int i = 0; i < recvd_frame->num_bufs; i++) { 318 if (recvd_frame->bufs[i] != NULL) { 319 for (int j = 0; j < m_numStreams; j++) { 320 if (mStreams[j] != NULL && 321 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) { 322 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx); 323 break; // break loop j 324 } 325 } 326 } 327 } 328 329 return rc; 330 } 331 332 /*=========================================================================== 333 * FUNCTION : getStreamTypeMask 334 * 335 * DESCRIPTION: Get bit mask of all stream types in this channel 336 * 337 * PARAMETERS : None 338 * 339 * RETURN : Bit mask of all stream types in this channel 340 *==========================================================================*/ 341 uint32_t QCamera3Channel::getStreamTypeMask() 342 { 343 uint32_t mask = 0; 344 for (int i = 0; i < m_numStreams; i++) { 345 mask |= (0x1 << mStreams[i]->getMyType()); 346 } 347 return mask; 348 } 349 350 /*=========================================================================== 351 * FUNCTION : getStreamID 352 * 353 * DESCRIPTION: Get StreamID of requested stream type 354 * 355 * PARAMETERS : streamMask 356 * 357 * RETURN : Stream ID 358 *==========================================================================*/ 359 uint32_t QCamera3Channel::getStreamID(uint32_t streamMask) 360 { 361 uint32_t streamID = 0; 362 for (int i = 0; i < m_numStreams; i++) { 363 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) { 364 streamID = mStreams[i]->getMyServerID(); 365 break; 366 } 367 } 368 return streamID; 369 } 370 371 /*=========================================================================== 372 * FUNCTION : getStreamByHandle 373 * 374 * DESCRIPTION: return stream object by stream handle 375 * 376 * PARAMETERS : 377 * @streamHandle : stream handle 378 * 379 * RETURN : stream object. NULL if not found 380 *==========================================================================*/ 381 QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle) 382 { 383 for (int i = 0; i < m_numStreams; i++) { 384 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) { 385 return mStreams[i]; 386 } 387 } 388 return NULL; 389 } 390 391 /*=========================================================================== 392 * FUNCTION : getStreamByIndex 393 * 394 * DESCRIPTION: return stream object by index 395 * 396 * PARAMETERS : 397 * @streamHandle : stream handle 398 * 399 * RETURN : stream object. NULL if not found 400 *==========================================================================*/ 401 QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index) 402 { 403 if (index < m_numStreams) { 404 return mStreams[index]; 405 } 406 return NULL; 407 } 408 409 /*=========================================================================== 410 * FUNCTION : streamCbRoutine 411 * 412 * DESCRIPTION: callback routine for stream 413 * 414 * PARAMETERS : 415 * @streamHandle : stream handle 416 * 417 * RETURN : stream object. NULL if not found 418 *==========================================================================*/ 419 void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 420 QCamera3Stream *stream, void *userdata) 421 { 422 QCamera3Channel *channel = (QCamera3Channel *)userdata; 423 if (channel == NULL) { 424 ALOGE("%s: invalid channel pointer", __func__); 425 return; 426 } 427 channel->streamCbRoutine(super_frame, stream); 428 } 429 430 /*=========================================================================== 431 * FUNCTION : QCamera3RegularChannel 432 * 433 * DESCRIPTION: constrcutor of QCamera3RegularChannel 434 * 435 * PARAMETERS : 436 * @cam_handle : camera handle 437 * @cam_ops : ptr to camera ops table 438 * @cb_routine : callback routine to frame aggregator 439 * @stream : camera3_stream_t structure 440 * @stream_type: Channel stream type 441 * 442 * RETURN : none 443 *==========================================================================*/ 444 QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle, 445 mm_camera_ops_t *cam_ops, 446 channel_cb_routine cb_routine, 447 cam_padding_info_t *paddingInfo, 448 void *userData, 449 camera3_stream_t *stream, 450 cam_stream_type_t stream_type) : 451 QCamera3Channel(cam_handle, cam_ops, cb_routine, 452 paddingInfo, userData), 453 mCamera3Stream(stream), 454 mNumBufs(0), 455 mStreamType(stream_type) 456 { 457 } 458 459 /*=========================================================================== 460 * FUNCTION : ~QCamera3RegularChannel 461 * 462 * DESCRIPTION: destructor of QCamera3RegularChannel 463 * 464 * PARAMETERS : none 465 * 466 * RETURN : none 467 *==========================================================================*/ 468 QCamera3RegularChannel::~QCamera3RegularChannel() 469 { 470 mMemory.unregisterBuffers(); 471 } 472 473 /*=========================================================================== 474 * FUNCTION : initialize 475 * 476 * DESCRIPTION: Initialize and add camera channel & stream 477 * 478 * PARAMETERS : 479 * 480 * RETURN : int32_t type of status 481 * NO_ERROR -- success 482 * none-zero failure code 483 *==========================================================================*/ 484 485 int32_t QCamera3RegularChannel::initialize() 486 { 487 int32_t rc = NO_ERROR; 488 cam_format_t streamFormat; 489 cam_dimension_t streamDim; 490 491 if (NULL == mCamera3Stream) { 492 ALOGE("%s: Camera stream uninitialized", __func__); 493 return NO_INIT; 494 } 495 496 if (1 <= m_numStreams) { 497 // Only one stream per channel supported in v3 Hal 498 return NO_ERROR; 499 } 500 501 rc = init(NULL, NULL); 502 if (rc < 0) { 503 ALOGE("%s: init failed", __func__); 504 return rc; 505 } 506 507 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM; 508 509 if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 510 if (mStreamType == CAM_STREAM_TYPE_VIDEO) { 511 streamFormat = CAM_FORMAT_YUV_420_NV12; 512 } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) { 513 streamFormat = CAM_FORMAT_YUV_420_NV21; 514 } else { 515 //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs 516 // to be properly aligned and padded. 517 streamFormat = CAM_FORMAT_YUV_420_NV21; 518 } 519 } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { 520 streamFormat = CAM_FORMAT_YUV_420_NV21; 521 } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE || 522 mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) { 523 // Bayer pattern doesn't matter here. 524 // All CAMIF raw format uses 10bit. 525 streamFormat = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG; 526 } else { 527 528 //TODO: Fail for other types of streams for now 529 ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__); 530 return -EINVAL; 531 } 532 533 streamDim.width = mCamera3Stream->width; 534 streamDim.height = mCamera3Stream->height; 535 536 rc = QCamera3Channel::addStream(mStreamType, 537 streamFormat, 538 streamDim, 539 mNumBufs); 540 541 return rc; 542 } 543 544 /*=========================================================================== 545 * FUNCTION : start 546 * 547 * DESCRIPTION: start a regular channel 548 * 549 * PARAMETERS : 550 * 551 * RETURN : int32_t type of status 552 * NO_ERROR -- success 553 * none-zero failure code 554 *==========================================================================*/ 555 int32_t QCamera3RegularChannel::start() 556 { 557 int32_t rc = NO_ERROR; 558 559 if (0 < mMemory.getCnt()) { 560 rc = QCamera3Channel::start(); 561 } 562 563 return rc; 564 } 565 /*=========================================================================== 566 * FUNCTION : getInternalFormatBuffer 567 * 568 * DESCRIPTION: return buffer in the internal format structure 569 * 570 * PARAMETERS : 571 * @streamHandle : buffer handle 572 * 573 * RETURN : stream object. NULL if not found 574 *==========================================================================*/ 575 mm_camera_buf_def_t* QCamera3RegularChannel::getInternalFormatBuffer( 576 buffer_handle_t * buffer) 577 { 578 int32_t index; 579 if(buffer == NULL) 580 return NULL; 581 index = mMemory.getMatchBufIndex((void*)buffer); 582 if(index < 0) { 583 ALOGE("%s: Could not find object among registered buffers",__func__); 584 return NULL; 585 } 586 return mStreams[0]->getInternalFormatBuffer(index); 587 } 588 589 /*=========================================================================== 590 * FUNCTION : request 591 * 592 * DESCRIPTION: process a request from camera service. Stream on if ncessary. 593 * 594 * PARAMETERS : 595 * @buffer : buffer to be filled for this request 596 * 597 * RETURN : 0 on a success start of capture 598 * -EINVAL on invalid input 599 * -ENODEV on serious error 600 *==========================================================================*/ 601 int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber) 602 { 603 //FIX ME: Return buffer back in case of failures below. 604 605 int32_t rc = NO_ERROR; 606 int index; 607 608 if (NULL == buffer) { 609 ALOGE("%s: Invalid buffer in channel request", __func__); 610 return BAD_VALUE; 611 } 612 613 if(!m_bIsActive) { 614 rc = registerBuffer(buffer); 615 if (NO_ERROR != rc) { 616 ALOGE("%s: On-the-fly buffer registration failed %d", 617 __func__, rc); 618 return rc; 619 } 620 621 rc = start(); 622 if (NO_ERROR != rc) { 623 return rc; 624 } 625 } else { 626 ALOGV("%s: Request on an existing stream",__func__); 627 } 628 629 index = mMemory.getMatchBufIndex((void*)buffer); 630 if(index < 0) { 631 rc = registerBuffer(buffer); 632 if (NO_ERROR != rc) { 633 ALOGE("%s: On-the-fly buffer registration failed %d", 634 __func__, rc); 635 return rc; 636 } 637 638 index = mMemory.getMatchBufIndex((void*)buffer); 639 if (index < 0) { 640 ALOGE("%s: Could not find object among registered buffers", 641 __func__); 642 return DEAD_OBJECT; 643 } 644 } 645 646 rc = mStreams[0]->bufDone(index); 647 if(rc != NO_ERROR) { 648 ALOGE("%s: Failed to Q new buffer to stream",__func__); 649 return rc; 650 } 651 652 rc = mMemory.markFrameNumber(index, frameNumber); 653 return rc; 654 } 655 656 /*=========================================================================== 657 * FUNCTION : registerBuffer 658 * 659 * DESCRIPTION: register streaming buffer to the channel object 660 * 661 * PARAMETERS : 662 * @buffer : buffer to be registered 663 * 664 * RETURN : int32_t type of status 665 * NO_ERROR -- success 666 * none-zero failure code 667 *==========================================================================*/ 668 int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer) 669 { 670 int rc = 0; 671 672 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) { 673 ALOGE("%s: Trying to register more buffers than initially requested", 674 __func__); 675 return BAD_VALUE; 676 } 677 678 if (0 == m_numStreams) { 679 rc = initialize(); 680 if (rc != NO_ERROR) { 681 ALOGE("%s: Couldn't initialize camera stream %d", 682 __func__, rc); 683 return rc; 684 } 685 } 686 687 rc = mMemory.registerBuffer(buffer); 688 if (ALREADY_EXISTS == rc) { 689 return NO_ERROR; 690 } else if (NO_ERROR != rc) { 691 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc); 692 return rc; 693 } 694 695 return rc; 696 } 697 698 void QCamera3RegularChannel::streamCbRoutine( 699 mm_camera_super_buf_t *super_frame, 700 QCamera3Stream *stream) 701 { 702 //FIXME Q Buf back in case of error? 703 uint8_t frameIndex; 704 buffer_handle_t *resultBuffer; 705 int32_t resultFrameNumber; 706 camera3_stream_buffer_t result; 707 708 if(!super_frame) { 709 ALOGE("%s: Invalid Super buffer",__func__); 710 return; 711 } 712 713 if(super_frame->num_bufs != 1) { 714 ALOGE("%s: Multiple streams are not supported",__func__); 715 return; 716 } 717 if(super_frame->bufs[0] == NULL ) { 718 ALOGE("%s: Error, Super buffer frame does not contain valid buffer", 719 __func__); 720 return; 721 } 722 723 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 724 if(frameIndex >= mNumBufs) { 725 ALOGE("%s: Error, Invalid index for buffer",__func__); 726 if(stream) { 727 stream->bufDone(frameIndex); 728 } 729 return; 730 } 731 732 ////Use below data to issue framework callback 733 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex); 734 resultFrameNumber = mMemory.getFrameNumber(frameIndex); 735 736 result.stream = mCamera3Stream; 737 result.buffer = resultBuffer; 738 result.status = CAMERA3_BUFFER_STATUS_OK; 739 result.acquire_fence = -1; 740 result.release_fence = -1; 741 742 mChannelCB(NULL, &result, resultFrameNumber, mUserData); 743 free(super_frame); 744 return; 745 } 746 747 QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/) 748 { 749 return &mMemory; 750 } 751 752 int QCamera3RegularChannel::kMaxBuffers = 7; 753 754 QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle, 755 mm_camera_ops_t *cam_ops, 756 channel_cb_routine cb_routine, 757 cam_padding_info_t *paddingInfo, 758 void *userData) : 759 QCamera3Channel(cam_handle, cam_ops, 760 cb_routine, paddingInfo, userData), 761 mMemory(NULL) 762 { 763 } 764 765 QCamera3MetadataChannel::~QCamera3MetadataChannel() 766 { 767 if (m_bIsActive) 768 stop(); 769 770 if (mMemory) { 771 mMemory->deallocate(); 772 delete mMemory; 773 mMemory = NULL; 774 } 775 } 776 777 int32_t QCamera3MetadataChannel::initialize() 778 { 779 int32_t rc; 780 cam_dimension_t streamDim; 781 782 if (mMemory || m_numStreams > 0) { 783 ALOGE("%s: metadata channel already initialized", __func__); 784 return -EINVAL; 785 } 786 787 rc = init(NULL, NULL); 788 if (rc < 0) { 789 ALOGE("%s: init failed", __func__); 790 return rc; 791 } 792 793 streamDim.width = sizeof(metadata_buffer_t), 794 streamDim.height = 1; 795 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX, 796 streamDim, MIN_STREAMING_BUFFER_NUM); 797 if (rc < 0) { 798 ALOGE("%s: addStream failed", __func__); 799 } 800 return rc; 801 } 802 803 int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/, 804 uint32_t /*frameNumber*/) 805 { 806 if (!m_bIsActive) { 807 return start(); 808 } 809 else 810 return 0; 811 } 812 813 void QCamera3MetadataChannel::streamCbRoutine( 814 mm_camera_super_buf_t *super_frame, 815 QCamera3Stream * /*stream*/) 816 { 817 uint32_t requestNumber = 0; 818 if (super_frame == NULL || super_frame->num_bufs != 1) { 819 ALOGE("%s: super_frame is not valid", __func__); 820 return; 821 } 822 mChannelCB(super_frame, NULL, requestNumber, mUserData); 823 } 824 825 QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len) 826 { 827 int rc; 828 if (len < sizeof(metadata_buffer_t)) { 829 ALOGE("%s: size doesn't match %d vs %d", __func__, 830 len, sizeof(metadata_buffer_t)); 831 return NULL; 832 } 833 mMemory = new QCamera3HeapMemory(); 834 if (!mMemory) { 835 ALOGE("%s: unable to create metadata memory", __func__); 836 return NULL; 837 } 838 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true); 839 if (rc < 0) { 840 ALOGE("%s: unable to allocate metadata memory", __func__); 841 delete mMemory; 842 mMemory = NULL; 843 return NULL; 844 } 845 memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t)); 846 return mMemory; 847 } 848 849 void QCamera3MetadataChannel::putStreamBufs() 850 { 851 mMemory->deallocate(); 852 delete mMemory; 853 mMemory = NULL; 854 } 855 /*************************************************************************************/ 856 // RAW Channel related functions 857 int QCamera3RawChannel::kMaxBuffers = 7; 858 859 QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle, 860 mm_camera_ops_t *cam_ops, 861 channel_cb_routine cb_routine, 862 cam_padding_info_t *paddingInfo, 863 void *userData, 864 camera3_stream_t *stream, 865 bool raw_16) : 866 QCamera3RegularChannel(cam_handle, cam_ops, 867 cb_routine, paddingInfo, userData, stream, 868 CAM_STREAM_TYPE_RAW), 869 mIsRaw16(raw_16) 870 { 871 char prop[PROPERTY_VALUE_MAX]; 872 property_get("persist.camera.raw.dump", prop, "0"); 873 mRawDump = atoi(prop); 874 } 875 876 QCamera3RawChannel::~QCamera3RawChannel() 877 { 878 } 879 880 void QCamera3RawChannel::streamCbRoutine( 881 mm_camera_super_buf_t *super_frame, 882 QCamera3Stream * stream) 883 { 884 /* Move this back down once verified */ 885 if (mRawDump) 886 dumpRawSnapshot(super_frame->bufs[0]); 887 888 if (mIsRaw16) 889 convertToRaw16(super_frame->bufs[0]); 890 891 //Make sure cache coherence because extra processing is done 892 mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx); 893 894 QCamera3RegularChannel::streamCbRoutine(super_frame, stream); 895 return; 896 } 897 898 void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame) 899 { 900 QCamera3Stream *stream = getStreamByIndex(0); 901 char buf[32]; 902 memset(buf, 0, sizeof(buf)); 903 cam_dimension_t dim; 904 memset(&dim, 0, sizeof(dim)); 905 stream->getFrameDimension(dim); 906 907 cam_frame_len_offset_t offset; 908 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 909 stream->getFrameOffset(offset); 910 snprintf(buf, sizeof(buf), "/data/r_%d_%dx%d.raw", 911 frame->frame_idx, dim.width, dim.height); 912 913 int file_fd = open(buf, O_RDWR| O_CREAT, 0777); 914 if (file_fd >= 0) { 915 int written_len = write(file_fd, frame->buffer, offset.frame_len); 916 ALOGE("%s: written number of bytes %d", __func__, written_len); 917 close(file_fd); 918 } else { 919 ALOGE("%s: failed to open file to dump image", __func__); 920 } 921 922 } 923 924 void QCamera3RawChannel::convertToRaw16(mm_camera_buf_def_t *frame) 925 { 926 // Convert image buffer from Opaque raw format to RAW16 format 927 // 10bit Opaque raw is stored in the format of: 928 // 0000 - p5 - p4 - p3 - p2 - p1 - p0 929 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant 930 // 4 bits are 0s. Each 64bit word contains 6 pixels. 931 932 QCamera3Stream *stream = getStreamByIndex(0); 933 cam_dimension_t dim; 934 memset(&dim, 0, sizeof(dim)); 935 stream->getFrameDimension(dim); 936 937 cam_frame_len_offset_t offset; 938 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 939 stream->getFrameOffset(offset); 940 941 uint32_t raw16_stride = (dim.width + 15) & ~15; 942 uint16_t* raw16_buffer = (uint16_t *)frame->buffer; 943 944 // In-place format conversion. 945 // Raw16 format always occupy more memory than opaque raw10. 946 // Convert to Raw16 by iterating through all pixels from bottom-right 947 // to top-left of the image. 948 // One special notes: 949 // 1. Cross-platform raw16's stride is 16 pixels. 950 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes. 951 for (int y = dim.height-1; y >= 0; y--) { 952 uint64_t* row_start = (uint64_t *)frame->buffer + 953 y * offset.mp[0].stride / 8; 954 for (int x = dim.width-1; x >= 0; x--) { 955 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6))); 956 raw16_buffer[y*raw16_stride+x] = raw16_pixel; 957 } 958 } 959 } 960 961 /*************************************************************************************/ 962 963 /*=========================================================================== 964 * FUNCTION : jpegEvtHandle 965 * 966 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events. 967 Construct result payload and call mChannelCb to deliver buffer 968 to framework. 969 * 970 * PARAMETERS : 971 * @status : status of jpeg job 972 * @client_hdl: jpeg client handle 973 * @jobId : jpeg job Id 974 * @p_ouput : ptr to jpeg output result struct 975 * @userdata : user data ptr 976 * 977 * RETURN : none 978 *==========================================================================*/ 979 void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status, 980 uint32_t /*client_hdl*/, 981 uint32_t jobId, 982 mm_jpeg_output_t *p_output, 983 void *userdata) 984 { 985 buffer_handle_t *resultBuffer, *jpegBufferHandle; 986 int32_t resultFrameNumber; 987 int resultStatus = CAMERA3_BUFFER_STATUS_OK; 988 camera3_stream_buffer_t result; 989 camera3_jpeg_blob_t jpegHeader; 990 char* jpeg_eof = 0; 991 int maxJpegSize; 992 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata; 993 if (obj) { 994 995 //Release any cached metabuffer information 996 if (obj->mMetaFrame != NULL && obj->m_pMetaChannel != NULL) { 997 ((QCamera3MetadataChannel*)(obj->m_pMetaChannel))->bufDone(obj->mMetaFrame); 998 obj->mMetaFrame = NULL; 999 obj->m_pMetaChannel = NULL; 1000 } else { 1001 ALOGE("%s: Meta frame was NULL", __func__); 1002 } 1003 //Construct payload for process_capture_result. Call mChannelCb 1004 1005 qcamera_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId); 1006 1007 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) { 1008 ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status); 1009 resultStatus = CAMERA3_BUFFER_STATUS_ERROR; 1010 } 1011 1012 //Construct jpeg transient header of type camera3_jpeg_blob_t 1013 //Append at the end of jpeg image of buf_filled_len size 1014 1015 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; 1016 jpegHeader.jpeg_size = p_output->buf_filled_len; 1017 1018 1019 char* jpeg_buf = (char *)p_output->buf_vaddr; 1020 1021 // Gralloc buffer may have additional padding for 4K page size 1022 // Follow size guidelines based on spec since framework relies 1023 // on that to reach end of buffer and with it the header 1024 1025 //Handle same as resultBuffer, but for readablity 1026 jpegBufferHandle = 1027 (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex); 1028 1029 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width; 1030 if (maxJpegSize > obj->mMemory.getSize(obj->mCurrentBufIndex)) { 1031 maxJpegSize = obj->mMemory.getSize(obj->mCurrentBufIndex); 1032 } 1033 1034 jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)]; 1035 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader)); 1036 obj->mMemory.cleanInvalidateCache(obj->mCurrentBufIndex); 1037 1038 ////Use below data to issue framework callback 1039 resultBuffer = (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex); 1040 resultFrameNumber = obj->mMemory.getFrameNumber(obj->mCurrentBufIndex); 1041 1042 result.stream = obj->mCamera3Stream; 1043 result.buffer = resultBuffer; 1044 result.status = resultStatus; 1045 result.acquire_fence = -1; 1046 result.release_fence = -1; 1047 1048 ALOGV("%s: Issue Callback", __func__); 1049 obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData); 1050 1051 // release internal data for jpeg job 1052 if (job != NULL) { 1053 obj->m_postprocessor.releaseJpegJobData(job); 1054 free(job); 1055 } 1056 return; 1057 // } 1058 } else { 1059 ALOGE("%s: Null userdata in jpeg callback", __func__); 1060 } 1061 } 1062 1063 QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle, 1064 mm_camera_ops_t *cam_ops, 1065 channel_cb_routine cb_routine, 1066 cam_padding_info_t *paddingInfo, 1067 void *userData, 1068 camera3_stream_t *stream) : 1069 QCamera3Channel(cam_handle, cam_ops, cb_routine, 1070 paddingInfo, userData), 1071 m_postprocessor(this), 1072 mCamera3Stream(stream), 1073 mNumBufs(0), 1074 mCurrentBufIndex(-1), 1075 mYuvMemory(NULL), 1076 mMetaFrame(NULL) 1077 { 1078 mYuvWidth = stream->width; 1079 mYuvHeight = stream->height; 1080 int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, this); 1081 if (rc != 0) { 1082 ALOGE("Init Postprocessor failed"); 1083 } 1084 } 1085 1086 /*=========================================================================== 1087 * FUNCTION : stop 1088 * 1089 * DESCRIPTION: stop pic channel, which will stop all streams within, including 1090 * the reprocessing channel in postprocessor and YUV stream. 1091 * 1092 * PARAMETERS : none 1093 * 1094 * RETURN : int32_t type of status 1095 * NO_ERROR -- success 1096 * none-zero failure code 1097 *==========================================================================*/ 1098 int32_t QCamera3PicChannel::stop() 1099 { 1100 int32_t rc = NO_ERROR; 1101 if(!m_bIsActive) { 1102 ALOGE("%s: Attempt to stop inactive channel",__func__); 1103 return rc; 1104 } 1105 1106 m_postprocessor.stop(); 1107 1108 rc |= QCamera3Channel::stop(); 1109 return rc; 1110 } 1111 1112 QCamera3PicChannel::~QCamera3PicChannel() 1113 { 1114 stop(); 1115 1116 int32_t rc = m_postprocessor.deinit(); 1117 if (rc != 0) { 1118 ALOGE("De-init Postprocessor failed"); 1119 } 1120 } 1121 1122 int32_t QCamera3PicChannel::initialize() 1123 { 1124 int32_t rc = NO_ERROR; 1125 cam_dimension_t streamDim; 1126 cam_stream_type_t streamType; 1127 cam_format_t streamFormat; 1128 mm_camera_channel_attr_t attr; 1129 1130 if (NULL == mCamera3Stream) { 1131 ALOGE("%s: Camera stream uninitialized", __func__); 1132 return NO_INIT; 1133 } 1134 1135 if (1 <= m_numStreams) { 1136 // Only one stream per channel supported in v3 Hal 1137 return NO_ERROR; 1138 } 1139 1140 memset(&attr, 0, sizeof(mm_camera_channel_attr_t)); 1141 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST; 1142 attr.look_back = 1; 1143 attr.post_frame_skip = 1; 1144 attr.water_mark = 1; 1145 attr.max_unmatched_frames = 1; 1146 1147 rc = init(&attr, NULL); 1148 if (rc < 0) { 1149 ALOGE("%s: init failed", __func__); 1150 return rc; 1151 } 1152 1153 streamType = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT; 1154 streamFormat = CAM_FORMAT_YUV_420_NV21; 1155 streamDim.width = mYuvWidth; 1156 streamDim.height = mYuvHeight; 1157 1158 int num_buffers = 1; 1159 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM; 1160 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim, 1161 num_buffers); 1162 1163 return rc; 1164 } 1165 1166 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer, 1167 uint32_t frameNumber, 1168 mm_camera_buf_def_t *pInputBuffer, 1169 metadata_buffer_t *metadata) 1170 { 1171 //FIX ME: Return buffer back in case of failures below. 1172 1173 int32_t rc = NO_ERROR; 1174 int index; 1175 // Picture stream has already been started before any request comes in 1176 if (!m_bIsActive) { 1177 ALOGE("%s: Channel not started!!", __func__); 1178 return NO_INIT; 1179 } 1180 1181 index = mMemory.getMatchBufIndex((void*)buffer); 1182 if(index < 0) { 1183 rc = registerBuffer(buffer); 1184 if (NO_ERROR != rc) { 1185 ALOGE("%s: On-the-fly buffer registration failed %d", 1186 __func__, rc); 1187 return rc; 1188 } 1189 1190 index = mMemory.getMatchBufIndex((void*)buffer); 1191 if (index < 0) { 1192 ALOGE("%s: Could not find object among registered buffers",__func__); 1193 return DEAD_OBJECT; 1194 } 1195 } 1196 rc = mMemory.markFrameNumber(index, frameNumber); 1197 1198 //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer 1199 mCurrentBufIndex = index; 1200 1201 // Start postprocessor 1202 m_postprocessor.start(this, metadata); 1203 1204 // Queue jpeg settings 1205 rc = queueJpegSetting(index, metadata); 1206 1207 if (pInputBuffer == NULL) 1208 mStreams[0]->bufDone(0); 1209 else { 1210 mm_camera_super_buf_t *src_frame = NULL; 1211 src_frame = (mm_camera_super_buf_t *)malloc( 1212 sizeof(mm_camera_super_buf_t)); 1213 if (src_frame == NULL) { 1214 ALOGE("%s: No memory for src frame", __func__); 1215 return NO_MEMORY; 1216 } 1217 memset(src_frame, 0, sizeof(mm_camera_super_buf_t)); 1218 src_frame->num_bufs = 1; 1219 src_frame->bufs[0] = pInputBuffer; 1220 1221 ALOGD("%s: Post-process started", __func__); 1222 ALOGD("%s: Issue call to reprocess", __func__); 1223 1224 m_postprocessor.processPPMetadata(metadata); 1225 m_postprocessor.processData(src_frame); 1226 } 1227 return rc; 1228 } 1229 1230 /*=========================================================================== 1231 * FUNCTION : dataNotifyCB 1232 * 1233 * DESCRIPTION: Channel Level callback used for super buffer data notify. 1234 * This function is registered with mm-camera-interface to handle 1235 * data notify 1236 * 1237 * PARAMETERS : 1238 * @recvd_frame : stream frame received 1239 * userdata : user data ptr 1240 * 1241 * RETURN : none 1242 *==========================================================================*/ 1243 void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame, 1244 void *userdata) 1245 { 1246 ALOGV("%s: E\n", __func__); 1247 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata; 1248 1249 if (channel == NULL) { 1250 ALOGE("%s: invalid channel pointer", __func__); 1251 return; 1252 } 1253 1254 if(channel->m_numStreams != 1) { 1255 ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__); 1256 return; 1257 } 1258 1259 1260 if(channel->mStreams[0] == NULL) { 1261 ALOGE("%s: Error: Invalid Stream object",__func__); 1262 return; 1263 } 1264 1265 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]); 1266 1267 ALOGV("%s: X\n", __func__); 1268 return; 1269 } 1270 1271 /*=========================================================================== 1272 * FUNCTION : registerBuffer 1273 * 1274 * DESCRIPTION: register streaming buffer to the channel object 1275 * 1276 * PARAMETERS : 1277 * @buffer : buffer to be registered 1278 * 1279 * RETURN : int32_t type of status 1280 * NO_ERROR -- success 1281 * none-zero failure code 1282 *==========================================================================*/ 1283 int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer) 1284 { 1285 int rc = 0; 1286 1287 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) { 1288 ALOGE("%s: Trying to register more buffers than initially requested", 1289 __func__); 1290 return BAD_VALUE; 1291 } 1292 1293 if (0 == m_numStreams) { 1294 rc = initialize(); 1295 if (rc != NO_ERROR) { 1296 ALOGE("%s: Couldn't initialize camera stream %d", 1297 __func__, rc); 1298 return rc; 1299 } 1300 } 1301 rc = mMemory.registerBuffer(buffer); 1302 if (ALREADY_EXISTS == rc) { 1303 return NO_ERROR; 1304 } else if (NO_ERROR != rc) { 1305 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc); 1306 return rc; 1307 } 1308 1309 return rc; 1310 } 1311 1312 void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 1313 QCamera3Stream *stream) 1314 { 1315 //TODO 1316 //Used only for getting YUV. Jpeg callback will be sent back from channel 1317 //directly to HWI. Refer to func jpegEvtHandle 1318 1319 //Got the yuv callback. Calling yuv callback handler in PostProc 1320 uint8_t frameIndex; 1321 mm_camera_super_buf_t* frame = NULL; 1322 if(!super_frame) { 1323 ALOGE("%s: Invalid Super buffer",__func__); 1324 return; 1325 } 1326 1327 if(super_frame->num_bufs != 1) { 1328 ALOGE("%s: Multiple streams are not supported",__func__); 1329 return; 1330 } 1331 if(super_frame->bufs[0] == NULL ) { 1332 ALOGE("%s: Error, Super buffer frame does not contain valid buffer", 1333 __func__); 1334 return; 1335 } 1336 1337 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 1338 if(frameIndex >= mNumBufs) { 1339 ALOGE("%s: Error, Invalid index for buffer",__func__); 1340 if(stream) { 1341 stream->bufDone(frameIndex); 1342 } 1343 return; 1344 } 1345 1346 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 1347 if (frame == NULL) { 1348 ALOGE("%s: Error allocating memory to save received_frame structure.", 1349 __func__); 1350 if(stream) { 1351 stream->bufDone(frameIndex); 1352 } 1353 return; 1354 } 1355 *frame = *super_frame; 1356 m_postprocessor.processData(frame); 1357 free(super_frame); 1358 return; 1359 } 1360 1361 QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len) 1362 { 1363 int rc = 0; 1364 1365 mYuvMemory = new QCamera3HeapMemory(); 1366 if (!mYuvMemory) { 1367 ALOGE("%s: unable to create metadata memory", __func__); 1368 return NULL; 1369 } 1370 1371 //Queue YUV buffers in the beginning mQueueAll = true 1372 rc = mYuvMemory->allocate(1, len, false); 1373 if (rc < 0) { 1374 ALOGE("%s: unable to allocate metadata memory", __func__); 1375 delete mYuvMemory; 1376 mYuvMemory = NULL; 1377 return NULL; 1378 } 1379 return mYuvMemory; 1380 } 1381 1382 void QCamera3PicChannel::putStreamBufs() 1383 { 1384 mMemory.unregisterBuffers(); 1385 1386 mYuvMemory->deallocate(); 1387 delete mYuvMemory; 1388 mYuvMemory = NULL; 1389 } 1390 1391 int32_t QCamera3PicChannel::queueReprocMetadata(metadata_buffer_t *metadata) 1392 { 1393 return m_postprocessor.processPPMetadata(metadata); 1394 } 1395 1396 int32_t QCamera3PicChannel::queueJpegSetting(int32_t index, metadata_buffer_t *metadata) 1397 { 1398 jpeg_settings_t *settings = 1399 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t)); 1400 1401 if (!settings) { 1402 ALOGE("%s: out of memory allocating jpeg_settings", __func__); 1403 return -ENOMEM; 1404 } 1405 1406 memset(settings, 0, sizeof(jpeg_settings_t)); 1407 1408 settings->out_buf_index = index; 1409 1410 settings->jpeg_orientation = 0; 1411 if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) { 1412 int32_t *orientation = (int32_t *)POINTER_OF( 1413 CAM_INTF_META_JPEG_ORIENTATION, metadata); 1414 settings->jpeg_orientation = *orientation; 1415 } 1416 1417 settings->jpeg_quality = 85; 1418 if (IS_PARM_VALID(CAM_INTF_META_JPEG_QUALITY, metadata)) { 1419 uint8_t *quality = (uint8_t *)POINTER_OF( 1420 CAM_INTF_META_JPEG_QUALITY, metadata); 1421 settings->jpeg_quality = *quality; 1422 } 1423 1424 if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) { 1425 uint8_t *quality = (uint8_t *)POINTER_OF( 1426 CAM_INTF_META_JPEG_THUMB_QUALITY, metadata); 1427 settings->jpeg_thumb_quality = *quality; 1428 } 1429 1430 if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) { 1431 cam_dimension_t *dimension = (cam_dimension_t *)POINTER_OF( 1432 CAM_INTF_META_JPEG_THUMB_SIZE, metadata); 1433 settings->thumbnail_size = *dimension; 1434 } 1435 1436 settings->gps_timestamp_valid = 0; 1437 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) { 1438 int64_t *timestamp = (int64_t *)POINTER_OF( 1439 CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata); 1440 settings->gps_timestamp = *timestamp; 1441 settings->gps_timestamp_valid = 1; 1442 } 1443 1444 settings->gps_coordinates_valid = 0; 1445 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) { 1446 double *coordinates = (double *)POINTER_OF( 1447 CAM_INTF_META_JPEG_GPS_COORDINATES, metadata); 1448 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double)); 1449 settings->gps_coordinates_valid = 1; 1450 } 1451 1452 if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) { 1453 char *proc_methods = (char *)POINTER_OF( 1454 CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata); 1455 memset(settings->gps_processing_method, 0, 1456 sizeof(settings->gps_processing_method)); 1457 strncpy(settings->gps_processing_method, proc_methods, 1458 sizeof(settings->gps_processing_method)); 1459 } 1460 1461 return m_postprocessor.processJpegSettingData(settings); 1462 } 1463 1464 /*=========================================================================== 1465 * FUNCTION : getRational 1466 * 1467 * DESCRIPTION: compose rational struct 1468 * 1469 * PARAMETERS : 1470 * @rat : ptr to struct to store rational info 1471 * @num :num of the rational 1472 * @denom : denom of the rational 1473 * 1474 * RETURN : int32_t type of status 1475 * NO_ERROR -- success 1476 * none-zero failure code 1477 *==========================================================================*/ 1478 int32_t getRational(rat_t *rat, int num, int denom) 1479 { 1480 if (NULL == rat) { 1481 ALOGE("%s: NULL rat input", __func__); 1482 return BAD_VALUE; 1483 } 1484 rat->num = num; 1485 rat->denom = denom; 1486 return NO_ERROR; 1487 } 1488 1489 /*=========================================================================== 1490 * FUNCTION : parseGPSCoordinate 1491 * 1492 * DESCRIPTION: parse GPS coordinate string 1493 * 1494 * PARAMETERS : 1495 * @coord_str : [input] coordinate string 1496 * @coord : [output] ptr to struct to store coordinate 1497 * 1498 * RETURN : int32_t type of status 1499 * NO_ERROR -- success 1500 * none-zero failure code 1501 *==========================================================================*/ 1502 int parseGPSCoordinate(const char *coord_str, rat_t* coord) 1503 { 1504 if(coord == NULL) { 1505 ALOGE("%s: error, invalid argument coord == NULL", __func__); 1506 return BAD_VALUE; 1507 } 1508 float degF = atof(coord_str); 1509 if (degF < 0) { 1510 degF = -degF; 1511 } 1512 float minF = (degF - (int) degF) * 60; 1513 float secF = (minF - (int) minF) * 60; 1514 1515 getRational(&coord[0], (int)degF, 1); 1516 getRational(&coord[1], (int)minF, 1); 1517 getRational(&coord[2], (int)(secF * 10000), 10000); 1518 return NO_ERROR; 1519 } 1520 1521 /*=========================================================================== 1522 * FUNCTION : getExifDateTime 1523 * 1524 * DESCRIPTION: query exif date time 1525 * 1526 * PARAMETERS : 1527 * @dateTime : string to store exif date time 1528 * @subsecTime : string to store exif subsec time 1529 * @count : length of the dateTime string 1530 * @subsecCount: length of the subsecTime string 1531 * 1532 * RETURN : int32_t type of status 1533 * NO_ERROR -- success 1534 * none-zero failure code 1535 *==========================================================================*/ 1536 int32_t getExifDateTime(char *dateTime, char *subsecTime, 1537 uint32_t &count, uint32_t &subsecCount) 1538 { 1539 //get time and date from system 1540 struct timeval tv; 1541 struct tm *timeinfo; 1542 1543 gettimeofday(&tv, NULL); 1544 timeinfo = localtime(&tv.tv_sec); 1545 //Write datetime according to EXIF Spec 1546 //"YYYY:MM:DD HH:MM:SS" (20 chars including \0) 1547 snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d", 1548 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1, 1549 timeinfo->tm_mday, timeinfo->tm_hour, 1550 timeinfo->tm_min, timeinfo->tm_sec); 1551 count = 20; 1552 1553 //Write subsec according to EXIF Sepc 1554 snprintf(subsecTime, 7, "%06ld", tv.tv_usec); 1555 subsecCount = 7; 1556 return NO_ERROR; 1557 } 1558 1559 /*=========================================================================== 1560 * FUNCTION : getExifFocalLength 1561 * 1562 * DESCRIPTION: get exif focal lenght 1563 * 1564 * PARAMETERS : 1565 * @focalLength : ptr to rational strcut to store focal lenght 1566 * 1567 * RETURN : int32_t type of status 1568 * NO_ERROR -- success 1569 * none-zero failure code 1570 *==========================================================================*/ 1571 int32_t getExifFocalLength(rat_t *focalLength, float value) 1572 { 1573 int focalLengthValue = 1574 (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION); 1575 return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION); 1576 } 1577 1578 /*=========================================================================== 1579 * FUNCTION : getExifExpTimeInfo 1580 * 1581 * DESCRIPTION: get exif exposure time information 1582 * 1583 * PARAMETERS : 1584 * @expoTimeInfo : expousure time value 1585 * RETURN : nt32_t type of status 1586 * NO_ERROR -- success 1587 * none-zero failure code 1588 *==========================================================================*/ 1589 int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value) 1590 { 1591 1592 int cal_exposureTime; 1593 if (value != 0) 1594 cal_exposureTime = value; 1595 else 1596 cal_exposureTime = 60; 1597 1598 return getRational(expoTimeInfo, 1, cal_exposureTime); 1599 } 1600 1601 /*=========================================================================== 1602 * FUNCTION : getExifGpsProcessingMethod 1603 * 1604 * DESCRIPTION: get GPS processing method 1605 * 1606 * PARAMETERS : 1607 * @gpsProcessingMethod : string to store GPS process method 1608 * @count : lenght of the string 1609 * 1610 * RETURN : int32_t type of status 1611 * NO_ERROR -- success 1612 * none-zero failure code 1613 *==========================================================================*/ 1614 int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod, 1615 uint32_t &count, char* value) 1616 { 1617 if(value != NULL) { 1618 memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE); 1619 count = EXIF_ASCII_PREFIX_SIZE; 1620 strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value)); 1621 count += strlen(value); 1622 gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char 1623 return NO_ERROR; 1624 } else { 1625 return BAD_VALUE; 1626 } 1627 } 1628 1629 /*=========================================================================== 1630 * FUNCTION : getExifLatitude 1631 * 1632 * DESCRIPTION: get exif latitude 1633 * 1634 * PARAMETERS : 1635 * @latitude : ptr to rational struct to store latitude info 1636 * @ladRef : charater to indicate latitude reference 1637 * 1638 * RETURN : int32_t type of status 1639 * NO_ERROR -- success 1640 * none-zero failure code 1641 *==========================================================================*/ 1642 int32_t getExifLatitude(rat_t *latitude, 1643 char *latRef, double value) 1644 { 1645 char str[30]; 1646 snprintf(str, sizeof(str), "%f", value); 1647 if(str != NULL) { 1648 parseGPSCoordinate(str, latitude); 1649 1650 //set Latitude Ref 1651 float latitudeValue = strtof(str, 0); 1652 if(latitudeValue < 0.0f) { 1653 latRef[0] = 'S'; 1654 } else { 1655 latRef[0] = 'N'; 1656 } 1657 latRef[1] = '\0'; 1658 return NO_ERROR; 1659 }else{ 1660 return BAD_VALUE; 1661 } 1662 } 1663 1664 /*=========================================================================== 1665 * FUNCTION : getExifLongitude 1666 * 1667 * DESCRIPTION: get exif longitude 1668 * 1669 * PARAMETERS : 1670 * @longitude : ptr to rational struct to store longitude info 1671 * @lonRef : charater to indicate longitude reference 1672 * 1673 * RETURN : int32_t type of status 1674 * NO_ERROR -- success 1675 * none-zero failure code 1676 *==========================================================================*/ 1677 int32_t getExifLongitude(rat_t *longitude, 1678 char *lonRef, double value) 1679 { 1680 char str[30]; 1681 snprintf(str, sizeof(str), "%f", value); 1682 if(str != NULL) { 1683 parseGPSCoordinate(str, longitude); 1684 1685 //set Longitude Ref 1686 float longitudeValue = strtof(str, 0); 1687 if(longitudeValue < 0.0f) { 1688 lonRef[0] = 'W'; 1689 } else { 1690 lonRef[0] = 'E'; 1691 } 1692 lonRef[1] = '\0'; 1693 return NO_ERROR; 1694 }else{ 1695 return BAD_VALUE; 1696 } 1697 } 1698 1699 /*=========================================================================== 1700 * FUNCTION : getExifAltitude 1701 * 1702 * DESCRIPTION: get exif altitude 1703 * 1704 * PARAMETERS : 1705 * @altitude : ptr to rational struct to store altitude info 1706 * @altRef : charater to indicate altitude reference 1707 * 1708 * RETURN : int32_t type of status 1709 * NO_ERROR -- success 1710 * none-zero failure code 1711 *==========================================================================*/ 1712 int32_t getExifAltitude(rat_t *altitude, 1713 char *altRef, double value) 1714 { 1715 char str[30]; 1716 snprintf(str, sizeof(str), "%f", value); 1717 if(str != NULL) { 1718 double value = atof(str); 1719 *altRef = 0; 1720 if(value < 0){ 1721 *altRef = 1; 1722 value = -value; 1723 } 1724 return getRational(altitude, value*1000, 1000); 1725 }else{ 1726 return BAD_VALUE; 1727 } 1728 } 1729 1730 /*=========================================================================== 1731 * FUNCTION : getExifGpsDateTimeStamp 1732 * 1733 * DESCRIPTION: get exif GPS date time stamp 1734 * 1735 * PARAMETERS : 1736 * @gpsDateStamp : GPS date time stamp string 1737 * @bufLen : length of the string 1738 * @gpsTimeStamp : ptr to rational struct to store time stamp info 1739 * 1740 * RETURN : int32_t type of status 1741 * NO_ERROR -- success 1742 * none-zero failure code 1743 *==========================================================================*/ 1744 int32_t getExifGpsDateTimeStamp(char *gpsDateStamp, 1745 uint32_t bufLen, 1746 rat_t *gpsTimeStamp, int64_t value) 1747 { 1748 char str[30]; 1749 snprintf(str, sizeof(str), "%lld", value); 1750 if(str != NULL) { 1751 time_t unixTime = (time_t)atol(str); 1752 struct tm *UTCTimestamp = gmtime(&unixTime); 1753 1754 strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp); 1755 1756 getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1); 1757 getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1); 1758 getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1); 1759 1760 return NO_ERROR; 1761 } else { 1762 return BAD_VALUE; 1763 } 1764 } 1765 1766 int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp, 1767 cam_rational_type_t step) 1768 { 1769 exposure_val->num = exposure_comp * step.numerator; 1770 exposure_val->denom = step.denominator; 1771 return 0; 1772 } 1773 /*=========================================================================== 1774 * FUNCTION : getExifData 1775 * 1776 * DESCRIPTION: get exif data to be passed into jpeg encoding 1777 * 1778 * PARAMETERS : none 1779 * 1780 * RETURN : exif data from user setting and GPS 1781 *==========================================================================*/ 1782 QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata, 1783 jpeg_settings_t *jpeg_settings) 1784 { 1785 QCamera3Exif *exif = new QCamera3Exif(); 1786 if (exif == NULL) { 1787 ALOGE("%s: No memory for QCamera3Exif", __func__); 1788 return NULL; 1789 } 1790 1791 int32_t rc = NO_ERROR; 1792 uint32_t count = 0; 1793 1794 // add exif entries 1795 { 1796 char dateTime[20]; 1797 char subsecTime[7]; 1798 uint32_t subsecCount; 1799 memset(dateTime, 0, sizeof(dateTime)); 1800 memset(subsecTime, 0, sizeof(subsecTime)); 1801 count = 20; 1802 subsecCount = 7; 1803 rc = getExifDateTime(dateTime, subsecTime, count, subsecCount); 1804 if(rc == NO_ERROR) { 1805 exif->addEntry(EXIFTAGID_DATE_TIME, 1806 EXIF_ASCII, 1807 count, 1808 (void *)dateTime); 1809 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, 1810 EXIF_ASCII, 1811 count, 1812 (void *)dateTime); 1813 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED, 1814 EXIF_ASCII, 1815 count, 1816 (void *)dateTime); 1817 exif->addEntry(EXIFTAGID_SUBSEC_TIME, 1818 EXIF_ASCII, 1819 subsecCount, 1820 (void *)subsecTime); 1821 exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL, 1822 EXIF_ASCII, 1823 subsecCount, 1824 (void *)subsecTime); 1825 exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED, 1826 EXIF_ASCII, 1827 subsecCount, 1828 (void *)subsecTime); 1829 } else { 1830 ALOGE("%s: getExifDateTime failed", __func__); 1831 } 1832 } 1833 1834 if (IS_PARM_VALID(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)) { 1835 float focal_length = *(float *)POINTER_OF( 1836 CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1837 rat_t focalLength; 1838 rc = getExifFocalLength(&focalLength, focal_length); 1839 if (rc == NO_ERROR) { 1840 exif->addEntry(EXIFTAGID_FOCAL_LENGTH, 1841 EXIF_RATIONAL, 1842 1, 1843 (void *)&(focalLength)); 1844 } else { 1845 ALOGE("%s: getExifFocalLength failed", __func__); 1846 } 1847 } 1848 1849 if (IS_PARM_VALID(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)) { 1850 int16_t isoSpeed = *(int32_t *)POINTER_OF( 1851 CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1852 exif->addEntry(EXIFTAGID_ISO_SPEED_RATING, 1853 EXIF_SHORT, 1854 1, 1855 (void *)&(isoSpeed)); 1856 } 1857 1858 if (IS_PARM_VALID(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)) { 1859 int64_t sensor_exposure_time = *(int64_t *)POINTER_OF( 1860 CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1861 rat_t sensorExpTime; 1862 rc = getExifExpTimeInfo(&sensorExpTime, sensor_exposure_time); 1863 if (rc == NO_ERROR){ 1864 exif->addEntry(EXIFTAGID_EXPOSURE_TIME, 1865 EXIF_RATIONAL, 1866 1, 1867 (void *)&(sensorExpTime)); 1868 } else { 1869 ALOGE("%s: getExifExpTimeInfo failed", __func__); 1870 } 1871 } 1872 1873 if (strlen(jpeg_settings->gps_processing_method) > 0) { 1874 char gpsProcessingMethod[ 1875 EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE]; 1876 count = 0; 1877 rc = getExifGpsProcessingMethod(gpsProcessingMethod, 1878 count, jpeg_settings->gps_processing_method); 1879 if(rc == NO_ERROR) { 1880 exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD, 1881 EXIF_ASCII, 1882 count, 1883 (void *)gpsProcessingMethod); 1884 } else { 1885 ALOGE("%s: getExifGpsProcessingMethod failed", __func__); 1886 } 1887 } 1888 1889 if (jpeg_settings->gps_coordinates_valid) { 1890 1891 //latitude 1892 rat_t latitude[3]; 1893 char latRef[2]; 1894 rc = getExifLatitude(latitude, latRef, 1895 jpeg_settings->gps_coordinates[0]); 1896 if(rc == NO_ERROR) { 1897 exif->addEntry(EXIFTAGID_GPS_LATITUDE, 1898 EXIF_RATIONAL, 1899 3, 1900 (void *)latitude); 1901 exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF, 1902 EXIF_ASCII, 1903 2, 1904 (void *)latRef); 1905 } else { 1906 ALOGE("%s: getExifLatitude failed", __func__); 1907 } 1908 1909 //longitude 1910 rat_t longitude[3]; 1911 char lonRef[2]; 1912 rc = getExifLongitude(longitude, lonRef, 1913 jpeg_settings->gps_coordinates[1]); 1914 if(rc == NO_ERROR) { 1915 exif->addEntry(EXIFTAGID_GPS_LONGITUDE, 1916 EXIF_RATIONAL, 1917 3, 1918 (void *)longitude); 1919 1920 exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF, 1921 EXIF_ASCII, 1922 2, 1923 (void *)lonRef); 1924 } else { 1925 ALOGE("%s: getExifLongitude failed", __func__); 1926 } 1927 1928 //altitude 1929 rat_t altitude; 1930 char altRef; 1931 rc = getExifAltitude(&altitude, &altRef, 1932 jpeg_settings->gps_coordinates[2]); 1933 if(rc == NO_ERROR) { 1934 exif->addEntry(EXIFTAGID_GPS_ALTITUDE, 1935 EXIF_RATIONAL, 1936 1, 1937 (void *)&(altitude)); 1938 1939 exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF, 1940 EXIF_BYTE, 1941 1, 1942 (void *)&altRef); 1943 } else { 1944 ALOGE("%s: getExifAltitude failed", __func__); 1945 } 1946 } 1947 1948 if (jpeg_settings->gps_timestamp_valid) { 1949 1950 char gpsDateStamp[20]; 1951 rat_t gpsTimeStamp[3]; 1952 rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp, 1953 jpeg_settings->gps_timestamp); 1954 if(rc == NO_ERROR) { 1955 exif->addEntry(EXIFTAGID_GPS_DATESTAMP, 1956 EXIF_ASCII, 1957 strlen(gpsDateStamp) + 1, 1958 (void *)gpsDateStamp); 1959 1960 exif->addEntry(EXIFTAGID_GPS_TIMESTAMP, 1961 EXIF_RATIONAL, 1962 3, 1963 (void *)gpsTimeStamp); 1964 } else { 1965 ALOGE("%s: getExifGpsDataTimeStamp failed", __func__); 1966 } 1967 } 1968 1969 if (IS_PARM_VALID(CAM_INTF_PARM_EV, metadata) && 1970 IS_PARM_VALID(CAM_INTF_PARM_EV_STEP, metadata)) { 1971 int32_t exposure_comp = *(int32_t *)POINTER_OF( 1972 CAM_INTF_PARM_EV, metadata); 1973 cam_rational_type_t comp_step = *(cam_rational_type_t *)POINTER_OF( 1974 CAM_INTF_PARM_EV_STEP, metadata); 1975 srat_t exposure_val; 1976 rc = getExifExposureValue(&exposure_val, exposure_comp, comp_step); 1977 if(rc == NO_ERROR) { 1978 exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE, 1979 EXIF_SRATIONAL, 1980 1, 1981 (void *)(&exposure_val)); 1982 } else { 1983 ALOGE("%s: getExifExposureValue failed ", __func__); 1984 } 1985 } 1986 1987 char value[PROPERTY_VALUE_MAX]; 1988 if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) { 1989 exif->addEntry(EXIFTAGID_MAKE, 1990 EXIF_ASCII, 1991 strlen(value) + 1, 1992 (void *)value); 1993 } else { 1994 ALOGE("%s: getExifMaker failed", __func__); 1995 } 1996 1997 if (property_get("ro.product.model", value, "QCAM-AA") > 0) { 1998 exif->addEntry(EXIFTAGID_MODEL, 1999 EXIF_ASCII, 2000 strlen(value) + 1, 2001 (void *)value); 2002 } else { 2003 ALOGE("%s: getExifModel failed", __func__); 2004 } 2005 2006 return exif; 2007 } 2008 2009 void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height) 2010 { 2011 mYuvWidth = width; 2012 mYuvHeight = height; 2013 } 2014 2015 int QCamera3PicChannel::kMaxBuffers = 1; 2016 2017 /*=========================================================================== 2018 * FUNCTION : QCamera3ReprocessChannel 2019 * 2020 * DESCRIPTION: constructor of QCamera3ReprocessChannel 2021 * 2022 * PARAMETERS : 2023 * @cam_handle : camera handle 2024 * @cam_ops : ptr to camera ops table 2025 * @pp_mask : post-proccess feature mask 2026 * 2027 * RETURN : none 2028 *==========================================================================*/ 2029 QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle, 2030 mm_camera_ops_t *cam_ops, 2031 channel_cb_routine cb_routine, 2032 cam_padding_info_t *paddingInfo, 2033 void *userData, void *ch_hdl) : 2034 QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, userData), 2035 picChHandle(ch_hdl), 2036 m_pSrcChannel(NULL), 2037 m_pMetaChannel(NULL), 2038 mMemory(NULL) 2039 { 2040 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles)); 2041 } 2042 2043 2044 /*=========================================================================== 2045 * FUNCTION : QCamera3ReprocessChannel 2046 * 2047 * DESCRIPTION: constructor of QCamera3ReprocessChannel 2048 * 2049 * PARAMETERS : 2050 * @cam_handle : camera handle 2051 * @cam_ops : ptr to camera ops table 2052 * @pp_mask : post-proccess feature mask 2053 * 2054 * RETURN : none 2055 *==========================================================================*/ 2056 int32_t QCamera3ReprocessChannel::initialize() 2057 { 2058 int32_t rc = NO_ERROR; 2059 mm_camera_channel_attr_t attr; 2060 2061 memset(&attr, 0, sizeof(mm_camera_channel_attr_t)); 2062 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS; 2063 attr.max_unmatched_frames = 1; 2064 2065 rc = init(&attr, NULL); 2066 if (rc < 0) { 2067 ALOGE("%s: init failed", __func__); 2068 } 2069 return rc; 2070 } 2071 2072 2073 /*=========================================================================== 2074 * FUNCTION : QCamera3ReprocessChannel 2075 * 2076 * DESCRIPTION: constructor of QCamera3ReprocessChannel 2077 * 2078 * PARAMETERS : 2079 * @cam_handle : camera handle 2080 * @cam_ops : ptr to camera ops table 2081 * @pp_mask : post-proccess feature mask 2082 * 2083 * RETURN : none 2084 *==========================================================================*/ 2085 void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 2086 QCamera3Stream *stream) 2087 { 2088 //Got the pproc data callback. Now send to jpeg encoding 2089 uint8_t frameIndex; 2090 mm_camera_super_buf_t* frame = NULL; 2091 QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle; 2092 2093 if(!super_frame) { 2094 ALOGE("%s: Invalid Super buffer",__func__); 2095 return; 2096 } 2097 2098 if(super_frame->num_bufs != 1) { 2099 ALOGE("%s: Multiple streams are not supported",__func__); 2100 return; 2101 } 2102 if(super_frame->bufs[0] == NULL ) { 2103 ALOGE("%s: Error, Super buffer frame does not contain valid buffer", 2104 __func__); 2105 return; 2106 } 2107 2108 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 2109 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 2110 if (frame == NULL) { 2111 ALOGE("%s: Error allocating memory to save received_frame structure.", 2112 __func__); 2113 if(stream) { 2114 stream->bufDone(frameIndex); 2115 } 2116 return; 2117 } 2118 *frame = *super_frame; 2119 obj->m_postprocessor.processPPData(frame); 2120 free(super_frame); 2121 return; 2122 } 2123 2124 /*=========================================================================== 2125 * FUNCTION : QCamera3ReprocessChannel 2126 * 2127 * DESCRIPTION: default constructor of QCamera3ReprocessChannel 2128 * 2129 * PARAMETERS : none 2130 * 2131 * RETURN : none 2132 *==========================================================================*/ 2133 QCamera3ReprocessChannel::QCamera3ReprocessChannel() : 2134 m_pSrcChannel(NULL), 2135 m_pMetaChannel(NULL) 2136 { 2137 } 2138 2139 /*=========================================================================== 2140 * FUNCTION : getStreamBufs 2141 * 2142 * DESCRIPTION: register the buffers of the reprocess channel 2143 * 2144 * PARAMETERS : none 2145 * 2146 * RETURN : QCamera3Memory * 2147 *==========================================================================*/ 2148 QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len) 2149 { 2150 int rc = 0; 2151 2152 mMemory = new QCamera3HeapMemory(); 2153 if (!mMemory) { 2154 ALOGE("%s: unable to create reproc memory", __func__); 2155 return NULL; 2156 } 2157 2158 //Queue YUV buffers in the beginning mQueueAll = true 2159 rc = mMemory->allocate(2, len, true); 2160 if (rc < 0) { 2161 ALOGE("%s: unable to allocate reproc memory", __func__); 2162 delete mMemory; 2163 mMemory = NULL; 2164 return NULL; 2165 } 2166 return mMemory; 2167 } 2168 2169 /*=========================================================================== 2170 * FUNCTION : getStreamBufs 2171 * 2172 * DESCRIPTION: register the buffers of the reprocess channel 2173 * 2174 * PARAMETERS : none 2175 * 2176 * RETURN : 2177 *==========================================================================*/ 2178 void QCamera3ReprocessChannel::putStreamBufs() 2179 { 2180 mMemory->deallocate(); 2181 delete mMemory; 2182 mMemory = NULL; 2183 } 2184 2185 /*=========================================================================== 2186 * FUNCTION : ~QCamera3ReprocessChannel 2187 * 2188 * DESCRIPTION: destructor of QCamera3ReprocessChannel 2189 * 2190 * PARAMETERS : none 2191 * 2192 * RETURN : none 2193 *==========================================================================*/ 2194 QCamera3ReprocessChannel::~QCamera3ReprocessChannel() 2195 { 2196 } 2197 2198 /*=========================================================================== 2199 * FUNCTION : getStreamBySrcHandle 2200 * 2201 * DESCRIPTION: find reprocess stream by its source stream handle 2202 * 2203 * PARAMETERS : 2204 * @srcHandle : source stream handle 2205 * 2206 * RETURN : ptr to reprocess stream if found. NULL if not found 2207 *==========================================================================*/ 2208 QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle) 2209 { 2210 QCamera3Stream *pStream = NULL; 2211 2212 for (int i = 0; i < m_numStreams; i++) { 2213 if (mSrcStreamHandles[i] == srcHandle) { 2214 pStream = mStreams[i]; 2215 break; 2216 } 2217 } 2218 return pStream; 2219 } 2220 2221 /*=========================================================================== 2222 * FUNCTION : getSrcStreamBySrcHandle 2223 * 2224 * DESCRIPTION: find source stream by source stream handle 2225 * 2226 * PARAMETERS : 2227 * @srcHandle : source stream handle 2228 * 2229 * RETURN : ptr to reprocess stream if found. NULL if not found 2230 *==========================================================================*/ 2231 QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle) 2232 { 2233 QCamera3Stream *pStream = NULL; 2234 2235 for (int i = 0; i < m_numStreams; i++) { 2236 if (mSrcStreamHandles[i] == srcHandle) { 2237 pStream = m_pSrcChannel->getStreamByIndex(i); 2238 break; 2239 } 2240 } 2241 return pStream; 2242 } 2243 2244 /*=========================================================================== 2245 * FUNCTION : metadataBufDone 2246 * 2247 * DESCRIPTION: buf done method for a metadata buffer 2248 * 2249 * PARAMETERS : 2250 * @recvd_frame : received metadata frame 2251 * 2252 * RETURN : 2253 *==========================================================================*/ 2254 int32_t QCamera3ReprocessChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame) 2255 { 2256 int32_t rc; 2257 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame); 2258 free(recvd_frame); 2259 recvd_frame = NULL; 2260 return rc; 2261 } 2262 2263 /*=========================================================================== 2264 * FUNCTION : doReprocess 2265 * 2266 * DESCRIPTION: request to do a reprocess on the frame 2267 * 2268 * PARAMETERS : 2269 * @frame : frame to be performed a reprocess 2270 * 2271 * RETURN : int32_t type of status 2272 * NO_ERROR -- success 2273 * none-zero failure code 2274 *==========================================================================*/ 2275 int32_t QCamera3ReprocessChannel::doReprocess(mm_camera_super_buf_t *frame, 2276 mm_camera_super_buf_t *meta_frame) 2277 { 2278 int32_t rc = 0; 2279 if (m_numStreams < 1) { 2280 ALOGE("%s: No reprocess stream is created", __func__); 2281 return -1; 2282 } 2283 if (m_pSrcChannel == NULL) { 2284 ALOGE("%s: No source channel for reprocess", __func__); 2285 return -1; 2286 } 2287 for (int i = 0; i < frame->num_bufs; i++) { 2288 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id); 2289 if (pStream != NULL) { 2290 cam_stream_parm_buffer_t param; 2291 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 2292 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS; 2293 param.reprocess.buf_index = frame->bufs[i]->buf_idx; 2294 if (meta_frame != NULL) { 2295 param.reprocess.meta_present = 1; 2296 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID(); 2297 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx; 2298 } 2299 rc = pStream->setParameter(param); 2300 if (rc != NO_ERROR) { 2301 ALOGE("%s: stream setParameter for reprocess failed", __func__); 2302 break; 2303 } 2304 } 2305 } 2306 return rc; 2307 } 2308 2309 int32_t QCamera3ReprocessChannel::doReprocessOffline(mm_camera_super_buf_t *frame, 2310 metadata_buffer_t *metadata) 2311 { 2312 int32_t rc = 0; 2313 OfflineBuffer mappedBuffer; 2314 if (m_numStreams < 1) { 2315 ALOGE("%s: No reprocess stream is created", __func__); 2316 return -1; 2317 } 2318 if (m_pSrcChannel == NULL) { 2319 ALOGE("%s: No source channel for reprocess", __func__); 2320 return -1; 2321 } 2322 2323 uint32_t buf_idx = 0; 2324 for (int i = 0; i < frame->num_bufs; i++) { 2325 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id); 2326 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id); 2327 if (pStream != NULL && pSrcStream != NULL) { 2328 2329 rc = mStreams[i]->mapBuf( 2330 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 2331 buf_idx, -1, 2332 frame->bufs[i]->fd, frame->bufs[i]->frame_len); 2333 2334 if (rc == NO_ERROR) { 2335 memset(&mappedBuffer, 0, sizeof(OfflineBuffer)); 2336 mappedBuffer.index = buf_idx; 2337 mappedBuffer.stream = pStream; 2338 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF; 2339 mOfflineBuffers.push_back(mappedBuffer); 2340 2341 cam_stream_parm_buffer_t param; 2342 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 2343 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS; 2344 param.reprocess.buf_index = buf_idx; 2345 2346 param.reprocess.meta_present = 1; 2347 char* private_data = (char *)POINTER_OF( 2348 CAM_INTF_META_PRIVATE_DATA, metadata); 2349 memcpy(param.reprocess.private_data, private_data, 2350 MAX_METADATA_PAYLOAD_SIZE); 2351 2352 // Find crop info for reprocess stream 2353 cam_crop_data_t *crop_data = (cam_crop_data_t *) 2354 POINTER_OF(CAM_INTF_META_CROP_DATA, metadata); 2355 for (int j = 0; j < crop_data->num_of_streams; j++) { 2356 if (crop_data->crop_info[j].stream_id == 2357 pSrcStream->getMyServerID()) { 2358 param.reprocess.crop_rect = 2359 crop_data->crop_info[j].crop; 2360 break; 2361 } 2362 } 2363 rc = pStream->setParameter(param); 2364 if (rc != NO_ERROR) { 2365 ALOGE("%s: stream setParameter for reprocess failed", __func__); 2366 break; 2367 } 2368 } 2369 } 2370 } 2371 return rc; 2372 } 2373 2374 /*=========================================================================== 2375 * FUNCTION : stop 2376 * 2377 * DESCRIPTION: Unmap offline buffers and stop channel 2378 * 2379 * PARAMETERS : none 2380 * 2381 * RETURN : int32_t type of status 2382 * NO_ERROR -- success 2383 * none-zero failure code 2384 *==========================================================================*/ 2385 int32_t QCamera3ReprocessChannel::stop() 2386 { 2387 if (!mOfflineBuffers.empty()) { 2388 QCamera3Stream *stream = NULL; 2389 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin(); 2390 int error = NO_ERROR; 2391 for( ; it != mOfflineBuffers.end(); it++) { 2392 stream = (*it).stream; 2393 if (NULL != stream) { 2394 error = stream->unmapBuf((*it).type, 2395 (*it).index, 2396 -1); 2397 if (NO_ERROR != error) { 2398 ALOGE("%s: Error during offline buffer unmap %d", 2399 __func__, error); 2400 } 2401 } 2402 } 2403 mOfflineBuffers.clear(); 2404 } 2405 2406 return QCamera3Channel::stop(); 2407 } 2408 2409 /*=========================================================================== 2410 * FUNCTION : doReprocess 2411 * 2412 * DESCRIPTION: request to do a reprocess on the frame 2413 * 2414 * PARAMETERS : 2415 * @buf_fd : fd to the input buffer that needs reprocess 2416 * @buf_lenght : length of the input buffer 2417 * @ret_val : result of reprocess. 2418 * Example: Could be faceID in case of register face image. 2419 * 2420 * RETURN : int32_t type of status 2421 * NO_ERROR -- success 2422 * none-zero failure code 2423 *==========================================================================*/ 2424 int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd, 2425 uint32_t buf_length, 2426 int32_t &ret_val, 2427 mm_camera_super_buf_t *meta_frame) 2428 { 2429 int32_t rc = 0; 2430 if (m_numStreams < 1) { 2431 ALOGE("%s: No reprocess stream is created", __func__); 2432 return -1; 2433 } 2434 if (meta_frame == NULL) { 2435 ALOGE("%s: Did not get corresponding metadata in time", __func__); 2436 return -1; 2437 } 2438 2439 uint32_t buf_idx = 0; 2440 for (int i = 0; i < m_numStreams; i++) { 2441 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 2442 buf_idx, -1, 2443 buf_fd, buf_length); 2444 2445 if (rc == NO_ERROR) { 2446 cam_stream_parm_buffer_t param; 2447 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 2448 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS; 2449 param.reprocess.buf_index = buf_idx; 2450 param.reprocess.meta_present = 1; 2451 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID(); 2452 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx; 2453 rc = mStreams[i]->setParameter(param); 2454 if (rc == NO_ERROR) { 2455 ret_val = param.reprocess.ret_val; 2456 } 2457 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 2458 buf_idx, -1); 2459 } 2460 } 2461 return rc; 2462 } 2463 2464 /*=========================================================================== 2465 * FUNCTION : addReprocStreamsFromSource 2466 * 2467 * DESCRIPTION: add reprocess streams from input source channel 2468 * 2469 * PARAMETERS : 2470 * @config : pp feature configuration 2471 * @pSrcChannel : ptr to input source channel that needs reprocess 2472 * @pMetaChannel : ptr to metadata channel to get corresp. metadata 2473 * @offline : configure for offline reprocessing 2474 * 2475 * RETURN : int32_t type of status 2476 * NO_ERROR -- success 2477 * none-zero failure code 2478 *==========================================================================*/ 2479 int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config, 2480 QCamera3Channel *pSrcChannel, 2481 QCamera3Channel *pMetaChannel) 2482 { 2483 int32_t rc = 0; 2484 QCamera3Stream *pSrcStream = pSrcChannel->getStreamByIndex(0); 2485 if (pSrcStream == NULL) { 2486 ALOGE("%s: source channel doesn't have a stream", __func__); 2487 return BAD_VALUE; 2488 } 2489 cam_stream_reproc_config_t reprocess_config; 2490 cam_dimension_t streamDim; 2491 cam_stream_type_t streamType; 2492 cam_format_t streamFormat; 2493 cam_frame_len_offset_t frameOffset; 2494 int num_buffers = 2; 2495 2496 streamType = CAM_STREAM_TYPE_OFFLINE_PROC; 2497 pSrcStream->getFormat(streamFormat); 2498 pSrcStream->getFrameDimension(streamDim); 2499 pSrcStream->getFrameOffset(frameOffset); 2500 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE; 2501 2502 reprocess_config.offline.input_fmt = streamFormat; 2503 reprocess_config.offline.input_dim = streamDim; 2504 reprocess_config.offline.input_buf_planes.plane_info = frameOffset; 2505 reprocess_config.offline.num_of_bufs = num_buffers; 2506 reprocess_config.offline.input_stream_type = pSrcStream->getMyType(); 2507 2508 2509 reprocess_config.pp_feature_config = pp_config; 2510 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle(); 2511 2512 // pp feature config 2513 if (pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION) { 2514 if (pp_config.rotation == ROTATE_90 || 2515 pp_config.rotation == ROTATE_270) { 2516 // rotated by 90 or 270, need to switch width and height 2517 int32_t temp = streamDim.height; 2518 streamDim.height = streamDim.width; 2519 streamDim.width = temp; 2520 } 2521 } 2522 2523 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle, 2524 m_handle, 2525 m_camOps, 2526 mPaddingInfo, 2527 (QCamera3Channel*)this); 2528 if (pStream == NULL) { 2529 ALOGE("%s: No mem for Stream", __func__); 2530 return NO_MEMORY; 2531 } 2532 2533 rc = pStream->init(streamType, streamFormat, streamDim, &reprocess_config, 2534 num_buffers,QCamera3Channel::streamCbRoutine, this); 2535 2536 2537 if (rc == 0) { 2538 mStreams[m_numStreams] = pStream; 2539 m_numStreams++; 2540 } else { 2541 ALOGE("%s: failed to create reprocess stream", __func__); 2542 delete pStream; 2543 } 2544 2545 if (rc == NO_ERROR) { 2546 m_pSrcChannel = pSrcChannel; 2547 m_pMetaChannel = pMetaChannel; 2548 } 2549 if(m_camOps->request_super_buf(m_camHandle,m_handle,1) < 0) { 2550 ALOGE("%s: Request for super buffer failed",__func__); 2551 } 2552 return rc; 2553 } 2554 2555 cam_dimension_t QCamera3SupportChannel::kDim = {640, 480}; 2556 2557 QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle, 2558 mm_camera_ops_t *cam_ops, 2559 cam_padding_info_t *paddingInfo, 2560 void *userData) : 2561 QCamera3Channel(cam_handle, cam_ops, 2562 NULL, paddingInfo, userData), 2563 mMemory(NULL) 2564 { 2565 } 2566 2567 QCamera3SupportChannel::~QCamera3SupportChannel() 2568 { 2569 if (m_bIsActive) 2570 stop(); 2571 2572 if (mMemory) { 2573 mMemory->deallocate(); 2574 delete mMemory; 2575 mMemory = NULL; 2576 } 2577 } 2578 2579 int32_t QCamera3SupportChannel::initialize() 2580 { 2581 int32_t rc; 2582 2583 if (mMemory || m_numStreams > 0) { 2584 ALOGE("%s: Support channel already initialized", __func__); 2585 return -EINVAL; 2586 } 2587 2588 rc = init(NULL, NULL); 2589 if (rc < 0) { 2590 ALOGE("%s: init failed", __func__); 2591 return rc; 2592 } 2593 2594 // Hardcode to VGA size for now 2595 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_CALLBACK, 2596 CAM_FORMAT_YUV_420_NV21, kDim, MIN_STREAMING_BUFFER_NUM); 2597 if (rc < 0) { 2598 ALOGE("%s: addStream failed", __func__); 2599 } 2600 return rc; 2601 } 2602 2603 int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/, 2604 uint32_t /*frameNumber*/) 2605 { 2606 return NO_ERROR; 2607 } 2608 2609 void QCamera3SupportChannel::streamCbRoutine( 2610 mm_camera_super_buf_t *super_frame, 2611 QCamera3Stream * /*stream*/) 2612 { 2613 if (super_frame == NULL || super_frame->num_bufs != 1) { 2614 ALOGE("%s: super_frame is not valid", __func__); 2615 return; 2616 } 2617 bufDone(super_frame); 2618 free(super_frame); 2619 } 2620 2621 QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len) 2622 { 2623 int rc; 2624 2625 mMemory = new QCamera3HeapMemory(); 2626 if (!mMemory) { 2627 ALOGE("%s: unable to create heap memory", __func__); 2628 return NULL; 2629 } 2630 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true); 2631 if (rc < 0) { 2632 ALOGE("%s: unable to allocate heap memory", __func__); 2633 delete mMemory; 2634 mMemory = NULL; 2635 return NULL; 2636 } 2637 return mMemory; 2638 } 2639 2640 void QCamera3SupportChannel::putStreamBufs() 2641 { 2642 mMemory->deallocate(); 2643 delete mMemory; 2644 mMemory = NULL; 2645 } 2646 2647 }; // namespace qcamera 2648