1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 /** 17 ************************************************************************* 18 * @file VideoEditorVideoEncoder.cpp 19 * @brief StageFright shell video encoder 20 ************************************************************************* 21 */ 22 #define LOG_NDEBUG 1 23 #define LOG_TAG "VIDEOEDITOR_VIDEOENCODER" 24 25 /******************* 26 * HEADERS * 27 *******************/ 28 #include "M4OSA_Debug.h" 29 #include "M4SYS_AccessUnit.h" 30 #include "VideoEditorVideoEncoder.h" 31 #include "VideoEditorUtils.h" 32 #include <I420ColorConverter.h> 33 34 #include "utils/Log.h" 35 #include "utils/Vector.h" 36 #include <media/stagefright/MediaSource.h> 37 #include <media/stagefright/MediaDebug.h> 38 #include <media/stagefright/MediaDefs.h> 39 #include <media/stagefright/MetaData.h> 40 #include <media/stagefright/OMXClient.h> 41 #include <media/stagefright/OMXCodec.h> 42 #include <media/MediaProfiles.h> 43 #include "OMX_Video.h" 44 45 /******************** 46 * DEFINITIONS * 47 ********************/ 48 49 // Force using hardware encoder 50 #define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly 51 52 #if !defined(VIDEOEDITOR_FORCECODEC) 53 #error "Cannot force DSI retrieval if codec type is not fixed" 54 #endif 55 56 /******************** 57 * SOURCE CLASS * 58 ********************/ 59 60 namespace android { 61 62 struct VideoEditorVideoEncoderSource : public MediaSource { 63 public: 64 static sp<VideoEditorVideoEncoderSource> Create( 65 const sp<MetaData> &format); 66 virtual status_t start(MetaData *params = NULL); 67 virtual status_t stop(); 68 virtual sp<MetaData> getFormat(); 69 virtual status_t read(MediaBuffer **buffer, 70 const ReadOptions *options = NULL); 71 virtual int32_t storeBuffer(MediaBuffer *buffer); 72 virtual int32_t getNumberOfBuffersInQueue(); 73 74 protected: 75 virtual ~VideoEditorVideoEncoderSource(); 76 77 private: 78 struct MediaBufferChain { 79 MediaBuffer* buffer; 80 MediaBufferChain* nextLink; 81 }; 82 enum State { 83 CREATED, 84 STARTED, 85 ERROR 86 }; 87 VideoEditorVideoEncoderSource(const sp<MetaData> &format); 88 89 // Don't call me 90 VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &); 91 VideoEditorVideoEncoderSource &operator=( 92 const VideoEditorVideoEncoderSource &); 93 94 MediaBufferChain* mFirstBufferLink; 95 MediaBufferChain* mLastBufferLink; 96 int32_t mNbBuffer; 97 bool mIsEOS; 98 State mState; 99 sp<MetaData> mEncFormat; 100 Mutex mLock; 101 Condition mBufferCond; 102 }; 103 104 sp<VideoEditorVideoEncoderSource> VideoEditorVideoEncoderSource::Create( 105 const sp<MetaData> &format) { 106 107 sp<VideoEditorVideoEncoderSource> aSource = 108 new VideoEditorVideoEncoderSource(format); 109 return aSource; 110 } 111 112 VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource( 113 const sp<MetaData> &format): 114 mFirstBufferLink(NULL), 115 mLastBufferLink(NULL), 116 mNbBuffer(0), 117 mIsEOS(false), 118 mState(CREATED), 119 mEncFormat(format) { 120 LOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource"); 121 } 122 123 VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() { 124 125 // Safety clean up 126 if( STARTED == mState ) { 127 stop(); 128 } 129 } 130 131 status_t VideoEditorVideoEncoderSource::start(MetaData *meta) { 132 status_t err = OK; 133 134 LOGV("VideoEditorVideoEncoderSource::start() begin"); 135 136 if( CREATED != mState ) { 137 LOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState); 138 return UNKNOWN_ERROR; 139 } 140 mState = STARTED; 141 142 LOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err); 143 return err; 144 } 145 146 status_t VideoEditorVideoEncoderSource::stop() { 147 status_t err = OK; 148 149 LOGV("VideoEditorVideoEncoderSource::stop() begin"); 150 151 if( STARTED != mState ) { 152 LOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState); 153 return UNKNOWN_ERROR; 154 } 155 156 // Release the buffer chain 157 int32_t i = 0; 158 MediaBufferChain* tmpLink = NULL; 159 while( mFirstBufferLink ) { 160 i++; 161 tmpLink = mFirstBufferLink; 162 mFirstBufferLink = mFirstBufferLink->nextLink; 163 delete tmpLink; 164 } 165 LOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i); 166 mFirstBufferLink = NULL; 167 mLastBufferLink = NULL; 168 169 mState = CREATED; 170 171 LOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err); 172 return err; 173 } 174 175 sp<MetaData> VideoEditorVideoEncoderSource::getFormat() { 176 177 LOGV("VideoEditorVideoEncoderSource::getFormat"); 178 return mEncFormat; 179 } 180 181 status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer, 182 const ReadOptions *options) { 183 Mutex::Autolock autolock(mLock); 184 MediaSource::ReadOptions readOptions; 185 status_t err = OK; 186 MediaBufferChain* tmpLink = NULL; 187 188 LOGV("VideoEditorVideoEncoderSource::read() begin"); 189 190 if ( STARTED != mState ) { 191 LOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState); 192 return UNKNOWN_ERROR; 193 } 194 195 while (mFirstBufferLink == NULL && !mIsEOS) { 196 mBufferCond.wait(mLock); 197 } 198 199 // End of stream? 200 if (mFirstBufferLink == NULL) { 201 *buffer = NULL; 202 LOGV("VideoEditorVideoEncoderSource::read : EOS"); 203 return ERROR_END_OF_STREAM; 204 } 205 206 // Get a buffer from the chain 207 *buffer = mFirstBufferLink->buffer; 208 tmpLink = mFirstBufferLink; 209 mFirstBufferLink = mFirstBufferLink->nextLink; 210 211 if ( NULL == mFirstBufferLink ) { 212 mLastBufferLink = NULL; 213 } 214 delete tmpLink; 215 mNbBuffer--; 216 217 LOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err); 218 return err; 219 } 220 221 int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) { 222 Mutex::Autolock autolock(mLock); 223 status_t err = OK; 224 225 LOGV("VideoEditorVideoEncoderSource::storeBuffer() begin"); 226 227 if( NULL == buffer ) { 228 LOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS"); 229 mIsEOS = true; 230 } else { 231 MediaBufferChain* newLink = new MediaBufferChain; 232 newLink->buffer = buffer; 233 newLink->nextLink = NULL; 234 if( NULL != mLastBufferLink ) { 235 mLastBufferLink->nextLink = newLink; 236 } else { 237 mFirstBufferLink = newLink; 238 } 239 mLastBufferLink = newLink; 240 mNbBuffer++; 241 } 242 mBufferCond.signal(); 243 LOGV("VideoEditorVideoEncoderSource::storeBuffer() end"); 244 return mNbBuffer; 245 } 246 247 int32_t VideoEditorVideoEncoderSource::getNumberOfBuffersInQueue() { 248 Mutex::Autolock autolock(mLock); 249 return mNbBuffer; 250 } 251 /******************** 252 * PULLER * 253 ********************/ 254 255 // Pulls media buffers from a MediaSource repeatedly. 256 // The user can then get the buffers from that list. 257 class VideoEditorVideoEncoderPuller { 258 public: 259 VideoEditorVideoEncoderPuller(sp<MediaSource> source); 260 ~VideoEditorVideoEncoderPuller(); 261 void start(); 262 void stop(); 263 MediaBuffer* getBufferBlocking(); 264 MediaBuffer* getBufferNonBlocking(); 265 void putBuffer(MediaBuffer* buffer); 266 bool hasMediaSourceReturnedError(); 267 private: 268 static int acquireThreadStart(void* arg); 269 void acquireThreadFunc(); 270 271 static int releaseThreadStart(void* arg); 272 void releaseThreadFunc(); 273 274 sp<MediaSource> mSource; 275 Vector<MediaBuffer*> mBuffers; 276 Vector<MediaBuffer*> mReleaseBuffers; 277 278 Mutex mLock; 279 Condition mUserCond; // for the user of this class 280 Condition mAcquireCond; // for the acquire thread 281 Condition mReleaseCond; // for the release thread 282 283 bool mAskToStart; // Asks the threads to start 284 bool mAskToStop; // Asks the threads to stop 285 bool mAcquireStopped; // The acquire thread has stopped 286 bool mReleaseStopped; // The release thread has stopped 287 status_t mSourceError; // Error returned by MediaSource read 288 }; 289 290 VideoEditorVideoEncoderPuller::VideoEditorVideoEncoderPuller( 291 sp<MediaSource> source) { 292 mSource = source; 293 mAskToStart = false; 294 mAskToStop = false; 295 mAcquireStopped = false; 296 mReleaseStopped = false; 297 mSourceError = OK; 298 androidCreateThread(acquireThreadStart, this); 299 androidCreateThread(releaseThreadStart, this); 300 } 301 302 VideoEditorVideoEncoderPuller::~VideoEditorVideoEncoderPuller() { 303 stop(); 304 } 305 306 bool VideoEditorVideoEncoderPuller::hasMediaSourceReturnedError() { 307 Mutex::Autolock autolock(mLock); 308 return ((mSourceError != OK) ? true : false); 309 } 310 void VideoEditorVideoEncoderPuller::start() { 311 Mutex::Autolock autolock(mLock); 312 mAskToStart = true; 313 mAcquireCond.signal(); 314 mReleaseCond.signal(); 315 } 316 317 void VideoEditorVideoEncoderPuller::stop() { 318 Mutex::Autolock autolock(mLock); 319 mAskToStop = true; 320 mAcquireCond.signal(); 321 mReleaseCond.signal(); 322 while (!mAcquireStopped || !mReleaseStopped) { 323 mUserCond.wait(mLock); 324 } 325 326 // Release remaining buffers 327 for (size_t i = 0; i < mBuffers.size(); i++) { 328 mBuffers.itemAt(i)->release(); 329 } 330 331 for (size_t i = 0; i < mReleaseBuffers.size(); i++) { 332 mReleaseBuffers.itemAt(i)->release(); 333 } 334 335 mBuffers.clear(); 336 mReleaseBuffers.clear(); 337 } 338 339 MediaBuffer* VideoEditorVideoEncoderPuller::getBufferNonBlocking() { 340 Mutex::Autolock autolock(mLock); 341 if (mBuffers.empty()) { 342 return NULL; 343 } else { 344 MediaBuffer* b = mBuffers.itemAt(0); 345 mBuffers.removeAt(0); 346 return b; 347 } 348 } 349 350 MediaBuffer* VideoEditorVideoEncoderPuller::getBufferBlocking() { 351 Mutex::Autolock autolock(mLock); 352 while (mBuffers.empty() && !mAcquireStopped) { 353 mUserCond.wait(mLock); 354 } 355 356 if (mBuffers.empty()) { 357 return NULL; 358 } else { 359 MediaBuffer* b = mBuffers.itemAt(0); 360 mBuffers.removeAt(0); 361 return b; 362 } 363 } 364 365 void VideoEditorVideoEncoderPuller::putBuffer(MediaBuffer* buffer) { 366 Mutex::Autolock autolock(mLock); 367 mReleaseBuffers.push(buffer); 368 mReleaseCond.signal(); 369 } 370 371 int VideoEditorVideoEncoderPuller::acquireThreadStart(void* arg) { 372 VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg; 373 self->acquireThreadFunc(); 374 return 0; 375 } 376 377 int VideoEditorVideoEncoderPuller::releaseThreadStart(void* arg) { 378 VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg; 379 self->releaseThreadFunc(); 380 return 0; 381 } 382 383 void VideoEditorVideoEncoderPuller::acquireThreadFunc() { 384 mLock.lock(); 385 386 // Wait for the start signal 387 while (!mAskToStart && !mAskToStop) { 388 mAcquireCond.wait(mLock); 389 } 390 391 // Loop until we are asked to stop, or there is nothing more to read 392 while (!mAskToStop) { 393 MediaBuffer* pBuffer; 394 mLock.unlock(); 395 status_t result = mSource->read(&pBuffer, NULL); 396 mLock.lock(); 397 mSourceError = result; 398 if (result != OK) { 399 break; 400 } 401 mBuffers.push(pBuffer); 402 mUserCond.signal(); 403 } 404 405 mAcquireStopped = true; 406 mUserCond.signal(); 407 mLock.unlock(); 408 } 409 410 void VideoEditorVideoEncoderPuller::releaseThreadFunc() { 411 mLock.lock(); 412 413 // Wait for the start signal 414 while (!mAskToStart && !mAskToStop) { 415 mReleaseCond.wait(mLock); 416 } 417 418 // Loop until we are asked to stop 419 while (1) { 420 if (mReleaseBuffers.empty()) { 421 if (mAskToStop) { 422 break; 423 } else { 424 mReleaseCond.wait(mLock); 425 continue; 426 } 427 } 428 MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0); 429 mReleaseBuffers.removeAt(0); 430 mLock.unlock(); 431 pBuffer->release(); 432 mLock.lock(); 433 } 434 435 mReleaseStopped = true; 436 mUserCond.signal(); 437 mLock.unlock(); 438 } 439 440 /** 441 ****************************************************************************** 442 * structure VideoEditorVideoEncoder_Context 443 * @brief This structure defines the context of the StageFright video encoder 444 * shell 445 ****************************************************************************** 446 */ 447 typedef enum { 448 CREATED = 0x1, 449 OPENED = 0x2, 450 STARTED = 0x4, 451 BUFFERING = 0x8, 452 READING = 0x10 453 } VideoEditorVideoEncoder_State; 454 455 typedef struct { 456 VideoEditorVideoEncoder_State mState; 457 M4ENCODER_Format mFormat; 458 M4WRITER_DataInterface* mWriterDataInterface; 459 M4VPP_apply_fct* mPreProcFunction; 460 M4VPP_Context mPreProcContext; 461 M4SYS_AccessUnit* mAccessUnit; 462 M4ENCODER_Params* mCodecParams; 463 M4ENCODER_Header mHeader; 464 H264MCS_ProcessEncodedNALU_fct* mH264NALUPostProcessFct; 465 M4OSA_Context mH264NALUPostProcessCtx; 466 M4OSA_UInt32 mLastCTS; 467 sp<VideoEditorVideoEncoderSource> mEncoderSource; 468 OMXClient mClient; 469 sp<MediaSource> mEncoder; 470 OMX_COLOR_FORMATTYPE mEncoderColorFormat; 471 VideoEditorVideoEncoderPuller* mPuller; 472 I420ColorConverter* mI420ColorConverter; 473 474 uint32_t mNbInputFrames; 475 double mFirstInputCts; 476 double mLastInputCts; 477 uint32_t mNbOutputFrames; 478 int64_t mFirstOutputCts; 479 int64_t mLastOutputCts; 480 481 MediaProfiles *mVideoEditorProfile; 482 int32_t mMaxPrefetchFrames; 483 } VideoEditorVideoEncoder_Context; 484 485 /******************** 486 * TOOLS * 487 ********************/ 488 489 M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext, 490 sp<MetaData> metaData) { 491 M4OSA_ERR err = M4NO_ERROR; 492 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 493 status_t result = OK; 494 int32_t nbBuffer = 0; 495 int32_t stride = 0; 496 int32_t height = 0; 497 int32_t framerate = 0; 498 int32_t isCodecConfig = 0; 499 size_t size = 0; 500 uint32_t codecFlags = 0; 501 MediaBuffer* inputBuffer = NULL; 502 MediaBuffer* outputBuffer = NULL; 503 sp<VideoEditorVideoEncoderSource> encoderSource = NULL; 504 sp<MediaSource> encoder = NULL;; 505 OMXClient client; 506 507 LOGV("VideoEditorVideoEncoder_getDSI begin"); 508 // Input parameters check 509 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 510 VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER); 511 512 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 513 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 514 515 // Create the encoder source 516 encoderSource = VideoEditorVideoEncoderSource::Create(metaData); 517 VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE); 518 519 // Connect to the OMX client 520 result = client.connect(); 521 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 522 523 // Create the OMX codec 524 // VIDEOEDITOR_FORCECODEC MUST be defined here 525 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 526 encoder = OMXCodec::Create(client.interface(), metaData, true, 527 encoderSource, NULL, codecFlags); 528 VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE); 529 530 /** 531 * Send fake frames and retrieve the DSI 532 */ 533 // Send a fake frame to the source 534 metaData->findInt32(kKeyStride, &stride); 535 metaData->findInt32(kKeyHeight, &height); 536 metaData->findInt32(kKeySampleRate, &framerate); 537 size = (size_t)(stride*height*3)/2; 538 inputBuffer = new MediaBuffer(size); 539 inputBuffer->meta_data()->setInt64(kKeyTime, 0); 540 nbBuffer = encoderSource->storeBuffer(inputBuffer); 541 encoderSource->storeBuffer(NULL); // Signal EOS 542 543 // Call read once to get the DSI 544 result = encoder->start();; 545 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 546 result = encoder->read(&outputBuffer, NULL); 547 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 548 VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32( 549 kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE); 550 551 VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE); 552 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 553 // For H264, format the DSI 554 result = buildAVCCodecSpecificData( 555 (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), 556 (size_t*)(&(pEncoderContext->mHeader.Size)), 557 (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(), 558 outputBuffer->range_length(), encoder->getFormat().get()); 559 outputBuffer->release(); 560 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 561 } else { 562 // For MPEG4, just copy the DSI 563 pEncoderContext->mHeader.Size = 564 (M4OSA_UInt32)outputBuffer->range_length(); 565 SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, 566 pEncoderContext->mHeader.Size, "Encoder header"); 567 memcpy((void *)pEncoderContext->mHeader.pBuf, 568 (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()), 569 pEncoderContext->mHeader.Size); 570 outputBuffer->release(); 571 } 572 573 result = encoder->stop(); 574 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 575 576 cleanUp: 577 // Destroy the graph 578 if ( encoder != NULL ) { encoder.clear(); } 579 client.disconnect(); 580 if ( encoderSource != NULL ) { encoderSource.clear(); } 581 if ( M4NO_ERROR == err ) { 582 LOGV("VideoEditorVideoEncoder_getDSI no error"); 583 } else { 584 LOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err); 585 } 586 LOGV("VideoEditorVideoEncoder_getDSI end"); 587 return err; 588 } 589 /******************** 590 * ENGINE INTERFACE * 591 ********************/ 592 593 M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) { 594 M4OSA_ERR err = M4NO_ERROR; 595 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 596 597 LOGV("VideoEditorVideoEncoder_cleanup begin"); 598 // Input parameters check 599 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 600 601 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 602 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 603 604 // Release memory 605 SAFE_FREE(pEncoderContext->mHeader.pBuf); 606 SAFE_FREE(pEncoderContext); 607 pContext = M4OSA_NULL; 608 609 cleanUp: 610 if ( M4NO_ERROR == err ) { 611 LOGV("VideoEditorVideoEncoder_cleanup no error"); 612 } else { 613 LOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err); 614 } 615 LOGV("VideoEditorVideoEncoder_cleanup end"); 616 return err; 617 } 618 619 M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format, 620 M4ENCODER_Context* pContext, 621 M4WRITER_DataInterface* pWriterDataInterface, 622 M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt, 623 M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) { 624 625 M4OSA_ERR err = M4NO_ERROR; 626 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 627 int encoderInput = OMX_COLOR_FormatYUV420Planar; 628 629 LOGV("VideoEditorVideoEncoder_init begin: format %d", format); 630 // Input parameters check 631 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 632 VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER); 633 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER); 634 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER); 635 636 // Context allocation & initialization 637 SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1, 638 "VideoEditorVideoEncoder"); 639 pEncoderContext->mState = CREATED; 640 pEncoderContext->mFormat = format; 641 pEncoderContext->mWriterDataInterface = pWriterDataInterface; 642 pEncoderContext->mPreProcFunction = pVPPfct; 643 pEncoderContext->mPreProcContext = pVPPctxt; 644 pEncoderContext->mPuller = NULL; 645 646 // Get color converter and determine encoder input format 647 pEncoderContext->mI420ColorConverter = new I420ColorConverter; 648 if (pEncoderContext->mI420ColorConverter->isLoaded()) { 649 encoderInput = pEncoderContext->mI420ColorConverter->getEncoderInputFormat(); 650 } 651 if (encoderInput == OMX_COLOR_FormatYUV420Planar) { 652 delete pEncoderContext->mI420ColorConverter; 653 pEncoderContext->mI420ColorConverter = NULL; 654 } 655 pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput; 656 LOGI("encoder input format = 0x%X\n", encoderInput); 657 658 *pContext = pEncoderContext; 659 660 cleanUp: 661 if ( M4NO_ERROR == err ) { 662 LOGV("VideoEditorVideoEncoder_init no error"); 663 } else { 664 VideoEditorVideoEncoder_cleanup(pEncoderContext); 665 *pContext = M4OSA_NULL; 666 LOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err); 667 } 668 LOGV("VideoEditorVideoEncoder_init end"); 669 return err; 670 } 671 672 M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext, 673 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 674 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 675 { 676 677 return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext, 678 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 679 } 680 681 682 M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext, 683 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 684 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 685 { 686 687 return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext, 688 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 689 } 690 691 692 M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext, 693 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 694 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 695 { 696 697 return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext, 698 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 699 } 700 701 M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) { 702 M4OSA_ERR err = M4NO_ERROR; 703 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 704 705 LOGV("VideoEditorVideoEncoder_close begin"); 706 // Input parameters check 707 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 708 709 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 710 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 711 712 // Release memory 713 SAFE_FREE(pEncoderContext->mCodecParams); 714 715 // Destroy the graph 716 pEncoderContext->mEncoder.clear(); 717 pEncoderContext->mClient.disconnect(); 718 pEncoderContext->mEncoderSource.clear(); 719 720 delete pEncoderContext->mPuller; 721 pEncoderContext->mPuller = NULL; 722 723 delete pEncoderContext->mI420ColorConverter; 724 pEncoderContext->mI420ColorConverter = NULL; 725 726 // Set the new state 727 pEncoderContext->mState = CREATED; 728 729 cleanUp: 730 if( M4NO_ERROR == err ) { 731 LOGV("VideoEditorVideoEncoder_close no error"); 732 } else { 733 LOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err); 734 } 735 LOGV("VideoEditorVideoEncoder_close end"); 736 return err; 737 } 738 739 740 M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext, 741 M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) { 742 M4OSA_ERR err = M4NO_ERROR; 743 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 744 M4ENCODER_Params* pCodecParams = M4OSA_NULL; 745 status_t result = OK; 746 sp<MetaData> encoderMetadata = NULL; 747 const char* mime = NULL; 748 int32_t iProfile = 0; 749 int32_t iLevel = 0; 750 751 int32_t iFrameRate = 0; 752 uint32_t codecFlags = 0; 753 754 LOGV(">>> VideoEditorVideoEncoder_open begin"); 755 // Input parameters check 756 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 757 VIDEOEDITOR_CHECK(M4OSA_NULL != pAU, M4ERR_PARAMETER); 758 VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); 759 760 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 761 pCodecParams = (M4ENCODER_Params*)pParams; 762 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 763 764 // Context initialization 765 pEncoderContext->mAccessUnit = pAU; 766 pEncoderContext->mVideoEditorProfile = MediaProfiles::getInstance(); 767 pEncoderContext->mMaxPrefetchFrames = 768 pEncoderContext->mVideoEditorProfile->getVideoEditorCapParamByName( 769 "maxPrefetchYUVFrames"); 770 771 // Allocate & initialize the encoding parameters 772 SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1, 773 "VideoEditorVideoEncoder"); 774 775 776 pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat; 777 pEncoderContext->mCodecParams->InputFrameWidth = 778 pCodecParams->InputFrameWidth; 779 pEncoderContext->mCodecParams->InputFrameHeight = 780 pCodecParams->InputFrameHeight; 781 pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth; 782 pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight; 783 pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate; 784 pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate; 785 pEncoderContext->mCodecParams->Format = pCodecParams->Format; 786 pEncoderContext->mCodecParams->videoProfile = pCodecParams->videoProfile; 787 pEncoderContext->mCodecParams->videoLevel= pCodecParams->videoLevel; 788 789 // Check output format consistency and resolution 790 VIDEOEDITOR_CHECK( 791 pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat, 792 M4ERR_PARAMETER); 793 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, 794 M4ERR_PARAMETER); 795 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, 796 M4ERR_PARAMETER); 797 798 /** 799 * StageFright graph building 800 */ 801 802 // Create the meta data for the encoder 803 encoderMetadata = new MetaData; 804 switch( pEncoderContext->mCodecParams->Format ) { 805 case M4ENCODER_kH263: 806 mime = MEDIA_MIMETYPE_VIDEO_H263; 807 break; 808 case M4ENCODER_kMPEG4: 809 mime = MEDIA_MIMETYPE_VIDEO_MPEG4; 810 break; 811 case M4ENCODER_kH264: 812 mime = MEDIA_MIMETYPE_VIDEO_AVC; 813 break; 814 default: 815 VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format", 816 M4ERR_PARAMETER); 817 break; 818 } 819 iProfile = pEncoderContext->mCodecParams->videoProfile; 820 iLevel = pEncoderContext->mCodecParams->videoLevel; 821 LOGV("Encoder mime %s profile %d, level %d", 822 mime,iProfile, iLevel); 823 LOGV("Encoder w %d, h %d, bitrate %d, fps %d", 824 pEncoderContext->mCodecParams->FrameWidth, 825 pEncoderContext->mCodecParams->FrameHeight, 826 pEncoderContext->mCodecParams->Bitrate, 827 pEncoderContext->mCodecParams->FrameRate); 828 CHECK(iProfile != 0x7fffffff); 829 CHECK(iLevel != 0x7fffffff); 830 831 encoderMetadata->setCString(kKeyMIMEType, mime); 832 encoderMetadata->setInt32(kKeyVideoProfile, iProfile); 833 //FIXME: 834 // Temp: Do not set the level for Mpeg4 / H.263 Enc 835 // as OMX.Nvidia.mp4.encoder and OMX.Nvidia.h263.encoder 836 // return 0x80001019 837 if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH264) { 838 encoderMetadata->setInt32(kKeyVideoLevel, iLevel); 839 } 840 encoderMetadata->setInt32(kKeyWidth, 841 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 842 encoderMetadata->setInt32(kKeyStride, 843 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 844 encoderMetadata->setInt32(kKeyHeight, 845 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 846 encoderMetadata->setInt32(kKeySliceHeight, 847 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 848 849 switch( pEncoderContext->mCodecParams->FrameRate ) { 850 case M4ENCODER_k5_FPS: iFrameRate = 5; break; 851 case M4ENCODER_k7_5_FPS: iFrameRate = 8; break; 852 case M4ENCODER_k10_FPS: iFrameRate = 10; break; 853 case M4ENCODER_k12_5_FPS: iFrameRate = 13; break; 854 case M4ENCODER_k15_FPS: iFrameRate = 15; break; 855 case M4ENCODER_k20_FPS: iFrameRate = 20; break; 856 case M4ENCODER_k25_FPS: iFrameRate = 25; break; 857 case M4ENCODER_k30_FPS: iFrameRate = 30; break; 858 case M4ENCODER_kVARIABLE_FPS: 859 iFrameRate = 30; 860 LOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30"); 861 break; 862 case M4ENCODER_kUSE_TIMESCALE: 863 iFrameRate = 30; 864 LOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE: set to 30"); 865 break; 866 867 default: 868 VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate", 869 M4ERR_STATE); 870 break; 871 } 872 encoderMetadata->setInt32(kKeyFrameRate, iFrameRate); 873 encoderMetadata->setInt32(kKeyBitRate, 874 (int32_t)pEncoderContext->mCodecParams->Bitrate); 875 encoderMetadata->setInt32(kKeyIFramesInterval, 1); 876 877 encoderMetadata->setInt32(kKeyColorFormat, 878 pEncoderContext->mEncoderColorFormat); 879 880 if (pEncoderContext->mCodecParams->Format != M4ENCODER_kH263) { 881 // Get the encoder DSI 882 err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata); 883 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 884 } 885 886 // Create the encoder source 887 pEncoderContext->mEncoderSource = VideoEditorVideoEncoderSource::Create( 888 encoderMetadata); 889 VIDEOEDITOR_CHECK( 890 NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE); 891 892 // Connect to the OMX client 893 result = pEncoderContext->mClient.connect(); 894 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 895 896 // Create the OMX codec 897 #ifdef VIDEOEDITOR_FORCECODEC 898 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 899 #endif /* VIDEOEDITOR_FORCECODEC */ 900 pEncoderContext->mEncoder = OMXCodec::Create( 901 pEncoderContext->mClient.interface(), encoderMetadata, true, 902 pEncoderContext->mEncoderSource, NULL, codecFlags); 903 VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); 904 LOGV("VideoEditorVideoEncoder_open : DONE"); 905 pEncoderContext->mPuller = new VideoEditorVideoEncoderPuller( 906 pEncoderContext->mEncoder); 907 908 // Set the new state 909 pEncoderContext->mState = OPENED; 910 911 cleanUp: 912 if( M4NO_ERROR == err ) { 913 LOGV("VideoEditorVideoEncoder_open no error"); 914 } else { 915 VideoEditorVideoEncoder_close(pEncoderContext); 916 LOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err); 917 } 918 LOGV("VideoEditorVideoEncoder_open end"); 919 return err; 920 } 921 922 M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( 923 M4ENCODER_Context pContext, M4OSA_Double Cts, 924 M4OSA_Bool bReachedEOS) { 925 M4OSA_ERR err = M4NO_ERROR; 926 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 927 M4VIFI_ImagePlane pOutPlane[3]; 928 MediaBuffer* buffer = NULL; 929 int32_t nbBuffer = 0; 930 931 LOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts %f", Cts); 932 // Input parameters check 933 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 934 935 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 936 pOutPlane[0].pac_data = M4OSA_NULL; 937 pOutPlane[1].pac_data = M4OSA_NULL; 938 pOutPlane[2].pac_data = M4OSA_NULL; 939 940 if ( M4OSA_FALSE == bReachedEOS ) { 941 M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth * 942 pEncoderContext->mCodecParams->FrameHeight; 943 M4OSA_UInt32 sizeU = sizeY >> 2; 944 M4OSA_UInt32 size = sizeY + 2*sizeU; 945 M4OSA_UInt8* pData = M4OSA_NULL; 946 buffer = new MediaBuffer((size_t)size); 947 pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset(); 948 949 // Prepare the output image for pre-processing 950 pOutPlane[0].u_width = pEncoderContext->mCodecParams->FrameWidth; 951 pOutPlane[0].u_height = pEncoderContext->mCodecParams->FrameHeight; 952 pOutPlane[0].u_topleft = 0; 953 pOutPlane[0].u_stride = pOutPlane[0].u_width; 954 pOutPlane[1].u_width = pOutPlane[0].u_width/2; 955 pOutPlane[1].u_height = pOutPlane[0].u_height/2; 956 pOutPlane[1].u_topleft = 0; 957 pOutPlane[1].u_stride = pOutPlane[0].u_stride/2; 958 pOutPlane[2].u_width = pOutPlane[1].u_width; 959 pOutPlane[2].u_height = pOutPlane[1].u_height; 960 pOutPlane[2].u_topleft = 0; 961 pOutPlane[2].u_stride = pOutPlane[1].u_stride; 962 963 pOutPlane[0].pac_data = pData; 964 pOutPlane[1].pac_data = pData + sizeY; 965 pOutPlane[2].pac_data = pData + sizeY + sizeU; 966 967 // Apply pre-processing 968 err = pEncoderContext->mPreProcFunction( 969 pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane); 970 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 971 972 // Convert MediaBuffer to the encoder input format if necessary 973 if (pEncoderContext->mI420ColorConverter) { 974 I420ColorConverter* converter = pEncoderContext->mI420ColorConverter; 975 int actualWidth = pEncoderContext->mCodecParams->FrameWidth; 976 int actualHeight = pEncoderContext->mCodecParams->FrameHeight; 977 978 int encoderWidth, encoderHeight; 979 ARect encoderRect; 980 int encoderBufferSize; 981 982 if (converter->getEncoderInputBufferInfo( 983 actualWidth, actualHeight, 984 &encoderWidth, &encoderHeight, 985 &encoderRect, &encoderBufferSize) == 0) { 986 987 MediaBuffer* newBuffer = new MediaBuffer(encoderBufferSize); 988 989 if (converter->convertI420ToEncoderInput( 990 pData, // srcBits 991 actualWidth, actualHeight, 992 encoderWidth, encoderHeight, 993 encoderRect, 994 (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) { 995 LOGE("convertI420ToEncoderInput failed"); 996 } 997 998 // switch to new buffer 999 buffer->release(); 1000 buffer = newBuffer; 1001 } 1002 } 1003 1004 // Set the metadata 1005 buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000)); 1006 } 1007 1008 // Push the buffer to the source, a NULL buffer, notifies the source of EOS 1009 nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); 1010 1011 cleanUp: 1012 if ( M4NO_ERROR == err ) { 1013 LOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err); 1014 } else { 1015 if( NULL != buffer ) { 1016 buffer->release(); 1017 } 1018 LOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err); 1019 } 1020 LOGV("VideoEditorVideoEncoder_processInputBuffer end"); 1021 return err; 1022 } 1023 1024 M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( 1025 M4ENCODER_Context pContext, MediaBuffer* buffer) { 1026 M4OSA_ERR err = M4NO_ERROR; 1027 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1028 M4OSA_UInt32 Cts = 0; 1029 int32_t i32Tmp = 0; 1030 int64_t i64Tmp = 0; 1031 status_t result = OK; 1032 1033 LOGV("VideoEditorVideoEncoder_processOutputBuffer begin"); 1034 // Input parameters check 1035 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1036 VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); 1037 1038 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1039 1040 // Process the returned AU 1041 if ( 0 == buffer->range_length() ) { 1042 // Encoder has no data yet, nothing unusual 1043 LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty"); 1044 goto cleanUp; 1045 } 1046 VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER); 1047 VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER); 1048 if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){ 1049 { // Display the DSI 1050 LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d", 1051 buffer->range_length()); 1052 uint8_t* tmp = (uint8_t*)(buffer->data()); 1053 for( uint32_t i=0; i<buffer->range_length(); i++ ) { 1054 LOGV("DSI [%d] %.2X", i, tmp[i]); 1055 } 1056 } 1057 } else { 1058 // Check the CTS 1059 VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), 1060 M4ERR_STATE); 1061 1062 pEncoderContext->mNbOutputFrames++; 1063 if ( 0 > pEncoderContext->mFirstOutputCts ) { 1064 pEncoderContext->mFirstOutputCts = i64Tmp; 1065 } 1066 pEncoderContext->mLastOutputCts = i64Tmp; 1067 1068 Cts = (M4OSA_Int32)(i64Tmp/1000); 1069 LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)", 1070 pEncoderContext->mNbOutputFrames, i64Tmp, Cts, 1071 pEncoderContext->mLastCTS); 1072 if ( Cts < pEncoderContext->mLastCTS ) { 1073 LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going " 1074 "backwards %d < %d", Cts, pEncoderContext->mLastCTS); 1075 goto cleanUp; 1076 } 1077 LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d", 1078 Cts, pEncoderContext->mLastCTS); 1079 1080 // Retrieve the AU container 1081 err = pEncoderContext->mWriterDataInterface->pStartAU( 1082 pEncoderContext->mWriterDataInterface->pWriterContext, 1083 pEncoderContext->mAccessUnit->stream->streamID, 1084 pEncoderContext->mAccessUnit); 1085 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1086 1087 // Format the AU 1088 VIDEOEDITOR_CHECK( 1089 buffer->range_length() <= pEncoderContext->mAccessUnit->size, 1090 M4ERR_PARAMETER); 1091 // Remove H264 AU start code 1092 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 1093 if (!memcmp((const uint8_t *)buffer->data() + \ 1094 buffer->range_offset(), "\x00\x00\x00\x01", 4) ) { 1095 buffer->set_range(buffer->range_offset() + 4, 1096 buffer->range_length() - 4); 1097 } 1098 } 1099 1100 if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) && 1101 (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) { 1102 // H264 trimming case, NALU post processing is needed 1103 M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size; 1104 err = pEncoderContext->mH264NALUPostProcessFct( 1105 pEncoderContext->mH264NALUPostProcessCtx, 1106 (M4OSA_UInt8*)buffer->data()+buffer->range_offset(), 1107 buffer->range_length(), 1108 (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress, 1109 &outputSize); 1110 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1111 pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize; 1112 } else { 1113 // The AU can just be copied 1114 memcpy((void *)pEncoderContext->mAccessUnit->\ 1115 dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\ 1116 range_offset()), buffer->range_length()); 1117 pEncoderContext->mAccessUnit->size = 1118 (M4OSA_UInt32)buffer->range_length(); 1119 } 1120 1121 if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){ 1122 pEncoderContext->mAccessUnit->attribute = AU_RAP; 1123 } else { 1124 pEncoderContext->mAccessUnit->attribute = AU_P_Frame; 1125 } 1126 pEncoderContext->mLastCTS = Cts; 1127 pEncoderContext->mAccessUnit->CTS = Cts; 1128 pEncoderContext->mAccessUnit->DTS = Cts; 1129 1130 LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d", 1131 pEncoderContext->mAccessUnit->dataAddress, 1132 *pEncoderContext->mAccessUnit->dataAddress, 1133 pEncoderContext->mAccessUnit->size, 1134 pEncoderContext->mAccessUnit->CTS); 1135 1136 // Write the AU 1137 err = pEncoderContext->mWriterDataInterface->pProcessAU( 1138 pEncoderContext->mWriterDataInterface->pWriterContext, 1139 pEncoderContext->mAccessUnit->stream->streamID, 1140 pEncoderContext->mAccessUnit); 1141 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1142 } 1143 1144 cleanUp: 1145 if( M4NO_ERROR == err ) { 1146 LOGV("VideoEditorVideoEncoder_processOutputBuffer no error"); 1147 } else { 1148 SAFE_FREE(pEncoderContext->mHeader.pBuf); 1149 pEncoderContext->mHeader.Size = 0; 1150 LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err); 1151 } 1152 LOGV("VideoEditorVideoEncoder_processOutputBuffer end"); 1153 return err; 1154 } 1155 1156 M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext, 1157 M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts, 1158 M4ENCODER_FrameMode FrameMode) { 1159 M4OSA_ERR err = M4NO_ERROR; 1160 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1161 status_t result = OK; 1162 MediaBuffer* outputBuffer = NULL; 1163 1164 LOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode); 1165 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1166 1167 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1168 if ( STARTED == pEncoderContext->mState ) { 1169 pEncoderContext->mState = BUFFERING; 1170 } 1171 VIDEOEDITOR_CHECK( 1172 (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE); 1173 1174 pEncoderContext->mNbInputFrames++; 1175 if ( 0 > pEncoderContext->mFirstInputCts ) { 1176 pEncoderContext->mFirstInputCts = Cts; 1177 } 1178 pEncoderContext->mLastInputCts = Cts; 1179 1180 LOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode, 1181 Cts, pEncoderContext->mLastCTS); 1182 1183 // Push the input buffer to the encoder source 1184 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts, 1185 M4OSA_FALSE); 1186 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1187 1188 // Notify the source in case of EOS 1189 if ( M4ENCODER_kLastFrame == FrameMode ) { 1190 err = VideoEditorVideoEncoder_processInputBuffer( 1191 pEncoderContext, 0, M4OSA_TRUE); 1192 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1193 } 1194 1195 if ( BUFFERING == pEncoderContext->mState ) { 1196 // Prefetch is complete, start reading 1197 pEncoderContext->mState = READING; 1198 } 1199 // Read 1200 while (1) { 1201 MediaBuffer *outputBuffer = 1202 pEncoderContext->mPuller->getBufferNonBlocking(); 1203 1204 if (outputBuffer == NULL) { 1205 int32_t YUVBufferNumber = 1206 pEncoderContext->mEncoderSource->getNumberOfBuffersInQueue(); 1207 /* Make sure that the configured maximum number of prefetch YUV frames is 1208 * not exceeded. This is to limit the amount of memory usage of video editor engine. 1209 * The value of maximum prefetch Yuv frames is defined in media_profiles.xml */ 1210 if ((YUVBufferNumber < pEncoderContext->mMaxPrefetchFrames) || 1211 (pEncoderContext->mPuller->hasMediaSourceReturnedError() 1212 == true)) { 1213 break; 1214 } 1215 } else { 1216 // Provide the encoded AU to the writer 1217 err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext, 1218 outputBuffer); 1219 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1220 1221 pEncoderContext->mPuller->putBuffer(outputBuffer); 1222 } 1223 } 1224 1225 cleanUp: 1226 if( M4NO_ERROR == err ) { 1227 LOGV("VideoEditorVideoEncoder_encode no error"); 1228 } else { 1229 LOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err); 1230 } 1231 LOGV("VideoEditorVideoEncoder_encode end"); 1232 return err; 1233 } 1234 1235 M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) { 1236 M4OSA_ERR err = M4NO_ERROR; 1237 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1238 status_t result = OK; 1239 1240 LOGV("VideoEditorVideoEncoder_start begin"); 1241 // Input parameters check 1242 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1243 1244 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1245 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 1246 1247 pEncoderContext->mNbInputFrames = 0; 1248 pEncoderContext->mFirstInputCts = -1.0; 1249 pEncoderContext->mLastInputCts = -1.0; 1250 pEncoderContext->mNbOutputFrames = 0; 1251 pEncoderContext->mFirstOutputCts = -1; 1252 pEncoderContext->mLastOutputCts = -1; 1253 1254 result = pEncoderContext->mEncoder->start(); 1255 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 1256 1257 pEncoderContext->mPuller->start(); 1258 1259 // Set the new state 1260 pEncoderContext->mState = STARTED; 1261 1262 cleanUp: 1263 if ( M4NO_ERROR == err ) { 1264 LOGV("VideoEditorVideoEncoder_start no error"); 1265 } else { 1266 LOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err); 1267 } 1268 LOGV("VideoEditorVideoEncoder_start end"); 1269 return err; 1270 } 1271 1272 M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) { 1273 M4OSA_ERR err = M4NO_ERROR; 1274 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1275 MediaBuffer* outputBuffer = NULL; 1276 status_t result = OK; 1277 1278 LOGV("VideoEditorVideoEncoder_stop begin"); 1279 // Input parameters check 1280 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1281 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1282 1283 // Send EOS again to make sure the source doesn't block. 1284 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0, 1285 M4OSA_TRUE); 1286 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1287 1288 // Process the remaining buffers if necessary 1289 if ( (BUFFERING | READING) & pEncoderContext->mState ) { 1290 while (1) { 1291 MediaBuffer *outputBuffer = 1292 pEncoderContext->mPuller->getBufferBlocking(); 1293 1294 if (outputBuffer == NULL) break; 1295 1296 err = VideoEditorVideoEncoder_processOutputBuffer( 1297 pEncoderContext, outputBuffer); 1298 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1299 1300 pEncoderContext->mPuller->putBuffer(outputBuffer); 1301 } 1302 1303 pEncoderContext->mState = STARTED; 1304 } 1305 1306 // Stop the graph module if necessary 1307 if ( STARTED == pEncoderContext->mState ) { 1308 pEncoderContext->mPuller->stop(); 1309 pEncoderContext->mEncoder->stop(); 1310 pEncoderContext->mState = OPENED; 1311 } 1312 1313 if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) { 1314 LOGW("Some frames were not encoded: input(%d) != output(%d)", 1315 pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames); 1316 } 1317 1318 cleanUp: 1319 if ( M4NO_ERROR == err ) { 1320 LOGV("VideoEditorVideoEncoder_stop no error"); 1321 } else { 1322 LOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err); 1323 } 1324 LOGV("VideoEditorVideoEncoder_stop end"); 1325 return err; 1326 } 1327 1328 M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) { 1329 LOGW("regulBitRate is not implemented"); 1330 return M4NO_ERROR; 1331 } 1332 1333 M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext, 1334 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1335 M4OSA_ERR err = M4NO_ERROR; 1336 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1337 1338 LOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID); 1339 // Input parameters check 1340 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1341 1342 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1343 1344 switch( optionID ) { 1345 case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr: 1346 pEncoderContext->mH264NALUPostProcessFct = 1347 (H264MCS_ProcessEncodedNALU_fct*)optionValue; 1348 break; 1349 case M4ENCODER_kOptionID_H264ProcessNALUContext: 1350 pEncoderContext->mH264NALUPostProcessCtx = 1351 (M4OSA_Context)optionValue; 1352 break; 1353 default: 1354 LOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X", 1355 optionID); 1356 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1357 break; 1358 } 1359 1360 cleanUp: 1361 if ( M4NO_ERROR == err ) { 1362 LOGV("VideoEditorVideoEncoder_setOption no error"); 1363 } else { 1364 LOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err); 1365 } 1366 LOGV("VideoEditorVideoEncoder_setOption end"); 1367 return err; 1368 } 1369 1370 M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext, 1371 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1372 M4OSA_ERR err = M4NO_ERROR; 1373 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1374 1375 LOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID); 1376 // Input parameters check 1377 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1378 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1379 1380 switch( optionID ) { 1381 case M4ENCODER_kOptionID_EncoderHeader: 1382 VIDEOEDITOR_CHECK( 1383 M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE); 1384 *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader); 1385 break; 1386 default: 1387 LOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X", 1388 optionID); 1389 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1390 break; 1391 } 1392 1393 cleanUp: 1394 if ( M4NO_ERROR == err ) { 1395 LOGV("VideoEditorVideoEncoder_getOption no error"); 1396 } else { 1397 LOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err); 1398 } 1399 return err; 1400 } 1401 1402 M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format, 1403 M4ENCODER_Format* pFormat, 1404 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1405 M4OSA_ERR err = M4NO_ERROR; 1406 1407 // Input parameters check 1408 VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); 1409 VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); 1410 1411 LOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat, 1412 pEncoderInterface, mode); 1413 1414 SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1, 1415 "VideoEditorVideoEncoder"); 1416 1417 *pFormat = format; 1418 1419 switch( format ) { 1420 case M4ENCODER_kH263: 1421 { 1422 (*pEncoderInterface)->pFctInit = 1423 VideoEditorVideoEncoder_init_H263; 1424 break; 1425 } 1426 case M4ENCODER_kMPEG4: 1427 { 1428 (*pEncoderInterface)->pFctInit = 1429 VideoEditorVideoEncoder_init_MPEG4; 1430 break; 1431 } 1432 case M4ENCODER_kH264: 1433 { 1434 (*pEncoderInterface)->pFctInit = 1435 VideoEditorVideoEncoder_init_H264; 1436 break; 1437 } 1438 default: 1439 LOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d", 1440 format); 1441 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); 1442 break; 1443 } 1444 (*pEncoderInterface)->pFctOpen = VideoEditorVideoEncoder_open; 1445 (*pEncoderInterface)->pFctStart = VideoEditorVideoEncoder_start; 1446 (*pEncoderInterface)->pFctStop = VideoEditorVideoEncoder_stop; 1447 (*pEncoderInterface)->pFctPause = M4OSA_NULL; 1448 (*pEncoderInterface)->pFctResume = M4OSA_NULL; 1449 (*pEncoderInterface)->pFctClose = VideoEditorVideoEncoder_close; 1450 (*pEncoderInterface)->pFctCleanup = VideoEditorVideoEncoder_cleanup; 1451 (*pEncoderInterface)->pFctRegulBitRate = 1452 VideoEditorVideoEncoder_regulBitRate; 1453 (*pEncoderInterface)->pFctEncode = VideoEditorVideoEncoder_encode; 1454 (*pEncoderInterface)->pFctSetOption = VideoEditorVideoEncoder_setOption; 1455 (*pEncoderInterface)->pFctGetOption = VideoEditorVideoEncoder_getOption; 1456 1457 cleanUp: 1458 if( M4NO_ERROR == err ) { 1459 LOGV("VideoEditorVideoEncoder_getInterface no error"); 1460 } else { 1461 *pEncoderInterface = M4OSA_NULL; 1462 LOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err); 1463 } 1464 return err; 1465 } 1466 1467 extern "C" { 1468 1469 M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat, 1470 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1471 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat, 1472 pEncoderInterface, mode); 1473 } 1474 1475 M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat, 1476 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1477 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat, 1478 pEncoderInterface, mode); 1479 } 1480 1481 M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat, 1482 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1483 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat, 1484 pEncoderInterface, mode); 1485 1486 } 1487 1488 } // extern "C" 1489 1490 } // namespace android 1491