1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 /** 17 ************************************************************************* 18 * @file VideoEditorVideoEncoder.cpp 19 * @brief StageFright shell video encoder 20 ************************************************************************* 21 */ 22 #define LOG_NDEBUG 1 23 #define LOG_TAG "VIDEOEDITOR_VIDEOENCODER" 24 25 /******************* 26 * HEADERS * 27 *******************/ 28 #include "M4OSA_Debug.h" 29 #include "M4SYS_AccessUnit.h" 30 #include "VideoEditorVideoEncoder.h" 31 #include "VideoEditorUtils.h" 32 #include <I420ColorConverter.h> 33 34 #include "utils/Log.h" 35 #include "utils/Vector.h" 36 #include <media/stagefright/MediaSource.h> 37 #include <media/stagefright/MediaDebug.h> 38 #include <media/stagefright/MediaDefs.h> 39 #include <media/stagefright/MetaData.h> 40 #include <media/stagefright/OMXClient.h> 41 #include <media/stagefright/OMXCodec.h> 42 #include "OMX_Video.h" 43 44 /******************** 45 * DEFINITIONS * 46 ********************/ 47 48 // Force using hardware encoder 49 #define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly 50 51 #if !defined(VIDEOEDITOR_FORCECODEC) 52 #error "Cannot force DSI retrieval if codec type is not fixed" 53 #endif 54 55 /******************** 56 * SOURCE CLASS * 57 ********************/ 58 59 namespace android { 60 61 struct VideoEditorVideoEncoderSource : public MediaSource { 62 public: 63 static sp<VideoEditorVideoEncoderSource> Create( 64 const sp<MetaData> &format); 65 virtual status_t start(MetaData *params = NULL); 66 virtual status_t stop(); 67 virtual sp<MetaData> getFormat(); 68 virtual status_t read(MediaBuffer **buffer, 69 const ReadOptions *options = NULL); 70 virtual int32_t storeBuffer(MediaBuffer *buffer); 71 72 protected: 73 virtual ~VideoEditorVideoEncoderSource(); 74 75 private: 76 struct MediaBufferChain { 77 MediaBuffer* buffer; 78 MediaBufferChain* nextLink; 79 }; 80 enum State { 81 CREATED, 82 STARTED, 83 ERROR 84 }; 85 VideoEditorVideoEncoderSource(const sp<MetaData> &format); 86 87 // Don't call me 88 VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &); 89 VideoEditorVideoEncoderSource &operator=( 90 const VideoEditorVideoEncoderSource &); 91 92 MediaBufferChain* mFirstBufferLink; 93 MediaBufferChain* mLastBufferLink; 94 int32_t mNbBuffer; 95 bool mIsEOS; 96 State mState; 97 sp<MetaData> mEncFormat; 98 Mutex mLock; 99 Condition mBufferCond; 100 }; 101 102 sp<VideoEditorVideoEncoderSource> VideoEditorVideoEncoderSource::Create( 103 const sp<MetaData> &format) { 104 105 sp<VideoEditorVideoEncoderSource> aSource = 106 new VideoEditorVideoEncoderSource(format); 107 return aSource; 108 } 109 110 VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource( 111 const sp<MetaData> &format): 112 mFirstBufferLink(NULL), 113 mLastBufferLink(NULL), 114 mNbBuffer(0), 115 mIsEOS(false), 116 mState(CREATED), 117 mEncFormat(format) { 118 LOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource"); 119 } 120 121 VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() { 122 123 // Safety clean up 124 if( STARTED == mState ) { 125 stop(); 126 } 127 } 128 129 status_t VideoEditorVideoEncoderSource::start(MetaData *meta) { 130 status_t err = OK; 131 132 LOGV("VideoEditorVideoEncoderSource::start() begin"); 133 134 if( CREATED != mState ) { 135 LOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState); 136 return UNKNOWN_ERROR; 137 } 138 mState = STARTED; 139 140 LOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err); 141 return err; 142 } 143 144 status_t VideoEditorVideoEncoderSource::stop() { 145 status_t err = OK; 146 147 LOGV("VideoEditorVideoEncoderSource::stop() begin"); 148 149 if( STARTED != mState ) { 150 LOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState); 151 return UNKNOWN_ERROR; 152 } 153 154 // Release the buffer chain 155 int32_t i = 0; 156 MediaBufferChain* tmpLink = NULL; 157 while( mFirstBufferLink ) { 158 i++; 159 tmpLink = mFirstBufferLink; 160 mFirstBufferLink = mFirstBufferLink->nextLink; 161 delete tmpLink; 162 } 163 LOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i); 164 mFirstBufferLink = NULL; 165 mLastBufferLink = NULL; 166 167 mState = CREATED; 168 169 LOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err); 170 return err; 171 } 172 173 sp<MetaData> VideoEditorVideoEncoderSource::getFormat() { 174 175 LOGV("VideoEditorVideoEncoderSource::getFormat"); 176 return mEncFormat; 177 } 178 179 status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer, 180 const ReadOptions *options) { 181 Mutex::Autolock autolock(mLock); 182 MediaSource::ReadOptions readOptions; 183 status_t err = OK; 184 MediaBufferChain* tmpLink = NULL; 185 186 LOGV("VideoEditorVideoEncoderSource::read() begin"); 187 188 if ( STARTED != mState ) { 189 LOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState); 190 return UNKNOWN_ERROR; 191 } 192 193 while (mFirstBufferLink == NULL && !mIsEOS) { 194 mBufferCond.wait(mLock); 195 } 196 197 // End of stream? 198 if (mFirstBufferLink == NULL) { 199 *buffer = NULL; 200 LOGV("VideoEditorVideoEncoderSource::read : EOS"); 201 return ERROR_END_OF_STREAM; 202 } 203 204 // Get a buffer from the chain 205 *buffer = mFirstBufferLink->buffer; 206 tmpLink = mFirstBufferLink; 207 mFirstBufferLink = mFirstBufferLink->nextLink; 208 209 if ( NULL == mFirstBufferLink ) { 210 mLastBufferLink = NULL; 211 } 212 delete tmpLink; 213 mNbBuffer--; 214 215 LOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err); 216 return err; 217 } 218 219 int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) { 220 Mutex::Autolock autolock(mLock); 221 status_t err = OK; 222 223 LOGV("VideoEditorVideoEncoderSource::storeBuffer() begin"); 224 225 if( NULL == buffer ) { 226 LOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS"); 227 mIsEOS = true; 228 } else { 229 MediaBufferChain* newLink = new MediaBufferChain; 230 newLink->buffer = buffer; 231 newLink->nextLink = NULL; 232 if( NULL != mLastBufferLink ) { 233 mLastBufferLink->nextLink = newLink; 234 } else { 235 mFirstBufferLink = newLink; 236 } 237 mLastBufferLink = newLink; 238 mNbBuffer++; 239 } 240 mBufferCond.signal(); 241 LOGV("VideoEditorVideoEncoderSource::storeBuffer() end"); 242 return mNbBuffer; 243 } 244 245 /******************** 246 * PULLER * 247 ********************/ 248 249 // Pulls media buffers from a MediaSource repeatedly. 250 // The user can then get the buffers from that list. 251 class VideoEditorVideoEncoderPuller { 252 public: 253 VideoEditorVideoEncoderPuller(sp<MediaSource> source); 254 ~VideoEditorVideoEncoderPuller(); 255 void start(); 256 void stop(); 257 MediaBuffer* getBufferBlocking(); 258 MediaBuffer* getBufferNonBlocking(); 259 void putBuffer(MediaBuffer* buffer); 260 private: 261 static int acquireThreadStart(void* arg); 262 void acquireThreadFunc(); 263 264 static int releaseThreadStart(void* arg); 265 void releaseThreadFunc(); 266 267 sp<MediaSource> mSource; 268 Vector<MediaBuffer*> mBuffers; 269 Vector<MediaBuffer*> mReleaseBuffers; 270 271 Mutex mLock; 272 Condition mUserCond; // for the user of this class 273 Condition mAcquireCond; // for the acquire thread 274 Condition mReleaseCond; // for the release thread 275 276 bool mAskToStart; // Asks the threads to start 277 bool mAskToStop; // Asks the threads to stop 278 bool mAcquireStopped; // The acquire thread has stopped 279 bool mReleaseStopped; // The release thread has stopped 280 }; 281 282 VideoEditorVideoEncoderPuller::VideoEditorVideoEncoderPuller( 283 sp<MediaSource> source) { 284 mSource = source; 285 mAskToStart = false; 286 mAskToStop = false; 287 mAcquireStopped = false; 288 mReleaseStopped = false; 289 androidCreateThread(acquireThreadStart, this); 290 androidCreateThread(releaseThreadStart, this); 291 } 292 293 VideoEditorVideoEncoderPuller::~VideoEditorVideoEncoderPuller() { 294 stop(); 295 } 296 297 void VideoEditorVideoEncoderPuller::start() { 298 Mutex::Autolock autolock(mLock); 299 mAskToStart = true; 300 mAcquireCond.signal(); 301 mReleaseCond.signal(); 302 } 303 304 void VideoEditorVideoEncoderPuller::stop() { 305 Mutex::Autolock autolock(mLock); 306 mAskToStop = true; 307 mAcquireCond.signal(); 308 mReleaseCond.signal(); 309 while (!mAcquireStopped || !mReleaseStopped) { 310 mUserCond.wait(mLock); 311 } 312 313 // Release remaining buffers 314 for (size_t i = 0; i < mBuffers.size(); i++) { 315 mBuffers.itemAt(i)->release(); 316 } 317 318 for (size_t i = 0; i < mReleaseBuffers.size(); i++) { 319 mReleaseBuffers.itemAt(i)->release(); 320 } 321 322 mBuffers.clear(); 323 mReleaseBuffers.clear(); 324 } 325 326 MediaBuffer* VideoEditorVideoEncoderPuller::getBufferNonBlocking() { 327 Mutex::Autolock autolock(mLock); 328 if (mBuffers.empty()) { 329 return NULL; 330 } else { 331 MediaBuffer* b = mBuffers.itemAt(0); 332 mBuffers.removeAt(0); 333 return b; 334 } 335 } 336 337 MediaBuffer* VideoEditorVideoEncoderPuller::getBufferBlocking() { 338 Mutex::Autolock autolock(mLock); 339 while (mBuffers.empty() && !mAcquireStopped) { 340 mUserCond.wait(mLock); 341 } 342 343 if (mBuffers.empty()) { 344 return NULL; 345 } else { 346 MediaBuffer* b = mBuffers.itemAt(0); 347 mBuffers.removeAt(0); 348 return b; 349 } 350 } 351 352 void VideoEditorVideoEncoderPuller::putBuffer(MediaBuffer* buffer) { 353 Mutex::Autolock autolock(mLock); 354 mReleaseBuffers.push(buffer); 355 mReleaseCond.signal(); 356 } 357 358 int VideoEditorVideoEncoderPuller::acquireThreadStart(void* arg) { 359 VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg; 360 self->acquireThreadFunc(); 361 return 0; 362 } 363 364 int VideoEditorVideoEncoderPuller::releaseThreadStart(void* arg) { 365 VideoEditorVideoEncoderPuller* self = (VideoEditorVideoEncoderPuller*)arg; 366 self->releaseThreadFunc(); 367 return 0; 368 } 369 370 void VideoEditorVideoEncoderPuller::acquireThreadFunc() { 371 mLock.lock(); 372 373 // Wait for the start signal 374 while (!mAskToStart && !mAskToStop) { 375 mAcquireCond.wait(mLock); 376 } 377 378 // Loop until we are asked to stop, or there is nothing more to read 379 while (!mAskToStop) { 380 MediaBuffer* pBuffer; 381 mLock.unlock(); 382 status_t result = mSource->read(&pBuffer, NULL); 383 mLock.lock(); 384 if (result != OK) { 385 break; 386 } 387 mBuffers.push(pBuffer); 388 mUserCond.signal(); 389 } 390 391 mAcquireStopped = true; 392 mUserCond.signal(); 393 mLock.unlock(); 394 } 395 396 void VideoEditorVideoEncoderPuller::releaseThreadFunc() { 397 mLock.lock(); 398 399 // Wait for the start signal 400 while (!mAskToStart && !mAskToStop) { 401 mReleaseCond.wait(mLock); 402 } 403 404 // Loop until we are asked to stop 405 while (1) { 406 if (mReleaseBuffers.empty()) { 407 if (mAskToStop) { 408 break; 409 } else { 410 mReleaseCond.wait(mLock); 411 continue; 412 } 413 } 414 MediaBuffer* pBuffer = mReleaseBuffers.itemAt(0); 415 mReleaseBuffers.removeAt(0); 416 mLock.unlock(); 417 pBuffer->release(); 418 mLock.lock(); 419 } 420 421 mReleaseStopped = true; 422 mUserCond.signal(); 423 mLock.unlock(); 424 } 425 426 /** 427 ****************************************************************************** 428 * structure VideoEditorVideoEncoder_Context 429 * @brief This structure defines the context of the StageFright video encoder 430 * shell 431 ****************************************************************************** 432 */ 433 typedef enum { 434 CREATED = 0x1, 435 OPENED = 0x2, 436 STARTED = 0x4, 437 BUFFERING = 0x8, 438 READING = 0x10 439 } VideoEditorVideoEncoder_State; 440 441 typedef struct { 442 VideoEditorVideoEncoder_State mState; 443 M4ENCODER_Format mFormat; 444 M4WRITER_DataInterface* mWriterDataInterface; 445 M4VPP_apply_fct* mPreProcFunction; 446 M4VPP_Context mPreProcContext; 447 M4SYS_AccessUnit* mAccessUnit; 448 M4ENCODER_Params* mCodecParams; 449 M4ENCODER_Header mHeader; 450 H264MCS_ProcessEncodedNALU_fct* mH264NALUPostProcessFct; 451 M4OSA_Context mH264NALUPostProcessCtx; 452 M4OSA_UInt32 mLastCTS; 453 sp<VideoEditorVideoEncoderSource> mEncoderSource; 454 OMXClient mClient; 455 sp<MediaSource> mEncoder; 456 OMX_COLOR_FORMATTYPE mEncoderColorFormat; 457 VideoEditorVideoEncoderPuller* mPuller; 458 I420ColorConverter* mI420ColorConverter; 459 460 uint32_t mNbInputFrames; 461 double mFirstInputCts; 462 double mLastInputCts; 463 uint32_t mNbOutputFrames; 464 int64_t mFirstOutputCts; 465 int64_t mLastOutputCts; 466 467 } VideoEditorVideoEncoder_Context; 468 469 /******************** 470 * TOOLS * 471 ********************/ 472 473 M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext, 474 sp<MetaData> metaData) { 475 M4OSA_ERR err = M4NO_ERROR; 476 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 477 status_t result = OK; 478 int32_t nbBuffer = 0; 479 int32_t stride = 0; 480 int32_t height = 0; 481 int32_t framerate = 0; 482 int32_t isCodecConfig = 0; 483 size_t size = 0; 484 uint32_t codecFlags = 0; 485 MediaBuffer* inputBuffer = NULL; 486 MediaBuffer* outputBuffer = NULL; 487 sp<VideoEditorVideoEncoderSource> encoderSource = NULL; 488 sp<MediaSource> encoder = NULL;; 489 OMXClient client; 490 491 LOGV("VideoEditorVideoEncoder_getDSI begin"); 492 // Input parameters check 493 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 494 VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER); 495 496 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 497 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 498 499 // Create the encoder source 500 encoderSource = VideoEditorVideoEncoderSource::Create(metaData); 501 VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE); 502 503 // Connect to the OMX client 504 result = client.connect(); 505 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 506 507 // Create the OMX codec 508 // VIDEOEDITOR_FORCECODEC MUST be defined here 509 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 510 encoder = OMXCodec::Create(client.interface(), metaData, true, 511 encoderSource, NULL, codecFlags); 512 VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE); 513 514 /** 515 * Send fake frames and retrieve the DSI 516 */ 517 // Send a fake frame to the source 518 metaData->findInt32(kKeyStride, &stride); 519 metaData->findInt32(kKeyHeight, &height); 520 metaData->findInt32(kKeySampleRate, &framerate); 521 size = (size_t)(stride*height*3)/2; 522 inputBuffer = new MediaBuffer(size); 523 inputBuffer->meta_data()->setInt64(kKeyTime, 0); 524 nbBuffer = encoderSource->storeBuffer(inputBuffer); 525 encoderSource->storeBuffer(NULL); // Signal EOS 526 527 // Call read once to get the DSI 528 result = encoder->start();; 529 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 530 result = encoder->read(&outputBuffer, NULL); 531 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 532 VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32( 533 kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE); 534 535 VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE); 536 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 537 // For H264, format the DSI 538 result = buildAVCCodecSpecificData( 539 (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), 540 (size_t*)(&(pEncoderContext->mHeader.Size)), 541 (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(), 542 outputBuffer->range_length(), encoder->getFormat().get()); 543 outputBuffer->release(); 544 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 545 } else { 546 // For MPEG4, just copy the DSI 547 pEncoderContext->mHeader.Size = 548 (M4OSA_UInt32)outputBuffer->range_length(); 549 SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, 550 pEncoderContext->mHeader.Size, "Encoder header"); 551 memcpy((void *)pEncoderContext->mHeader.pBuf, 552 (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()), 553 pEncoderContext->mHeader.Size); 554 outputBuffer->release(); 555 } 556 557 result = encoder->stop(); 558 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 559 560 cleanUp: 561 // Destroy the graph 562 if ( encoder != NULL ) { encoder.clear(); } 563 client.disconnect(); 564 if ( encoderSource != NULL ) { encoderSource.clear(); } 565 if ( M4NO_ERROR == err ) { 566 LOGV("VideoEditorVideoEncoder_getDSI no error"); 567 } else { 568 LOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err); 569 } 570 LOGV("VideoEditorVideoEncoder_getDSI end"); 571 return err; 572 } 573 /******************** 574 * ENGINE INTERFACE * 575 ********************/ 576 577 M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) { 578 M4OSA_ERR err = M4NO_ERROR; 579 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 580 581 LOGV("VideoEditorVideoEncoder_cleanup begin"); 582 // Input parameters check 583 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 584 585 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 586 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 587 588 // Release memory 589 SAFE_FREE(pEncoderContext->mHeader.pBuf); 590 SAFE_FREE(pEncoderContext); 591 pContext = M4OSA_NULL; 592 593 cleanUp: 594 if ( M4NO_ERROR == err ) { 595 LOGV("VideoEditorVideoEncoder_cleanup no error"); 596 } else { 597 LOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err); 598 } 599 LOGV("VideoEditorVideoEncoder_cleanup end"); 600 return err; 601 } 602 603 M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format, 604 M4ENCODER_Context* pContext, 605 M4WRITER_DataInterface* pWriterDataInterface, 606 M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt, 607 M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) { 608 609 M4OSA_ERR err = M4NO_ERROR; 610 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 611 int encoderInput = OMX_COLOR_FormatYUV420Planar; 612 613 LOGV("VideoEditorVideoEncoder_init begin: format %d", format); 614 // Input parameters check 615 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 616 VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER); 617 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER); 618 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER); 619 620 // Context allocation & initialization 621 SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1, 622 "VideoEditorVideoEncoder"); 623 pEncoderContext->mState = CREATED; 624 pEncoderContext->mFormat = format; 625 pEncoderContext->mWriterDataInterface = pWriterDataInterface; 626 pEncoderContext->mPreProcFunction = pVPPfct; 627 pEncoderContext->mPreProcContext = pVPPctxt; 628 pEncoderContext->mPuller = NULL; 629 630 // Get color converter and determine encoder input format 631 pEncoderContext->mI420ColorConverter = new I420ColorConverter; 632 if (pEncoderContext->mI420ColorConverter->isLoaded()) { 633 encoderInput = pEncoderContext->mI420ColorConverter->getEncoderInputFormat(); 634 } 635 if (encoderInput == OMX_COLOR_FormatYUV420Planar) { 636 delete pEncoderContext->mI420ColorConverter; 637 pEncoderContext->mI420ColorConverter = NULL; 638 } 639 pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput; 640 LOGI("encoder input format = 0x%X\n", encoderInput); 641 642 *pContext = pEncoderContext; 643 644 cleanUp: 645 if ( M4NO_ERROR == err ) { 646 LOGV("VideoEditorVideoEncoder_init no error"); 647 } else { 648 VideoEditorVideoEncoder_cleanup(pEncoderContext); 649 *pContext = M4OSA_NULL; 650 LOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err); 651 } 652 LOGV("VideoEditorVideoEncoder_init end"); 653 return err; 654 } 655 656 M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext, 657 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 658 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 659 { 660 661 return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext, 662 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 663 } 664 665 666 M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext, 667 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 668 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 669 { 670 671 return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext, 672 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 673 } 674 675 676 M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext, 677 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 678 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 679 { 680 681 return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext, 682 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 683 } 684 685 M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) { 686 M4OSA_ERR err = M4NO_ERROR; 687 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 688 689 LOGV("VideoEditorVideoEncoder_close begin"); 690 // Input parameters check 691 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 692 693 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 694 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 695 696 // Release memory 697 SAFE_FREE(pEncoderContext->mCodecParams); 698 699 // Destroy the graph 700 pEncoderContext->mEncoder.clear(); 701 pEncoderContext->mClient.disconnect(); 702 pEncoderContext->mEncoderSource.clear(); 703 704 delete pEncoderContext->mPuller; 705 pEncoderContext->mPuller = NULL; 706 707 delete pEncoderContext->mI420ColorConverter; 708 pEncoderContext->mI420ColorConverter = NULL; 709 710 // Set the new state 711 pEncoderContext->mState = CREATED; 712 713 cleanUp: 714 if( M4NO_ERROR == err ) { 715 LOGV("VideoEditorVideoEncoder_close no error"); 716 } else { 717 LOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err); 718 } 719 LOGV("VideoEditorVideoEncoder_close end"); 720 return err; 721 } 722 723 724 M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext, 725 M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) { 726 M4OSA_ERR err = M4NO_ERROR; 727 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 728 M4ENCODER_Params* pCodecParams = M4OSA_NULL; 729 status_t result = OK; 730 sp<MetaData> encoderMetadata = NULL; 731 const char* mime = NULL; 732 int32_t iProfile = 0; 733 int32_t iLevel = 0; 734 735 int32_t iFrameRate = 0; 736 uint32_t codecFlags = 0; 737 738 LOGV(">>> VideoEditorVideoEncoder_open begin"); 739 // Input parameters check 740 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 741 VIDEOEDITOR_CHECK(M4OSA_NULL != pAU, M4ERR_PARAMETER); 742 VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); 743 744 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 745 pCodecParams = (M4ENCODER_Params*)pParams; 746 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 747 748 // Context initialization 749 pEncoderContext->mAccessUnit = pAU; 750 751 // Allocate & initialize the encoding parameters 752 SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1, 753 "VideoEditorVideoEncoder"); 754 755 756 pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat; 757 pEncoderContext->mCodecParams->InputFrameWidth = 758 pCodecParams->InputFrameWidth; 759 pEncoderContext->mCodecParams->InputFrameHeight = 760 pCodecParams->InputFrameHeight; 761 pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth; 762 pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight; 763 pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate; 764 pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate; 765 pEncoderContext->mCodecParams->Format = pCodecParams->Format; 766 pEncoderContext->mCodecParams->videoProfile = pCodecParams->videoProfile; 767 pEncoderContext->mCodecParams->videoLevel= pCodecParams->videoLevel; 768 769 // Check output format consistency and resolution 770 VIDEOEDITOR_CHECK( 771 pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat, 772 M4ERR_PARAMETER); 773 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, 774 M4ERR_PARAMETER); 775 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, 776 M4ERR_PARAMETER); 777 778 /** 779 * StageFright graph building 780 */ 781 782 // Create the meta data for the encoder 783 encoderMetadata = new MetaData; 784 switch( pEncoderContext->mCodecParams->Format ) { 785 case M4ENCODER_kH263: 786 mime = MEDIA_MIMETYPE_VIDEO_H263; 787 break; 788 case M4ENCODER_kMPEG4: 789 mime = MEDIA_MIMETYPE_VIDEO_MPEG4; 790 break; 791 case M4ENCODER_kH264: 792 mime = MEDIA_MIMETYPE_VIDEO_AVC; 793 break; 794 default: 795 VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format", 796 M4ERR_PARAMETER); 797 break; 798 } 799 iProfile = pEncoderContext->mCodecParams->videoProfile; 800 iLevel = pEncoderContext->mCodecParams->videoLevel; 801 LOGV("Encoder mime %s profile %d, level %d", 802 mime,iProfile, iLevel); 803 LOGV("Encoder w %d, h %d, bitrate %d, fps %d", 804 pEncoderContext->mCodecParams->FrameWidth, 805 pEncoderContext->mCodecParams->FrameHeight, 806 pEncoderContext->mCodecParams->Bitrate, 807 pEncoderContext->mCodecParams->FrameRate); 808 CHECK(iProfile != 0x7fffffff); 809 CHECK(iLevel != 0x7fffffff); 810 811 encoderMetadata->setCString(kKeyMIMEType, mime); 812 encoderMetadata->setInt32(kKeyVideoProfile, iProfile); 813 //FIXME: 814 // Temp: Do not set the level for Mpeg4 / H.263 Enc 815 // as OMX.Nvidia.mp4.encoder and OMX.Nvidia.h263.encoder 816 // return 0x80001019 817 if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH264) { 818 encoderMetadata->setInt32(kKeyVideoLevel, iLevel); 819 } 820 encoderMetadata->setInt32(kKeyWidth, 821 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 822 encoderMetadata->setInt32(kKeyStride, 823 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 824 encoderMetadata->setInt32(kKeyHeight, 825 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 826 encoderMetadata->setInt32(kKeySliceHeight, 827 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 828 829 switch( pEncoderContext->mCodecParams->FrameRate ) { 830 case M4ENCODER_k5_FPS: iFrameRate = 5; break; 831 case M4ENCODER_k7_5_FPS: iFrameRate = 8; break; 832 case M4ENCODER_k10_FPS: iFrameRate = 10; break; 833 case M4ENCODER_k12_5_FPS: iFrameRate = 13; break; 834 case M4ENCODER_k15_FPS: iFrameRate = 15; break; 835 case M4ENCODER_k20_FPS: iFrameRate = 20; break; 836 case M4ENCODER_k25_FPS: iFrameRate = 25; break; 837 case M4ENCODER_k30_FPS: iFrameRate = 30; break; 838 case M4ENCODER_kVARIABLE_FPS: 839 iFrameRate = 30; 840 LOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30"); 841 break; 842 case M4ENCODER_kUSE_TIMESCALE: 843 iFrameRate = 30; 844 LOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE: set to 30"); 845 break; 846 847 default: 848 VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate", 849 M4ERR_STATE); 850 break; 851 } 852 encoderMetadata->setInt32(kKeyFrameRate, iFrameRate); 853 encoderMetadata->setInt32(kKeyBitRate, 854 (int32_t)pEncoderContext->mCodecParams->Bitrate); 855 encoderMetadata->setInt32(kKeyIFramesInterval, 1); 856 857 encoderMetadata->setInt32(kKeyColorFormat, 858 pEncoderContext->mEncoderColorFormat); 859 860 if (pEncoderContext->mCodecParams->Format != M4ENCODER_kH263) { 861 // Get the encoder DSI 862 err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata); 863 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 864 } 865 866 // Create the encoder source 867 pEncoderContext->mEncoderSource = VideoEditorVideoEncoderSource::Create( 868 encoderMetadata); 869 VIDEOEDITOR_CHECK( 870 NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE); 871 872 // Connect to the OMX client 873 result = pEncoderContext->mClient.connect(); 874 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 875 876 // Create the OMX codec 877 #ifdef VIDEOEDITOR_FORCECODEC 878 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 879 #endif /* VIDEOEDITOR_FORCECODEC */ 880 pEncoderContext->mEncoder = OMXCodec::Create( 881 pEncoderContext->mClient.interface(), encoderMetadata, true, 882 pEncoderContext->mEncoderSource, NULL, codecFlags); 883 VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); 884 LOGV("VideoEditorVideoEncoder_open : DONE"); 885 pEncoderContext->mPuller = new VideoEditorVideoEncoderPuller( 886 pEncoderContext->mEncoder); 887 888 // Set the new state 889 pEncoderContext->mState = OPENED; 890 891 cleanUp: 892 if( M4NO_ERROR == err ) { 893 LOGV("VideoEditorVideoEncoder_open no error"); 894 } else { 895 VideoEditorVideoEncoder_close(pEncoderContext); 896 LOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err); 897 } 898 LOGV("VideoEditorVideoEncoder_open end"); 899 return err; 900 } 901 902 M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( 903 M4ENCODER_Context pContext, M4OSA_Double Cts, 904 M4OSA_Bool bReachedEOS) { 905 M4OSA_ERR err = M4NO_ERROR; 906 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 907 M4VIFI_ImagePlane pOutPlane[3]; 908 MediaBuffer* buffer = NULL; 909 int32_t nbBuffer = 0; 910 911 LOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts %f", Cts); 912 // Input parameters check 913 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 914 915 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 916 pOutPlane[0].pac_data = M4OSA_NULL; 917 pOutPlane[1].pac_data = M4OSA_NULL; 918 pOutPlane[2].pac_data = M4OSA_NULL; 919 920 if ( M4OSA_FALSE == bReachedEOS ) { 921 M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth * 922 pEncoderContext->mCodecParams->FrameHeight; 923 M4OSA_UInt32 sizeU = sizeY >> 2; 924 M4OSA_UInt32 size = sizeY + 2*sizeU; 925 M4OSA_UInt8* pData = M4OSA_NULL; 926 buffer = new MediaBuffer((size_t)size); 927 pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset(); 928 929 // Prepare the output image for pre-processing 930 pOutPlane[0].u_width = pEncoderContext->mCodecParams->FrameWidth; 931 pOutPlane[0].u_height = pEncoderContext->mCodecParams->FrameHeight; 932 pOutPlane[0].u_topleft = 0; 933 pOutPlane[0].u_stride = pOutPlane[0].u_width; 934 pOutPlane[1].u_width = pOutPlane[0].u_width/2; 935 pOutPlane[1].u_height = pOutPlane[0].u_height/2; 936 pOutPlane[1].u_topleft = 0; 937 pOutPlane[1].u_stride = pOutPlane[0].u_stride/2; 938 pOutPlane[2].u_width = pOutPlane[1].u_width; 939 pOutPlane[2].u_height = pOutPlane[1].u_height; 940 pOutPlane[2].u_topleft = 0; 941 pOutPlane[2].u_stride = pOutPlane[1].u_stride; 942 943 pOutPlane[0].pac_data = pData; 944 pOutPlane[1].pac_data = pData + sizeY; 945 pOutPlane[2].pac_data = pData + sizeY + sizeU; 946 947 // Apply pre-processing 948 err = pEncoderContext->mPreProcFunction( 949 pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane); 950 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 951 952 // Convert MediaBuffer to the encoder input format if necessary 953 if (pEncoderContext->mI420ColorConverter) { 954 I420ColorConverter* converter = pEncoderContext->mI420ColorConverter; 955 int actualWidth = pEncoderContext->mCodecParams->FrameWidth; 956 int actualHeight = pEncoderContext->mCodecParams->FrameHeight; 957 958 int encoderWidth, encoderHeight; 959 ARect encoderRect; 960 int encoderBufferSize; 961 962 if (converter->getEncoderInputBufferInfo( 963 actualWidth, actualHeight, 964 &encoderWidth, &encoderHeight, 965 &encoderRect, &encoderBufferSize) == 0) { 966 967 MediaBuffer* newBuffer = new MediaBuffer(encoderBufferSize); 968 969 if (converter->convertI420ToEncoderInput( 970 pData, // srcBits 971 actualWidth, actualHeight, 972 encoderWidth, encoderHeight, 973 encoderRect, 974 (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) { 975 LOGE("convertI420ToEncoderInput failed"); 976 } 977 978 // switch to new buffer 979 buffer->release(); 980 buffer = newBuffer; 981 } 982 } 983 984 // Set the metadata 985 buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000)); 986 } 987 988 // Push the buffer to the source, a NULL buffer, notifies the source of EOS 989 nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); 990 991 cleanUp: 992 if ( M4NO_ERROR == err ) { 993 LOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err); 994 } else { 995 if( NULL != buffer ) { 996 buffer->release(); 997 } 998 LOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err); 999 } 1000 LOGV("VideoEditorVideoEncoder_processInputBuffer end"); 1001 return err; 1002 } 1003 1004 M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( 1005 M4ENCODER_Context pContext, MediaBuffer* buffer) { 1006 M4OSA_ERR err = M4NO_ERROR; 1007 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1008 M4OSA_UInt32 Cts = 0; 1009 int32_t i32Tmp = 0; 1010 int64_t i64Tmp = 0; 1011 status_t result = OK; 1012 1013 LOGV("VideoEditorVideoEncoder_processOutputBuffer begin"); 1014 // Input parameters check 1015 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1016 VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); 1017 1018 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1019 1020 // Process the returned AU 1021 if ( 0 == buffer->range_length() ) { 1022 // Encoder has no data yet, nothing unusual 1023 LOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty"); 1024 goto cleanUp; 1025 } 1026 VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER); 1027 VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER); 1028 if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){ 1029 { // Display the DSI 1030 LOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d", 1031 buffer->range_length()); 1032 uint8_t* tmp = (uint8_t*)(buffer->data()); 1033 for( uint32_t i=0; i<buffer->range_length(); i++ ) { 1034 LOGV("DSI [%d] %.2X", i, tmp[i]); 1035 } 1036 } 1037 } else { 1038 // Check the CTS 1039 VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), 1040 M4ERR_STATE); 1041 1042 pEncoderContext->mNbOutputFrames++; 1043 if ( 0 > pEncoderContext->mFirstOutputCts ) { 1044 pEncoderContext->mFirstOutputCts = i64Tmp; 1045 } 1046 pEncoderContext->mLastOutputCts = i64Tmp; 1047 1048 Cts = (M4OSA_Int32)(i64Tmp/1000); 1049 LOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)", 1050 pEncoderContext->mNbOutputFrames, i64Tmp, Cts, 1051 pEncoderContext->mLastCTS); 1052 if ( Cts < pEncoderContext->mLastCTS ) { 1053 LOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going " 1054 "backwards %d < %d", Cts, pEncoderContext->mLastCTS); 1055 goto cleanUp; 1056 } 1057 LOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d", 1058 Cts, pEncoderContext->mLastCTS); 1059 1060 // Retrieve the AU container 1061 err = pEncoderContext->mWriterDataInterface->pStartAU( 1062 pEncoderContext->mWriterDataInterface->pWriterContext, 1063 pEncoderContext->mAccessUnit->stream->streamID, 1064 pEncoderContext->mAccessUnit); 1065 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1066 1067 // Format the AU 1068 VIDEOEDITOR_CHECK( 1069 buffer->range_length() <= pEncoderContext->mAccessUnit->size, 1070 M4ERR_PARAMETER); 1071 // Remove H264 AU start code 1072 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 1073 if (!memcmp((const uint8_t *)buffer->data() + \ 1074 buffer->range_offset(), "\x00\x00\x00\x01", 4) ) { 1075 buffer->set_range(buffer->range_offset() + 4, 1076 buffer->range_length() - 4); 1077 } 1078 } 1079 1080 if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) && 1081 (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) { 1082 // H264 trimming case, NALU post processing is needed 1083 M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size; 1084 err = pEncoderContext->mH264NALUPostProcessFct( 1085 pEncoderContext->mH264NALUPostProcessCtx, 1086 (M4OSA_UInt8*)buffer->data()+buffer->range_offset(), 1087 buffer->range_length(), 1088 (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress, 1089 &outputSize); 1090 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1091 pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize; 1092 } else { 1093 // The AU can just be copied 1094 memcpy((void *)pEncoderContext->mAccessUnit->\ 1095 dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\ 1096 range_offset()), buffer->range_length()); 1097 pEncoderContext->mAccessUnit->size = 1098 (M4OSA_UInt32)buffer->range_length(); 1099 } 1100 1101 if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){ 1102 pEncoderContext->mAccessUnit->attribute = AU_RAP; 1103 } else { 1104 pEncoderContext->mAccessUnit->attribute = AU_P_Frame; 1105 } 1106 pEncoderContext->mLastCTS = Cts; 1107 pEncoderContext->mAccessUnit->CTS = Cts; 1108 pEncoderContext->mAccessUnit->DTS = Cts; 1109 1110 LOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d", 1111 pEncoderContext->mAccessUnit->dataAddress, 1112 *pEncoderContext->mAccessUnit->dataAddress, 1113 pEncoderContext->mAccessUnit->size, 1114 pEncoderContext->mAccessUnit->CTS); 1115 1116 // Write the AU 1117 err = pEncoderContext->mWriterDataInterface->pProcessAU( 1118 pEncoderContext->mWriterDataInterface->pWriterContext, 1119 pEncoderContext->mAccessUnit->stream->streamID, 1120 pEncoderContext->mAccessUnit); 1121 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1122 } 1123 1124 cleanUp: 1125 if( M4NO_ERROR == err ) { 1126 LOGV("VideoEditorVideoEncoder_processOutputBuffer no error"); 1127 } else { 1128 SAFE_FREE(pEncoderContext->mHeader.pBuf); 1129 pEncoderContext->mHeader.Size = 0; 1130 LOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err); 1131 } 1132 LOGV("VideoEditorVideoEncoder_processOutputBuffer end"); 1133 return err; 1134 } 1135 1136 M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext, 1137 M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts, 1138 M4ENCODER_FrameMode FrameMode) { 1139 M4OSA_ERR err = M4NO_ERROR; 1140 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1141 status_t result = OK; 1142 MediaBuffer* outputBuffer = NULL; 1143 1144 LOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode); 1145 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1146 1147 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1148 if ( STARTED == pEncoderContext->mState ) { 1149 pEncoderContext->mState = BUFFERING; 1150 } 1151 VIDEOEDITOR_CHECK( 1152 (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE); 1153 1154 pEncoderContext->mNbInputFrames++; 1155 if ( 0 > pEncoderContext->mFirstInputCts ) { 1156 pEncoderContext->mFirstInputCts = Cts; 1157 } 1158 pEncoderContext->mLastInputCts = Cts; 1159 1160 LOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode, 1161 Cts, pEncoderContext->mLastCTS); 1162 1163 // Push the input buffer to the encoder source 1164 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts, 1165 M4OSA_FALSE); 1166 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1167 1168 // Notify the source in case of EOS 1169 if ( M4ENCODER_kLastFrame == FrameMode ) { 1170 err = VideoEditorVideoEncoder_processInputBuffer( 1171 pEncoderContext, 0, M4OSA_TRUE); 1172 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1173 } 1174 1175 if ( BUFFERING == pEncoderContext->mState ) { 1176 // Prefetch is complete, start reading 1177 pEncoderContext->mState = READING; 1178 } 1179 // Read 1180 while (1) { 1181 MediaBuffer *outputBuffer = 1182 pEncoderContext->mPuller->getBufferNonBlocking(); 1183 1184 if (outputBuffer == NULL) break; 1185 1186 // Provide the encoded AU to the writer 1187 err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext, 1188 outputBuffer); 1189 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1190 1191 pEncoderContext->mPuller->putBuffer(outputBuffer); 1192 } 1193 1194 cleanUp: 1195 if( M4NO_ERROR == err ) { 1196 LOGV("VideoEditorVideoEncoder_encode no error"); 1197 } else { 1198 LOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err); 1199 } 1200 LOGV("VideoEditorVideoEncoder_encode end"); 1201 return err; 1202 } 1203 1204 M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) { 1205 M4OSA_ERR err = M4NO_ERROR; 1206 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1207 status_t result = OK; 1208 1209 LOGV("VideoEditorVideoEncoder_start begin"); 1210 // Input parameters check 1211 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1212 1213 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1214 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 1215 1216 pEncoderContext->mNbInputFrames = 0; 1217 pEncoderContext->mFirstInputCts = -1.0; 1218 pEncoderContext->mLastInputCts = -1.0; 1219 pEncoderContext->mNbOutputFrames = 0; 1220 pEncoderContext->mFirstOutputCts = -1; 1221 pEncoderContext->mLastOutputCts = -1; 1222 1223 result = pEncoderContext->mEncoder->start(); 1224 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 1225 1226 pEncoderContext->mPuller->start(); 1227 1228 // Set the new state 1229 pEncoderContext->mState = STARTED; 1230 1231 cleanUp: 1232 if ( M4NO_ERROR == err ) { 1233 LOGV("VideoEditorVideoEncoder_start no error"); 1234 } else { 1235 LOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err); 1236 } 1237 LOGV("VideoEditorVideoEncoder_start end"); 1238 return err; 1239 } 1240 1241 M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) { 1242 M4OSA_ERR err = M4NO_ERROR; 1243 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1244 MediaBuffer* outputBuffer = NULL; 1245 status_t result = OK; 1246 1247 LOGV("VideoEditorVideoEncoder_stop begin"); 1248 // Input parameters check 1249 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1250 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1251 1252 // Send EOS again to make sure the source doesn't block. 1253 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0, 1254 M4OSA_TRUE); 1255 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1256 1257 // Process the remaining buffers if necessary 1258 if ( (BUFFERING | READING) & pEncoderContext->mState ) { 1259 while (1) { 1260 MediaBuffer *outputBuffer = 1261 pEncoderContext->mPuller->getBufferBlocking(); 1262 1263 if (outputBuffer == NULL) break; 1264 1265 err = VideoEditorVideoEncoder_processOutputBuffer( 1266 pEncoderContext, outputBuffer); 1267 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1268 1269 pEncoderContext->mPuller->putBuffer(outputBuffer); 1270 } 1271 1272 pEncoderContext->mState = STARTED; 1273 } 1274 1275 // Stop the graph module if necessary 1276 if ( STARTED == pEncoderContext->mState ) { 1277 pEncoderContext->mPuller->stop(); 1278 pEncoderContext->mEncoder->stop(); 1279 pEncoderContext->mState = OPENED; 1280 } 1281 1282 if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) { 1283 LOGW("Some frames were not encoded: input(%d) != output(%d)", 1284 pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames); 1285 } 1286 1287 cleanUp: 1288 if ( M4NO_ERROR == err ) { 1289 LOGV("VideoEditorVideoEncoder_stop no error"); 1290 } else { 1291 LOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err); 1292 } 1293 LOGV("VideoEditorVideoEncoder_stop end"); 1294 return err; 1295 } 1296 1297 M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) { 1298 LOGW("regulBitRate is not implemented"); 1299 return M4NO_ERROR; 1300 } 1301 1302 M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext, 1303 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1304 M4OSA_ERR err = M4NO_ERROR; 1305 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1306 1307 LOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID); 1308 // Input parameters check 1309 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1310 1311 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1312 1313 switch( optionID ) { 1314 case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr: 1315 pEncoderContext->mH264NALUPostProcessFct = 1316 (H264MCS_ProcessEncodedNALU_fct*)optionValue; 1317 break; 1318 case M4ENCODER_kOptionID_H264ProcessNALUContext: 1319 pEncoderContext->mH264NALUPostProcessCtx = 1320 (M4OSA_Context)optionValue; 1321 break; 1322 default: 1323 LOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X", 1324 optionID); 1325 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1326 break; 1327 } 1328 1329 cleanUp: 1330 if ( M4NO_ERROR == err ) { 1331 LOGV("VideoEditorVideoEncoder_setOption no error"); 1332 } else { 1333 LOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err); 1334 } 1335 LOGV("VideoEditorVideoEncoder_setOption end"); 1336 return err; 1337 } 1338 1339 M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext, 1340 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1341 M4OSA_ERR err = M4NO_ERROR; 1342 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1343 1344 LOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID); 1345 // Input parameters check 1346 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1347 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1348 1349 switch( optionID ) { 1350 case M4ENCODER_kOptionID_EncoderHeader: 1351 VIDEOEDITOR_CHECK( 1352 M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE); 1353 *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader); 1354 break; 1355 default: 1356 LOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X", 1357 optionID); 1358 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1359 break; 1360 } 1361 1362 cleanUp: 1363 if ( M4NO_ERROR == err ) { 1364 LOGV("VideoEditorVideoEncoder_getOption no error"); 1365 } else { 1366 LOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err); 1367 } 1368 return err; 1369 } 1370 1371 M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format, 1372 M4ENCODER_Format* pFormat, 1373 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1374 M4OSA_ERR err = M4NO_ERROR; 1375 1376 // Input parameters check 1377 VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); 1378 VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); 1379 1380 LOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat, 1381 pEncoderInterface, mode); 1382 1383 SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1, 1384 "VideoEditorVideoEncoder"); 1385 1386 *pFormat = format; 1387 1388 switch( format ) { 1389 case M4ENCODER_kH263: 1390 { 1391 (*pEncoderInterface)->pFctInit = 1392 VideoEditorVideoEncoder_init_H263; 1393 break; 1394 } 1395 case M4ENCODER_kMPEG4: 1396 { 1397 (*pEncoderInterface)->pFctInit = 1398 VideoEditorVideoEncoder_init_MPEG4; 1399 break; 1400 } 1401 case M4ENCODER_kH264: 1402 { 1403 (*pEncoderInterface)->pFctInit = 1404 VideoEditorVideoEncoder_init_H264; 1405 break; 1406 } 1407 default: 1408 LOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d", 1409 format); 1410 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); 1411 break; 1412 } 1413 (*pEncoderInterface)->pFctOpen = VideoEditorVideoEncoder_open; 1414 (*pEncoderInterface)->pFctStart = VideoEditorVideoEncoder_start; 1415 (*pEncoderInterface)->pFctStop = VideoEditorVideoEncoder_stop; 1416 (*pEncoderInterface)->pFctPause = M4OSA_NULL; 1417 (*pEncoderInterface)->pFctResume = M4OSA_NULL; 1418 (*pEncoderInterface)->pFctClose = VideoEditorVideoEncoder_close; 1419 (*pEncoderInterface)->pFctCleanup = VideoEditorVideoEncoder_cleanup; 1420 (*pEncoderInterface)->pFctRegulBitRate = 1421 VideoEditorVideoEncoder_regulBitRate; 1422 (*pEncoderInterface)->pFctEncode = VideoEditorVideoEncoder_encode; 1423 (*pEncoderInterface)->pFctSetOption = VideoEditorVideoEncoder_setOption; 1424 (*pEncoderInterface)->pFctGetOption = VideoEditorVideoEncoder_getOption; 1425 1426 cleanUp: 1427 if( M4NO_ERROR == err ) { 1428 LOGV("VideoEditorVideoEncoder_getInterface no error"); 1429 } else { 1430 *pEncoderInterface = M4OSA_NULL; 1431 LOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err); 1432 } 1433 return err; 1434 } 1435 1436 extern "C" { 1437 1438 M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat, 1439 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1440 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat, 1441 pEncoderInterface, mode); 1442 } 1443 1444 M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat, 1445 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1446 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat, 1447 pEncoderInterface, mode); 1448 } 1449 1450 M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat, 1451 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1452 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat, 1453 pEncoderInterface, mode); 1454 1455 } 1456 1457 } // extern "C" 1458 1459 } // namespace android 1460