1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 /** 17 ************************************************************************* 18 * @file VideoEditorVideoEncoder.cpp 19 * @brief StageFright shell video encoder 20 ************************************************************************* 21 */ 22 #define LOG_NDEBUG 1 23 #define LOG_TAG "VIDEOEDITOR_VIDEOENCODER" 24 25 /******************* 26 * HEADERS * 27 *******************/ 28 #include "M4OSA_Debug.h" 29 #include "M4SYS_AccessUnit.h" 30 #include "VideoEditorVideoEncoder.h" 31 #include "VideoEditorUtils.h" 32 #include "MediaBufferPuller.h" 33 #include <I420ColorConverter.h> 34 35 #include <unistd.h> 36 #include "utils/Log.h" 37 #include "utils/Vector.h" 38 #include <media/stagefright/foundation/ADebug.h> 39 #include <media/stagefright/MediaSource.h> 40 #include <media/stagefright/MediaDefs.h> 41 #include <media/stagefright/MetaData.h> 42 #include <media/stagefright/OMXClient.h> 43 #include <media/stagefright/OMXCodec.h> 44 #include <media/MediaProfiles.h> 45 #include "OMX_Video.h" 46 47 /******************** 48 * DEFINITIONS * 49 ********************/ 50 51 // Force using hardware encoder 52 #define VIDEOEDITOR_FORCECODEC kHardwareCodecsOnly 53 54 #if !defined(VIDEOEDITOR_FORCECODEC) 55 #error "Cannot force DSI retrieval if codec type is not fixed" 56 #endif 57 58 /******************** 59 * SOURCE CLASS * 60 ********************/ 61 62 namespace android { 63 64 struct VideoEditorVideoEncoderSource : public MediaSource { 65 public: 66 static sp<VideoEditorVideoEncoderSource> Create( 67 const sp<MetaData> &format); 68 virtual status_t start(MetaData *params = NULL); 69 virtual status_t stop(); 70 virtual sp<MetaData> getFormat(); 71 virtual status_t read(MediaBuffer **buffer, 72 const ReadOptions *options = NULL); 73 virtual int32_t storeBuffer(MediaBuffer *buffer); 74 virtual int32_t getNumberOfBuffersInQueue(); 75 76 protected: 77 virtual ~VideoEditorVideoEncoderSource(); 78 79 private: 80 struct MediaBufferChain { 81 MediaBuffer* buffer; 82 MediaBufferChain* nextLink; 83 }; 84 enum State { 85 CREATED, 86 STARTED, 87 ERROR 88 }; 89 VideoEditorVideoEncoderSource(const sp<MetaData> &format); 90 91 // Don't call me 92 VideoEditorVideoEncoderSource(const VideoEditorVideoEncoderSource &); 93 VideoEditorVideoEncoderSource &operator=( 94 const VideoEditorVideoEncoderSource &); 95 96 MediaBufferChain* mFirstBufferLink; 97 MediaBufferChain* mLastBufferLink; 98 int32_t mNbBuffer; 99 bool mIsEOS; 100 State mState; 101 sp<MetaData> mEncFormat; 102 Mutex mLock; 103 Condition mBufferCond; 104 }; 105 106 sp<VideoEditorVideoEncoderSource> VideoEditorVideoEncoderSource::Create( 107 const sp<MetaData> &format) { 108 109 sp<VideoEditorVideoEncoderSource> aSource = 110 new VideoEditorVideoEncoderSource(format); 111 return aSource; 112 } 113 114 VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource( 115 const sp<MetaData> &format): 116 mFirstBufferLink(NULL), 117 mLastBufferLink(NULL), 118 mNbBuffer(0), 119 mIsEOS(false), 120 mState(CREATED), 121 mEncFormat(format) { 122 ALOGV("VideoEditorVideoEncoderSource::VideoEditorVideoEncoderSource"); 123 } 124 125 VideoEditorVideoEncoderSource::~VideoEditorVideoEncoderSource() { 126 127 // Safety clean up 128 if( STARTED == mState ) { 129 stop(); 130 } 131 } 132 133 status_t VideoEditorVideoEncoderSource::start(MetaData *meta) { 134 status_t err = OK; 135 136 ALOGV("VideoEditorVideoEncoderSource::start() begin"); 137 138 if( CREATED != mState ) { 139 ALOGV("VideoEditorVideoEncoderSource::start: invalid state %d", mState); 140 return UNKNOWN_ERROR; 141 } 142 mState = STARTED; 143 144 ALOGV("VideoEditorVideoEncoderSource::start() END (0x%x)", err); 145 return err; 146 } 147 148 status_t VideoEditorVideoEncoderSource::stop() { 149 status_t err = OK; 150 151 ALOGV("VideoEditorVideoEncoderSource::stop() begin"); 152 153 if( STARTED != mState ) { 154 ALOGV("VideoEditorVideoEncoderSource::stop: invalid state %d", mState); 155 return UNKNOWN_ERROR; 156 } 157 158 // Release the buffer chain 159 int32_t i = 0; 160 MediaBufferChain* tmpLink = NULL; 161 while( mFirstBufferLink ) { 162 i++; 163 tmpLink = mFirstBufferLink; 164 mFirstBufferLink = mFirstBufferLink->nextLink; 165 delete tmpLink; 166 } 167 ALOGV("VideoEditorVideoEncoderSource::stop : %d buffer remained", i); 168 mFirstBufferLink = NULL; 169 mLastBufferLink = NULL; 170 171 mState = CREATED; 172 173 ALOGV("VideoEditorVideoEncoderSource::stop() END (0x%x)", err); 174 return err; 175 } 176 177 sp<MetaData> VideoEditorVideoEncoderSource::getFormat() { 178 179 ALOGV("VideoEditorVideoEncoderSource::getFormat"); 180 return mEncFormat; 181 } 182 183 status_t VideoEditorVideoEncoderSource::read(MediaBuffer **buffer, 184 const ReadOptions *options) { 185 Mutex::Autolock autolock(mLock); 186 MediaSource::ReadOptions readOptions; 187 status_t err = OK; 188 MediaBufferChain* tmpLink = NULL; 189 190 ALOGV("VideoEditorVideoEncoderSource::read() begin"); 191 192 if ( STARTED != mState ) { 193 ALOGV("VideoEditorVideoEncoderSource::read: invalid state %d", mState); 194 return UNKNOWN_ERROR; 195 } 196 197 while (mFirstBufferLink == NULL && !mIsEOS) { 198 mBufferCond.wait(mLock); 199 } 200 201 // End of stream? 202 if (mFirstBufferLink == NULL) { 203 *buffer = NULL; 204 ALOGV("VideoEditorVideoEncoderSource::read : EOS"); 205 return ERROR_END_OF_STREAM; 206 } 207 208 // Get a buffer from the chain 209 *buffer = mFirstBufferLink->buffer; 210 tmpLink = mFirstBufferLink; 211 mFirstBufferLink = mFirstBufferLink->nextLink; 212 213 if ( NULL == mFirstBufferLink ) { 214 mLastBufferLink = NULL; 215 } 216 delete tmpLink; 217 mNbBuffer--; 218 219 ALOGV("VideoEditorVideoEncoderSource::read() END (0x%x)", err); 220 return err; 221 } 222 223 int32_t VideoEditorVideoEncoderSource::storeBuffer(MediaBuffer *buffer) { 224 Mutex::Autolock autolock(mLock); 225 status_t err = OK; 226 227 ALOGV("VideoEditorVideoEncoderSource::storeBuffer() begin"); 228 229 if( NULL == buffer ) { 230 ALOGV("VideoEditorVideoEncoderSource::storeBuffer : reached EOS"); 231 mIsEOS = true; 232 } else { 233 MediaBufferChain* newLink = new MediaBufferChain; 234 newLink->buffer = buffer; 235 newLink->nextLink = NULL; 236 if( NULL != mLastBufferLink ) { 237 mLastBufferLink->nextLink = newLink; 238 } else { 239 mFirstBufferLink = newLink; 240 } 241 mLastBufferLink = newLink; 242 mNbBuffer++; 243 } 244 mBufferCond.signal(); 245 ALOGV("VideoEditorVideoEncoderSource::storeBuffer() end"); 246 return mNbBuffer; 247 } 248 249 int32_t VideoEditorVideoEncoderSource::getNumberOfBuffersInQueue() { 250 Mutex::Autolock autolock(mLock); 251 return mNbBuffer; 252 } 253 254 /** 255 ****************************************************************************** 256 * structure VideoEditorVideoEncoder_Context 257 * @brief This structure defines the context of the StageFright video encoder 258 * shell 259 ****************************************************************************** 260 */ 261 typedef enum { 262 CREATED = 0x1, 263 OPENED = 0x2, 264 STARTED = 0x4, 265 BUFFERING = 0x8, 266 READING = 0x10 267 } VideoEditorVideoEncoder_State; 268 269 typedef struct { 270 VideoEditorVideoEncoder_State mState; 271 M4ENCODER_Format mFormat; 272 M4WRITER_DataInterface* mWriterDataInterface; 273 M4VPP_apply_fct* mPreProcFunction; 274 M4VPP_Context mPreProcContext; 275 M4SYS_AccessUnit* mAccessUnit; 276 M4ENCODER_Params* mCodecParams; 277 M4ENCODER_Header mHeader; 278 H264MCS_ProcessEncodedNALU_fct* mH264NALUPostProcessFct; 279 M4OSA_Context mH264NALUPostProcessCtx; 280 M4OSA_UInt32 mLastCTS; 281 sp<VideoEditorVideoEncoderSource> mEncoderSource; 282 OMXClient mClient; 283 sp<MediaSource> mEncoder; 284 OMX_COLOR_FORMATTYPE mEncoderColorFormat; 285 MediaBufferPuller* mPuller; 286 I420ColorConverter* mI420ColorConverter; 287 288 uint32_t mNbInputFrames; 289 double mFirstInputCts; 290 double mLastInputCts; 291 uint32_t mNbOutputFrames; 292 int64_t mFirstOutputCts; 293 int64_t mLastOutputCts; 294 295 MediaProfiles *mVideoEditorProfile; 296 int32_t mMaxPrefetchFrames; 297 } VideoEditorVideoEncoder_Context; 298 299 /******************** 300 * TOOLS * 301 ********************/ 302 303 M4OSA_ERR VideoEditorVideoEncoder_getDSI(M4ENCODER_Context pContext, 304 sp<MetaData> metaData) { 305 M4OSA_ERR err = M4NO_ERROR; 306 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 307 status_t result = OK; 308 int32_t nbBuffer = 0; 309 int32_t stride = 0; 310 int32_t height = 0; 311 int32_t framerate = 0; 312 int32_t isCodecConfig = 0; 313 size_t size = 0; 314 uint32_t codecFlags = 0; 315 MediaBuffer* inputBuffer = NULL; 316 MediaBuffer* outputBuffer = NULL; 317 sp<VideoEditorVideoEncoderSource> encoderSource = NULL; 318 sp<MediaSource> encoder = NULL;; 319 OMXClient client; 320 321 ALOGV("VideoEditorVideoEncoder_getDSI begin"); 322 // Input parameters check 323 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 324 VIDEOEDITOR_CHECK(M4OSA_NULL != metaData.get(), M4ERR_PARAMETER); 325 326 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 327 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 328 329 // Create the encoder source 330 encoderSource = VideoEditorVideoEncoderSource::Create(metaData); 331 VIDEOEDITOR_CHECK(NULL != encoderSource.get(), M4ERR_STATE); 332 333 // Connect to the OMX client 334 result = client.connect(); 335 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 336 337 // Create the OMX codec 338 // VIDEOEDITOR_FORCECODEC MUST be defined here 339 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 340 encoder = OMXCodec::Create(client.interface(), metaData, true, 341 encoderSource, NULL, codecFlags); 342 VIDEOEDITOR_CHECK(NULL != encoder.get(), M4ERR_STATE); 343 344 /** 345 * Send fake frames and retrieve the DSI 346 */ 347 // Send a fake frame to the source 348 metaData->findInt32(kKeyStride, &stride); 349 metaData->findInt32(kKeyHeight, &height); 350 metaData->findInt32(kKeySampleRate, &framerate); 351 size = (size_t)(stride*height*3)/2; 352 inputBuffer = new MediaBuffer(size); 353 inputBuffer->meta_data()->setInt64(kKeyTime, 0); 354 nbBuffer = encoderSource->storeBuffer(inputBuffer); 355 encoderSource->storeBuffer(NULL); // Signal EOS 356 357 // Call read once to get the DSI 358 result = encoder->start();; 359 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 360 result = encoder->read(&outputBuffer, NULL); 361 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 362 VIDEOEDITOR_CHECK(outputBuffer->meta_data()->findInt32( 363 kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig, M4ERR_STATE); 364 365 VIDEOEDITOR_CHECK(M4OSA_NULL == pEncoderContext->mHeader.pBuf, M4ERR_STATE); 366 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 367 // For H264, format the DSI 368 result = buildAVCCodecSpecificData( 369 (uint8_t**)(&(pEncoderContext->mHeader.pBuf)), 370 (size_t*)(&(pEncoderContext->mHeader.Size)), 371 (const uint8_t*)outputBuffer->data() + outputBuffer->range_offset(), 372 outputBuffer->range_length(), encoder->getFormat().get()); 373 outputBuffer->release(); 374 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 375 } else { 376 // For MPEG4, just copy the DSI 377 pEncoderContext->mHeader.Size = 378 (M4OSA_UInt32)outputBuffer->range_length(); 379 SAFE_MALLOC(pEncoderContext->mHeader.pBuf, M4OSA_Int8, 380 pEncoderContext->mHeader.Size, "Encoder header"); 381 memcpy((void *)pEncoderContext->mHeader.pBuf, 382 (void *)((M4OSA_MemAddr8)(outputBuffer->data())+outputBuffer->range_offset()), 383 pEncoderContext->mHeader.Size); 384 outputBuffer->release(); 385 } 386 387 result = encoder->stop(); 388 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 389 390 cleanUp: 391 // Destroy the graph 392 if ( encoder != NULL ) { encoder.clear(); } 393 client.disconnect(); 394 if ( encoderSource != NULL ) { encoderSource.clear(); } 395 if ( M4NO_ERROR == err ) { 396 ALOGV("VideoEditorVideoEncoder_getDSI no error"); 397 } else { 398 ALOGV("VideoEditorVideoEncoder_getDSI ERROR 0x%X", err); 399 } 400 ALOGV("VideoEditorVideoEncoder_getDSI end"); 401 return err; 402 } 403 /******************** 404 * ENGINE INTERFACE * 405 ********************/ 406 407 M4OSA_ERR VideoEditorVideoEncoder_cleanup(M4ENCODER_Context pContext) { 408 M4OSA_ERR err = M4NO_ERROR; 409 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 410 411 ALOGV("VideoEditorVideoEncoder_cleanup begin"); 412 // Input parameters check 413 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 414 415 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 416 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 417 418 // Release memory 419 SAFE_FREE(pEncoderContext->mHeader.pBuf); 420 SAFE_FREE(pEncoderContext); 421 pContext = M4OSA_NULL; 422 423 cleanUp: 424 if ( M4NO_ERROR == err ) { 425 ALOGV("VideoEditorVideoEncoder_cleanup no error"); 426 } else { 427 ALOGV("VideoEditorVideoEncoder_cleanup ERROR 0x%X", err); 428 } 429 ALOGV("VideoEditorVideoEncoder_cleanup end"); 430 return err; 431 } 432 433 M4OSA_ERR VideoEditorVideoEncoder_init(M4ENCODER_Format format, 434 M4ENCODER_Context* pContext, 435 M4WRITER_DataInterface* pWriterDataInterface, 436 M4VPP_apply_fct* pVPPfct, M4VPP_Context pVPPctxt, 437 M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) { 438 439 M4OSA_ERR err = M4NO_ERROR; 440 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 441 int encoderInput = OMX_COLOR_FormatYUV420Planar; 442 443 ALOGV("VideoEditorVideoEncoder_init begin: format %d", format); 444 // Input parameters check 445 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 446 VIDEOEDITOR_CHECK(M4OSA_NULL != pWriterDataInterface, M4ERR_PARAMETER); 447 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPfct, M4ERR_PARAMETER); 448 VIDEOEDITOR_CHECK(M4OSA_NULL != pVPPctxt, M4ERR_PARAMETER); 449 450 // Context allocation & initialization 451 SAFE_MALLOC(pEncoderContext, VideoEditorVideoEncoder_Context, 1, 452 "VideoEditorVideoEncoder"); 453 pEncoderContext->mState = CREATED; 454 pEncoderContext->mFormat = format; 455 pEncoderContext->mWriterDataInterface = pWriterDataInterface; 456 pEncoderContext->mPreProcFunction = pVPPfct; 457 pEncoderContext->mPreProcContext = pVPPctxt; 458 pEncoderContext->mPuller = NULL; 459 460 // Get color converter and determine encoder input format 461 pEncoderContext->mI420ColorConverter = new I420ColorConverter; 462 if (pEncoderContext->mI420ColorConverter->isLoaded()) { 463 encoderInput = pEncoderContext->mI420ColorConverter->getEncoderInputFormat(); 464 } 465 if (encoderInput == OMX_COLOR_FormatYUV420Planar) { 466 delete pEncoderContext->mI420ColorConverter; 467 pEncoderContext->mI420ColorConverter = NULL; 468 } 469 pEncoderContext->mEncoderColorFormat = (OMX_COLOR_FORMATTYPE)encoderInput; 470 ALOGI("encoder input format = 0x%X\n", encoderInput); 471 472 *pContext = pEncoderContext; 473 474 cleanUp: 475 if ( M4NO_ERROR == err ) { 476 ALOGV("VideoEditorVideoEncoder_init no error"); 477 } else { 478 VideoEditorVideoEncoder_cleanup(pEncoderContext); 479 *pContext = M4OSA_NULL; 480 ALOGV("VideoEditorVideoEncoder_init ERROR 0x%X", err); 481 } 482 ALOGV("VideoEditorVideoEncoder_init end"); 483 return err; 484 } 485 486 M4OSA_ERR VideoEditorVideoEncoder_init_H263(M4ENCODER_Context* pContext, 487 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 488 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 489 { 490 491 return VideoEditorVideoEncoder_init(M4ENCODER_kH263, pContext, 492 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 493 } 494 495 496 M4OSA_ERR VideoEditorVideoEncoder_init_MPEG4(M4ENCODER_Context* pContext, 497 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 498 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 499 { 500 501 return VideoEditorVideoEncoder_init(M4ENCODER_kMPEG4, pContext, 502 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 503 } 504 505 506 M4OSA_ERR VideoEditorVideoEncoder_init_H264(M4ENCODER_Context* pContext, 507 M4WRITER_DataInterface* pWriterDataInterface, M4VPP_apply_fct* pVPPfct, 508 M4VPP_Context pVPPctxt, M4OSA_Void* pExternalAPI, M4OSA_Void* pUserData) 509 { 510 511 return VideoEditorVideoEncoder_init(M4ENCODER_kH264, pContext, 512 pWriterDataInterface, pVPPfct, pVPPctxt, pExternalAPI, pUserData); 513 } 514 515 M4OSA_ERR VideoEditorVideoEncoder_close(M4ENCODER_Context pContext) { 516 M4OSA_ERR err = M4NO_ERROR; 517 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 518 519 ALOGV("VideoEditorVideoEncoder_close begin"); 520 // Input parameters check 521 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 522 523 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 524 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 525 526 // Release memory 527 SAFE_FREE(pEncoderContext->mCodecParams); 528 529 // Destroy the graph 530 pEncoderContext->mEncoder.clear(); 531 pEncoderContext->mClient.disconnect(); 532 pEncoderContext->mEncoderSource.clear(); 533 534 delete pEncoderContext->mPuller; 535 pEncoderContext->mPuller = NULL; 536 537 delete pEncoderContext->mI420ColorConverter; 538 pEncoderContext->mI420ColorConverter = NULL; 539 540 // Set the new state 541 pEncoderContext->mState = CREATED; 542 543 cleanUp: 544 if( M4NO_ERROR == err ) { 545 ALOGV("VideoEditorVideoEncoder_close no error"); 546 } else { 547 ALOGV("VideoEditorVideoEncoder_close ERROR 0x%X", err); 548 } 549 ALOGV("VideoEditorVideoEncoder_close end"); 550 return err; 551 } 552 553 554 M4OSA_ERR VideoEditorVideoEncoder_open(M4ENCODER_Context pContext, 555 M4SYS_AccessUnit* pAU, M4OSA_Void* pParams) { 556 M4OSA_ERR err = M4NO_ERROR; 557 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 558 M4ENCODER_Params* pCodecParams = M4OSA_NULL; 559 status_t result = OK; 560 sp<MetaData> encoderMetadata = NULL; 561 const char* mime = NULL; 562 int32_t iProfile = 0; 563 int32_t iLevel = 0; 564 565 int32_t iFrameRate = 0; 566 uint32_t codecFlags = 0; 567 568 ALOGV(">>> VideoEditorVideoEncoder_open begin"); 569 // Input parameters check 570 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 571 VIDEOEDITOR_CHECK(M4OSA_NULL != pAU, M4ERR_PARAMETER); 572 VIDEOEDITOR_CHECK(M4OSA_NULL != pParams, M4ERR_PARAMETER); 573 574 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 575 pCodecParams = (M4ENCODER_Params*)pParams; 576 VIDEOEDITOR_CHECK(CREATED == pEncoderContext->mState, M4ERR_STATE); 577 578 // Context initialization 579 pEncoderContext->mAccessUnit = pAU; 580 pEncoderContext->mVideoEditorProfile = MediaProfiles::getInstance(); 581 pEncoderContext->mMaxPrefetchFrames = 582 pEncoderContext->mVideoEditorProfile->getVideoEditorCapParamByName( 583 "maxPrefetchYUVFrames"); 584 585 // Allocate & initialize the encoding parameters 586 SAFE_MALLOC(pEncoderContext->mCodecParams, M4ENCODER_Params, 1, 587 "VideoEditorVideoEncoder"); 588 589 590 pEncoderContext->mCodecParams->InputFormat = pCodecParams->InputFormat; 591 pEncoderContext->mCodecParams->InputFrameWidth = 592 pCodecParams->InputFrameWidth; 593 pEncoderContext->mCodecParams->InputFrameHeight = 594 pCodecParams->InputFrameHeight; 595 pEncoderContext->mCodecParams->FrameWidth = pCodecParams->FrameWidth; 596 pEncoderContext->mCodecParams->FrameHeight = pCodecParams->FrameHeight; 597 pEncoderContext->mCodecParams->Bitrate = pCodecParams->Bitrate; 598 pEncoderContext->mCodecParams->FrameRate = pCodecParams->FrameRate; 599 pEncoderContext->mCodecParams->Format = pCodecParams->Format; 600 pEncoderContext->mCodecParams->videoProfile = pCodecParams->videoProfile; 601 pEncoderContext->mCodecParams->videoLevel= pCodecParams->videoLevel; 602 603 // Check output format consistency and resolution 604 VIDEOEDITOR_CHECK( 605 pEncoderContext->mCodecParams->Format == pEncoderContext->mFormat, 606 M4ERR_PARAMETER); 607 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameWidth % 16, 608 M4ERR_PARAMETER); 609 VIDEOEDITOR_CHECK(0 == pEncoderContext->mCodecParams->FrameHeight % 16, 610 M4ERR_PARAMETER); 611 612 /** 613 * StageFright graph building 614 */ 615 616 // Create the meta data for the encoder 617 encoderMetadata = new MetaData; 618 switch( pEncoderContext->mCodecParams->Format ) { 619 case M4ENCODER_kH263: 620 mime = MEDIA_MIMETYPE_VIDEO_H263; 621 break; 622 case M4ENCODER_kMPEG4: 623 mime = MEDIA_MIMETYPE_VIDEO_MPEG4; 624 break; 625 case M4ENCODER_kH264: 626 mime = MEDIA_MIMETYPE_VIDEO_AVC; 627 break; 628 default: 629 VIDEOEDITOR_CHECK(!"VideoEncoder_open : incorrect input format", 630 M4ERR_PARAMETER); 631 break; 632 } 633 iProfile = pEncoderContext->mCodecParams->videoProfile; 634 iLevel = pEncoderContext->mCodecParams->videoLevel; 635 ALOGV("Encoder mime %s profile %d, level %d", 636 mime,iProfile, iLevel); 637 ALOGV("Encoder w %d, h %d, bitrate %d, fps %d", 638 pEncoderContext->mCodecParams->FrameWidth, 639 pEncoderContext->mCodecParams->FrameHeight, 640 pEncoderContext->mCodecParams->Bitrate, 641 pEncoderContext->mCodecParams->FrameRate); 642 CHECK(iProfile != 0x7fffffff); 643 CHECK(iLevel != 0x7fffffff); 644 645 encoderMetadata->setCString(kKeyMIMEType, mime); 646 encoderMetadata->setInt32(kKeyVideoProfile, iProfile); 647 //FIXME: 648 // Temp: Do not set the level for Mpeg4 / H.263 Enc 649 // as OMX.Nvidia.mp4.encoder and OMX.Nvidia.h263.encoder 650 // return 0x80001019 651 if (pEncoderContext->mCodecParams->Format == M4ENCODER_kH264) { 652 encoderMetadata->setInt32(kKeyVideoLevel, iLevel); 653 } 654 encoderMetadata->setInt32(kKeyWidth, 655 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 656 encoderMetadata->setInt32(kKeyStride, 657 (int32_t)pEncoderContext->mCodecParams->FrameWidth); 658 encoderMetadata->setInt32(kKeyHeight, 659 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 660 encoderMetadata->setInt32(kKeySliceHeight, 661 (int32_t)pEncoderContext->mCodecParams->FrameHeight); 662 663 switch( pEncoderContext->mCodecParams->FrameRate ) { 664 case M4ENCODER_k5_FPS: iFrameRate = 5; break; 665 case M4ENCODER_k7_5_FPS: iFrameRate = 8; break; 666 case M4ENCODER_k10_FPS: iFrameRate = 10; break; 667 case M4ENCODER_k12_5_FPS: iFrameRate = 13; break; 668 case M4ENCODER_k15_FPS: iFrameRate = 15; break; 669 case M4ENCODER_k20_FPS: iFrameRate = 20; break; 670 case M4ENCODER_k25_FPS: iFrameRate = 25; break; 671 case M4ENCODER_k30_FPS: iFrameRate = 30; break; 672 case M4ENCODER_kVARIABLE_FPS: 673 iFrameRate = 30; 674 ALOGI("Frame rate set to M4ENCODER_kVARIABLE_FPS: set to 30"); 675 break; 676 case M4ENCODER_kUSE_TIMESCALE: 677 iFrameRate = 30; 678 ALOGI("Frame rate set to M4ENCODER_kUSE_TIMESCALE: set to 30"); 679 break; 680 681 default: 682 VIDEOEDITOR_CHECK(!"VideoEncoder_open:incorrect framerate", 683 M4ERR_STATE); 684 break; 685 } 686 encoderMetadata->setInt32(kKeyFrameRate, iFrameRate); 687 encoderMetadata->setInt32(kKeyBitRate, 688 (int32_t)pEncoderContext->mCodecParams->Bitrate); 689 encoderMetadata->setInt32(kKeyIFramesInterval, 1); 690 691 encoderMetadata->setInt32(kKeyColorFormat, 692 pEncoderContext->mEncoderColorFormat); 693 694 if (pEncoderContext->mCodecParams->Format != M4ENCODER_kH263) { 695 // Get the encoder DSI 696 err = VideoEditorVideoEncoder_getDSI(pEncoderContext, encoderMetadata); 697 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 698 } 699 700 // Create the encoder source 701 pEncoderContext->mEncoderSource = VideoEditorVideoEncoderSource::Create( 702 encoderMetadata); 703 VIDEOEDITOR_CHECK( 704 NULL != pEncoderContext->mEncoderSource.get(), M4ERR_STATE); 705 706 // Connect to the OMX client 707 result = pEncoderContext->mClient.connect(); 708 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 709 710 // Create the OMX codec 711 #ifdef VIDEOEDITOR_FORCECODEC 712 codecFlags |= OMXCodec::VIDEOEDITOR_FORCECODEC; 713 #endif /* VIDEOEDITOR_FORCECODEC */ 714 pEncoderContext->mEncoder = OMXCodec::Create( 715 pEncoderContext->mClient.interface(), encoderMetadata, true, 716 pEncoderContext->mEncoderSource, NULL, codecFlags); 717 VIDEOEDITOR_CHECK(NULL != pEncoderContext->mEncoder.get(), M4ERR_STATE); 718 ALOGV("VideoEditorVideoEncoder_open : DONE"); 719 pEncoderContext->mPuller = new MediaBufferPuller( 720 pEncoderContext->mEncoder); 721 722 // Set the new state 723 pEncoderContext->mState = OPENED; 724 725 cleanUp: 726 if( M4NO_ERROR == err ) { 727 ALOGV("VideoEditorVideoEncoder_open no error"); 728 } else { 729 VideoEditorVideoEncoder_close(pEncoderContext); 730 ALOGV("VideoEditorVideoEncoder_open ERROR 0x%X", err); 731 } 732 ALOGV("VideoEditorVideoEncoder_open end"); 733 return err; 734 } 735 736 M4OSA_ERR VideoEditorVideoEncoder_processInputBuffer( 737 M4ENCODER_Context pContext, M4OSA_Double Cts, 738 M4OSA_Bool bReachedEOS) { 739 M4OSA_ERR err = M4NO_ERROR; 740 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 741 M4VIFI_ImagePlane pOutPlane[3]; 742 MediaBuffer* buffer = NULL; 743 int32_t nbBuffer = 0; 744 745 ALOGV("VideoEditorVideoEncoder_processInputBuffer begin: cts %f", Cts); 746 // Input parameters check 747 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 748 749 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 750 pOutPlane[0].pac_data = M4OSA_NULL; 751 pOutPlane[1].pac_data = M4OSA_NULL; 752 pOutPlane[2].pac_data = M4OSA_NULL; 753 754 if ( M4OSA_FALSE == bReachedEOS ) { 755 M4OSA_UInt32 sizeY = pEncoderContext->mCodecParams->FrameWidth * 756 pEncoderContext->mCodecParams->FrameHeight; 757 M4OSA_UInt32 sizeU = sizeY >> 2; 758 M4OSA_UInt32 size = sizeY + 2*sizeU; 759 M4OSA_UInt8* pData = M4OSA_NULL; 760 buffer = new MediaBuffer((size_t)size); 761 pData = (M4OSA_UInt8*)buffer->data() + buffer->range_offset(); 762 763 // Prepare the output image for pre-processing 764 pOutPlane[0].u_width = pEncoderContext->mCodecParams->FrameWidth; 765 pOutPlane[0].u_height = pEncoderContext->mCodecParams->FrameHeight; 766 pOutPlane[0].u_topleft = 0; 767 pOutPlane[0].u_stride = pOutPlane[0].u_width; 768 pOutPlane[1].u_width = pOutPlane[0].u_width/2; 769 pOutPlane[1].u_height = pOutPlane[0].u_height/2; 770 pOutPlane[1].u_topleft = 0; 771 pOutPlane[1].u_stride = pOutPlane[0].u_stride/2; 772 pOutPlane[2].u_width = pOutPlane[1].u_width; 773 pOutPlane[2].u_height = pOutPlane[1].u_height; 774 pOutPlane[2].u_topleft = 0; 775 pOutPlane[2].u_stride = pOutPlane[1].u_stride; 776 777 pOutPlane[0].pac_data = pData; 778 pOutPlane[1].pac_data = pData + sizeY; 779 pOutPlane[2].pac_data = pData + sizeY + sizeU; 780 781 // Apply pre-processing 782 err = pEncoderContext->mPreProcFunction( 783 pEncoderContext->mPreProcContext, M4OSA_NULL, pOutPlane); 784 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 785 786 // Convert MediaBuffer to the encoder input format if necessary 787 if (pEncoderContext->mI420ColorConverter) { 788 I420ColorConverter* converter = pEncoderContext->mI420ColorConverter; 789 int actualWidth = pEncoderContext->mCodecParams->FrameWidth; 790 int actualHeight = pEncoderContext->mCodecParams->FrameHeight; 791 792 int encoderWidth, encoderHeight; 793 ARect encoderRect; 794 int encoderBufferSize; 795 796 if (converter->getEncoderInputBufferInfo( 797 actualWidth, actualHeight, 798 &encoderWidth, &encoderHeight, 799 &encoderRect, &encoderBufferSize) == 0) { 800 801 MediaBuffer* newBuffer = new MediaBuffer(encoderBufferSize); 802 803 if (converter->convertI420ToEncoderInput( 804 pData, // srcBits 805 actualWidth, actualHeight, 806 encoderWidth, encoderHeight, 807 encoderRect, 808 (uint8_t*)newBuffer->data() + newBuffer->range_offset()) < 0) { 809 ALOGE("convertI420ToEncoderInput failed"); 810 } 811 812 // switch to new buffer 813 buffer->release(); 814 buffer = newBuffer; 815 } 816 } 817 818 // Set the metadata 819 buffer->meta_data()->setInt64(kKeyTime, (int64_t)(Cts*1000)); 820 } 821 822 // Push the buffer to the source, a NULL buffer, notifies the source of EOS 823 nbBuffer = pEncoderContext->mEncoderSource->storeBuffer(buffer); 824 825 cleanUp: 826 if ( M4NO_ERROR == err ) { 827 ALOGV("VideoEditorVideoEncoder_processInputBuffer error 0x%X", err); 828 } else { 829 if( NULL != buffer ) { 830 buffer->release(); 831 } 832 ALOGV("VideoEditorVideoEncoder_processInputBuffer ERROR 0x%X", err); 833 } 834 ALOGV("VideoEditorVideoEncoder_processInputBuffer end"); 835 return err; 836 } 837 838 M4OSA_ERR VideoEditorVideoEncoder_processOutputBuffer( 839 M4ENCODER_Context pContext, MediaBuffer* buffer) { 840 M4OSA_ERR err = M4NO_ERROR; 841 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 842 M4OSA_UInt32 Cts = 0; 843 int32_t i32Tmp = 0; 844 int64_t i64Tmp = 0; 845 status_t result = OK; 846 847 ALOGV("VideoEditorVideoEncoder_processOutputBuffer begin"); 848 // Input parameters check 849 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 850 VIDEOEDITOR_CHECK(M4OSA_NULL != buffer, M4ERR_PARAMETER); 851 852 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 853 854 // Process the returned AU 855 if ( 0 == buffer->range_length() ) { 856 // Encoder has no data yet, nothing unusual 857 ALOGV("VideoEditorVideoEncoder_processOutputBuffer : buffer is empty"); 858 goto cleanUp; 859 } 860 VIDEOEDITOR_CHECK(0 == ((M4OSA_UInt32)buffer->data())%4, M4ERR_PARAMETER); 861 VIDEOEDITOR_CHECK(buffer->meta_data().get(), M4ERR_PARAMETER); 862 if ( buffer->meta_data()->findInt32(kKeyIsCodecConfig, &i32Tmp) && i32Tmp ){ 863 { // Display the DSI 864 ALOGV("VideoEditorVideoEncoder_processOutputBuffer DSI %d", 865 buffer->range_length()); 866 uint8_t* tmp = (uint8_t*)(buffer->data()); 867 for( uint32_t i=0; i<buffer->range_length(); i++ ) { 868 ALOGV("DSI [%d] %.2X", i, tmp[i]); 869 } 870 } 871 } else { 872 // Check the CTS 873 VIDEOEDITOR_CHECK(buffer->meta_data()->findInt64(kKeyTime, &i64Tmp), 874 M4ERR_STATE); 875 876 pEncoderContext->mNbOutputFrames++; 877 if ( 0 > pEncoderContext->mFirstOutputCts ) { 878 pEncoderContext->mFirstOutputCts = i64Tmp; 879 } 880 pEncoderContext->mLastOutputCts = i64Tmp; 881 882 Cts = (M4OSA_Int32)(i64Tmp/1000); 883 ALOGV("[TS_CHECK] VI/ENC WRITE frame %d @ %lld -> %d (last %d)", 884 pEncoderContext->mNbOutputFrames, i64Tmp, Cts, 885 pEncoderContext->mLastCTS); 886 if ( Cts < pEncoderContext->mLastCTS ) { 887 ALOGV("VideoEncoder_processOutputBuffer WARNING : Cts is going " 888 "backwards %d < %d", Cts, pEncoderContext->mLastCTS); 889 goto cleanUp; 890 } 891 ALOGV("VideoEditorVideoEncoder_processOutputBuffer : %d %d", 892 Cts, pEncoderContext->mLastCTS); 893 894 // Retrieve the AU container 895 err = pEncoderContext->mWriterDataInterface->pStartAU( 896 pEncoderContext->mWriterDataInterface->pWriterContext, 897 pEncoderContext->mAccessUnit->stream->streamID, 898 pEncoderContext->mAccessUnit); 899 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 900 901 // Format the AU 902 VIDEOEDITOR_CHECK( 903 buffer->range_length() <= pEncoderContext->mAccessUnit->size, 904 M4ERR_PARAMETER); 905 // Remove H264 AU start code 906 if ( M4ENCODER_kH264 == pEncoderContext->mFormat ) { 907 if (!memcmp((const uint8_t *)buffer->data() + \ 908 buffer->range_offset(), "\x00\x00\x00\x01", 4) ) { 909 buffer->set_range(buffer->range_offset() + 4, 910 buffer->range_length() - 4); 911 } 912 } 913 914 if ( (M4ENCODER_kH264 == pEncoderContext->mFormat) && 915 (M4OSA_NULL != pEncoderContext->mH264NALUPostProcessFct) ) { 916 // H264 trimming case, NALU post processing is needed 917 M4OSA_Int32 outputSize = pEncoderContext->mAccessUnit->size; 918 err = pEncoderContext->mH264NALUPostProcessFct( 919 pEncoderContext->mH264NALUPostProcessCtx, 920 (M4OSA_UInt8*)buffer->data()+buffer->range_offset(), 921 buffer->range_length(), 922 (M4OSA_UInt8*)pEncoderContext->mAccessUnit->dataAddress, 923 &outputSize); 924 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 925 pEncoderContext->mAccessUnit->size = (M4OSA_UInt32)outputSize; 926 } else { 927 // The AU can just be copied 928 memcpy((void *)pEncoderContext->mAccessUnit->\ 929 dataAddress, (void *)((M4OSA_MemAddr8)(buffer->data())+buffer->\ 930 range_offset()), buffer->range_length()); 931 pEncoderContext->mAccessUnit->size = 932 (M4OSA_UInt32)buffer->range_length(); 933 } 934 935 if ( buffer->meta_data()->findInt32(kKeyIsSyncFrame,&i32Tmp) && i32Tmp){ 936 pEncoderContext->mAccessUnit->attribute = AU_RAP; 937 } else { 938 pEncoderContext->mAccessUnit->attribute = AU_P_Frame; 939 } 940 pEncoderContext->mLastCTS = Cts; 941 pEncoderContext->mAccessUnit->CTS = Cts; 942 pEncoderContext->mAccessUnit->DTS = Cts; 943 944 ALOGV("VideoEditorVideoEncoder_processOutputBuffer: AU @ 0x%X 0x%X %d %d", 945 pEncoderContext->mAccessUnit->dataAddress, 946 *pEncoderContext->mAccessUnit->dataAddress, 947 pEncoderContext->mAccessUnit->size, 948 pEncoderContext->mAccessUnit->CTS); 949 950 // Write the AU 951 err = pEncoderContext->mWriterDataInterface->pProcessAU( 952 pEncoderContext->mWriterDataInterface->pWriterContext, 953 pEncoderContext->mAccessUnit->stream->streamID, 954 pEncoderContext->mAccessUnit); 955 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 956 } 957 958 cleanUp: 959 if( M4NO_ERROR == err ) { 960 ALOGV("VideoEditorVideoEncoder_processOutputBuffer no error"); 961 } else { 962 SAFE_FREE(pEncoderContext->mHeader.pBuf); 963 pEncoderContext->mHeader.Size = 0; 964 ALOGV("VideoEditorVideoEncoder_processOutputBuffer ERROR 0x%X", err); 965 } 966 ALOGV("VideoEditorVideoEncoder_processOutputBuffer end"); 967 return err; 968 } 969 970 M4OSA_ERR VideoEditorVideoEncoder_encode(M4ENCODER_Context pContext, 971 M4VIFI_ImagePlane* pInPlane, M4OSA_Double Cts, 972 M4ENCODER_FrameMode FrameMode) { 973 M4OSA_ERR err = M4NO_ERROR; 974 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 975 status_t result = OK; 976 MediaBuffer* outputBuffer = NULL; 977 978 ALOGV("VideoEditorVideoEncoder_encode 0x%X %f %d", pInPlane, Cts, FrameMode); 979 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 980 981 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 982 if ( STARTED == pEncoderContext->mState ) { 983 pEncoderContext->mState = BUFFERING; 984 } 985 VIDEOEDITOR_CHECK( 986 (BUFFERING | READING) & pEncoderContext->mState, M4ERR_STATE); 987 988 pEncoderContext->mNbInputFrames++; 989 if ( 0 > pEncoderContext->mFirstInputCts ) { 990 pEncoderContext->mFirstInputCts = Cts; 991 } 992 pEncoderContext->mLastInputCts = Cts; 993 994 ALOGV("VideoEditorVideoEncoder_encode 0x%X %d %f (%d)", pInPlane, FrameMode, 995 Cts, pEncoderContext->mLastCTS); 996 997 // Push the input buffer to the encoder source 998 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, Cts, 999 M4OSA_FALSE); 1000 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1001 1002 // Notify the source in case of EOS 1003 if ( M4ENCODER_kLastFrame == FrameMode ) { 1004 err = VideoEditorVideoEncoder_processInputBuffer( 1005 pEncoderContext, 0, M4OSA_TRUE); 1006 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1007 } 1008 1009 if ( BUFFERING == pEncoderContext->mState ) { 1010 // Prefetch is complete, start reading 1011 pEncoderContext->mState = READING; 1012 } 1013 // Read 1014 while (1) { 1015 MediaBuffer *outputBuffer = 1016 pEncoderContext->mPuller->getBufferNonBlocking(); 1017 1018 if (outputBuffer == NULL) { 1019 int32_t YUVBufferNumber = 1020 pEncoderContext->mEncoderSource->getNumberOfBuffersInQueue(); 1021 /* Make sure that the configured maximum number of prefetch YUV frames is 1022 * not exceeded. This is to limit the amount of memory usage of video editor engine. 1023 * The value of maximum prefetch Yuv frames is defined in media_profiles.xml */ 1024 if ((YUVBufferNumber < pEncoderContext->mMaxPrefetchFrames) || 1025 (pEncoderContext->mPuller->hasMediaSourceReturnedError() 1026 == true)) { 1027 break; 1028 } 1029 } else { 1030 // Provide the encoded AU to the writer 1031 err = VideoEditorVideoEncoder_processOutputBuffer(pEncoderContext, 1032 outputBuffer); 1033 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1034 1035 pEncoderContext->mPuller->putBuffer(outputBuffer); 1036 } 1037 } 1038 1039 cleanUp: 1040 if( M4NO_ERROR == err ) { 1041 ALOGV("VideoEditorVideoEncoder_encode no error"); 1042 } else { 1043 ALOGV("VideoEditorVideoEncoder_encode ERROR 0x%X", err); 1044 } 1045 ALOGV("VideoEditorVideoEncoder_encode end"); 1046 return err; 1047 } 1048 1049 M4OSA_ERR VideoEditorVideoEncoder_start(M4ENCODER_Context pContext) { 1050 M4OSA_ERR err = M4NO_ERROR; 1051 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1052 status_t result = OK; 1053 1054 ALOGV("VideoEditorVideoEncoder_start begin"); 1055 // Input parameters check 1056 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1057 1058 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1059 VIDEOEDITOR_CHECK(OPENED == pEncoderContext->mState, M4ERR_STATE); 1060 1061 pEncoderContext->mNbInputFrames = 0; 1062 pEncoderContext->mFirstInputCts = -1.0; 1063 pEncoderContext->mLastInputCts = -1.0; 1064 pEncoderContext->mNbOutputFrames = 0; 1065 pEncoderContext->mFirstOutputCts = -1; 1066 pEncoderContext->mLastOutputCts = -1; 1067 1068 result = pEncoderContext->mEncoder->start(); 1069 VIDEOEDITOR_CHECK(OK == result, M4ERR_STATE); 1070 1071 pEncoderContext->mPuller->start(); 1072 1073 // Set the new state 1074 pEncoderContext->mState = STARTED; 1075 1076 cleanUp: 1077 if ( M4NO_ERROR == err ) { 1078 ALOGV("VideoEditorVideoEncoder_start no error"); 1079 } else { 1080 ALOGV("VideoEditorVideoEncoder_start ERROR 0x%X", err); 1081 } 1082 ALOGV("VideoEditorVideoEncoder_start end"); 1083 return err; 1084 } 1085 1086 M4OSA_ERR VideoEditorVideoEncoder_stop(M4ENCODER_Context pContext) { 1087 M4OSA_ERR err = M4NO_ERROR; 1088 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1089 MediaBuffer* outputBuffer = NULL; 1090 status_t result = OK; 1091 1092 ALOGV("VideoEditorVideoEncoder_stop begin"); 1093 // Input parameters check 1094 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1095 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1096 1097 // Send EOS again to make sure the source doesn't block. 1098 err = VideoEditorVideoEncoder_processInputBuffer(pEncoderContext, 0, 1099 M4OSA_TRUE); 1100 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1101 1102 // Process the remaining buffers if necessary 1103 if ( (BUFFERING | READING) & pEncoderContext->mState ) { 1104 while (1) { 1105 MediaBuffer *outputBuffer = 1106 pEncoderContext->mPuller->getBufferBlocking(); 1107 1108 if (outputBuffer == NULL) break; 1109 1110 err = VideoEditorVideoEncoder_processOutputBuffer( 1111 pEncoderContext, outputBuffer); 1112 VIDEOEDITOR_CHECK(M4NO_ERROR == err, err); 1113 1114 pEncoderContext->mPuller->putBuffer(outputBuffer); 1115 } 1116 1117 pEncoderContext->mState = STARTED; 1118 } 1119 1120 // Stop the graph module if necessary 1121 if ( STARTED == pEncoderContext->mState ) { 1122 pEncoderContext->mPuller->stop(); 1123 pEncoderContext->mEncoder->stop(); 1124 pEncoderContext->mState = OPENED; 1125 } 1126 1127 if (pEncoderContext->mNbInputFrames != pEncoderContext->mNbOutputFrames) { 1128 ALOGW("Some frames were not encoded: input(%d) != output(%d)", 1129 pEncoderContext->mNbInputFrames, pEncoderContext->mNbOutputFrames); 1130 } 1131 1132 cleanUp: 1133 if ( M4NO_ERROR == err ) { 1134 ALOGV("VideoEditorVideoEncoder_stop no error"); 1135 } else { 1136 ALOGV("VideoEditorVideoEncoder_stop ERROR 0x%X", err); 1137 } 1138 ALOGV("VideoEditorVideoEncoder_stop end"); 1139 return err; 1140 } 1141 1142 M4OSA_ERR VideoEditorVideoEncoder_regulBitRate(M4ENCODER_Context pContext) { 1143 ALOGW("regulBitRate is not implemented"); 1144 return M4NO_ERROR; 1145 } 1146 1147 M4OSA_ERR VideoEditorVideoEncoder_setOption(M4ENCODER_Context pContext, 1148 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1149 M4OSA_ERR err = M4NO_ERROR; 1150 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1151 1152 ALOGV("VideoEditorVideoEncoder_setOption start optionID 0x%X", optionID); 1153 // Input parameters check 1154 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1155 1156 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1157 1158 switch( optionID ) { 1159 case M4ENCODER_kOptionID_SetH264ProcessNALUfctsPtr: 1160 pEncoderContext->mH264NALUPostProcessFct = 1161 (H264MCS_ProcessEncodedNALU_fct*)optionValue; 1162 break; 1163 case M4ENCODER_kOptionID_H264ProcessNALUContext: 1164 pEncoderContext->mH264NALUPostProcessCtx = 1165 (M4OSA_Context)optionValue; 1166 break; 1167 default: 1168 ALOGV("VideoEditorVideoEncoder_setOption: unsupported optionId 0x%X", 1169 optionID); 1170 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1171 break; 1172 } 1173 1174 cleanUp: 1175 if ( M4NO_ERROR == err ) { 1176 ALOGV("VideoEditorVideoEncoder_setOption no error"); 1177 } else { 1178 ALOGV("VideoEditorVideoEncoder_setOption ERROR 0x%X", err); 1179 } 1180 ALOGV("VideoEditorVideoEncoder_setOption end"); 1181 return err; 1182 } 1183 1184 M4OSA_ERR VideoEditorVideoEncoder_getOption(M4ENCODER_Context pContext, 1185 M4OSA_UInt32 optionID, M4OSA_DataOption optionValue) { 1186 M4OSA_ERR err = M4NO_ERROR; 1187 VideoEditorVideoEncoder_Context* pEncoderContext = M4OSA_NULL; 1188 1189 ALOGV("VideoEditorVideoEncoder_getOption begin optinId 0x%X", optionID); 1190 // Input parameters check 1191 VIDEOEDITOR_CHECK(M4OSA_NULL != pContext, M4ERR_PARAMETER); 1192 pEncoderContext = (VideoEditorVideoEncoder_Context*)pContext; 1193 1194 switch( optionID ) { 1195 case M4ENCODER_kOptionID_EncoderHeader: 1196 VIDEOEDITOR_CHECK( 1197 M4OSA_NULL != pEncoderContext->mHeader.pBuf, M4ERR_STATE); 1198 *(M4ENCODER_Header**)optionValue = &(pEncoderContext->mHeader); 1199 break; 1200 default: 1201 ALOGV("VideoEditorVideoEncoder_getOption: unsupported optionId 0x%X", 1202 optionID); 1203 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_BAD_OPTION_ID); 1204 break; 1205 } 1206 1207 cleanUp: 1208 if ( M4NO_ERROR == err ) { 1209 ALOGV("VideoEditorVideoEncoder_getOption no error"); 1210 } else { 1211 ALOGV("VideoEditorVideoEncoder_getOption ERROR 0x%X", err); 1212 } 1213 return err; 1214 } 1215 1216 M4OSA_ERR VideoEditorVideoEncoder_getInterface(M4ENCODER_Format format, 1217 M4ENCODER_Format* pFormat, 1218 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1219 M4OSA_ERR err = M4NO_ERROR; 1220 1221 // Input parameters check 1222 VIDEOEDITOR_CHECK(M4OSA_NULL != pFormat, M4ERR_PARAMETER); 1223 VIDEOEDITOR_CHECK(M4OSA_NULL != pEncoderInterface, M4ERR_PARAMETER); 1224 1225 ALOGV("VideoEditorVideoEncoder_getInterface begin 0x%x 0x%x %d", pFormat, 1226 pEncoderInterface, mode); 1227 1228 SAFE_MALLOC(*pEncoderInterface, M4ENCODER_GlobalInterface, 1, 1229 "VideoEditorVideoEncoder"); 1230 1231 *pFormat = format; 1232 1233 switch( format ) { 1234 case M4ENCODER_kH263: 1235 { 1236 (*pEncoderInterface)->pFctInit = 1237 VideoEditorVideoEncoder_init_H263; 1238 break; 1239 } 1240 case M4ENCODER_kMPEG4: 1241 { 1242 (*pEncoderInterface)->pFctInit = 1243 VideoEditorVideoEncoder_init_MPEG4; 1244 break; 1245 } 1246 case M4ENCODER_kH264: 1247 { 1248 (*pEncoderInterface)->pFctInit = 1249 VideoEditorVideoEncoder_init_H264; 1250 break; 1251 } 1252 default: 1253 ALOGV("VideoEditorVideoEncoder_getInterface : unsupported format %d", 1254 format); 1255 VIDEOEDITOR_CHECK(M4OSA_FALSE, M4ERR_PARAMETER); 1256 break; 1257 } 1258 (*pEncoderInterface)->pFctOpen = VideoEditorVideoEncoder_open; 1259 (*pEncoderInterface)->pFctStart = VideoEditorVideoEncoder_start; 1260 (*pEncoderInterface)->pFctStop = VideoEditorVideoEncoder_stop; 1261 (*pEncoderInterface)->pFctPause = M4OSA_NULL; 1262 (*pEncoderInterface)->pFctResume = M4OSA_NULL; 1263 (*pEncoderInterface)->pFctClose = VideoEditorVideoEncoder_close; 1264 (*pEncoderInterface)->pFctCleanup = VideoEditorVideoEncoder_cleanup; 1265 (*pEncoderInterface)->pFctRegulBitRate = 1266 VideoEditorVideoEncoder_regulBitRate; 1267 (*pEncoderInterface)->pFctEncode = VideoEditorVideoEncoder_encode; 1268 (*pEncoderInterface)->pFctSetOption = VideoEditorVideoEncoder_setOption; 1269 (*pEncoderInterface)->pFctGetOption = VideoEditorVideoEncoder_getOption; 1270 1271 cleanUp: 1272 if( M4NO_ERROR == err ) { 1273 ALOGV("VideoEditorVideoEncoder_getInterface no error"); 1274 } else { 1275 *pEncoderInterface = M4OSA_NULL; 1276 ALOGV("VideoEditorVideoEncoder_getInterface ERROR 0x%X", err); 1277 } 1278 return err; 1279 } 1280 1281 extern "C" { 1282 1283 M4OSA_ERR VideoEditorVideoEncoder_getInterface_H263(M4ENCODER_Format* pFormat, 1284 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1285 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH263, pFormat, 1286 pEncoderInterface, mode); 1287 } 1288 1289 M4OSA_ERR VideoEditorVideoEncoder_getInterface_MPEG4(M4ENCODER_Format* pFormat, 1290 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1291 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kMPEG4, pFormat, 1292 pEncoderInterface, mode); 1293 } 1294 1295 M4OSA_ERR VideoEditorVideoEncoder_getInterface_H264(M4ENCODER_Format* pFormat, 1296 M4ENCODER_GlobalInterface** pEncoderInterface, M4ENCODER_OpenMode mode){ 1297 return VideoEditorVideoEncoder_getInterface(M4ENCODER_kH264, pFormat, 1298 pEncoderInterface, mode); 1299 1300 } 1301 1302 } // extern "C" 1303 1304 } // namespace android 1305