Home | History | Annotate | Download | only in enc
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "SoftAVCEncoder"
     19 #include <utils/Log.h>
     20 #include <utils/misc.h>
     21 
     22 #include "avcenc_api.h"
     23 #include "avcenc_int.h"
     24 #include "OMX_Video.h"
     25 
     26 #include <HardwareAPI.h>
     27 #include <MetadataBufferType.h>
     28 #include <media/stagefright/foundation/ADebug.h>
     29 #include <media/stagefright/foundation/AUtils.h>
     30 #include <media/stagefright/MediaDefs.h>
     31 #include <media/stagefright/MediaErrors.h>
     32 #include <media/stagefright/MetaData.h>
     33 #include <media/stagefright/Utils.h>
     34 #include <ui/Rect.h>
     35 #include <ui/GraphicBufferMapper.h>
     36 
     37 #include "SoftAVCEncoder.h"
     38 
     39 #if LOG_NDEBUG
     40 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
     41 #else
     42 #define UNUSED_UNLESS_VERBOSE(x)
     43 #endif
     44 
     45 namespace android {
     46 
     47 template<class T>
     48 static void InitOMXParams(T *params) {
     49     params->nSize = sizeof(T);
     50     params->nVersion.s.nVersionMajor = 1;
     51     params->nVersion.s.nVersionMinor = 0;
     52     params->nVersion.s.nRevision = 0;
     53     params->nVersion.s.nStep = 0;
     54 }
     55 
     56 static const CodecProfileLevel kProfileLevels[] = {
     57     { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2  },
     58 };
     59 
     60 typedef struct LevelConversion {
     61     OMX_U32 omxLevel;
     62     AVCLevel avcLevel;
     63     uint32_t maxMacroBlocks;
     64 } LevelConcersion;
     65 
     66 static LevelConversion ConversionTable[] = {
     67     { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B, 99 },
     68     { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1,   99 },
     69     { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1, 396 },
     70     { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2, 396 },
     71     { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3, 396 },
     72     { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2,   396 },
     73 #if 0
     74     // encoding speed is very poor if video resolution
     75     // is higher than CIF or if level is higher than 2
     76     { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1, 792 },
     77     { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2, 1620 },
     78     { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3,   1620 },
     79     { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1, 3600 },
     80     { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2, 5120 },
     81     { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4,   8192 },
     82     { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1, 8192 },
     83     { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2, 8704 },
     84     { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5,   22080 },
     85     { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1, 36864 },
     86 #endif
     87 };
     88 
     89 static status_t ConvertOmxAvcLevelToAvcSpecLevel(
     90         OMX_U32 omxLevel, AVCLevel *avcLevel) {
     91     for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
     92         i < n; ++i) {
     93         if (omxLevel == ConversionTable[i].omxLevel) {
     94             *avcLevel = ConversionTable[i].avcLevel;
     95             return OK;
     96         }
     97     }
     98 
     99     ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
    100             (int32_t)omxLevel);
    101 
    102     return BAD_VALUE;
    103 }
    104 
    105 static status_t ConvertAvcSpecLevelToOmxAvcLevel(
    106     AVCLevel avcLevel, OMX_U32 *omxLevel) {
    107     for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
    108         i < n; ++i) {
    109         if (avcLevel == ConversionTable[i].avcLevel) {
    110             *omxLevel = ConversionTable[i].omxLevel;
    111             return OK;
    112         }
    113     }
    114 
    115     ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
    116             (int32_t) avcLevel);
    117 
    118     return BAD_VALUE;
    119 }
    120 
    121 static void* MallocWrapper(
    122         void * /* userData */, int32_t size, int32_t /* attrs */) {
    123     void *ptr = malloc(size);
    124     if (ptr)
    125         memset(ptr, 0, size);
    126     return ptr;
    127 }
    128 
    129 static void FreeWrapper(void * /* userData */, void* ptr) {
    130     free(ptr);
    131 }
    132 
    133 static int32_t DpbAllocWrapper(void *userData,
    134         unsigned int sizeInMbs, unsigned int numBuffers) {
    135     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    136     CHECK(encoder != NULL);
    137     return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
    138 }
    139 
    140 static int32_t BindFrameWrapper(
    141         void *userData, int32_t index, uint8_t **yuv) {
    142     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    143     CHECK(encoder != NULL);
    144     return encoder->bindOutputBuffer(index, yuv);
    145 }
    146 
    147 static void UnbindFrameWrapper(void *userData, int32_t index) {
    148     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    149     CHECK(encoder != NULL);
    150     return encoder->unbindOutputBuffer(index);
    151 }
    152 
    153 SoftAVCEncoder::SoftAVCEncoder(
    154             const char *name,
    155             const OMX_CALLBACKTYPE *callbacks,
    156             OMX_PTR appData,
    157             OMX_COMPONENTTYPE **component)
    158     : SoftVideoEncoderOMXComponent(
    159             name, "video_encoder.avc", OMX_VIDEO_CodingAVC,
    160             kProfileLevels, NELEM(kProfileLevels),
    161             176 /* width */, 144 /* height */,
    162             callbacks, appData, component),
    163       mIDRFrameRefreshIntervalInSec(1),
    164       mAVCEncProfile(AVC_BASELINE),
    165       mAVCEncLevel(AVC_LEVEL2),
    166       mNumInputFrames(-1),
    167       mPrevTimestampUs(-1),
    168       mStarted(false),
    169       mSawInputEOS(false),
    170       mSignalledError(false),
    171       mHandle(new tagAVCHandle),
    172       mEncParams(new tagAVCEncParam),
    173       mInputFrameData(NULL),
    174       mSliceGroup(NULL) {
    175 
    176     const size_t kOutputBufferSize =
    177         320 * ConversionTable[NELEM(ConversionTable) - 1].maxMacroBlocks;
    178 
    179     initPorts(
    180             kNumBuffers, kNumBuffers, kOutputBufferSize,
    181             MEDIA_MIMETYPE_VIDEO_AVC, 2 /* minCompressionRatio */);
    182 
    183     ALOGI("Construct SoftAVCEncoder");
    184 }
    185 
    186 SoftAVCEncoder::~SoftAVCEncoder() {
    187     ALOGV("Destruct SoftAVCEncoder");
    188     releaseEncoder();
    189     List<BufferInfo *> &outQueue = getPortQueue(1);
    190     List<BufferInfo *> &inQueue = getPortQueue(0);
    191     CHECK(outQueue.empty());
    192     CHECK(inQueue.empty());
    193 }
    194 
    195 OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
    196     CHECK(mHandle != NULL);
    197     memset(mHandle, 0, sizeof(tagAVCHandle));
    198     mHandle->AVCObject = NULL;
    199     mHandle->userData = this;
    200     mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
    201     mHandle->CBAVC_FrameBind = BindFrameWrapper;
    202     mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
    203     mHandle->CBAVC_Malloc = MallocWrapper;
    204     mHandle->CBAVC_Free = FreeWrapper;
    205 
    206     CHECK(mEncParams != NULL);
    207     memset(mEncParams, 0, sizeof(*mEncParams));
    208     mEncParams->rate_control = AVC_ON;
    209     mEncParams->initQP = 0;
    210     mEncParams->init_CBP_removal_delay = 1600;
    211 
    212     mEncParams->intramb_refresh = 0;
    213     mEncParams->auto_scd = AVC_ON;
    214     mEncParams->out_of_band_param_set = AVC_ON;
    215     mEncParams->poc_type = 2;
    216     mEncParams->log2_max_poc_lsb_minus_4 = 12;
    217     mEncParams->delta_poc_zero_flag = 0;
    218     mEncParams->offset_poc_non_ref = 0;
    219     mEncParams->offset_top_bottom = 0;
    220     mEncParams->num_ref_in_cycle = 0;
    221     mEncParams->offset_poc_ref = NULL;
    222 
    223     mEncParams->num_ref_frame = 1;
    224     mEncParams->num_slice_group = 1;
    225     mEncParams->fmo_type = 0;
    226 
    227     mEncParams->db_filter = AVC_ON;
    228     mEncParams->disable_db_idc = 0;
    229 
    230     mEncParams->alpha_offset = 0;
    231     mEncParams->beta_offset = 0;
    232     mEncParams->constrained_intra_pred = AVC_OFF;
    233 
    234     mEncParams->data_par = AVC_OFF;
    235     mEncParams->fullsearch = AVC_OFF;
    236     mEncParams->search_range = 16;
    237     mEncParams->sub_pel = AVC_OFF;
    238     mEncParams->submb_pred = AVC_OFF;
    239     mEncParams->rdopt_mode = AVC_OFF;
    240     mEncParams->bidir_pred = AVC_OFF;
    241 
    242     mEncParams->use_overrun_buffer = AVC_OFF;
    243 
    244     if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
    245         // Color conversion is needed.
    246         free(mInputFrameData);
    247         mInputFrameData =
    248             (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
    249         CHECK(mInputFrameData != NULL);
    250     }
    251 
    252     // PV's AVC encoder requires the video dimension of multiple
    253     if (mWidth % 16 != 0 || mHeight % 16 != 0) {
    254         ALOGE("Video frame size %dx%d must be a multiple of 16",
    255             mWidth, mHeight);
    256         return OMX_ErrorBadParameter;
    257     }
    258 
    259     mEncParams->width = mWidth;
    260     mEncParams->height = mHeight;
    261     mEncParams->bitrate = mBitrate;
    262     mEncParams->frame_rate = (1000 * mFramerate) >> 16;  // In frames/ms!, mFramerate is in Q16
    263     mEncParams->CPB_size = (uint32_t) (mBitrate >> 1);
    264 
    265     int32_t nMacroBlocks = divUp(mWidth, 16) * divUp(mHeight, 16);
    266     CHECK(mSliceGroup == NULL);
    267     mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
    268     CHECK(mSliceGroup != NULL);
    269     for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
    270         mSliceGroup[ii] = idx++;
    271         if (idx >= mEncParams->num_slice_group) {
    272             idx = 0;
    273         }
    274     }
    275     mEncParams->slice_group = mSliceGroup;
    276 
    277     // Set IDR frame refresh interval
    278     if (mIDRFrameRefreshIntervalInSec < 0) {
    279         mEncParams->idr_period = -1;
    280     } else if (mIDRFrameRefreshIntervalInSec == 0) {
    281         mEncParams->idr_period = 1;  // All I frames
    282     } else {
    283         mEncParams->idr_period =
    284             (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16; // mFramerate is in Q16
    285     }
    286 
    287     // Set profile and level
    288     mEncParams->profile = mAVCEncProfile;
    289     mEncParams->level = mAVCEncLevel;
    290 
    291     return OMX_ErrorNone;
    292 }
    293 
    294 OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
    295     CHECK(!mStarted);
    296 
    297     OMX_ERRORTYPE errType = OMX_ErrorNone;
    298     if (OMX_ErrorNone != (errType = initEncParams())) {
    299         ALOGE("Failed to initialized encoder params");
    300         mSignalledError = true;
    301         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    302         return errType;
    303     }
    304 
    305     AVCEnc_Status err;
    306     err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
    307     if (err != AVCENC_SUCCESS) {
    308         ALOGE("Failed to initialize the encoder: %d", err);
    309         mSignalledError = true;
    310         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    311         return OMX_ErrorUndefined;
    312     }
    313 
    314     mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
    315     mSpsPpsHeaderReceived = false;
    316     mReadyForNextFrame = true;
    317     mIsIDRFrame = false;
    318     mStarted = true;
    319 
    320     return OMX_ErrorNone;
    321 }
    322 
    323 OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
    324     if (!mStarted) {
    325         return OMX_ErrorNone;
    326     }
    327 
    328     PVAVCCleanUpEncoder(mHandle);
    329     releaseOutputBuffers();
    330 
    331     free(mInputFrameData);
    332     mInputFrameData = NULL;
    333 
    334     free(mSliceGroup);
    335     mSliceGroup = NULL;
    336 
    337     delete mEncParams;
    338     mEncParams = NULL;
    339 
    340     delete mHandle;
    341     mHandle = NULL;
    342 
    343     mStarted = false;
    344 
    345     return OMX_ErrorNone;
    346 }
    347 
    348 void SoftAVCEncoder::releaseOutputBuffers() {
    349     for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
    350         MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
    351         buffer->setObserver(NULL);
    352         buffer->release();
    353     }
    354     mOutputBuffers.clear();
    355 }
    356 
    357 OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
    358         OMX_INDEXTYPE index, OMX_PTR params) {
    359     switch (index) {
    360         case OMX_IndexParamVideoBitrate:
    361         {
    362             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
    363                 (OMX_VIDEO_PARAM_BITRATETYPE *) params;
    364 
    365             if (bitRate->nPortIndex != 1) {
    366                 return OMX_ErrorUndefined;
    367             }
    368 
    369             bitRate->eControlRate = OMX_Video_ControlRateVariable;
    370             bitRate->nTargetBitrate = mBitrate;
    371             return OMX_ErrorNone;
    372         }
    373 
    374         case OMX_IndexParamVideoAvc:
    375         {
    376             OMX_VIDEO_PARAM_AVCTYPE *avcParams =
    377                 (OMX_VIDEO_PARAM_AVCTYPE *)params;
    378 
    379             if (avcParams->nPortIndex != 1) {
    380                 return OMX_ErrorUndefined;
    381             }
    382 
    383             avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
    384             OMX_U32 omxLevel = AVC_LEVEL2;
    385             if (OMX_ErrorNone !=
    386                 ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
    387                 return OMX_ErrorUndefined;
    388             }
    389 
    390             avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
    391             avcParams->nRefFrames = 1;
    392             avcParams->nBFrames = 0;
    393             avcParams->bUseHadamard = OMX_TRUE;
    394             avcParams->nAllowedPictureTypes =
    395                     (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
    396             avcParams->nRefIdx10ActiveMinus1 = 0;
    397             avcParams->nRefIdx11ActiveMinus1 = 0;
    398             avcParams->bWeightedPPrediction = OMX_FALSE;
    399             avcParams->bEntropyCodingCABAC = OMX_FALSE;
    400             avcParams->bconstIpred = OMX_FALSE;
    401             avcParams->bDirect8x8Inference = OMX_FALSE;
    402             avcParams->bDirectSpatialTemporal = OMX_FALSE;
    403             avcParams->nCabacInitIdc = 0;
    404             return OMX_ErrorNone;
    405         }
    406 
    407         default:
    408             return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
    409     }
    410 }
    411 
    412 OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
    413         OMX_INDEXTYPE index, const OMX_PTR params) {
    414     int32_t indexFull = index;
    415 
    416     switch (indexFull) {
    417         case OMX_IndexParamVideoBitrate:
    418         {
    419             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
    420                 (OMX_VIDEO_PARAM_BITRATETYPE *) params;
    421 
    422             if (bitRate->nPortIndex != 1 ||
    423                 bitRate->eControlRate != OMX_Video_ControlRateVariable) {
    424                 return OMX_ErrorUndefined;
    425             }
    426 
    427             mBitrate = bitRate->nTargetBitrate;
    428             return OMX_ErrorNone;
    429         }
    430 
    431         case OMX_IndexParamVideoAvc:
    432         {
    433             OMX_VIDEO_PARAM_AVCTYPE *avcType =
    434                 (OMX_VIDEO_PARAM_AVCTYPE *)params;
    435 
    436             if (avcType->nPortIndex != 1) {
    437                 return OMX_ErrorUndefined;
    438             }
    439 
    440             // PV's AVC encoder only supports baseline profile
    441             if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
    442                 avcType->nRefFrames != 1 ||
    443                 avcType->nBFrames != 0 ||
    444                 avcType->bUseHadamard != OMX_TRUE ||
    445                 (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
    446                 avcType->nRefIdx10ActiveMinus1 != 0 ||
    447                 avcType->nRefIdx11ActiveMinus1 != 0 ||
    448                 avcType->bWeightedPPrediction != OMX_FALSE ||
    449                 avcType->bEntropyCodingCABAC != OMX_FALSE ||
    450                 avcType->bconstIpred != OMX_FALSE ||
    451                 avcType->bDirect8x8Inference != OMX_FALSE ||
    452                 avcType->bDirectSpatialTemporal != OMX_FALSE ||
    453                 avcType->nCabacInitIdc != 0) {
    454                 return OMX_ErrorUndefined;
    455             }
    456 
    457             if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
    458                 return OMX_ErrorUndefined;
    459             }
    460 
    461             return OMX_ErrorNone;
    462         }
    463 
    464         default:
    465             return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
    466     }
    467 }
    468 
    469 void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
    470     if (mSignalledError || mSawInputEOS) {
    471         return;
    472     }
    473 
    474     if (!mStarted) {
    475         if (OMX_ErrorNone != initEncoder()) {
    476             return;
    477         }
    478     }
    479 
    480     List<BufferInfo *> &inQueue = getPortQueue(0);
    481     List<BufferInfo *> &outQueue = getPortQueue(1);
    482 
    483     while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
    484         BufferInfo *inInfo = *inQueue.begin();
    485         OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
    486         BufferInfo *outInfo = *outQueue.begin();
    487         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
    488 
    489         outHeader->nTimeStamp = 0;
    490         outHeader->nFlags = 0;
    491         outHeader->nOffset = 0;
    492         outHeader->nFilledLen = 0;
    493         outHeader->nOffset = 0;
    494 
    495         uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
    496         uint32_t dataLength = outHeader->nAllocLen;
    497 
    498         if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
    499             // 4 bytes are reserved for holding the start code 0x00000001
    500             // of the sequence parameter set at the beginning.
    501             outPtr += 4;
    502             dataLength -= 4;
    503         }
    504 
    505         int32_t type;
    506         AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
    507 
    508         // Combine SPS and PPS and place them in the very first output buffer
    509         // SPS and PPS are separated by start code 0x00000001
    510         // Assume that we have exactly one SPS and exactly one PPS.
    511         while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
    512             encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
    513             if (encoderStatus == AVCENC_WRONG_STATE) {
    514                 mSpsPpsHeaderReceived = true;
    515                 CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
    516                 outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
    517                 outQueue.erase(outQueue.begin());
    518                 outInfo->mOwnedByUs = false;
    519                 notifyFillBufferDone(outHeader);
    520                 return;
    521             } else {
    522                 switch (type) {
    523                     case AVC_NALTYPE_SPS:
    524                         ++mNumInputFrames;
    525                         memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
    526                         outHeader->nFilledLen = 4 + dataLength;
    527                         outPtr += (dataLength + 4);  // 4 bytes for next start code
    528                         dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
    529                         break;
    530                     default:
    531                         CHECK_EQ(AVC_NALTYPE_PPS, type);
    532                         ++mNumInputFrames;
    533                         memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
    534                                 "\x00\x00\x00\x01", 4);
    535                         outHeader->nFilledLen += (dataLength + 4);
    536                         outPtr += (dataLength + 4);
    537                         break;
    538                 }
    539             }
    540         }
    541 
    542         // Get next input video frame
    543         if (mReadyForNextFrame) {
    544             // Save the input buffer info so that it can be
    545             // passed to an output buffer
    546             InputBufferInfo info;
    547             info.mTimeUs = inHeader->nTimeStamp;
    548             info.mFlags = inHeader->nFlags;
    549             mInputBufferInfoVec.push(info);
    550             mPrevTimestampUs = inHeader->nTimeStamp;
    551 
    552             if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
    553                 mSawInputEOS = true;
    554             }
    555 
    556             if (inHeader->nFilledLen > 0) {
    557                 AVCFrameIO videoInput;
    558                 memset(&videoInput, 0, sizeof(videoInput));
    559                 videoInput.height = align(mHeight, 16);
    560                 videoInput.pitch = align(mWidth, 16);
    561                 videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
    562                 const uint8_t *inputData = NULL;
    563                 if (mInputDataIsMeta) {
    564                     if (inHeader->nFilledLen != 8) {
    565                         ALOGE("MetaData buffer is wrong size! "
    566                                 "(got %u bytes, expected 8)", inHeader->nFilledLen);
    567                         mSignalledError = true;
    568                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    569                         return;
    570                     }
    571                     inputData =
    572                         extractGraphicBuffer(
    573                                 mInputFrameData, (mWidth * mHeight * 3) >> 1,
    574                                 inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
    575                                 mWidth, mHeight);
    576                     if (inputData == NULL) {
    577                         ALOGE("Unable to extract gralloc buffer in metadata mode");
    578                         mSignalledError = true;
    579                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    580                         return;
    581                     }
    582                     // TODO: Verify/convert pixel format enum
    583                 } else {
    584                     inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
    585                     if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
    586                         ConvertYUV420SemiPlanarToYUV420Planar(
    587                             inputData, mInputFrameData, mWidth, mHeight);
    588                         inputData = mInputFrameData;
    589                     }
    590                 }
    591 
    592                 CHECK(inputData != NULL);
    593                 videoInput.YCbCr[0] = (uint8_t *)inputData;
    594                 videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
    595                 videoInput.YCbCr[2] = videoInput.YCbCr[1] +
    596                     ((videoInput.height * videoInput.pitch) >> 2);
    597                 videoInput.disp_order = mNumInputFrames;
    598 
    599                 encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
    600                 if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
    601                     mReadyForNextFrame = false;
    602                     ++mNumInputFrames;
    603                     if (encoderStatus == AVCENC_NEW_IDR) {
    604                         mIsIDRFrame = 1;
    605                     }
    606                 } else {
    607                     if (encoderStatus < AVCENC_SUCCESS) {
    608                         ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    609                         mSignalledError = true;
    610                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    611                         return;
    612                     } else {
    613                         ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    614                         inQueue.erase(inQueue.begin());
    615                         inInfo->mOwnedByUs = false;
    616                         notifyEmptyBufferDone(inHeader);
    617                         return;
    618                     }
    619                 }
    620             }
    621         }
    622 
    623         // Encode an input video frame
    624         CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
    625         dataLength = outHeader->nAllocLen;  // Reset the output buffer length
    626         if (inHeader->nFilledLen > 0) {
    627             if (outHeader->nAllocLen >= 4) {
    628                 memcpy(outPtr, "\x00\x00\x00\x01", 4);
    629                 outPtr += 4;
    630                 dataLength -= 4;
    631             }
    632             encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
    633             dataLength = outPtr + dataLength - outHeader->pBuffer;
    634             if (encoderStatus == AVCENC_SUCCESS) {
    635                 CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
    636             } else if (encoderStatus == AVCENC_PICTURE_READY) {
    637                 CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
    638                 if (mIsIDRFrame) {
    639                     outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
    640                     mIsIDRFrame = false;
    641                 }
    642                 mReadyForNextFrame = true;
    643                 AVCFrameIO recon;
    644                 if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
    645                     PVAVCEncReleaseRecon(mHandle, &recon);
    646                 }
    647             } else {
    648                 dataLength = 0;
    649                 mReadyForNextFrame = true;
    650             }
    651 
    652             if (encoderStatus < AVCENC_SUCCESS) {
    653                 ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    654                 mSignalledError = true;
    655                 notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    656                 return;
    657             }
    658         } else {
    659             dataLength = 0;
    660         }
    661 
    662         inQueue.erase(inQueue.begin());
    663         inInfo->mOwnedByUs = false;
    664         notifyEmptyBufferDone(inHeader);
    665 
    666         outQueue.erase(outQueue.begin());
    667         CHECK(!mInputBufferInfoVec.empty());
    668         InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
    669         outHeader->nTimeStamp = inputBufInfo->mTimeUs;
    670         outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
    671         if (mSawInputEOS) {
    672             outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
    673         }
    674         outHeader->nFilledLen = dataLength;
    675         outInfo->mOwnedByUs = false;
    676         notifyFillBufferDone(outHeader);
    677         mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
    678     }
    679 }
    680 
    681 int32_t SoftAVCEncoder::allocOutputBuffers(
    682         unsigned int sizeInMbs, unsigned int numBuffers) {
    683     CHECK(mOutputBuffers.isEmpty());
    684     size_t frameSize = (sizeInMbs << 7) * 3;
    685     for (unsigned int i = 0; i <  numBuffers; ++i) {
    686         MediaBuffer *buffer = new MediaBuffer(frameSize);
    687         buffer->setObserver(this);
    688         mOutputBuffers.push(buffer);
    689     }
    690 
    691     return 1;
    692 }
    693 
    694 void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
    695     CHECK(index >= 0);
    696 }
    697 
    698 int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
    699     CHECK(index >= 0);
    700     CHECK(index < (int32_t) mOutputBuffers.size());
    701     *yuv = (uint8_t *) mOutputBuffers[index]->data();
    702 
    703     return 1;
    704 }
    705 
    706 void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
    707     UNUSED_UNLESS_VERBOSE(buffer);
    708     ALOGV("signalBufferReturned: %p", buffer);
    709 }
    710 
    711 }  // namespace android
    712 
    713 android::SoftOMXComponent *createSoftOMXComponent(
    714         const char *name, const OMX_CALLBACKTYPE *callbacks,
    715         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
    716     return new android::SoftAVCEncoder(name, callbacks, appData, component);
    717 }
    718