Home | History | Annotate | Download | only in enc
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "SoftAVCEncoder"
     19 #include <utils/Log.h>
     20 #include <utils/misc.h>
     21 
     22 #include "avcenc_api.h"
     23 #include "avcenc_int.h"
     24 #include "OMX_Video.h"
     25 
     26 #include <HardwareAPI.h>
     27 #include <MetadataBufferType.h>
     28 #include <media/stagefright/foundation/ADebug.h>
     29 #include <media/stagefright/foundation/AUtils.h>
     30 #include <media/stagefright/MediaDefs.h>
     31 #include <media/stagefright/MediaErrors.h>
     32 #include <media/stagefright/MetaData.h>
     33 #include <media/stagefright/Utils.h>
     34 #include <ui/Rect.h>
     35 #include <ui/GraphicBufferMapper.h>
     36 
     37 #include "SoftAVCEncoder.h"
     38 
     39 #if LOG_NDEBUG
     40 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
     41 #else
     42 #define UNUSED_UNLESS_VERBOSE(x)
     43 #endif
     44 
     45 namespace android {
     46 
     47 template<class T>
     48 static void InitOMXParams(T *params) {
     49     params->nSize = sizeof(T);
     50     params->nVersion.s.nVersionMajor = 1;
     51     params->nVersion.s.nVersionMinor = 0;
     52     params->nVersion.s.nRevision = 0;
     53     params->nVersion.s.nStep = 0;
     54 }
     55 
     56 static const CodecProfileLevel kProfileLevels[] = {
     57     { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2  },
     58 };
     59 
     60 typedef struct LevelConversion {
     61     OMX_U32 omxLevel;
     62     AVCLevel avcLevel;
     63     uint32_t maxMacroBlocks;
     64 } LevelConcersion;
     65 
     66 static LevelConversion ConversionTable[] = {
     67     { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B, 99 },
     68     { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1,   99 },
     69     { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1, 396 },
     70     { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2, 396 },
     71     { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3, 396 },
     72     { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2,   396 },
     73 #if 0
     74     // encoding speed is very poor if video resolution
     75     // is higher than CIF or if level is higher than 2
     76     { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1, 792 },
     77     { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2, 1620 },
     78     { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3,   1620 },
     79     { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1, 3600 },
     80     { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2, 5120 },
     81     { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4,   8192 },
     82     { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1, 8192 },
     83     { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2, 8704 },
     84     { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5,   22080 },
     85     { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1, 36864 },
     86 #endif
     87 };
     88 
     89 static status_t ConvertOmxAvcLevelToAvcSpecLevel(
     90         OMX_U32 omxLevel, AVCLevel *avcLevel) {
     91     for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
     92         i < n; ++i) {
     93         if (omxLevel == ConversionTable[i].omxLevel) {
     94             *avcLevel = ConversionTable[i].avcLevel;
     95             return OK;
     96         }
     97     }
     98 
     99     ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
    100             (int32_t)omxLevel);
    101 
    102     return BAD_VALUE;
    103 }
    104 
    105 static status_t ConvertAvcSpecLevelToOmxAvcLevel(
    106     AVCLevel avcLevel, OMX_U32 *omxLevel) {
    107     for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
    108         i < n; ++i) {
    109         if (avcLevel == ConversionTable[i].avcLevel) {
    110             *omxLevel = ConversionTable[i].omxLevel;
    111             return OK;
    112         }
    113     }
    114 
    115     ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
    116             (int32_t) avcLevel);
    117 
    118     return BAD_VALUE;
    119 }
    120 
    121 static void* MallocWrapper(
    122         void * /* userData */, int32_t size, int32_t /* attrs */) {
    123     void *ptr = malloc(size);
    124     if (ptr)
    125         memset(ptr, 0, size);
    126     return ptr;
    127 }
    128 
    129 static void FreeWrapper(void * /* userData */, void* ptr) {
    130     free(ptr);
    131 }
    132 
    133 static int32_t DpbAllocWrapper(void *userData,
    134         unsigned int sizeInMbs, unsigned int numBuffers) {
    135     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    136     CHECK(encoder != NULL);
    137     return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
    138 }
    139 
    140 static int32_t BindFrameWrapper(
    141         void *userData, int32_t index, uint8_t **yuv) {
    142     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    143     CHECK(encoder != NULL);
    144     return encoder->bindOutputBuffer(index, yuv);
    145 }
    146 
    147 static void UnbindFrameWrapper(void *userData, int32_t index) {
    148     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    149     CHECK(encoder != NULL);
    150     return encoder->unbindOutputBuffer(index);
    151 }
    152 
    153 SoftAVCEncoder::SoftAVCEncoder(
    154             const char *name,
    155             const OMX_CALLBACKTYPE *callbacks,
    156             OMX_PTR appData,
    157             OMX_COMPONENTTYPE **component)
    158     : SoftVideoEncoderOMXComponent(
    159             name, "video_encoder.avc", OMX_VIDEO_CodingAVC,
    160             kProfileLevels, NELEM(kProfileLevels),
    161             176 /* width */, 144 /* height */,
    162             callbacks, appData, component),
    163       mIDRFrameRefreshIntervalInSec(1),
    164       mAVCEncProfile(AVC_BASELINE),
    165       mAVCEncLevel(AVC_LEVEL2),
    166       mNumInputFrames(-1),
    167       mPrevTimestampUs(-1),
    168       mStarted(false),
    169       mSawInputEOS(false),
    170       mSignalledError(false),
    171       mHandle(new tagAVCHandle),
    172       mEncParams(new tagAVCEncParam),
    173       mInputFrameData(NULL),
    174       mSliceGroup(NULL) {
    175 
    176     const size_t kOutputBufferSize =
    177         320 * ConversionTable[NELEM(ConversionTable) - 1].maxMacroBlocks;
    178 
    179     initPorts(
    180             kNumBuffers, kNumBuffers, kOutputBufferSize,
    181             MEDIA_MIMETYPE_VIDEO_AVC, 2 /* minCompressionRatio */);
    182 
    183     ALOGI("Construct SoftAVCEncoder");
    184 }
    185 
    186 SoftAVCEncoder::~SoftAVCEncoder() {
    187     ALOGV("Destruct SoftAVCEncoder");
    188     releaseEncoder();
    189     List<BufferInfo *> &outQueue = getPortQueue(1);
    190     List<BufferInfo *> &inQueue = getPortQueue(0);
    191     CHECK(outQueue.empty());
    192     CHECK(inQueue.empty());
    193 }
    194 
    195 OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
    196     CHECK(mHandle != NULL);
    197     memset(mHandle, 0, sizeof(tagAVCHandle));
    198     mHandle->AVCObject = NULL;
    199     mHandle->userData = this;
    200     mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
    201     mHandle->CBAVC_FrameBind = BindFrameWrapper;
    202     mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
    203     mHandle->CBAVC_Malloc = MallocWrapper;
    204     mHandle->CBAVC_Free = FreeWrapper;
    205 
    206     CHECK(mEncParams != NULL);
    207     memset(mEncParams, 0, sizeof(*mEncParams));
    208     mEncParams->rate_control = AVC_ON;
    209     mEncParams->initQP = 0;
    210     mEncParams->init_CBP_removal_delay = 1600;
    211 
    212     mEncParams->intramb_refresh = 0;
    213     mEncParams->auto_scd = AVC_ON;
    214     mEncParams->out_of_band_param_set = AVC_ON;
    215     mEncParams->poc_type = 2;
    216     mEncParams->log2_max_poc_lsb_minus_4 = 12;
    217     mEncParams->delta_poc_zero_flag = 0;
    218     mEncParams->offset_poc_non_ref = 0;
    219     mEncParams->offset_top_bottom = 0;
    220     mEncParams->num_ref_in_cycle = 0;
    221     mEncParams->offset_poc_ref = NULL;
    222 
    223     mEncParams->num_ref_frame = 1;
    224     mEncParams->num_slice_group = 1;
    225     mEncParams->fmo_type = 0;
    226 
    227     mEncParams->db_filter = AVC_ON;
    228     mEncParams->disable_db_idc = 0;
    229 
    230     mEncParams->alpha_offset = 0;
    231     mEncParams->beta_offset = 0;
    232     mEncParams->constrained_intra_pred = AVC_OFF;
    233 
    234     mEncParams->data_par = AVC_OFF;
    235     mEncParams->fullsearch = AVC_OFF;
    236     mEncParams->search_range = 16;
    237     mEncParams->sub_pel = AVC_OFF;
    238     mEncParams->submb_pred = AVC_OFF;
    239     mEncParams->rdopt_mode = AVC_OFF;
    240     mEncParams->bidir_pred = AVC_OFF;
    241 
    242     mEncParams->use_overrun_buffer = AVC_OFF;
    243 
    244     if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
    245         // Color conversion is needed.
    246         free(mInputFrameData);
    247         if (((uint64_t)mWidth * mHeight) > ((uint64_t)INT32_MAX / 3)) {
    248             ALOGE("Buffer size is too big.");
    249             return OMX_ErrorUndefined;
    250         }
    251         mInputFrameData =
    252             (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
    253         CHECK(mInputFrameData != NULL);
    254     }
    255 
    256     // PV's AVC encoder requires the video dimension of multiple
    257     if (mWidth % 16 != 0 || mHeight % 16 != 0) {
    258         ALOGE("Video frame size %dx%d must be a multiple of 16",
    259             mWidth, mHeight);
    260         return OMX_ErrorBadParameter;
    261     }
    262 
    263     mEncParams->width = mWidth;
    264     mEncParams->height = mHeight;
    265     mEncParams->bitrate = mBitrate;
    266     mEncParams->frame_rate = (1000 * mFramerate) >> 16;  // In frames/ms!, mFramerate is in Q16
    267     mEncParams->CPB_size = (uint32_t) (mBitrate >> 1);
    268 
    269     int32_t nMacroBlocks = divUp(mWidth, 16) * divUp(mHeight, 16);
    270     CHECK(mSliceGroup == NULL);
    271     if ((size_t)nMacroBlocks > SIZE_MAX / sizeof(uint32_t)) {
    272         ALOGE("requested memory size is too big.");
    273         return OMX_ErrorUndefined;
    274     }
    275     mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
    276     CHECK(mSliceGroup != NULL);
    277     for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
    278         mSliceGroup[ii] = idx++;
    279         if (idx >= mEncParams->num_slice_group) {
    280             idx = 0;
    281         }
    282     }
    283     mEncParams->slice_group = mSliceGroup;
    284 
    285     // Set IDR frame refresh interval
    286     if (mIDRFrameRefreshIntervalInSec < 0) {
    287         mEncParams->idr_period = -1;
    288     } else if (mIDRFrameRefreshIntervalInSec == 0) {
    289         mEncParams->idr_period = 1;  // All I frames
    290     } else {
    291         mEncParams->idr_period =
    292             (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16; // mFramerate is in Q16
    293     }
    294 
    295     // Set profile and level
    296     mEncParams->profile = mAVCEncProfile;
    297     mEncParams->level = mAVCEncLevel;
    298 
    299     return OMX_ErrorNone;
    300 }
    301 
    302 OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
    303     CHECK(!mStarted);
    304 
    305     OMX_ERRORTYPE errType = OMX_ErrorNone;
    306     if (OMX_ErrorNone != (errType = initEncParams())) {
    307         ALOGE("Failed to initialized encoder params");
    308         mSignalledError = true;
    309         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    310         return errType;
    311     }
    312 
    313     AVCEnc_Status err;
    314     err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
    315     if (err != AVCENC_SUCCESS) {
    316         ALOGE("Failed to initialize the encoder: %d", err);
    317         mSignalledError = true;
    318         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    319         return OMX_ErrorUndefined;
    320     }
    321 
    322     mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
    323     mSpsPpsHeaderReceived = false;
    324     mReadyForNextFrame = true;
    325     mIsIDRFrame = false;
    326     mStarted = true;
    327 
    328     return OMX_ErrorNone;
    329 }
    330 
    331 OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
    332     if (!mStarted) {
    333         return OMX_ErrorNone;
    334     }
    335 
    336     PVAVCCleanUpEncoder(mHandle);
    337     releaseOutputBuffers();
    338 
    339     free(mInputFrameData);
    340     mInputFrameData = NULL;
    341 
    342     free(mSliceGroup);
    343     mSliceGroup = NULL;
    344 
    345     delete mEncParams;
    346     mEncParams = NULL;
    347 
    348     delete mHandle;
    349     mHandle = NULL;
    350 
    351     mStarted = false;
    352 
    353     return OMX_ErrorNone;
    354 }
    355 
    356 void SoftAVCEncoder::releaseOutputBuffers() {
    357     for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
    358         MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
    359         buffer->setObserver(NULL);
    360         buffer->release();
    361     }
    362     mOutputBuffers.clear();
    363 }
    364 
    365 OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
    366         OMX_INDEXTYPE index, OMX_PTR params) {
    367     switch (index) {
    368         case OMX_IndexParamVideoBitrate:
    369         {
    370             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
    371                 (OMX_VIDEO_PARAM_BITRATETYPE *) params;
    372 
    373             if (!isValidOMXParam(bitRate)) {
    374                 return OMX_ErrorBadParameter;
    375             }
    376 
    377             if (bitRate->nPortIndex != 1) {
    378                 return OMX_ErrorUndefined;
    379             }
    380 
    381             bitRate->eControlRate = OMX_Video_ControlRateVariable;
    382             bitRate->nTargetBitrate = mBitrate;
    383             return OMX_ErrorNone;
    384         }
    385 
    386         case OMX_IndexParamVideoAvc:
    387         {
    388             OMX_VIDEO_PARAM_AVCTYPE *avcParams =
    389                 (OMX_VIDEO_PARAM_AVCTYPE *)params;
    390 
    391             if (!isValidOMXParam(avcParams)) {
    392                 return OMX_ErrorBadParameter;
    393             }
    394 
    395             if (avcParams->nPortIndex != 1) {
    396                 return OMX_ErrorUndefined;
    397             }
    398 
    399             avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
    400             OMX_U32 omxLevel = AVC_LEVEL2;
    401             if (OMX_ErrorNone !=
    402                 ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
    403                 return OMX_ErrorUndefined;
    404             }
    405 
    406             avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
    407             avcParams->nRefFrames = 1;
    408             avcParams->nBFrames = 0;
    409             avcParams->bUseHadamard = OMX_TRUE;
    410             avcParams->nAllowedPictureTypes =
    411                     (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
    412             avcParams->nRefIdx10ActiveMinus1 = 0;
    413             avcParams->nRefIdx11ActiveMinus1 = 0;
    414             avcParams->bWeightedPPrediction = OMX_FALSE;
    415             avcParams->bEntropyCodingCABAC = OMX_FALSE;
    416             avcParams->bconstIpred = OMX_FALSE;
    417             avcParams->bDirect8x8Inference = OMX_FALSE;
    418             avcParams->bDirectSpatialTemporal = OMX_FALSE;
    419             avcParams->nCabacInitIdc = 0;
    420             return OMX_ErrorNone;
    421         }
    422 
    423         default:
    424             return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
    425     }
    426 }
    427 
    428 OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
    429         OMX_INDEXTYPE index, const OMX_PTR params) {
    430     int32_t indexFull = index;
    431 
    432     switch (indexFull) {
    433         case OMX_IndexParamVideoBitrate:
    434         {
    435             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
    436                 (OMX_VIDEO_PARAM_BITRATETYPE *) params;
    437 
    438             if (!isValidOMXParam(bitRate)) {
    439                 return OMX_ErrorBadParameter;
    440             }
    441 
    442             if (bitRate->nPortIndex != 1 ||
    443                 bitRate->eControlRate != OMX_Video_ControlRateVariable) {
    444                 return OMX_ErrorUndefined;
    445             }
    446 
    447             mBitrate = bitRate->nTargetBitrate;
    448             return OMX_ErrorNone;
    449         }
    450 
    451         case OMX_IndexParamVideoAvc:
    452         {
    453             OMX_VIDEO_PARAM_AVCTYPE *avcType =
    454                 (OMX_VIDEO_PARAM_AVCTYPE *)params;
    455 
    456             if (!isValidOMXParam(avcType)) {
    457                 return OMX_ErrorBadParameter;
    458             }
    459 
    460             if (avcType->nPortIndex != 1) {
    461                 return OMX_ErrorUndefined;
    462             }
    463 
    464             // PV's AVC encoder only supports baseline profile
    465             if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
    466                 avcType->nRefFrames != 1 ||
    467                 avcType->nBFrames != 0 ||
    468                 avcType->bUseHadamard != OMX_TRUE ||
    469                 (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
    470                 avcType->nRefIdx10ActiveMinus1 != 0 ||
    471                 avcType->nRefIdx11ActiveMinus1 != 0 ||
    472                 avcType->bWeightedPPrediction != OMX_FALSE ||
    473                 avcType->bEntropyCodingCABAC != OMX_FALSE ||
    474                 avcType->bconstIpred != OMX_FALSE ||
    475                 avcType->bDirect8x8Inference != OMX_FALSE ||
    476                 avcType->bDirectSpatialTemporal != OMX_FALSE ||
    477                 avcType->nCabacInitIdc != 0) {
    478                 return OMX_ErrorUndefined;
    479             }
    480 
    481             if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
    482                 return OMX_ErrorUndefined;
    483             }
    484 
    485             return OMX_ErrorNone;
    486         }
    487 
    488         default:
    489             return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
    490     }
    491 }
    492 
    493 void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
    494     if (mSignalledError || mSawInputEOS) {
    495         return;
    496     }
    497 
    498     if (!mStarted) {
    499         if (OMX_ErrorNone != initEncoder()) {
    500             return;
    501         }
    502     }
    503 
    504     List<BufferInfo *> &inQueue = getPortQueue(0);
    505     List<BufferInfo *> &outQueue = getPortQueue(1);
    506 
    507     while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
    508         BufferInfo *inInfo = *inQueue.begin();
    509         OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
    510         BufferInfo *outInfo = *outQueue.begin();
    511         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
    512 
    513         outHeader->nTimeStamp = 0;
    514         outHeader->nFlags = 0;
    515         outHeader->nOffset = 0;
    516         outHeader->nFilledLen = 0;
    517         outHeader->nOffset = 0;
    518 
    519         uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
    520         uint32_t dataLength = outHeader->nAllocLen;
    521 
    522         if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
    523             // 4 bytes are reserved for holding the start code 0x00000001
    524             // of the sequence parameter set at the beginning.
    525             outPtr += 4;
    526             dataLength -= 4;
    527         }
    528 
    529         int32_t type;
    530         AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
    531 
    532         // Combine SPS and PPS and place them in the very first output buffer
    533         // SPS and PPS are separated by start code 0x00000001
    534         // Assume that we have exactly one SPS and exactly one PPS.
    535         while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
    536             encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
    537             if (encoderStatus == AVCENC_WRONG_STATE) {
    538                 mSpsPpsHeaderReceived = true;
    539                 CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
    540                 outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
    541                 outQueue.erase(outQueue.begin());
    542                 outInfo->mOwnedByUs = false;
    543                 notifyFillBufferDone(outHeader);
    544                 return;
    545             } else {
    546                 switch (type) {
    547                     case AVC_NALTYPE_SPS:
    548                         ++mNumInputFrames;
    549                         memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
    550                         outHeader->nFilledLen = 4 + dataLength;
    551                         outPtr += (dataLength + 4);  // 4 bytes for next start code
    552                         dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
    553                         break;
    554                     default:
    555                         CHECK_EQ(AVC_NALTYPE_PPS, type);
    556                         ++mNumInputFrames;
    557                         memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
    558                                 "\x00\x00\x00\x01", 4);
    559                         outHeader->nFilledLen += (dataLength + 4);
    560                         outPtr += (dataLength + 4);
    561                         break;
    562                 }
    563             }
    564         }
    565 
    566         // Get next input video frame
    567         if (mReadyForNextFrame) {
    568             // Save the input buffer info so that it can be
    569             // passed to an output buffer
    570             InputBufferInfo info;
    571             info.mTimeUs = inHeader->nTimeStamp;
    572             info.mFlags = inHeader->nFlags;
    573             mInputBufferInfoVec.push(info);
    574             mPrevTimestampUs = inHeader->nTimeStamp;
    575 
    576             if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
    577                 mSawInputEOS = true;
    578             }
    579 
    580             if (inHeader->nFilledLen > 0) {
    581                 AVCFrameIO videoInput;
    582                 memset(&videoInput, 0, sizeof(videoInput));
    583                 videoInput.height = align(mHeight, 16);
    584                 videoInput.pitch = align(mWidth, 16);
    585                 videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
    586                 const uint8_t *inputData = NULL;
    587                 if (mInputDataIsMeta) {
    588                     inputData =
    589                         extractGraphicBuffer(
    590                                 mInputFrameData, (mWidth * mHeight * 3) >> 1,
    591                                 inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
    592                                 mWidth, mHeight);
    593                     if (inputData == NULL) {
    594                         ALOGE("Unable to extract gralloc buffer in metadata mode");
    595                         mSignalledError = true;
    596                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    597                         return;
    598                     }
    599                     // TODO: Verify/convert pixel format enum
    600                 } else {
    601                     inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
    602                     if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
    603                         ConvertYUV420SemiPlanarToYUV420Planar(
    604                             inputData, mInputFrameData, mWidth, mHeight);
    605                         inputData = mInputFrameData;
    606                     }
    607                 }
    608 
    609                 CHECK(inputData != NULL);
    610                 videoInput.YCbCr[0] = (uint8_t *)inputData;
    611                 videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
    612                 videoInput.YCbCr[2] = videoInput.YCbCr[1] +
    613                     ((videoInput.height * videoInput.pitch) >> 2);
    614                 videoInput.disp_order = mNumInputFrames;
    615 
    616                 encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
    617                 if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
    618                     mReadyForNextFrame = false;
    619                     ++mNumInputFrames;
    620                     if (encoderStatus == AVCENC_NEW_IDR) {
    621                         mIsIDRFrame = 1;
    622                     }
    623                 } else {
    624                     if (encoderStatus < AVCENC_SUCCESS) {
    625                         ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    626                         mSignalledError = true;
    627                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    628                         return;
    629                     } else {
    630                         ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    631                         inQueue.erase(inQueue.begin());
    632                         inInfo->mOwnedByUs = false;
    633                         notifyEmptyBufferDone(inHeader);
    634                         return;
    635                     }
    636                 }
    637             }
    638         }
    639 
    640         // Encode an input video frame
    641         CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
    642         dataLength = outHeader->nAllocLen;  // Reset the output buffer length
    643         if (inHeader->nFilledLen > 0) {
    644             if (outHeader->nAllocLen >= 4) {
    645                 memcpy(outPtr, "\x00\x00\x00\x01", 4);
    646                 outPtr += 4;
    647                 dataLength -= 4;
    648             }
    649             encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
    650             dataLength = outPtr + dataLength - outHeader->pBuffer;
    651             if (encoderStatus == AVCENC_SUCCESS) {
    652                 CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
    653             } else if (encoderStatus == AVCENC_PICTURE_READY) {
    654                 CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
    655                 if (mIsIDRFrame) {
    656                     outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
    657                     mIsIDRFrame = false;
    658                 }
    659                 mReadyForNextFrame = true;
    660                 AVCFrameIO recon;
    661                 if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
    662                     PVAVCEncReleaseRecon(mHandle, &recon);
    663                 }
    664             } else {
    665                 dataLength = 0;
    666                 mReadyForNextFrame = true;
    667             }
    668 
    669             if (encoderStatus < AVCENC_SUCCESS) {
    670                 ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    671                 mSignalledError = true;
    672                 notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    673                 return;
    674             }
    675         } else {
    676             dataLength = 0;
    677         }
    678 
    679         inQueue.erase(inQueue.begin());
    680         inInfo->mOwnedByUs = false;
    681         notifyEmptyBufferDone(inHeader);
    682 
    683         outQueue.erase(outQueue.begin());
    684         CHECK(!mInputBufferInfoVec.empty());
    685         InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
    686         outHeader->nTimeStamp = inputBufInfo->mTimeUs;
    687         outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
    688         if (mSawInputEOS) {
    689             outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
    690         }
    691         outHeader->nFilledLen = dataLength;
    692         outInfo->mOwnedByUs = false;
    693         notifyFillBufferDone(outHeader);
    694         mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
    695     }
    696 }
    697 
    698 int32_t SoftAVCEncoder::allocOutputBuffers(
    699         unsigned int sizeInMbs, unsigned int numBuffers) {
    700     CHECK(mOutputBuffers.isEmpty());
    701     size_t frameSize = (sizeInMbs << 7) * 3;
    702     for (unsigned int i = 0; i <  numBuffers; ++i) {
    703         MediaBuffer *buffer = new MediaBuffer(frameSize);
    704         buffer->setObserver(this);
    705         mOutputBuffers.push(buffer);
    706     }
    707 
    708     return 1;
    709 }
    710 
    711 void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
    712     CHECK(index >= 0);
    713 }
    714 
    715 int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
    716     CHECK(index >= 0);
    717     CHECK(index < (int32_t) mOutputBuffers.size());
    718     *yuv = (uint8_t *) mOutputBuffers[index]->data();
    719 
    720     return 1;
    721 }
    722 
    723 void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
    724     UNUSED_UNLESS_VERBOSE(buffer);
    725     ALOGV("signalBufferReturned: %p", buffer);
    726 }
    727 
    728 }  // namespace android
    729 
    730 android::SoftOMXComponent *createSoftOMXComponent(
    731         const char *name, const OMX_CALLBACKTYPE *callbacks,
    732         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
    733     return new android::SoftAVCEncoder(name, callbacks, appData, component);
    734 }
    735