Home | History | Annotate | Download | only in enc
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "SoftAVCEncoder"
     19 #include <utils/Log.h>
     20 
     21 #include "avcenc_api.h"
     22 #include "avcenc_int.h"
     23 #include "OMX_Video.h"
     24 
     25 #include <HardwareAPI.h>
     26 #include <MetadataBufferType.h>
     27 #include <media/stagefright/foundation/ADebug.h>
     28 #include <media/stagefright/MediaDefs.h>
     29 #include <media/stagefright/MediaErrors.h>
     30 #include <media/stagefright/MetaData.h>
     31 #include <media/stagefright/Utils.h>
     32 #include <ui/Rect.h>
     33 #include <ui/GraphicBufferMapper.h>
     34 
     35 #include "SoftAVCEncoder.h"
     36 
     37 namespace android {
     38 
     39 template<class T>
     40 static void InitOMXParams(T *params) {
     41     params->nSize = sizeof(T);
     42     params->nVersion.s.nVersionMajor = 1;
     43     params->nVersion.s.nVersionMinor = 0;
     44     params->nVersion.s.nRevision = 0;
     45     params->nVersion.s.nStep = 0;
     46 }
     47 
     48 typedef struct LevelConversion {
     49     OMX_U32 omxLevel;
     50     AVCLevel avcLevel;
     51 } LevelConcersion;
     52 
     53 static LevelConversion ConversionTable[] = {
     54     { OMX_VIDEO_AVCLevel1,  AVC_LEVEL1_B },
     55     { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1   },
     56     { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
     57     { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
     58     { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
     59     { OMX_VIDEO_AVCLevel2,  AVC_LEVEL2 },
     60 #if 0
     61     // encoding speed is very poor if video
     62     // resolution is higher than CIF
     63     { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
     64     { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
     65     { OMX_VIDEO_AVCLevel3,  AVC_LEVEL3   },
     66     { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
     67     { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
     68     { OMX_VIDEO_AVCLevel4,  AVC_LEVEL4   },
     69     { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
     70     { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
     71     { OMX_VIDEO_AVCLevel5,  AVC_LEVEL5   },
     72     { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
     73 #endif
     74 };
     75 
     76 static status_t ConvertOmxAvcLevelToAvcSpecLevel(
     77         OMX_U32 omxLevel, AVCLevel *avcLevel) {
     78     for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
     79         i < n; ++i) {
     80         if (omxLevel == ConversionTable[i].omxLevel) {
     81             *avcLevel = ConversionTable[i].avcLevel;
     82             return OK;
     83         }
     84     }
     85 
     86     ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
     87             (int32_t)omxLevel);
     88 
     89     return BAD_VALUE;
     90 }
     91 
     92 static status_t ConvertAvcSpecLevelToOmxAvcLevel(
     93     AVCLevel avcLevel, OMX_U32 *omxLevel) {
     94     for (size_t i = 0, n = sizeof(ConversionTable)/sizeof(ConversionTable[0]);
     95         i < n; ++i) {
     96         if (avcLevel == ConversionTable[i].avcLevel) {
     97             *omxLevel = ConversionTable[i].omxLevel;
     98             return OK;
     99         }
    100     }
    101 
    102     ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
    103             (int32_t) avcLevel);
    104 
    105     return BAD_VALUE;
    106 }
    107 
    108 inline static void ConvertYUV420SemiPlanarToYUV420Planar(
    109         uint8_t *inyuv, uint8_t* outyuv,
    110         int32_t width, int32_t height) {
    111 
    112     int32_t outYsize = width * height;
    113     uint32_t *outy =  (uint32_t *) outyuv;
    114     uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
    115     uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
    116 
    117     /* Y copying */
    118     memcpy(outy, inyuv, outYsize);
    119 
    120     /* U & V copying */
    121     uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
    122     for (int32_t i = height >> 1; i > 0; --i) {
    123         for (int32_t j = width >> 2; j > 0; --j) {
    124             uint32_t temp = *inyuv_4++;
    125             uint32_t tempU = temp & 0xFF;
    126             tempU = tempU | ((temp >> 8) & 0xFF00);
    127 
    128             uint32_t tempV = (temp >> 8) & 0xFF;
    129             tempV = tempV | ((temp >> 16) & 0xFF00);
    130 
    131             // Flip U and V
    132             *outcb++ = tempV;
    133             *outcr++ = tempU;
    134         }
    135     }
    136 }
    137 
    138 static void* MallocWrapper(
    139         void *userData, int32_t size, int32_t attrs) {
    140     void *ptr = malloc(size);
    141     if (ptr)
    142         memset(ptr, 0, size);
    143     return ptr;
    144 }
    145 
    146 static void FreeWrapper(void *userData, void* ptr) {
    147     free(ptr);
    148 }
    149 
    150 static int32_t DpbAllocWrapper(void *userData,
    151         unsigned int sizeInMbs, unsigned int numBuffers) {
    152     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    153     CHECK(encoder != NULL);
    154     return encoder->allocOutputBuffers(sizeInMbs, numBuffers);
    155 }
    156 
    157 static int32_t BindFrameWrapper(
    158         void *userData, int32_t index, uint8_t **yuv) {
    159     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    160     CHECK(encoder != NULL);
    161     return encoder->bindOutputBuffer(index, yuv);
    162 }
    163 
    164 static void UnbindFrameWrapper(void *userData, int32_t index) {
    165     SoftAVCEncoder *encoder = static_cast<SoftAVCEncoder *>(userData);
    166     CHECK(encoder != NULL);
    167     return encoder->unbindOutputBuffer(index);
    168 }
    169 
    170 SoftAVCEncoder::SoftAVCEncoder(
    171             const char *name,
    172             const OMX_CALLBACKTYPE *callbacks,
    173             OMX_PTR appData,
    174             OMX_COMPONENTTYPE **component)
    175     : SimpleSoftOMXComponent(name, callbacks, appData, component),
    176       mVideoWidth(176),
    177       mVideoHeight(144),
    178       mVideoFrameRate(30),
    179       mVideoBitRate(192000),
    180       mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
    181       mStoreMetaDataInBuffers(false),
    182       mIDRFrameRefreshIntervalInSec(1),
    183       mAVCEncProfile(AVC_BASELINE),
    184       mAVCEncLevel(AVC_LEVEL2),
    185       mNumInputFrames(-1),
    186       mPrevTimestampUs(-1),
    187       mStarted(false),
    188       mSawInputEOS(false),
    189       mSignalledError(false),
    190       mHandle(new tagAVCHandle),
    191       mEncParams(new tagAVCEncParam),
    192       mInputFrameData(NULL),
    193       mSliceGroup(NULL) {
    194 
    195     initPorts();
    196     ALOGI("Construct SoftAVCEncoder");
    197 }
    198 
    199 SoftAVCEncoder::~SoftAVCEncoder() {
    200     ALOGV("Destruct SoftAVCEncoder");
    201     releaseEncoder();
    202     List<BufferInfo *> &outQueue = getPortQueue(1);
    203     List<BufferInfo *> &inQueue = getPortQueue(0);
    204     CHECK(outQueue.empty());
    205     CHECK(inQueue.empty());
    206 }
    207 
    208 OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
    209     CHECK(mHandle != NULL);
    210     memset(mHandle, 0, sizeof(tagAVCHandle));
    211     mHandle->AVCObject = NULL;
    212     mHandle->userData = this;
    213     mHandle->CBAVC_DPBAlloc = DpbAllocWrapper;
    214     mHandle->CBAVC_FrameBind = BindFrameWrapper;
    215     mHandle->CBAVC_FrameUnbind = UnbindFrameWrapper;
    216     mHandle->CBAVC_Malloc = MallocWrapper;
    217     mHandle->CBAVC_Free = FreeWrapper;
    218 
    219     CHECK(mEncParams != NULL);
    220     memset(mEncParams, 0, sizeof(mEncParams));
    221     mEncParams->rate_control = AVC_ON;
    222     mEncParams->initQP = 0;
    223     mEncParams->init_CBP_removal_delay = 1600;
    224 
    225     mEncParams->intramb_refresh = 0;
    226     mEncParams->auto_scd = AVC_ON;
    227     mEncParams->out_of_band_param_set = AVC_ON;
    228     mEncParams->poc_type = 2;
    229     mEncParams->log2_max_poc_lsb_minus_4 = 12;
    230     mEncParams->delta_poc_zero_flag = 0;
    231     mEncParams->offset_poc_non_ref = 0;
    232     mEncParams->offset_top_bottom = 0;
    233     mEncParams->num_ref_in_cycle = 0;
    234     mEncParams->offset_poc_ref = NULL;
    235 
    236     mEncParams->num_ref_frame = 1;
    237     mEncParams->num_slice_group = 1;
    238     mEncParams->fmo_type = 0;
    239 
    240     mEncParams->db_filter = AVC_ON;
    241     mEncParams->disable_db_idc = 0;
    242 
    243     mEncParams->alpha_offset = 0;
    244     mEncParams->beta_offset = 0;
    245     mEncParams->constrained_intra_pred = AVC_OFF;
    246 
    247     mEncParams->data_par = AVC_OFF;
    248     mEncParams->fullsearch = AVC_OFF;
    249     mEncParams->search_range = 16;
    250     mEncParams->sub_pel = AVC_OFF;
    251     mEncParams->submb_pred = AVC_OFF;
    252     mEncParams->rdopt_mode = AVC_OFF;
    253     mEncParams->bidir_pred = AVC_OFF;
    254 
    255     mEncParams->use_overrun_buffer = AVC_OFF;
    256 
    257     if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
    258         // Color conversion is needed.
    259         CHECK(mInputFrameData == NULL);
    260         mInputFrameData =
    261             (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
    262         CHECK(mInputFrameData != NULL);
    263     }
    264 
    265     // PV's AVC encoder requires the video dimension of multiple
    266     if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
    267         ALOGE("Video frame size %dx%d must be a multiple of 16",
    268             mVideoWidth, mVideoHeight);
    269         return OMX_ErrorBadParameter;
    270     }
    271 
    272     mEncParams->width = mVideoWidth;
    273     mEncParams->height = mVideoHeight;
    274     mEncParams->bitrate = mVideoBitRate;
    275     mEncParams->frame_rate = 1000 * mVideoFrameRate;  // In frames/ms!
    276     mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
    277 
    278     int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
    279             (((mVideoHeight + 15) >> 4) << 4)) >> 8;
    280     CHECK(mSliceGroup == NULL);
    281     mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
    282     CHECK(mSliceGroup != NULL);
    283     for (int ii = 0, idx = 0; ii < nMacroBlocks; ++ii) {
    284         mSliceGroup[ii] = idx++;
    285         if (idx >= mEncParams->num_slice_group) {
    286             idx = 0;
    287         }
    288     }
    289     mEncParams->slice_group = mSliceGroup;
    290 
    291     // Set IDR frame refresh interval
    292     if (mIDRFrameRefreshIntervalInSec < 0) {
    293         mEncParams->idr_period = -1;
    294     } else if (mIDRFrameRefreshIntervalInSec == 0) {
    295         mEncParams->idr_period = 1;  // All I frames
    296     } else {
    297         mEncParams->idr_period =
    298             (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
    299     }
    300 
    301     // Set profile and level
    302     mEncParams->profile = mAVCEncProfile;
    303     mEncParams->level = mAVCEncLevel;
    304 
    305     return OMX_ErrorNone;
    306 }
    307 
    308 OMX_ERRORTYPE SoftAVCEncoder::initEncoder() {
    309     CHECK(!mStarted);
    310 
    311     OMX_ERRORTYPE errType = OMX_ErrorNone;
    312     if (OMX_ErrorNone != (errType = initEncParams())) {
    313         ALOGE("Failed to initialized encoder params");
    314         mSignalledError = true;
    315         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    316         return errType;
    317     }
    318 
    319     AVCEnc_Status err;
    320     err = PVAVCEncInitialize(mHandle, mEncParams, NULL, NULL);
    321     if (err != AVCENC_SUCCESS) {
    322         ALOGE("Failed to initialize the encoder: %d", err);
    323         mSignalledError = true;
    324         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    325         return OMX_ErrorUndefined;
    326     }
    327 
    328     mNumInputFrames = -2;  // 1st two buffers contain SPS and PPS
    329     mSpsPpsHeaderReceived = false;
    330     mReadyForNextFrame = true;
    331     mIsIDRFrame = false;
    332     mStarted = true;
    333 
    334     return OMX_ErrorNone;
    335 }
    336 
    337 OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
    338     if (!mStarted) {
    339         return OMX_ErrorNone;
    340     }
    341 
    342     PVAVCCleanUpEncoder(mHandle);
    343     releaseOutputBuffers();
    344 
    345     delete mInputFrameData;
    346     mInputFrameData = NULL;
    347 
    348     delete mSliceGroup;
    349     mSliceGroup = NULL;
    350 
    351     delete mEncParams;
    352     mEncParams = NULL;
    353 
    354     delete mHandle;
    355     mHandle = NULL;
    356 
    357     mStarted = false;
    358 
    359     return OMX_ErrorNone;
    360 }
    361 
    362 void SoftAVCEncoder::releaseOutputBuffers() {
    363     for (size_t i = 0; i < mOutputBuffers.size(); ++i) {
    364         MediaBuffer *buffer = mOutputBuffers.editItemAt(i);
    365         buffer->setObserver(NULL);
    366         buffer->release();
    367     }
    368     mOutputBuffers.clear();
    369 }
    370 
    371 void SoftAVCEncoder::initPorts() {
    372     OMX_PARAM_PORTDEFINITIONTYPE def;
    373     InitOMXParams(&def);
    374 
    375     const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
    376 
    377     // 31584 is PV's magic number.  Not sure why.
    378     const size_t kOutputBufferSize =
    379             (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
    380 
    381     def.nPortIndex = 0;
    382     def.eDir = OMX_DirInput;
    383     def.nBufferCountMin = kNumBuffers;
    384     def.nBufferCountActual = def.nBufferCountMin;
    385     def.nBufferSize = kInputBufferSize;
    386     def.bEnabled = OMX_TRUE;
    387     def.bPopulated = OMX_FALSE;
    388     def.eDomain = OMX_PortDomainVideo;
    389     def.bBuffersContiguous = OMX_FALSE;
    390     def.nBufferAlignment = 1;
    391 
    392     def.format.video.cMIMEType = const_cast<char *>("video/raw");
    393     def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
    394     def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
    395     def.format.video.xFramerate = (mVideoFrameRate << 16);  // Q16 format
    396     def.format.video.nBitrate = mVideoBitRate;
    397     def.format.video.nFrameWidth = mVideoWidth;
    398     def.format.video.nFrameHeight = mVideoHeight;
    399     def.format.video.nStride = mVideoWidth;
    400     def.format.video.nSliceHeight = mVideoHeight;
    401 
    402     addPort(def);
    403 
    404     def.nPortIndex = 1;
    405     def.eDir = OMX_DirOutput;
    406     def.nBufferCountMin = kNumBuffers;
    407     def.nBufferCountActual = def.nBufferCountMin;
    408     def.nBufferSize = kOutputBufferSize;
    409     def.bEnabled = OMX_TRUE;
    410     def.bPopulated = OMX_FALSE;
    411     def.eDomain = OMX_PortDomainVideo;
    412     def.bBuffersContiguous = OMX_FALSE;
    413     def.nBufferAlignment = 2;
    414 
    415     def.format.video.cMIMEType = const_cast<char *>("video/avc");
    416     def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
    417     def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
    418     def.format.video.xFramerate = (0 << 16);  // Q16 format
    419     def.format.video.nBitrate = mVideoBitRate;
    420     def.format.video.nFrameWidth = mVideoWidth;
    421     def.format.video.nFrameHeight = mVideoHeight;
    422     def.format.video.nStride = mVideoWidth;
    423     def.format.video.nSliceHeight = mVideoHeight;
    424 
    425     addPort(def);
    426 }
    427 
    428 OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
    429         OMX_INDEXTYPE index, OMX_PTR params) {
    430     switch (index) {
    431         case OMX_IndexParamVideoErrorCorrection:
    432         {
    433             return OMX_ErrorNotImplemented;
    434         }
    435 
    436         case OMX_IndexParamVideoBitrate:
    437         {
    438             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
    439                 (OMX_VIDEO_PARAM_BITRATETYPE *) params;
    440 
    441             if (bitRate->nPortIndex != 1) {
    442                 return OMX_ErrorUndefined;
    443             }
    444 
    445             bitRate->eControlRate = OMX_Video_ControlRateVariable;
    446             bitRate->nTargetBitrate = mVideoBitRate;
    447             return OMX_ErrorNone;
    448         }
    449 
    450         case OMX_IndexParamVideoPortFormat:
    451         {
    452             OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
    453                 (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
    454 
    455             if (formatParams->nPortIndex > 1) {
    456                 return OMX_ErrorUndefined;
    457             }
    458 
    459             if (formatParams->nIndex > 2) {
    460                 return OMX_ErrorNoMore;
    461             }
    462 
    463             if (formatParams->nPortIndex == 0) {
    464                 formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
    465                 if (formatParams->nIndex == 0) {
    466                     formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
    467                 } else if (formatParams->nIndex == 1) {
    468                     formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
    469                 } else {
    470                     formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
    471                 }
    472             } else {
    473                 formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
    474                 formatParams->eColorFormat = OMX_COLOR_FormatUnused;
    475             }
    476 
    477             return OMX_ErrorNone;
    478         }
    479 
    480         case OMX_IndexParamVideoAvc:
    481         {
    482             OMX_VIDEO_PARAM_AVCTYPE *avcParams =
    483                 (OMX_VIDEO_PARAM_AVCTYPE *)params;
    484 
    485             if (avcParams->nPortIndex != 1) {
    486                 return OMX_ErrorUndefined;
    487             }
    488 
    489             avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
    490             OMX_U32 omxLevel = AVC_LEVEL2;
    491             if (OMX_ErrorNone !=
    492                 ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
    493                 return OMX_ErrorUndefined;
    494             }
    495 
    496             avcParams->eLevel = (OMX_VIDEO_AVCLEVELTYPE) omxLevel;
    497             avcParams->nRefFrames = 1;
    498             avcParams->nBFrames = 0;
    499             avcParams->bUseHadamard = OMX_TRUE;
    500             avcParams->nAllowedPictureTypes =
    501                     (OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP);
    502             avcParams->nRefIdx10ActiveMinus1 = 0;
    503             avcParams->nRefIdx11ActiveMinus1 = 0;
    504             avcParams->bWeightedPPrediction = OMX_FALSE;
    505             avcParams->bEntropyCodingCABAC = OMX_FALSE;
    506             avcParams->bconstIpred = OMX_FALSE;
    507             avcParams->bDirect8x8Inference = OMX_FALSE;
    508             avcParams->bDirectSpatialTemporal = OMX_FALSE;
    509             avcParams->nCabacInitIdc = 0;
    510             return OMX_ErrorNone;
    511         }
    512 
    513         case OMX_IndexParamVideoProfileLevelQuerySupported:
    514         {
    515             OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
    516                 (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
    517 
    518             if (profileLevel->nPortIndex != 1) {
    519                 return OMX_ErrorUndefined;
    520             }
    521 
    522             const size_t size =
    523                     sizeof(ConversionTable) / sizeof(ConversionTable[0]);
    524 
    525             if (profileLevel->nProfileIndex >= size) {
    526                 return OMX_ErrorNoMore;
    527             }
    528 
    529             profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
    530             profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
    531 
    532             return OMX_ErrorNone;
    533         }
    534 
    535         default:
    536             return SimpleSoftOMXComponent::internalGetParameter(index, params);
    537     }
    538 }
    539 
    540 OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
    541         OMX_INDEXTYPE index, const OMX_PTR params) {
    542     int32_t indexFull = index;
    543 
    544     switch (indexFull) {
    545         case OMX_IndexParamVideoErrorCorrection:
    546         {
    547             return OMX_ErrorNotImplemented;
    548         }
    549 
    550         case OMX_IndexParamVideoBitrate:
    551         {
    552             OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
    553                 (OMX_VIDEO_PARAM_BITRATETYPE *) params;
    554 
    555             if (bitRate->nPortIndex != 1 ||
    556                 bitRate->eControlRate != OMX_Video_ControlRateVariable) {
    557                 return OMX_ErrorUndefined;
    558             }
    559 
    560             mVideoBitRate = bitRate->nTargetBitrate;
    561             return OMX_ErrorNone;
    562         }
    563 
    564         case OMX_IndexParamPortDefinition:
    565         {
    566             OMX_PARAM_PORTDEFINITIONTYPE *def =
    567                 (OMX_PARAM_PORTDEFINITIONTYPE *)params;
    568             if (def->nPortIndex > 1) {
    569                 return OMX_ErrorUndefined;
    570             }
    571 
    572             if (def->nPortIndex == 0) {
    573                 if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
    574                     (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
    575                      def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
    576                      def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
    577                     return OMX_ErrorUndefined;
    578                 }
    579             } else {
    580                 if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
    581                     (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
    582                     return OMX_ErrorUndefined;
    583                 }
    584             }
    585 
    586             OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
    587             if (OMX_ErrorNone != err) {
    588                 return err;
    589             }
    590 
    591             if (def->nPortIndex == 0) {
    592                 mVideoWidth = def->format.video.nFrameWidth;
    593                 mVideoHeight = def->format.video.nFrameHeight;
    594                 mVideoFrameRate = def->format.video.xFramerate >> 16;
    595                 mVideoColorFormat = def->format.video.eColorFormat;
    596             } else {
    597                 mVideoBitRate = def->format.video.nBitrate;
    598             }
    599 
    600             return OMX_ErrorNone;
    601         }
    602 
    603         case OMX_IndexParamStandardComponentRole:
    604         {
    605             const OMX_PARAM_COMPONENTROLETYPE *roleParams =
    606                 (const OMX_PARAM_COMPONENTROLETYPE *)params;
    607 
    608             if (strncmp((const char *)roleParams->cRole,
    609                         "video_encoder.avc",
    610                         OMX_MAX_STRINGNAME_SIZE - 1)) {
    611                 return OMX_ErrorUndefined;
    612             }
    613 
    614             return OMX_ErrorNone;
    615         }
    616 
    617         case OMX_IndexParamVideoPortFormat:
    618         {
    619             const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
    620                 (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
    621 
    622             if (formatParams->nPortIndex > 1) {
    623                 return OMX_ErrorUndefined;
    624             }
    625 
    626             if (formatParams->nIndex > 2) {
    627                 return OMX_ErrorNoMore;
    628             }
    629 
    630             if (formatParams->nPortIndex == 0) {
    631                 if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
    632                     ((formatParams->nIndex == 0 &&
    633                       formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
    634                     (formatParams->nIndex == 1 &&
    635                      formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
    636                     (formatParams->nIndex == 2 &&
    637                      formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
    638                     return OMX_ErrorUndefined;
    639                 }
    640                 mVideoColorFormat = formatParams->eColorFormat;
    641             } else {
    642                 if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
    643                     formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
    644                     return OMX_ErrorUndefined;
    645                 }
    646             }
    647 
    648             return OMX_ErrorNone;
    649         }
    650 
    651         case OMX_IndexParamVideoAvc:
    652         {
    653             OMX_VIDEO_PARAM_AVCTYPE *avcType =
    654                 (OMX_VIDEO_PARAM_AVCTYPE *)params;
    655 
    656             if (avcType->nPortIndex != 1) {
    657                 return OMX_ErrorUndefined;
    658             }
    659 
    660             // PV's AVC encoder only supports baseline profile
    661             if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline ||
    662                 avcType->nRefFrames != 1 ||
    663                 avcType->nBFrames != 0 ||
    664                 avcType->bUseHadamard != OMX_TRUE ||
    665                 (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 ||
    666                 avcType->nRefIdx10ActiveMinus1 != 0 ||
    667                 avcType->nRefIdx11ActiveMinus1 != 0 ||
    668                 avcType->bWeightedPPrediction != OMX_FALSE ||
    669                 avcType->bEntropyCodingCABAC != OMX_FALSE ||
    670                 avcType->bconstIpred != OMX_FALSE ||
    671                 avcType->bDirect8x8Inference != OMX_FALSE ||
    672                 avcType->bDirectSpatialTemporal != OMX_FALSE ||
    673                 avcType->nCabacInitIdc != 0) {
    674                 return OMX_ErrorUndefined;
    675             }
    676 
    677             if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
    678                 return OMX_ErrorUndefined;
    679             }
    680 
    681             return OMX_ErrorNone;
    682         }
    683 
    684         case kStoreMetaDataExtensionIndex:
    685         {
    686             StoreMetaDataInBuffersParams *storeParams =
    687                     (StoreMetaDataInBuffersParams*)params;
    688             if (storeParams->nPortIndex != 0) {
    689                 ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
    690                         __FUNCTION__);
    691                 return OMX_ErrorUndefined;
    692             }
    693 
    694             mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
    695             ALOGV("StoreMetaDataInBuffers set to: %s",
    696                     mStoreMetaDataInBuffers ? " true" : "false");
    697 
    698             if (mStoreMetaDataInBuffers) {
    699                 mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
    700                 if (mInputFrameData == NULL) {
    701                     mInputFrameData =
    702                             (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
    703                 }
    704             }
    705 
    706             return OMX_ErrorNone;
    707         }
    708 
    709         default:
    710             return SimpleSoftOMXComponent::internalSetParameter(index, params);
    711     }
    712 }
    713 
    714 void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
    715     if (mSignalledError || mSawInputEOS) {
    716         return;
    717     }
    718 
    719     if (!mStarted) {
    720         if (OMX_ErrorNone != initEncoder()) {
    721             return;
    722         }
    723     }
    724 
    725     List<BufferInfo *> &inQueue = getPortQueue(0);
    726     List<BufferInfo *> &outQueue = getPortQueue(1);
    727 
    728     while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
    729         BufferInfo *inInfo = *inQueue.begin();
    730         OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
    731         BufferInfo *outInfo = *outQueue.begin();
    732         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
    733 
    734         outHeader->nTimeStamp = 0;
    735         outHeader->nFlags = 0;
    736         outHeader->nOffset = 0;
    737         outHeader->nFilledLen = 0;
    738         outHeader->nOffset = 0;
    739 
    740         uint8_t *outPtr = (uint8_t *) outHeader->pBuffer;
    741         uint32_t dataLength = outHeader->nAllocLen;
    742 
    743         if (!mSpsPpsHeaderReceived && mNumInputFrames < 0) {
    744             // 4 bytes are reserved for holding the start code 0x00000001
    745             // of the sequence parameter set at the beginning.
    746             outPtr += 4;
    747             dataLength -= 4;
    748         }
    749 
    750         int32_t type;
    751         AVCEnc_Status encoderStatus = AVCENC_SUCCESS;
    752 
    753         // Combine SPS and PPS and place them in the very first output buffer
    754         // SPS and PPS are separated by start code 0x00000001
    755         // Assume that we have exactly one SPS and exactly one PPS.
    756         while (!mSpsPpsHeaderReceived && mNumInputFrames <= 0) {
    757             encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
    758             if (encoderStatus == AVCENC_WRONG_STATE) {
    759                 mSpsPpsHeaderReceived = true;
    760                 CHECK_EQ(0, mNumInputFrames);  // 1st video frame is 0
    761                 outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
    762                 outQueue.erase(outQueue.begin());
    763                 outInfo->mOwnedByUs = false;
    764                 notifyFillBufferDone(outHeader);
    765                 return;
    766             } else {
    767                 switch (type) {
    768                     case AVC_NALTYPE_SPS:
    769                         ++mNumInputFrames;
    770                         memcpy((uint8_t *)outHeader->pBuffer, "\x00\x00\x00\x01", 4);
    771                         outHeader->nFilledLen = 4 + dataLength;
    772                         outPtr += (dataLength + 4);  // 4 bytes for next start code
    773                         dataLength = outHeader->nAllocLen - outHeader->nFilledLen;
    774                         break;
    775                     default:
    776                         CHECK_EQ(AVC_NALTYPE_PPS, type);
    777                         ++mNumInputFrames;
    778                         memcpy((uint8_t *) outHeader->pBuffer + outHeader->nFilledLen,
    779                                 "\x00\x00\x00\x01", 4);
    780                         outHeader->nFilledLen += (dataLength + 4);
    781                         outPtr += (dataLength + 4);
    782                         break;
    783                 }
    784             }
    785         }
    786 
    787         buffer_handle_t srcBuffer; // for MetaDataMode only
    788 
    789         // Get next input video frame
    790         if (mReadyForNextFrame) {
    791             // Save the input buffer info so that it can be
    792             // passed to an output buffer
    793             InputBufferInfo info;
    794             info.mTimeUs = inHeader->nTimeStamp;
    795             info.mFlags = inHeader->nFlags;
    796             mInputBufferInfoVec.push(info);
    797             mPrevTimestampUs = inHeader->nTimeStamp;
    798 
    799             if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
    800                 mSawInputEOS = true;
    801             }
    802 
    803             if (inHeader->nFilledLen > 0) {
    804                 AVCFrameIO videoInput;
    805                 memset(&videoInput, 0, sizeof(videoInput));
    806                 videoInput.height = ((mVideoHeight  + 15) >> 4) << 4;
    807                 videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
    808                 videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000;  // in ms
    809                 uint8_t *inputData = NULL;
    810                 if (mStoreMetaDataInBuffers) {
    811                     if (inHeader->nFilledLen != 8) {
    812                         ALOGE("MetaData buffer is wrong size! "
    813                                 "(got %lu bytes, expected 8)", inHeader->nFilledLen);
    814                         mSignalledError = true;
    815                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    816                         return;
    817                     }
    818                     inputData =
    819                             extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
    820                                     &srcBuffer);
    821                     if (inputData == NULL) {
    822                         ALOGE("Unable to extract gralloc buffer in metadata mode");
    823                         mSignalledError = true;
    824                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    825                         return;
    826                     }
    827                     // TODO: Verify/convert pixel format enum
    828                 } else {
    829                     inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
    830                 }
    831 
    832                 if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
    833                     ConvertYUV420SemiPlanarToYUV420Planar(
    834                         inputData, mInputFrameData, mVideoWidth, mVideoHeight);
    835                     inputData = mInputFrameData;
    836                 }
    837                 CHECK(inputData != NULL);
    838                 videoInput.YCbCr[0] = inputData;
    839                 videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
    840                 videoInput.YCbCr[2] = videoInput.YCbCr[1] +
    841                     ((videoInput.height * videoInput.pitch) >> 2);
    842                 videoInput.disp_order = mNumInputFrames;
    843 
    844                 encoderStatus = PVAVCEncSetInput(mHandle, &videoInput);
    845                 if (encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR) {
    846                     mReadyForNextFrame = false;
    847                     ++mNumInputFrames;
    848                     if (encoderStatus == AVCENC_NEW_IDR) {
    849                         mIsIDRFrame = 1;
    850                     }
    851                 } else {
    852                     if (encoderStatus < AVCENC_SUCCESS) {
    853                         ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    854                         mSignalledError = true;
    855                         releaseGrallocData(srcBuffer);
    856                         notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    857                         return;
    858                     } else {
    859                         ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    860                         inQueue.erase(inQueue.begin());
    861                         inInfo->mOwnedByUs = false;
    862                         releaseGrallocData(srcBuffer);
    863                         notifyEmptyBufferDone(inHeader);
    864                         return;
    865                     }
    866                 }
    867             }
    868         }
    869 
    870         // Encode an input video frame
    871         CHECK(encoderStatus == AVCENC_SUCCESS || encoderStatus == AVCENC_NEW_IDR);
    872         dataLength = outHeader->nAllocLen;  // Reset the output buffer length
    873         if (inHeader->nFilledLen > 0) {
    874             encoderStatus = PVAVCEncodeNAL(mHandle, outPtr, &dataLength, &type);
    875             if (encoderStatus == AVCENC_SUCCESS) {
    876                 CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
    877             } else if (encoderStatus == AVCENC_PICTURE_READY) {
    878                 CHECK(NULL == PVAVCEncGetOverrunBuffer(mHandle));
    879                 if (mIsIDRFrame) {
    880                     outHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
    881                     mIsIDRFrame = false;
    882                 }
    883                 mReadyForNextFrame = true;
    884                 AVCFrameIO recon;
    885                 if (PVAVCEncGetRecon(mHandle, &recon) == AVCENC_SUCCESS) {
    886                     PVAVCEncReleaseRecon(mHandle, &recon);
    887                 }
    888             } else {
    889                 dataLength = 0;
    890                 mReadyForNextFrame = true;
    891             }
    892 
    893             if (encoderStatus < AVCENC_SUCCESS) {
    894                 ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
    895                 mSignalledError = true;
    896                 releaseGrallocData(srcBuffer);
    897                 notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
    898                 return;
    899             }
    900         } else {
    901             dataLength = 0;
    902         }
    903 
    904         inQueue.erase(inQueue.begin());
    905         inInfo->mOwnedByUs = false;
    906         releaseGrallocData(srcBuffer);
    907         notifyEmptyBufferDone(inHeader);
    908 
    909         outQueue.erase(outQueue.begin());
    910         CHECK(!mInputBufferInfoVec.empty());
    911         InputBufferInfo *inputBufInfo = mInputBufferInfoVec.begin();
    912         outHeader->nTimeStamp = inputBufInfo->mTimeUs;
    913         outHeader->nFlags |= (inputBufInfo->mFlags | OMX_BUFFERFLAG_ENDOFFRAME);
    914         if (mSawInputEOS) {
    915             outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
    916         }
    917         outHeader->nFilledLen = dataLength;
    918         outInfo->mOwnedByUs = false;
    919         notifyFillBufferDone(outHeader);
    920         mInputBufferInfoVec.erase(mInputBufferInfoVec.begin());
    921     }
    922 }
    923 
    924 int32_t SoftAVCEncoder::allocOutputBuffers(
    925         unsigned int sizeInMbs, unsigned int numBuffers) {
    926     CHECK(mOutputBuffers.isEmpty());
    927     size_t frameSize = (sizeInMbs << 7) * 3;
    928     for (unsigned int i = 0; i <  numBuffers; ++i) {
    929         MediaBuffer *buffer = new MediaBuffer(frameSize);
    930         buffer->setObserver(this);
    931         mOutputBuffers.push(buffer);
    932     }
    933 
    934     return 1;
    935 }
    936 
    937 void SoftAVCEncoder::unbindOutputBuffer(int32_t index) {
    938     CHECK(index >= 0);
    939 }
    940 
    941 int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
    942     CHECK(index >= 0);
    943     CHECK(index < (int32_t) mOutputBuffers.size());
    944     *yuv = (uint8_t *) mOutputBuffers[index]->data();
    945 
    946     return 1;
    947 }
    948 
    949 void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
    950     ALOGV("signalBufferReturned: %p", buffer);
    951 }
    952 
    953 OMX_ERRORTYPE SoftAVCEncoder::getExtensionIndex(
    954         const char *name, OMX_INDEXTYPE *index) {
    955     if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
    956         *(int32_t*)index = kStoreMetaDataExtensionIndex;
    957         return OMX_ErrorNone;
    958     }
    959     return OMX_ErrorUndefined;
    960 }
    961 
    962 uint8_t *SoftAVCEncoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
    963     OMX_U32 type = *(OMX_U32*)data;
    964     status_t res;
    965     if (type != kMetadataBufferTypeGrallocSource) {
    966         ALOGE("Data passed in with metadata mode does not have type "
    967                 "kMetadataBufferTypeGrallocSource (%d), has type %ld instead",
    968                 kMetadataBufferTypeGrallocSource, type);
    969         return NULL;
    970     }
    971     buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
    972 
    973     const Rect rect(mVideoWidth, mVideoHeight);
    974     uint8_t *img;
    975     res = GraphicBufferMapper::get().lock(imgBuffer,
    976             GRALLOC_USAGE_HW_VIDEO_ENCODER,
    977             rect, (void**)&img);
    978     if (res != OK) {
    979         ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
    980                 imgBuffer);
    981         return NULL;
    982     }
    983 
    984     *buffer = imgBuffer;
    985     return img;
    986 }
    987 
    988 void SoftAVCEncoder::releaseGrallocData(buffer_handle_t buffer) {
    989     if (mStoreMetaDataInBuffers) {
    990         GraphicBufferMapper::get().unlock(buffer);
    991     }
    992 }
    993 
    994 }  // namespace android
    995 
    996 android::SoftOMXComponent *createSoftOMXComponent(
    997         const char *name, const OMX_CALLBACKTYPE *callbacks,
    998         OMX_PTR appData, OMX_COMPONENTTYPE **component) {
    999     return new android::SoftAVCEncoder(name, callbacks, appData, component);
   1000 }
   1001