Home | History | Annotate | Download | only in videoencoder
      1 /*
      2 * Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
      3 *
      4 * Licensed under the Apache License, Version 2.0 (the "License");
      5 * you may not use this file except in compliance with the License.
      6 * You may obtain a copy of the License at
      7 *
      8 * http://www.apache.org/licenses/LICENSE-2.0
      9 *
     10 * Unless required by applicable law or agreed to in writing, software
     11 * distributed under the License is distributed on an "AS IS" BASIS,
     12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13 * See the License for the specific language governing permissions and
     14 * limitations under the License.
     15 */
     16 
     17 #include <string.h>
     18 #include "VideoEncoderLog.h"
     19 #include "VideoEncoderBase.h"
     20 #include "IntelMetadataBuffer.h"
     21 #include <va/va_tpi.h>
     22 #include <va/va_android.h>
     23 
     24 VideoEncoderBase::VideoEncoderBase()
     25     :mInitialized(true)
     26     ,mStarted(false)
     27     ,mVADisplay(NULL)
     28     ,mVAContext(VA_INVALID_ID)
     29     ,mVAConfig(VA_INVALID_ID)
     30     ,mVAEntrypoint(VAEntrypointEncSlice)
     31     ,mNewHeader(false)
     32     ,mRenderMaxSliceSize(false)
     33     ,mRenderQP (false)
     34     ,mRenderAIR(false)
     35     ,mRenderCIR(false)
     36     ,mRenderFrameRate(false)
     37     ,mRenderBitRate(false)
     38     ,mRenderHrd(false)
     39     ,mRenderMultiTemporal(false)
     40     ,mForceKFrame(false)
     41     ,mSeqParamBuf(0)
     42     ,mPicParamBuf(0)
     43     ,mSliceParamBuf(0)
     44     ,mAutoRefSurfaces(NULL)
     45     ,mRefSurface(VA_INVALID_SURFACE)
     46     ,mRecSurface(VA_INVALID_SURFACE)
     47     ,mFrameNum(0)
     48     ,mCodedBufSize(0)
     49     ,mAutoReference(false)
     50     ,mAutoReferenceSurfaceNum(4)
     51     ,mEncPackedHeaders(VA_ATTRIB_NOT_SUPPORTED)
     52     ,mSliceSizeOverflow(false)
     53     ,mCurOutputTask(NULL)
     54     ,mOutCodedBuffer(0)
     55     ,mOutCodedBufferPtr(NULL)
     56     ,mCurSegment(NULL)
     57     ,mOffsetInSeg(0)
     58     ,mTotalSize(0)
     59     ,mTotalSizeCopied(0)
     60     ,mFrameSkipped(false)
     61     ,mSupportedSurfaceMemType(0)
     62     ,mVASurfaceMappingAction(0)
     63 #ifdef INTEL_VIDEO_XPROC_SHARING
     64     ,mSessionFlag(0)
     65 #endif
     66     {
     67 
     68     VAStatus vaStatus = VA_STATUS_SUCCESS;
     69     // here the display can be any value, use following one
     70     // just for consistence purpose, so don't define it
     71     unsigned int display = 0x18C34078;
     72     int majorVersion = -1;
     73     int minorVersion = -1;
     74 
     75     setDefaultParams();
     76 
     77     LOG_V("vaGetDisplay \n");
     78     mVADisplay = vaGetDisplay(&display);
     79     if (mVADisplay == NULL) {
     80         LOG_E("vaGetDisplay failed.");
     81     }
     82 
     83     vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
     84     LOG_V("vaInitialize \n");
     85     if (vaStatus != VA_STATUS_SUCCESS) {
     86         LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
     87         mInitialized = false;
     88     }
     89 }
     90 
     91 VideoEncoderBase::~VideoEncoderBase() {
     92 
     93     VAStatus vaStatus = VA_STATUS_SUCCESS;
     94 
     95     stop();
     96 
     97     vaStatus = vaTerminate(mVADisplay);
     98     LOG_V( "vaTerminate\n");
     99     if (vaStatus != VA_STATUS_SUCCESS) {
    100         LOG_W( "Failed vaTerminate, vaStatus = %d\n", vaStatus);
    101     } else {
    102         mVADisplay = NULL;
    103     }
    104 
    105 #ifdef INTEL_VIDEO_XPROC_SHARING
    106     IntelMetadataBuffer::ClearContext(mSessionFlag, false);
    107 #endif
    108 }
    109 
    110 Encode_Status VideoEncoderBase::start() {
    111 
    112     Encode_Status ret = ENCODE_SUCCESS;
    113     VAStatus vaStatus = VA_STATUS_SUCCESS;
    114 
    115     if (!mInitialized) {
    116         LOGE("Encoder Initialize fail can not start");
    117         return ENCODE_DRIVER_FAIL;
    118     }
    119 
    120     if (mStarted) {
    121         LOG_V("Encoder has been started\n");
    122         return ENCODE_ALREADY_INIT;
    123     }
    124 
    125     if (mComParams.rawFormat != RAW_FORMAT_NV12)
    126 #ifdef IMG_GFX
    127         mVASurfaceMappingAction |= MAP_ACTION_COLORCONVERT;
    128 #else
    129         return ENCODE_NOT_SUPPORTED;
    130 #endif
    131 
    132     if (mComParams.resolution.width > 2048 || mComParams.resolution.height > 2048){
    133         LOGE("Unsupported resolution width %d, height %d\n",
    134             mComParams.resolution.width, mComParams.resolution.height);
    135         return ENCODE_NOT_SUPPORTED;
    136     }
    137     queryAutoReferenceConfig(mComParams.profile);
    138 
    139     VAConfigAttrib vaAttrib_tmp[6],vaAttrib[VAConfigAttribTypeMax];
    140     int vaAttribNumber = 0;
    141     vaAttrib_tmp[0].type = VAConfigAttribRTFormat;
    142     vaAttrib_tmp[1].type = VAConfigAttribRateControl;
    143     vaAttrib_tmp[2].type = VAConfigAttribEncAutoReference;
    144     vaAttrib_tmp[3].type = VAConfigAttribEncPackedHeaders;
    145     vaAttrib_tmp[4].type = VAConfigAttribEncMaxRefFrames;
    146     vaAttrib_tmp[5].type = VAConfigAttribEncRateControlExt;
    147 
    148     vaStatus = vaGetConfigAttributes(mVADisplay, mComParams.profile,
    149             VAEntrypointEncSlice, &vaAttrib_tmp[0], 6);
    150     CHECK_VA_STATUS_RETURN("vaGetConfigAttributes");
    151 
    152     if((vaAttrib_tmp[0].value & VA_RT_FORMAT_YUV420) != 0)
    153     {
    154         vaAttrib[vaAttribNumber].type = VAConfigAttribRTFormat;
    155         vaAttrib[vaAttribNumber].value = VA_RT_FORMAT_YUV420;
    156         vaAttribNumber++;
    157     }
    158 
    159     vaAttrib[vaAttribNumber].type = VAConfigAttribRateControl;
    160     vaAttrib[vaAttribNumber].value = mComParams.rcMode;
    161     vaAttribNumber++;
    162 
    163     vaAttrib[vaAttribNumber].type = VAConfigAttribEncAutoReference;
    164     vaAttrib[vaAttribNumber].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED;
    165     vaAttribNumber++;
    166 
    167     if(vaAttrib_tmp[3].value != VA_ATTRIB_NOT_SUPPORTED)
    168     {
    169         vaAttrib[vaAttribNumber].type = VAConfigAttribEncPackedHeaders;
    170         vaAttrib[vaAttribNumber].value = vaAttrib[3].value;
    171         vaAttribNumber++;
    172         mEncPackedHeaders = vaAttrib[3].value;
    173     }
    174 
    175     if(vaAttrib_tmp[4].value != VA_ATTRIB_NOT_SUPPORTED)
    176     {
    177         vaAttrib[vaAttribNumber].type = VAConfigAttribEncMaxRefFrames;
    178         vaAttrib[vaAttribNumber].value = vaAttrib[4].value;
    179         vaAttribNumber++;
    180         mEncMaxRefFrames = vaAttrib[4].value;
    181     }
    182 
    183     if(vaAttrib_tmp[5].value != VA_ATTRIB_NOT_SUPPORTED)
    184     {
    185         vaAttrib[vaAttribNumber].type = VAConfigAttribEncRateControlExt;
    186         vaAttrib[vaAttribNumber].value = mComParams.numberOfLayer;
    187         vaAttribNumber++;
    188     }
    189 
    190     LOG_V( "======VA Configuration======\n");
    191     LOG_V( "profile = %d\n", mComParams.profile);
    192     LOG_V( "mVAEntrypoint = %d\n", mVAEntrypoint);
    193     LOG_V( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
    194     LOG_V( "vaAttrib[1].type = %d\n", vaAttrib[1].type);
    195     LOG_V( "vaAttrib[2].type = %d\n", vaAttrib[2].type);
    196     LOG_V( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value);
    197     LOG_V( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value);
    198     LOG_V( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value);
    199     LOG_V( "vaAttribNumber is %d\n", vaAttribNumber);
    200     LOG_V( "mComParams.numberOfLayer is %d\n", mComParams.numberOfLayer);
    201 
    202     LOG_V( "vaCreateConfig\n");
    203 
    204     vaStatus = vaCreateConfig(
    205             mVADisplay, mComParams.profile, mVAEntrypoint,
    206             &vaAttrib[0], vaAttribNumber, &(mVAConfig));
    207 //            &vaAttrib[0], 3, &(mVAConfig));  //uncomment this after psb_video supports
    208     CHECK_VA_STATUS_RETURN("vaCreateConfig");
    209 
    210     querySupportedSurfaceMemTypes();
    211 
    212     if (mComParams.rcMode == VA_RC_VCM) {
    213         // Following three features are only enabled in VCM mode
    214         mRenderMaxSliceSize = true;
    215         mRenderAIR = true;
    216         mRenderBitRate = true;
    217     }
    218 
    219     LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n");
    220 
    221     uint32_t stride_aligned, height_aligned;
    222     if(mAutoReference == false){
    223         stride_aligned = (mComParams.resolution.width + 15) & ~15;
    224         height_aligned = (mComParams.resolution.height + 15) & ~15;
    225     }else{
    226         // this alignment is used for AVC. For vp8 encode, driver will handle the alignment
    227         if(mComParams.profile == VAProfileVP8Version0_3)
    228         {
    229             stride_aligned = mComParams.resolution.width;
    230             height_aligned = mComParams.resolution.height;
    231             mVASurfaceMappingAction |= MAP_ACTION_COPY;
    232         }
    233         else
    234         {
    235             stride_aligned = (mComParams.resolution.width + 63) & ~63;  //on Merr, stride must be 64 aligned.
    236             height_aligned = (mComParams.resolution.height + 31) & ~31;
    237             mVASurfaceMappingAction |= MAP_ACTION_ALIGN64;
    238         }
    239     }
    240 
    241     if(mAutoReference == false){
    242         mRefSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
    243         mRecSurface = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
    244 
    245     }else {
    246         mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum];
    247         for(uint32_t i = 0; i < mAutoReferenceSurfaceNum; i ++)
    248             mAutoRefSurfaces[i] = CreateNewVASurface(mVADisplay, stride_aligned, height_aligned);
    249     }
    250     CHECK_VA_STATUS_RETURN("vaCreateSurfaces");
    251 
    252     //Prepare all Surfaces to be added into Context
    253     uint32_t contextSurfaceCnt;
    254     if(mAutoReference == false )
    255         contextSurfaceCnt = 2 + mSrcSurfaceMapList.size();
    256     else
    257         contextSurfaceCnt = mAutoReferenceSurfaceNum + mSrcSurfaceMapList.size();
    258 
    259     VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt];
    260     int32_t index = -1;
    261     android::List<VASurfaceMap *>::iterator map_node;
    262 
    263     for(map_node = mSrcSurfaceMapList.begin(); map_node !=  mSrcSurfaceMapList.end(); map_node++)
    264     {
    265         contextSurfaces[++index] = (*map_node)->getVASurface();
    266         (*map_node)->setTracked();
    267     }
    268 
    269     if(mAutoReference == false){
    270         contextSurfaces[++index] = mRefSurface;
    271         contextSurfaces[++index] = mRecSurface;
    272     } else {
    273         for (uint32_t i=0; i < mAutoReferenceSurfaceNum; i++)
    274             contextSurfaces[++index] = mAutoRefSurfaces[i];
    275     }
    276 
    277     //Initialize and save the VA context ID
    278     LOG_V( "vaCreateContext\n");
    279     vaStatus = vaCreateContext(mVADisplay, mVAConfig,
    280 #ifdef IMG_GFX
    281             mComParams.resolution.width,
    282             mComParams.resolution.height,
    283 #else
    284             stride_aligned,
    285             height_aligned,
    286 #endif
    287             VA_PROGRESSIVE, contextSurfaces, contextSurfaceCnt,
    288             &(mVAContext));
    289     CHECK_VA_STATUS_RETURN("vaCreateContext");
    290 
    291     delete [] contextSurfaces;
    292 
    293     LOG_I("Success to create libva context width %d, height %d\n",
    294           mComParams.resolution.width, mComParams.resolution.height);
    295 
    296     uint32_t maxSize = 0;
    297     ret = getMaxOutSize(&maxSize);
    298     CHECK_ENCODE_STATUS_RETURN("getMaxOutSize");
    299 
    300     // Create CodedBuffer for output
    301     VABufferID VACodedBuffer;
    302 
    303     for(uint32_t i = 0; i <mComParams.codedBufNum; i++) {
    304             vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
    305                     VAEncCodedBufferType,
    306                     mCodedBufSize,
    307                     1, NULL,
    308                     &VACodedBuffer);
    309             CHECK_VA_STATUS_RETURN("vaCreateBuffer::VAEncCodedBufferType");
    310 
    311             mVACodedBufferList.push_back(VACodedBuffer);
    312     }
    313 
    314     if (ret == ENCODE_SUCCESS)
    315         mStarted = true;
    316 
    317     LOG_V( "end\n");
    318     return ret;
    319 }
    320 
    321 Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout) {
    322 
    323     Encode_Status ret = ENCODE_SUCCESS;
    324     VAStatus vaStatus = VA_STATUS_SUCCESS;
    325 
    326     if (!mStarted) {
    327         LOG_E("Encoder has not initialized yet\n");
    328         return ENCODE_NOT_INIT;
    329     }
    330 
    331     CHECK_NULL_RETURN_IFFAIL(inBuffer);
    332 
    333     //======Prepare all resources encoder needed=====.
    334 
    335     //Prepare encode vaSurface
    336     VASurfaceID sid = VA_INVALID_SURFACE;
    337     ret = manageSrcSurface(inBuffer, &sid);
    338     CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
    339 
    340     //Prepare CodedBuffer
    341     mCodedBuffer_Lock.lock();
    342     if(mVACodedBufferList.empty()){
    343         if(timeout == FUNC_BLOCK)
    344             mCodedBuffer_Cond.wait(mCodedBuffer_Lock);
    345         else if (timeout > 0) {
    346             if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){
    347                 mCodedBuffer_Lock.unlock();
    348                 LOG_E("Time out wait for Coded buffer.\n");
    349                 return ENCODE_DEVICE_BUSY;
    350             }
    351         }
    352         else {//Nonblock
    353             mCodedBuffer_Lock.unlock();
    354             LOG_E("Coded buffer is not ready now.\n");
    355             return ENCODE_DEVICE_BUSY;
    356         }
    357     }
    358 
    359     if(mVACodedBufferList.empty()){
    360         mCodedBuffer_Lock.unlock();
    361         return ENCODE_DEVICE_BUSY;
    362     }
    363     VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin());
    364     mVACodedBufferList.erase(mVACodedBufferList.begin());
    365     mCodedBuffer_Lock.unlock();
    366 
    367     LOG_V("CodedBuffer ID 0x%08x\n", coded_buf);
    368 
    369     //All resources are ready, start to assemble EncodeTask
    370     EncodeTask* task = new EncodeTask();
    371 
    372     task->completed = false;
    373     task->enc_surface = sid;
    374     task->coded_buffer = coded_buf;
    375     task->timestamp = inBuffer->timeStamp;
    376     task->priv = inBuffer->priv;
    377 
    378     //Setup frame info, like flag ( SYNCFRAME), frame number, type etc
    379     task->type = inBuffer->type;
    380     task->flag = inBuffer->flag;
    381     PrepareFrameInfo(task);
    382 
    383     if(mAutoReference == false){
    384         //Setup ref /rec frames
    385         //TODO: B frame support, temporary use same logic
    386         switch (inBuffer->type) {
    387             case FTYPE_UNKNOWN:
    388             case FTYPE_IDR:
    389             case FTYPE_I:
    390             case FTYPE_P:
    391             {
    392                 if(!mFrameSkipped) {
    393                     VASurfaceID tmpSurface = mRecSurface;
    394                     mRecSurface = mRefSurface;
    395                     mRefSurface = tmpSurface;
    396                 }
    397 
    398                 task->ref_surface = mRefSurface;
    399                 task->rec_surface = mRecSurface;
    400 
    401                 break;
    402             }
    403             case FTYPE_B:
    404             default:
    405                 LOG_V("Something wrong, B frame may not be supported in this mode\n");
    406                 ret = ENCODE_NOT_SUPPORTED;
    407                 goto CLEAN_UP;
    408         }
    409     }else {
    410         task->ref_surface = VA_INVALID_SURFACE;
    411         task->rec_surface = VA_INVALID_SURFACE;
    412     }
    413     //======Start Encoding, add task to list======
    414     LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface);
    415 
    416     vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface);
    417     CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture");
    418 
    419     ret = sendEncodeCommand(task);
    420     CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand");
    421 
    422     vaStatus = vaEndPicture(mVADisplay, mVAContext);
    423     CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture");
    424 
    425     LOG_V("Add Task %p into Encode Task list\n", task);
    426     mEncodeTask_Lock.lock();
    427     mEncodeTaskList.push_back(task);
    428     mEncodeTask_Cond.signal();
    429     mEncodeTask_Lock.unlock();
    430 
    431     mFrameNum ++;
    432 
    433     LOG_V("encode return Success\n");
    434 
    435     return ENCODE_SUCCESS;
    436 
    437 CLEAN_UP:
    438 
    439     delete task;
    440     mCodedBuffer_Lock.lock();
    441     mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used
    442     mCodedBuffer_Cond.signal();
    443     mCodedBuffer_Lock.unlock();
    444 
    445     LOG_V("encode return error=%x\n", ret);
    446 
    447     return ret;
    448 }
    449 
    450 /*
    451   1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list.
    452   2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes,
    453     start output data
    454   3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure
    455     on non-block/block with timeout modes.
    456   4. if complete all output data, curoutputtask should be set NULL
    457 */
    458 Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) {
    459 
    460     Encode_Status ret = ENCODE_SUCCESS;
    461     VAStatus vaStatus = VA_STATUS_SUCCESS;
    462     bool useLocalBuffer = false;
    463 
    464     CHECK_NULL_RETURN_IFFAIL(outBuffer);
    465 
    466     if (mCurOutputTask == NULL) {
    467         mEncodeTask_Lock.lock();
    468         if(mEncodeTaskList.empty()) {
    469             LOG_V("getOutput CurrentTask is NULL\n");
    470             if(timeout == FUNC_BLOCK) {
    471                 LOG_V("waiting for task....\n");
    472                 mEncodeTask_Cond.wait(mEncodeTask_Lock);
    473             } else if (timeout > 0) {
    474                 LOG_V("waiting for task in %i ms....\n", timeout);
    475                 if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) {
    476                     mEncodeTask_Lock.unlock();
    477                     LOG_E("Time out wait for encode task.\n");
    478                     return ENCODE_NO_REQUEST_DATA;
    479                 }
    480             } else {//Nonblock
    481                 mEncodeTask_Lock.unlock();
    482                 return ENCODE_NO_REQUEST_DATA;
    483             }
    484         }
    485 
    486         if(mEncodeTaskList.empty()){
    487             mEncodeTask_Lock.unlock();
    488             return ENCODE_DATA_NOT_READY;
    489         }
    490         mCurOutputTask =  *(mEncodeTaskList.begin());
    491         mEncodeTaskList.erase(mEncodeTaskList.begin());
    492         mEncodeTask_Lock.unlock();
    493     }
    494 
    495     //sync/query/wait task if not completed
    496     if (mCurOutputTask->completed == false) {
    497         VASurfaceStatus vaSurfaceStatus;
    498 
    499         if (timeout == FUNC_BLOCK) {
    500             //block mode, direct sync surface to output data
    501 
    502             mOutCodedBuffer = mCurOutputTask->coded_buffer;
    503 
    504             // Check frame skip
    505             // Need encoding to be completed before calling query surface below to
    506             // get the right skip frame flag for current frame
    507             // It is a requirement of video driver
    508             // vaSyncSurface syncs the wrong frame when rendering the same surface multiple times,
    509             // so use vaMapbuffer instead
    510             LOG_V ("block mode, vaMapBuffer ID = 0x%08x\n", mOutCodedBuffer);
    511             if (mOutCodedBufferPtr == NULL) {
    512                 vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
    513                 CHECK_VA_STATUS_GOTO_CLEANUP("vaMapBuffer");
    514                 CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
    515             }
    516 
    517             vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface,  &vaSurfaceStatus);
    518             CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
    519             mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
    520 
    521             mCurOutputTask->completed = true;
    522 
    523         } else {
    524             //For both block with timeout and non-block mode, query surface, if ready, output data
    525             LOG_V ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface);
    526 
    527             vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface,  &vaSurfaceStatus);
    528             if (vaSurfaceStatus & VASurfaceReady) {
    529                 mOutCodedBuffer = mCurOutputTask->coded_buffer;
    530                 mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
    531                 mCurOutputTask->completed = true;
    532                 //if need to call SyncSurface again ?
    533 
    534             }  else {//not encode complet yet, but keep all context and return directly
    535                 return ENCODE_DATA_NOT_READY;
    536             }
    537 
    538         }
    539 
    540     }
    541 
    542     //start to output data
    543     ret = prepareForOutput(outBuffer, &useLocalBuffer);
    544     CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
    545 
    546     //copy all flags to outBuffer
    547     outBuffer->offset = 0;
    548     outBuffer->flag = mCurOutputTask->flag;
    549     outBuffer->type = mCurOutputTask->type;
    550     outBuffer->timeStamp = mCurOutputTask->timestamp;
    551     outBuffer->priv = mCurOutputTask->priv;
    552 
    553     if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) {
    554         ret = outputAllData(outBuffer);
    555         CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
    556     }else {
    557         ret = getExtFormatOutput(outBuffer);
    558         CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput");
    559     }
    560 
    561     LOG_V("out size for this getOutput call = %d\n", outBuffer->dataSize);
    562 
    563     ret = cleanupForOutput();
    564     CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput");
    565 
    566     LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped);
    567 
    568     return ENCODE_SUCCESS;
    569 
    570 CLEAN_UP:
    571 
    572     if (outBuffer->data && (useLocalBuffer == true)) {
    573         delete[] outBuffer->data;
    574         outBuffer->data = NULL;
    575         useLocalBuffer = false;
    576     }
    577 
    578     if (mOutCodedBufferPtr != NULL) {
    579         vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
    580         mOutCodedBufferPtr = NULL;
    581         mCurSegment = NULL;
    582     }
    583 
    584     delete mCurOutputTask;
    585     mCurOutputTask = NULL;
    586     mCodedBuffer_Lock.lock();
    587     mVACodedBufferList.push_back(mOutCodedBuffer);
    588     mCodedBuffer_Cond.signal();
    589     mCodedBuffer_Lock.unlock();
    590 
    591     LOG_V("getOutput return error=%x\n", ret);
    592     return ret;
    593 }
    594 
    595 void VideoEncoderBase::flush() {
    596 
    597     LOG_V( "Begin\n");
    598 
    599     // reset the properities
    600     mFrameNum = 0;
    601 
    602     LOG_V( "end\n");
    603 }
    604 
    605 Encode_Status VideoEncoderBase::stop() {
    606 
    607     VAStatus vaStatus = VA_STATUS_SUCCESS;
    608     Encode_Status ret = ENCODE_SUCCESS;
    609 
    610     LOG_V( "Begin\n");
    611 
    612     // It is possible that above pointers have been allocated
    613     // before we set mStarted to true
    614     if (!mStarted) {
    615         LOG_V("Encoder has been stopped\n");
    616         return ENCODE_SUCCESS;
    617     }
    618     if (mAutoRefSurfaces) {
    619         delete[] mAutoRefSurfaces;
    620         mAutoRefSurfaces = NULL;
    621     }
    622 
    623     mCodedBuffer_Lock.lock();
    624     mVACodedBufferList.clear();
    625     mCodedBuffer_Lock.unlock();
    626     mCodedBuffer_Cond.broadcast();
    627 
    628     //Delete all uncompleted tasks
    629     mEncodeTask_Lock.lock();
    630     while(! mEncodeTaskList.empty())
    631     {
    632         delete *mEncodeTaskList.begin();
    633         mEncodeTaskList.erase(mEncodeTaskList.begin());
    634     }
    635     mEncodeTask_Lock.unlock();
    636     mEncodeTask_Cond.broadcast();
    637 
    638     //Release Src Surface Buffer Map, destroy surface manually since it is not added into context
    639     LOG_V( "Rlease Src Surface Map\n");
    640     while(! mSrcSurfaceMapList.empty())
    641     {
    642         delete (*mSrcSurfaceMapList.begin());
    643         mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
    644     }
    645 
    646     LOG_V( "vaDestroyContext\n");
    647     if (mVAContext != VA_INVALID_ID) {
    648         vaStatus = vaDestroyContext(mVADisplay, mVAContext);
    649         CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
    650     }
    651 
    652     LOG_V( "vaDestroyConfig\n");
    653     if (mVAConfig != VA_INVALID_ID) {
    654         vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
    655         CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
    656     }
    657 
    658 CLEAN_UP:
    659 
    660     mStarted = false;
    661     mSliceSizeOverflow = false;
    662     mCurOutputTask= NULL;
    663     mOutCodedBuffer = 0;
    664     mCurSegment = NULL;
    665     mOffsetInSeg =0;
    666     mTotalSize = 0;
    667     mTotalSizeCopied = 0;
    668     mFrameSkipped = false;
    669     mSupportedSurfaceMemType = 0;
    670 
    671     LOG_V( "end\n");
    672     return ret;
    673 }
    674 
    675 Encode_Status VideoEncoderBase::prepareForOutput(
    676         VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) {
    677 
    678     VAStatus vaStatus = VA_STATUS_SUCCESS;
    679     VACodedBufferSegment *vaCodedSeg = NULL;
    680     uint32_t status = 0;
    681 
    682     LOG_V( "begin\n");
    683     // Won't check parameters here as the caller already checked them
    684     // mCurSegment is NULL means it is first time to be here after finishing encoding a frame
    685     if (mCurSegment == NULL) {
    686         if (mOutCodedBufferPtr == NULL) {
    687             vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&mOutCodedBufferPtr);
    688             CHECK_VA_STATUS_RETURN("vaMapBuffer");
    689             CHECK_NULL_RETURN_IFFAIL(mOutCodedBufferPtr);
    690         }
    691 
    692         LOG_V("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer);
    693 
    694         mTotalSize = 0;
    695         mOffsetInSeg = 0;
    696         mTotalSizeCopied = 0;
    697         vaCodedSeg = (VACodedBufferSegment *)mOutCodedBufferPtr;
    698         mCurSegment = (VACodedBufferSegment *)mOutCodedBufferPtr;
    699 
    700         while (1) {
    701 
    702             mTotalSize += vaCodedSeg->size;
    703             status = vaCodedSeg->status;
    704 #ifndef IMG_GFX
    705             uint8_t *pTemp;
    706             uint32_t ii;
    707             pTemp = (uint8_t*)vaCodedSeg->buf;
    708             for(ii = 0; ii < 16;){
    709                 if (*(pTemp + ii) == 0xFF)
    710                     ii++;
    711                 else
    712                     break;
    713             }
    714             if (ii > 0) {
    715                 mOffsetInSeg = ii;
    716             }
    717 #endif
    718             if (!mSliceSizeOverflow) {
    719                 mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
    720             }
    721 
    722             if (vaCodedSeg->next == NULL)
    723                 break;
    724 
    725             vaCodedSeg = (VACodedBufferSegment *)vaCodedSeg->next;
    726         }
    727     }
    728 
    729     // We will support two buffer allocation mode,
    730     // one is application allocates the buffer and passes to encode,
    731     // the other is encode allocate memory
    732 
    733     //means  app doesn't allocate the buffer, so _encode will allocate it.
    734     if (outBuffer->data == NULL) {
    735         *useLocalBuffer = true;
    736         outBuffer->data = new  uint8_t[mTotalSize - mTotalSizeCopied + 100];
    737         if (outBuffer->data == NULL) {
    738             LOG_E( "outBuffer->data == NULL\n");
    739             return ENCODE_NO_MEMORY;
    740         }
    741         outBuffer->bufferSize = mTotalSize + 100;
    742         outBuffer->dataSize = 0;
    743     }
    744 
    745     // Clear all flag for every call
    746     outBuffer->flag = 0;
    747     if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW;
    748 
    749     if (!mCurSegment)
    750         return ENCODE_FAIL;
    751 
    752     if (mCurSegment->size < mOffsetInSeg) {
    753         LOG_E("mCurSegment->size < mOffsetInSeg\n");
    754         return ENCODE_FAIL;
    755     }
    756 
    757     // Make sure we have data in current segment
    758     if (mCurSegment->size == mOffsetInSeg) {
    759         if (mCurSegment->next != NULL) {
    760             mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
    761             mOffsetInSeg = 0;
    762         } else {
    763             LOG_V("No more data available\n");
    764             outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
    765             outBuffer->dataSize = 0;
    766             mCurSegment = NULL;
    767             return ENCODE_NO_REQUEST_DATA;
    768         }
    769     }
    770 
    771     LOG_V( "end\n");
    772     return ENCODE_SUCCESS;
    773 }
    774 
    775 Encode_Status VideoEncoderBase::cleanupForOutput() {
    776 
    777     VAStatus vaStatus = VA_STATUS_SUCCESS;
    778 
    779     //mCurSegment is NULL means all data has been copied out
    780     if (mCurSegment == NULL && mOutCodedBufferPtr) {
    781         vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
    782         CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
    783         mOutCodedBufferPtr = NULL;
    784         mTotalSize = 0;
    785         mOffsetInSeg = 0;
    786         mTotalSizeCopied = 0;
    787 
    788         delete mCurOutputTask;
    789         mCurOutputTask = NULL;
    790         mCodedBuffer_Lock.lock();
    791         mVACodedBufferList.push_back(mOutCodedBuffer);
    792         mCodedBuffer_Cond.signal();
    793         mCodedBuffer_Lock.unlock();
    794 
    795         LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer);
    796     }
    797     return ENCODE_SUCCESS;
    798 }
    799 
    800 Encode_Status VideoEncoderBase::queryProfileLevelConfig(VADisplay dpy, VAProfile profile) {
    801 
    802     VAStatus vaStatus = VA_STATUS_SUCCESS;
    803     VAEntrypoint entryPtr[8];
    804     int i, entryPtrNum;
    805 
    806     if(profile ==  VAProfileH264Main) //need to be fixed
    807         return ENCODE_NOT_SUPPORTED;
    808 
    809     vaStatus = vaQueryConfigEntrypoints(dpy, profile, entryPtr, &entryPtrNum);
    810     CHECK_VA_STATUS_RETURN("vaQueryConfigEntrypoints");
    811 
    812     for(i=0; i<entryPtrNum; i++){
    813         if(entryPtr[i] == VAEntrypointEncSlice)
    814             return ENCODE_SUCCESS;
    815     }
    816 
    817     return ENCODE_NOT_SUPPORTED;
    818 }
    819 
    820 Encode_Status VideoEncoderBase::queryAutoReferenceConfig(VAProfile profile) {
    821 
    822     VAStatus vaStatus = VA_STATUS_SUCCESS;
    823     VAConfigAttrib attrib_list;
    824     attrib_list.type = VAConfigAttribEncAutoReference;
    825     attrib_list.value = VA_ATTRIB_NOT_SUPPORTED;
    826 
    827     vaStatus = vaGetConfigAttributes(mVADisplay, profile, VAEntrypointEncSlice, &attrib_list, 1);
    828     CHECK_VA_STATUS_RETURN("vaQueryConfigAttributes");
    829 
    830     if(attrib_list.value == VA_ATTRIB_NOT_SUPPORTED )
    831         mAutoReference = false;
    832     else
    833         mAutoReference = true;
    834 
    835     return ENCODE_SUCCESS;
    836 }
    837 
    838 Encode_Status VideoEncoderBase::querySupportedSurfaceMemTypes() {
    839 
    840     VAStatus vaStatus = VA_STATUS_SUCCESS;
    841 
    842     unsigned int num = 0;
    843 
    844     VASurfaceAttrib* attribs = NULL;
    845 
    846     //get attribs number
    847     vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
    848     CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
    849 
    850     if (num == 0)
    851         return ENCODE_SUCCESS;
    852 
    853     attribs = new VASurfaceAttrib[num];
    854 
    855     vaStatus = vaQuerySurfaceAttributes(mVADisplay, mVAConfig, attribs, &num);
    856     CHECK_VA_STATUS_RETURN("vaGetSurfaceAttributes");
    857 
    858     for(uint32_t i = 0; i < num; i ++) {
    859         if (attribs[i].type == VASurfaceAttribMemoryType) {
    860             mSupportedSurfaceMemType = attribs[i].value.value.i;
    861             break;
    862         }
    863         else
    864             continue;
    865     }
    866 
    867     delete attribs;
    868 
    869     return ENCODE_SUCCESS;
    870 }
    871 
    872 Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) {
    873 
    874     // Data size been copied for every single call
    875     uint32_t sizeCopiedHere = 0;
    876     uint32_t sizeToBeCopied = 0;
    877 
    878     CHECK_NULL_RETURN_IFFAIL(outBuffer->data);
    879 
    880     while (1) {
    881 
    882         LOG_V("mCurSegment->size = %d, mOffsetInSeg = %d\n", mCurSegment->size, mOffsetInSeg);
    883         LOG_V("outBuffer->bufferSize = %d, sizeCopiedHere = %d, mTotalSizeCopied = %d\n",
    884               outBuffer->bufferSize, sizeCopiedHere, mTotalSizeCopied);
    885 
    886         if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
    887             LOG_E("mCurSegment->size < mOffsetInSeg  || outBuffer->bufferSize < sizeCopiedHere\n");
    888             return ENCODE_FAIL;
    889         }
    890 
    891         if ((mCurSegment->size - mOffsetInSeg) <= outBuffer->bufferSize - sizeCopiedHere) {
    892             sizeToBeCopied = mCurSegment->size - mOffsetInSeg;
    893             memcpy(outBuffer->data + sizeCopiedHere,
    894                    (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
    895             sizeCopiedHere += sizeToBeCopied;
    896             mTotalSizeCopied += sizeToBeCopied;
    897             mOffsetInSeg = 0;
    898         } else {
    899             sizeToBeCopied = outBuffer->bufferSize - sizeCopiedHere;
    900             memcpy(outBuffer->data + sizeCopiedHere,
    901                    (uint8_t *)mCurSegment->buf + mOffsetInSeg, outBuffer->bufferSize - sizeCopiedHere);
    902             mTotalSizeCopied += sizeToBeCopied;
    903             mOffsetInSeg += sizeToBeCopied;
    904             outBuffer->dataSize = outBuffer->bufferSize;
    905             outBuffer->remainingSize = mTotalSize - mTotalSizeCopied;
    906             outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
    907             return ENCODE_BUFFER_TOO_SMALL;
    908         }
    909 
    910         if (mCurSegment->next == NULL) {
    911             outBuffer->dataSize = sizeCopiedHere;
    912             outBuffer->remainingSize = 0;
    913             outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
    914             mCurSegment = NULL;
    915             return ENCODE_SUCCESS;
    916         }
    917 
    918         mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
    919         mOffsetInSeg = 0;
    920     }
    921 }
    922 
    923 void VideoEncoderBase::setDefaultParams() {
    924 
    925     // Set default value for input parameters
    926     mComParams.profile = VAProfileH264Baseline;
    927     mComParams.level = 41;
    928     mComParams.rawFormat = RAW_FORMAT_NV12;
    929     mComParams.frameRate.frameRateNum = 30;
    930     mComParams.frameRate.frameRateDenom = 1;
    931     mComParams.resolution.width = 0;
    932     mComParams.resolution.height = 0;
    933     mComParams.intraPeriod = 30;
    934     mComParams.rcMode = RATE_CONTROL_NONE;
    935     mComParams.rcParams.initQP = 15;
    936     mComParams.rcParams.minQP = 0;
    937     mComParams.rcParams.maxQP = 0;
    938     mComParams.rcParams.I_minQP = 0;
    939     mComParams.rcParams.I_maxQP = 0;
    940     mComParams.rcParams.bitRate = 640000;
    941     mComParams.rcParams.targetPercentage= 0;
    942     mComParams.rcParams.windowSize = 0;
    943     mComParams.rcParams.disableFrameSkip = 0;
    944     mComParams.rcParams.disableBitsStuffing = 1;
    945     mComParams.rcParams.enableIntraFrameQPControl = 0;
    946     mComParams.rcParams.temporalFrameRate = 0;
    947     mComParams.rcParams.temporalID = 0;
    948     mComParams.cyclicFrameInterval = 30;
    949     mComParams.refreshType = VIDEO_ENC_NONIR;
    950     mComParams.airParams.airMBs = 0;
    951     mComParams.airParams.airThreshold = 0;
    952     mComParams.airParams.airAuto = 1;
    953     mComParams.disableDeblocking = 2;
    954     mComParams.syncEncMode = false;
    955     mComParams.codedBufNum = 2;
    956     mComParams.numberOfLayer = 1;
    957     mComParams.nPeriodicity = 0;
    958     memset(mComParams.nLayerID,0,32*sizeof(uint32_t));
    959 
    960     mHrdParam.bufferSize = 0;
    961     mHrdParam.initBufferFullness = 0;
    962 
    963     mStoreMetaDataInBuffers.isEnabled = false;
    964 }
    965 
    966 Encode_Status VideoEncoderBase::setParameters(
    967         VideoParamConfigSet *videoEncParams) {
    968 
    969     Encode_Status ret = ENCODE_SUCCESS;
    970     CHECK_NULL_RETURN_IFFAIL(videoEncParams);
    971     LOG_V("Config type = %x\n", (int)videoEncParams->type);
    972 
    973     if (mStarted) {
    974         LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
    975         return ENCODE_ALREADY_INIT;
    976     }
    977 
    978     switch (videoEncParams->type) {
    979         case VideoParamsTypeCommon: {
    980 
    981             VideoParamsCommon *paramsCommon =
    982                     reinterpret_cast <VideoParamsCommon *> (videoEncParams);
    983             if (paramsCommon->size != sizeof (VideoParamsCommon)) {
    984                 return ENCODE_INVALID_PARAMS;
    985             }
    986             if(paramsCommon->codedBufNum < 2)
    987                 paramsCommon->codedBufNum =2;
    988             mComParams = *paramsCommon;
    989             break;
    990         }
    991 
    992         case VideoParamsTypeUpSteamBuffer: {
    993 
    994             VideoParamsUpstreamBuffer *upStreamBuffer =
    995                     reinterpret_cast <VideoParamsUpstreamBuffer *> (videoEncParams);
    996 
    997             if (upStreamBuffer->size != sizeof (VideoParamsUpstreamBuffer)) {
    998                 return ENCODE_INVALID_PARAMS;
    999             }
   1000 
   1001             ret = setUpstreamBuffer(upStreamBuffer);
   1002             break;
   1003         }
   1004 
   1005         case VideoParamsTypeUsrptrBuffer: {
   1006 
   1007             // usrptr only can be get
   1008             // this case should not happen
   1009             break;
   1010         }
   1011 
   1012         case VideoParamsTypeHRD: {
   1013             VideoParamsHRD *hrd =
   1014                     reinterpret_cast <VideoParamsHRD *> (videoEncParams);
   1015 
   1016             if (hrd->size != sizeof (VideoParamsHRD)) {
   1017                 return ENCODE_INVALID_PARAMS;
   1018             }
   1019 
   1020             mHrdParam.bufferSize = hrd->bufferSize;
   1021             mHrdParam.initBufferFullness = hrd->initBufferFullness;
   1022             mRenderHrd = true;
   1023 
   1024             break;
   1025         }
   1026 
   1027         case VideoParamsTypeStoreMetaDataInBuffers: {
   1028             VideoParamsStoreMetaDataInBuffers *metadata =
   1029                     reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
   1030 
   1031             if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
   1032                 return ENCODE_INVALID_PARAMS;
   1033             }
   1034 
   1035             mStoreMetaDataInBuffers.isEnabled = metadata->isEnabled;
   1036 
   1037             break;
   1038         }
   1039 
   1040         case VideoParamsTypeTemporalLayer:{
   1041             VideoParamsTemporalLayer *temporallayer =
   1042                     reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
   1043 
   1044             if (temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
   1045                  return ENCODE_INVALID_PARAMS;
   1046             }
   1047 
   1048             mComParams.numberOfLayer = temporallayer->numberOfLayer;
   1049             mComParams.nPeriodicity = temporallayer->nPeriodicity;
   1050             for(uint32_t i=0;i<temporallayer->nPeriodicity;i++)
   1051                 mComParams.nLayerID[i] = temporallayer->nLayerID[i];
   1052             mRenderMultiTemporal = true;
   1053             break;
   1054         }
   1055 
   1056         case VideoParamsTypeAVC:
   1057         case VideoParamsTypeH263:
   1058         case VideoParamsTypeMP4:
   1059         case VideoParamsTypeVC1:
   1060         case VideoParamsTypeVP8: {
   1061             ret = derivedSetParams(videoEncParams);
   1062             break;
   1063         }
   1064 
   1065         default: {
   1066             LOG_E ("Wrong ParamType here\n");
   1067             return ENCODE_INVALID_PARAMS;
   1068         }
   1069     }
   1070     return ret;
   1071 }
   1072 
   1073 Encode_Status VideoEncoderBase::getParameters(
   1074         VideoParamConfigSet *videoEncParams) {
   1075 
   1076     Encode_Status ret = ENCODE_SUCCESS;
   1077     CHECK_NULL_RETURN_IFFAIL(videoEncParams);
   1078     LOG_V("Config type = %d\n", (int)videoEncParams->type);
   1079 
   1080     switch (videoEncParams->type) {
   1081         case VideoParamsTypeCommon: {
   1082 
   1083             VideoParamsCommon *paramsCommon =
   1084                     reinterpret_cast <VideoParamsCommon *> (videoEncParams);
   1085 
   1086             if (paramsCommon->size != sizeof (VideoParamsCommon)) {
   1087                 return ENCODE_INVALID_PARAMS;
   1088             }
   1089             *paramsCommon = mComParams;
   1090             break;
   1091         }
   1092 
   1093         case VideoParamsTypeUpSteamBuffer: {
   1094 
   1095             // Get upstream buffer could happen
   1096             // but not meaningful a lot
   1097             break;
   1098         }
   1099 
   1100         case VideoParamsTypeUsrptrBuffer: {
   1101             VideoParamsUsrptrBuffer *usrptrBuffer =
   1102                     reinterpret_cast <VideoParamsUsrptrBuffer *> (videoEncParams);
   1103 
   1104             if (usrptrBuffer->size != sizeof (VideoParamsUsrptrBuffer)) {
   1105                 return ENCODE_INVALID_PARAMS;
   1106             }
   1107 
   1108             ret = getNewUsrptrFromSurface(
   1109                     usrptrBuffer->width, usrptrBuffer->height, usrptrBuffer->format,
   1110                     usrptrBuffer->expectedSize, &(usrptrBuffer->actualSize),
   1111                     &(usrptrBuffer->stride), &(usrptrBuffer->usrPtr));
   1112 
   1113             break;
   1114         }
   1115 
   1116         case VideoParamsTypeHRD: {
   1117             VideoParamsHRD *hrd =
   1118                     reinterpret_cast <VideoParamsHRD *> (videoEncParams);
   1119 
   1120             if (hrd->size != sizeof (VideoParamsHRD)) {
   1121                 return ENCODE_INVALID_PARAMS;
   1122             }
   1123 
   1124             hrd->bufferSize = mHrdParam.bufferSize;
   1125             hrd->initBufferFullness = mHrdParam.initBufferFullness;
   1126 
   1127             break;
   1128         }
   1129 
   1130         case VideoParamsTypeStoreMetaDataInBuffers: {
   1131             VideoParamsStoreMetaDataInBuffers *metadata =
   1132                     reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
   1133 
   1134             if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
   1135                 return ENCODE_INVALID_PARAMS;
   1136             }
   1137 
   1138             metadata->isEnabled = mStoreMetaDataInBuffers.isEnabled;
   1139 
   1140             break;
   1141         }
   1142 
   1143         case VideoParamsTypeProfileLevel: {
   1144             VideoParamsProfileLevel *profilelevel =
   1145                 reinterpret_cast <VideoParamsProfileLevel *> (videoEncParams);
   1146 
   1147             if (profilelevel->size != sizeof (VideoParamsProfileLevel)) {
   1148                 return ENCODE_INVALID_PARAMS;
   1149             }
   1150 
   1151             profilelevel->level = 0;
   1152             if(queryProfileLevelConfig(mVADisplay, profilelevel->profile) == ENCODE_SUCCESS){
   1153                 profilelevel->isSupported = true;
   1154                 if(profilelevel->profile == VAProfileH264High)
   1155                     profilelevel->level = 42;
   1156                 else if(profilelevel->profile == VAProfileH264Main)
   1157                      profilelevel->level = 42;
   1158                 else if(profilelevel->profile == VAProfileH264Baseline)
   1159                      profilelevel->level = 41;
   1160                 else{
   1161                     profilelevel->level = 0;
   1162                     profilelevel->isSupported = false;
   1163                 }
   1164             }
   1165         }
   1166 
   1167         case VideoParamsTypeTemporalLayer:{
   1168             VideoParamsTemporalLayer *temporallayer =
   1169                 reinterpret_cast <VideoParamsTemporalLayer *> (videoEncParams);
   1170 
   1171             if(temporallayer->size != sizeof(VideoParamsTemporalLayer)) {
   1172                 return ENCODE_INVALID_PARAMS;
   1173             }
   1174 
   1175             temporallayer->numberOfLayer = mComParams.numberOfLayer;
   1176 
   1177             break;
   1178         }
   1179 
   1180         case VideoParamsTypeAVC:
   1181         case VideoParamsTypeH263:
   1182         case VideoParamsTypeMP4:
   1183         case VideoParamsTypeVC1:
   1184         case VideoParamsTypeVP8: {
   1185             derivedGetParams(videoEncParams);
   1186             break;
   1187         }
   1188 
   1189         default: {
   1190             LOG_E ("Wrong ParamType here\n");
   1191             break;
   1192         }
   1193 
   1194     }
   1195     return ret;
   1196 }
   1197 
   1198 Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) {
   1199 
   1200     Encode_Status ret = ENCODE_SUCCESS;
   1201     CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
   1202     LOG_V("Config type = %d\n", (int)videoEncConfig->type);
   1203 
   1204    // workaround
   1205 #if 0
   1206     if (!mStarted) {
   1207         LOG_E("Encoder has not initialized yet, can't call setConfig\n");
   1208         return ENCODE_NOT_INIT;
   1209     }
   1210 #endif
   1211 
   1212     switch (videoEncConfig->type) {
   1213         case VideoConfigTypeFrameRate: {
   1214             VideoConfigFrameRate *configFrameRate =
   1215                     reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
   1216 
   1217             if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
   1218                 return ENCODE_INVALID_PARAMS;
   1219             }
   1220             mComParams.frameRate = configFrameRate->frameRate;
   1221             mRenderFrameRate = true;
   1222             break;
   1223         }
   1224 
   1225         case VideoConfigTypeBitRate: {
   1226             VideoConfigBitRate *configBitRate =
   1227                     reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
   1228 
   1229             if (configBitRate->size != sizeof (VideoConfigBitRate)) {
   1230                 return ENCODE_INVALID_PARAMS;
   1231             }
   1232 
   1233             if(mComParams.numberOfLayer == 1)
   1234             {
   1235                 mComParams.rcParams = configBitRate->rcParams;
   1236                 mRenderBitRate = true;
   1237             }
   1238             else
   1239             {
   1240                 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].nLayerID = configBitRate->rcParams.temporalID;
   1241                 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].bitRate = configBitRate->rcParams.bitRate;
   1242                 mTemporalLayerBitrateFramerate[configBitRate->rcParams.temporalID].frameRate = configBitRate->rcParams.temporalFrameRate;
   1243             }
   1244             break;
   1245         }
   1246 
   1247         case VideoConfigTypeResolution: {
   1248 
   1249             // Not Implemented
   1250             break;
   1251         }
   1252         case VideoConfigTypeIntraRefreshType: {
   1253 
   1254             VideoConfigIntraRefreshType *configIntraRefreshType =
   1255                     reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
   1256 
   1257             if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
   1258                 return ENCODE_INVALID_PARAMS;
   1259             }
   1260             mComParams.refreshType = configIntraRefreshType->refreshType;
   1261             break;
   1262         }
   1263 
   1264         case VideoConfigTypeCyclicFrameInterval: {
   1265             VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
   1266                     reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
   1267             if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
   1268                 return ENCODE_INVALID_PARAMS;
   1269             }
   1270 
   1271             mComParams.cyclicFrameInterval = configCyclicFrameInterval->cyclicFrameInterval;
   1272             break;
   1273         }
   1274 
   1275         case VideoConfigTypeAIR: {
   1276 
   1277             VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
   1278 
   1279             if (configAIR->size != sizeof (VideoConfigAIR)) {
   1280                 return ENCODE_INVALID_PARAMS;
   1281             }
   1282 
   1283             mComParams.airParams = configAIR->airParams;
   1284             mRenderAIR = true;
   1285             break;
   1286         }
   1287         case VideoConfigTypeCIR: {
   1288 
   1289             VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
   1290 
   1291             if (configCIR->size != sizeof (VideoConfigCIR)) {
   1292                 return ENCODE_INVALID_PARAMS;
   1293             }
   1294 
   1295             mComParams.cirParams = configCIR->cirParams;
   1296             mRenderCIR = true;
   1297             break;
   1298         }
   1299         case VideoConfigTypeAVCIntraPeriod:
   1300         case VideoConfigTypeNALSize:
   1301         case VideoConfigTypeIDRRequest:
   1302         case VideoConfigTypeSliceNum:
   1303         case VideoConfigTypeVP8:
   1304         case VideoConfigTypeVP8ReferenceFrame:
   1305         case VideoConfigTypeVP8MaxFrameSizeRatio:{
   1306             ret = derivedSetConfig(videoEncConfig);
   1307             break;
   1308         }
   1309         default: {
   1310             LOG_E ("Wrong Config Type here\n");
   1311             break;
   1312         }
   1313     }
   1314     return ret;
   1315 }
   1316 
   1317 Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) {
   1318 
   1319     Encode_Status ret = ENCODE_SUCCESS;
   1320     CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
   1321     LOG_V("Config type = %d\n", (int)videoEncConfig->type);
   1322 
   1323     switch (videoEncConfig->type) {
   1324         case VideoConfigTypeFrameRate: {
   1325             VideoConfigFrameRate *configFrameRate =
   1326                     reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
   1327 
   1328             if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
   1329                 return ENCODE_INVALID_PARAMS;
   1330             }
   1331 
   1332             configFrameRate->frameRate = mComParams.frameRate;
   1333             break;
   1334         }
   1335 
   1336         case VideoConfigTypeBitRate: {
   1337             VideoConfigBitRate *configBitRate =
   1338                     reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
   1339 
   1340             if (configBitRate->size != sizeof (VideoConfigBitRate)) {
   1341                 return ENCODE_INVALID_PARAMS;
   1342             }
   1343             configBitRate->rcParams = mComParams.rcParams;
   1344 
   1345 
   1346             break;
   1347         }
   1348         case VideoConfigTypeResolution: {
   1349             // Not Implemented
   1350             break;
   1351         }
   1352         case VideoConfigTypeIntraRefreshType: {
   1353 
   1354             VideoConfigIntraRefreshType *configIntraRefreshType =
   1355                     reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
   1356 
   1357             if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
   1358                 return ENCODE_INVALID_PARAMS;
   1359             }
   1360             configIntraRefreshType->refreshType = mComParams.refreshType;
   1361             break;
   1362         }
   1363 
   1364         case VideoConfigTypeCyclicFrameInterval: {
   1365             VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
   1366                     reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
   1367             if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
   1368                 return ENCODE_INVALID_PARAMS;
   1369             }
   1370 
   1371             configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval;
   1372             break;
   1373         }
   1374 
   1375         case VideoConfigTypeAIR: {
   1376 
   1377             VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
   1378 
   1379             if (configAIR->size != sizeof (VideoConfigAIR)) {
   1380                 return ENCODE_INVALID_PARAMS;
   1381             }
   1382 
   1383             configAIR->airParams = mComParams.airParams;
   1384             break;
   1385         }
   1386         case VideoConfigTypeCIR: {
   1387 
   1388             VideoConfigCIR *configCIR = reinterpret_cast <VideoConfigCIR *> (videoEncConfig);
   1389 
   1390             if (configCIR->size != sizeof (VideoConfigCIR)) {
   1391                 return ENCODE_INVALID_PARAMS;
   1392             }
   1393 
   1394             configCIR->cirParams = mComParams.cirParams;
   1395             break;
   1396         }
   1397         case VideoConfigTypeAVCIntraPeriod:
   1398         case VideoConfigTypeNALSize:
   1399         case VideoConfigTypeIDRRequest:
   1400         case VideoConfigTypeSliceNum:
   1401         case VideoConfigTypeVP8: {
   1402 
   1403             ret = derivedGetConfig(videoEncConfig);
   1404             break;
   1405         }
   1406         default: {
   1407             LOG_E ("Wrong ParamType here\n");
   1408             break;
   1409         }
   1410     }
   1411     return ret;
   1412 }
   1413 
   1414 void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) {
   1415     if (mNewHeader) mFrameNum = 0;
   1416     LOG_V( "mFrameNum = %d   ", mFrameNum);
   1417 
   1418     updateFrameInfo(task) ;
   1419 }
   1420 
   1421 Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) {
   1422 
   1423     task->type = FTYPE_P;
   1424 
   1425     // determine the picture type
   1426     if (mFrameNum == 0)
   1427         task->type = FTYPE_I;
   1428     if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0))
   1429         task->type = FTYPE_I;
   1430 
   1431     if (task->type == FTYPE_I)
   1432         task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
   1433 
   1434     return ENCODE_SUCCESS;
   1435 }
   1436 
   1437 Encode_Status  VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) {
   1438 
   1439     uint32_t size = mComParams.resolution.width * mComParams.resolution.height;
   1440 
   1441     if (maxSize == NULL) {
   1442         LOG_E("maxSize == NULL\n");
   1443         return ENCODE_NULL_PTR;
   1444     }
   1445 
   1446     LOG_V( "Begin\n");
   1447 
   1448     if (mCodedBufSize > 0) {
   1449         *maxSize = mCodedBufSize;
   1450         LOG_V ("Already calculate the max encoded size, get the value directly");
   1451         return ENCODE_SUCCESS;
   1452     }
   1453 
   1454     // here, VP8 is different from AVC/H263
   1455     if(mComParams.profile == VAProfileVP8Version0_3) // for VP8 encode
   1456     {
   1457         // According to VIED suggestions, in CBR mode, coded buffer should be the size of 3 bytes per luma pixel
   1458         // in CBR_HRD mode, coded buffer size should be  5 * rc_buf_sz * rc_target_bitrate;
   1459         // now we just hardcode mCodedBufSize as 2M to walk round coded buffer size issue;
   1460         /*
   1461         if(mComParams.rcMode == VA_RC_CBR) // CBR_HRD mode
   1462             mCodedBufSize = 5 * mComParams.rcParams.bitRate * 6000;
   1463         else // CBR mode
   1464             mCodedBufSize = 3 * mComParams.resolution.width * mComParams.resolution.height;
   1465         */
   1466         mCodedBufSize = (2 * 1024 * 1024 + 31) & (~31);
   1467     }
   1468     else // for AVC/H263/MPEG4 encode
   1469     {
   1470         // base on the rate control mode to calculate the defaule encoded buffer size
   1471         if (mComParams.rcMode == VA_RC_NONE) {
   1472              mCodedBufSize = (size * 400) / (16 * 16);
   1473              // set to value according to QP
   1474         } else {
   1475              mCodedBufSize = mComParams.rcParams.bitRate / 4;
   1476         }
   1477 
   1478         mCodedBufSize = max (mCodedBufSize , (size * 400) / (16 * 16));
   1479 
   1480         // in case got a very large user input bit rate value
   1481         mCodedBufSize = min(mCodedBufSize, (size * 1.5 * 8));
   1482         mCodedBufSize =  (mCodedBufSize + 15) &(~15);
   1483     }
   1484 
   1485     *maxSize = mCodedBufSize;
   1486     return ENCODE_SUCCESS;
   1487 }
   1488 
   1489 Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
   1490     uint32_t width, uint32_t height, uint32_t format,
   1491     uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
   1492 
   1493     Encode_Status ret = ENCODE_FAIL;
   1494     VAStatus vaStatus = VA_STATUS_SUCCESS;
   1495 
   1496     VASurfaceID surface = VA_INVALID_SURFACE;
   1497     VAImage image;
   1498     uint32_t index = 0;
   1499 
   1500     LOG_V( "Begin\n");
   1501     // If encode session has been configured, we can not request surface creation anymore
   1502     if (mStarted) {
   1503         LOG_E( "Already Initialized, can not request VA surface anymore\n");
   1504         return ENCODE_WRONG_STATE;
   1505     }
   1506     if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) {
   1507         LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n");
   1508         return ENCODE_NULL_PTR;
   1509     }
   1510 
   1511     // Current only NV12 is supported in VA API
   1512     // Through format we can get known the number of planes
   1513     if (format != STRING_TO_FOURCC("NV12")) {
   1514         LOG_W ("Format is not supported\n");
   1515         return ENCODE_NOT_SUPPORTED;
   1516     }
   1517 
   1518     surface = CreateNewVASurface(mVADisplay, width, height);
   1519     if (surface == VA_INVALID_SURFACE)
   1520         return ENCODE_DRIVER_FAIL;
   1521 
   1522     vaStatus = vaDeriveImage(mVADisplay, surface, &image);
   1523     CHECK_VA_STATUS_RETURN("vaDeriveImage");
   1524     LOG_V( "vaDeriveImage Done\n");
   1525     vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr);
   1526     CHECK_VA_STATUS_RETURN("vaMapBuffer");
   1527 
   1528     // make sure the physical page been allocated
   1529     for (index = 0; index < image.data_size; index = index + 4096) {
   1530         unsigned char tmp =  *(*usrptr + index);
   1531         if (tmp == 0)
   1532             *(*usrptr + index) = 0;
   1533     }
   1534 
   1535     *outsize = image.data_size;
   1536     *stride = image.pitches[0];
   1537 
   1538     LOG_V( "surface = 0x%08x\n",(uint32_t)surface);
   1539     LOG_V("image->pitches[0] = %d\n", image.pitches[0]);
   1540     LOG_V("image->pitches[1] = %d\n", image.pitches[1]);
   1541     LOG_V("image->offsets[0] = %d\n", image.offsets[0]);
   1542     LOG_V("image->offsets[1] = %d\n", image.offsets[1]);
   1543     LOG_V("image->num_planes = %d\n", image.num_planes);
   1544     LOG_V("image->width = %d\n", image.width);
   1545     LOG_V("image->height = %d\n", image.height);
   1546     LOG_V("data_size = %d\n", image.data_size);
   1547     LOG_V("usrptr = 0x%p\n", *usrptr);
   1548 
   1549     vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
   1550     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
   1551     vaStatus = vaDestroyImage(mVADisplay, image.image_id);
   1552     CHECK_VA_STATUS_RETURN("vaDestroyImage");
   1553 
   1554     if (*outsize < expectedSize) {
   1555         LOG_E ("Allocated buffer size is small than the expected size, destroy the surface");
   1556         LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expectedSize);
   1557         vaStatus = vaDestroySurfaces(mVADisplay, &surface, 1);
   1558         CHECK_VA_STATUS_RETURN("vaDestroySurfaces");
   1559         return ENCODE_FAIL;
   1560     }
   1561 
   1562     VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
   1563     if (map == NULL) {
   1564         LOG_E( "new VASurfaceMap failed\n");
   1565         return ENCODE_NO_MEMORY;
   1566     }
   1567 
   1568     map->setVASurface(surface);  //special case, vasuface is set, so nothing do in doMapping
   1569 //    map->setType(MetadataBufferTypeEncoder);
   1570     map->setValue((intptr_t)*usrptr);
   1571     ValueInfo vinfo;
   1572     memset(&vinfo, 0, sizeof(ValueInfo));
   1573     vinfo.mode = (MemMode)MEM_MODE_USRPTR;
   1574     vinfo.handle = 0;
   1575     vinfo.size = 0;
   1576     vinfo.width = width;
   1577     vinfo.height = height;
   1578     vinfo.lumaStride = width;
   1579     vinfo.chromStride = width;
   1580     vinfo.format = VA_FOURCC_NV12;
   1581     vinfo.s3dformat = 0xffffffff;
   1582     map->setValueInfo(vinfo);
   1583     map->doMapping();
   1584 
   1585     mSrcSurfaceMapList.push_back(map);
   1586 
   1587     ret = ENCODE_SUCCESS;
   1588 
   1589     return ret;
   1590 }
   1591 
   1592 Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) {
   1593 
   1594     Encode_Status status = ENCODE_SUCCESS;
   1595 
   1596     CHECK_NULL_RETURN_IFFAIL(upStreamBuffer);
   1597     if (upStreamBuffer->bufCnt == 0) {
   1598         LOG_E("bufCnt == 0\n");
   1599         return ENCODE_FAIL;
   1600     }
   1601 
   1602     for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) {
   1603         if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL)  //already mapped
   1604             continue;
   1605 
   1606         //wrap upstream buffer into vaSurface
   1607         VASurfaceMap *map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
   1608 
   1609 //        map->setType(MetadataBufferTypeUser);
   1610         map->setValue(upStreamBuffer->bufList[i]);
   1611         ValueInfo vinfo;
   1612         memset(&vinfo, 0, sizeof(ValueInfo));
   1613         vinfo.mode = (MemMode)upStreamBuffer->bufferMode;
   1614         vinfo.handle = (intptr_t)upStreamBuffer->display;
   1615         vinfo.size = 0;
   1616         if (upStreamBuffer->bufAttrib) {
   1617             vinfo.width = upStreamBuffer->bufAttrib->realWidth;
   1618             vinfo.height = upStreamBuffer->bufAttrib->realHeight;
   1619             vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride;
   1620             vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride;
   1621             vinfo.format = upStreamBuffer->bufAttrib->format;
   1622         }
   1623         vinfo.s3dformat = 0xFFFFFFFF;
   1624         map->setValueInfo(vinfo);
   1625         status = map->doMapping();
   1626 
   1627         if (status == ENCODE_SUCCESS)
   1628             mSrcSurfaceMapList.push_back(map);
   1629         else
   1630            delete map;
   1631     }
   1632 
   1633     return status;
   1634 }
   1635 
   1636 Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) {
   1637 
   1638     Encode_Status ret = ENCODE_SUCCESS;
   1639     IntelMetadataBufferType type;
   1640     intptr_t value;
   1641     ValueInfo vinfo;
   1642     ValueInfo *pvinfo = &vinfo;
   1643     intptr_t *extravalues = NULL;
   1644     unsigned int extravalues_count = 0;
   1645 
   1646     IntelMetadataBuffer imb;
   1647     VASurfaceMap *map = NULL;
   1648 
   1649     memset(&vinfo, 0, sizeof(ValueInfo));
   1650     if (mStoreMetaDataInBuffers.isEnabled) {
   1651         //metadatabuffer mode
   1652         LOG_V("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
   1653         if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) {
   1654             //fail to parse buffer
   1655             return ENCODE_NO_REQUEST_DATA;
   1656         }
   1657 
   1658         imb.GetType(type);
   1659         imb.GetValue(value);
   1660     } else {
   1661         //raw mode
   1662         LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
   1663         if (! inBuffer->data || inBuffer->size == 0) {
   1664             return ENCODE_NULL_PTR;
   1665         }
   1666 
   1667         type = IntelMetadataBufferTypeUser;
   1668         value = (intptr_t)inBuffer->data;
   1669     }
   1670 
   1671 #ifdef INTEL_VIDEO_XPROC_SHARING
   1672     uint32_t sflag = mSessionFlag;
   1673     imb.GetSessionFlag(mSessionFlag);
   1674     if (mSessionFlag != sflag) {
   1675         //new sharing session, flush buffer sharing cache
   1676         IntelMetadataBuffer::ClearContext(sflag, false);
   1677         //flush surfacemap cache
   1678         LOG_V( "Flush Src Surface Map\n");
   1679         while(! mSrcSurfaceMapList.empty())
   1680         {
   1681             delete (*mSrcSurfaceMapList.begin());
   1682             mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
   1683         }
   1684     }
   1685 #endif
   1686 
   1687     //find if mapped
   1688     map = (VASurfaceMap*) findSurfaceMapByValue(value);
   1689 
   1690     if (map) {
   1691         //has mapped, get surfaceID directly and do all necessary actions
   1692         LOG_V("direct find surface %d from value %i\n", map->getVASurface(), value);
   1693         *sid = map->getVASurface();
   1694         map->doMapping();
   1695         return ret;
   1696     }
   1697 
   1698     //if no found from list, then try to map value with parameters
   1699     LOG_V("not find surface from cache with value %i, start mapping if enough information\n", value);
   1700 
   1701     if (mStoreMetaDataInBuffers.isEnabled) {
   1702 
   1703         //if type is IntelMetadataBufferTypeGrallocSource, use default parameters since no ValueInfo
   1704         if (type == IntelMetadataBufferTypeGrallocSource) {
   1705             vinfo.mode = MEM_MODE_GFXHANDLE;
   1706             vinfo.handle = 0;
   1707             vinfo.size = 0;
   1708             vinfo.width = mComParams.resolution.width;
   1709             vinfo.height = mComParams.resolution.height;
   1710             vinfo.lumaStride = mComParams.resolution.width;
   1711             vinfo.chromStride = mComParams.resolution.width;
   1712             vinfo.format = VA_FOURCC_NV12;
   1713             vinfo.s3dformat = 0xFFFFFFFF;
   1714         } else {
   1715             //get all info mapping needs
   1716             imb.GetValueInfo(pvinfo);
   1717             imb.GetExtraValues(extravalues, extravalues_count);
   1718         }
   1719 
   1720     } else {
   1721 
   1722         //raw mode
   1723         vinfo.mode = MEM_MODE_MALLOC;
   1724         vinfo.handle = 0;
   1725         vinfo.size = inBuffer->size;
   1726         vinfo.width = mComParams.resolution.width;
   1727         vinfo.height = mComParams.resolution.height;
   1728         vinfo.lumaStride = mComParams.resolution.width;
   1729         vinfo.chromStride = mComParams.resolution.width;
   1730         vinfo.format = VA_FOURCC_NV12;
   1731         vinfo.s3dformat = 0xFFFFFFFF;
   1732     }
   1733 
   1734     /*  Start mapping, if pvinfo is not NULL, then have enough info to map;
   1735      *   if extravalues is not NULL, then need to do more times mapping
   1736      */
   1737     if (pvinfo){
   1738         //map according info, and add to surfacemap list
   1739         map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
   1740         map->setValue(value);
   1741         map->setValueInfo(*pvinfo);
   1742         map->setAction(mVASurfaceMappingAction);
   1743 
   1744         ret = map->doMapping();
   1745         if (ret == ENCODE_SUCCESS) {
   1746             LOG_V("surface mapping success, map value %i into surface %d\n", value, map->getVASurface());
   1747             mSrcSurfaceMapList.push_back(map);
   1748         } else {
   1749             delete map;
   1750             LOG_E("surface mapping failed, wrong info or meet serious error\n");
   1751             return ret;
   1752         }
   1753 
   1754         *sid = map->getVASurface();
   1755 
   1756     } else {
   1757         //can't map due to no info
   1758         LOG_E("surface mapping failed, missing information\n");
   1759         return ENCODE_NO_REQUEST_DATA;
   1760     }
   1761 
   1762     if (extravalues) {
   1763         //map more using same ValueInfo
   1764         for(unsigned int i=0; i<extravalues_count; i++) {
   1765             map = new VASurfaceMap(mVADisplay, mSupportedSurfaceMemType);
   1766             map->setValue(extravalues[i]);
   1767             map->setValueInfo(vinfo);
   1768 
   1769             ret = map->doMapping();
   1770             if (ret == ENCODE_SUCCESS) {
   1771                 LOG_V("surface mapping extravalue success, map value %i into surface %d\n", extravalues[i], map->getVASurface());
   1772                 mSrcSurfaceMapList.push_back(map);
   1773             } else {
   1774                 delete map;
   1775                 map = NULL;
   1776                 LOG_E( "surface mapping extravalue failed, extravalue is %i\n", extravalues[i]);
   1777             }
   1778         }
   1779     }
   1780 
   1781     return ret;
   1782 }
   1783 
   1784 Encode_Status VideoEncoderBase::renderDynamicBitrate(EncodeTask* task) {
   1785     VAStatus vaStatus = VA_STATUS_SUCCESS;
   1786 
   1787     LOG_V( "Begin\n\n");
   1788     // disable bits stuffing and skip frame apply to all rate control mode
   1789 
   1790     VAEncMiscParameterBuffer   *miscEncParamBuf;
   1791     VAEncMiscParameterRateControl *bitrateControlParam;
   1792     VABufferID miscParamBufferID;
   1793 
   1794     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
   1795             VAEncMiscParameterBufferType,
   1796             sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
   1797             1, NULL,
   1798             &miscParamBufferID);
   1799 
   1800     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
   1801 
   1802     vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
   1803     CHECK_VA_STATUS_RETURN("vaMapBuffer");
   1804 
   1805     miscEncParamBuf->type = VAEncMiscParameterTypeRateControl;
   1806     bitrateControlParam = (VAEncMiscParameterRateControl *)miscEncParamBuf->data;
   1807 
   1808     bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate;
   1809     bitrateControlParam->initial_qp = mComParams.rcParams.initQP;
   1810     if(mComParams.rcParams.enableIntraFrameQPControl && (task->type == FTYPE_IDR || task->type == FTYPE_I)) {
   1811         bitrateControlParam->min_qp = mComParams.rcParams.I_minQP;
   1812         bitrateControlParam->max_qp = mComParams.rcParams.I_maxQP;
   1813         mRenderBitRate = true;
   1814         LOG_I("apply I min/max qp for IDR or I frame\n");
   1815     } else {
   1816         bitrateControlParam->min_qp = mComParams.rcParams.minQP;
   1817         bitrateControlParam->max_qp = mComParams.rcParams.maxQP;
   1818         mRenderBitRate = false;
   1819         LOG_I("revert to original min/max qp after IDR or I frame\n");
   1820     }
   1821     bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage;
   1822     bitrateControlParam->window_size = mComParams.rcParams.windowSize;
   1823     bitrateControlParam->rc_flags.bits.disable_frame_skip = mComParams.rcParams.disableFrameSkip;
   1824     bitrateControlParam->rc_flags.bits.disable_bit_stuffing = mComParams.rcParams.disableBitsStuffing;
   1825     bitrateControlParam->basic_unit_size = 0;
   1826 
   1827     LOG_I("bits_per_second = %d\n", bitrateControlParam->bits_per_second);
   1828     LOG_I("initial_qp = %d\n", bitrateControlParam->initial_qp);
   1829     LOG_I("min_qp = %d\n", bitrateControlParam->min_qp);
   1830     LOG_I("max_qp = %d\n", bitrateControlParam->max_qp);
   1831     LOG_I("target_percentage = %d\n", bitrateControlParam->target_percentage);
   1832     LOG_I("window_size = %d\n", bitrateControlParam->window_size);
   1833     LOG_I("disable_frame_skip = %d\n", bitrateControlParam->rc_flags.bits.disable_frame_skip);
   1834     LOG_I("disable_bit_stuffing = %d\n", bitrateControlParam->rc_flags.bits.disable_bit_stuffing);
   1835 
   1836     vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
   1837     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
   1838 
   1839     vaStatus = vaRenderPicture(mVADisplay, mVAContext,
   1840             &miscParamBufferID, 1);
   1841     CHECK_VA_STATUS_RETURN("vaRenderPicture");
   1842 
   1843     return ENCODE_SUCCESS;
   1844 }
   1845 
   1846 
   1847 Encode_Status VideoEncoderBase::renderDynamicFrameRate() {
   1848 
   1849     VAStatus vaStatus = VA_STATUS_SUCCESS;
   1850 
   1851     if (mComParams.rcMode != RATE_CONTROL_VCM) {
   1852 
   1853         LOG_W("Not in VCM mode, but call SendDynamicFramerate\n");
   1854         return ENCODE_SUCCESS;
   1855     }
   1856 
   1857     VAEncMiscParameterBuffer   *miscEncParamBuf;
   1858     VAEncMiscParameterFrameRate *frameRateParam;
   1859     VABufferID miscParamBufferID;
   1860 
   1861     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
   1862             VAEncMiscParameterBufferType,
   1863             sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterFrameRate),
   1864             1, NULL, &miscParamBufferID);
   1865     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
   1866 
   1867     vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
   1868     CHECK_VA_STATUS_RETURN("vaMapBuffer");
   1869 
   1870     miscEncParamBuf->type = VAEncMiscParameterTypeFrameRate;
   1871     frameRateParam = (VAEncMiscParameterFrameRate *)miscEncParamBuf->data;
   1872     frameRateParam->framerate =
   1873             (unsigned int) (mComParams.frameRate.frameRateNum + mComParams.frameRate.frameRateDenom/2)
   1874             / mComParams.frameRate.frameRateDenom;
   1875 
   1876     vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
   1877     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
   1878 
   1879     vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
   1880     CHECK_VA_STATUS_RETURN("vaRenderPicture");
   1881 
   1882     LOG_I( "frame rate = %d\n", frameRateParam->framerate);
   1883     return ENCODE_SUCCESS;
   1884 }
   1885 
   1886 Encode_Status VideoEncoderBase::renderHrd() {
   1887 
   1888     VAStatus vaStatus = VA_STATUS_SUCCESS;
   1889 
   1890     VAEncMiscParameterBuffer *miscEncParamBuf;
   1891     VAEncMiscParameterHRD *hrdParam;
   1892     VABufferID miscParamBufferID;
   1893 
   1894     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
   1895             VAEncMiscParameterBufferType,
   1896             sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterHRD),
   1897             1, NULL, &miscParamBufferID);
   1898     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
   1899 
   1900     vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
   1901     CHECK_VA_STATUS_RETURN("vaMapBuffer");
   1902 
   1903     miscEncParamBuf->type = VAEncMiscParameterTypeHRD;
   1904     hrdParam = (VAEncMiscParameterHRD *)miscEncParamBuf->data;
   1905 
   1906     hrdParam->buffer_size = mHrdParam.bufferSize;
   1907     hrdParam->initial_buffer_fullness = mHrdParam.initBufferFullness;
   1908 
   1909     vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
   1910     CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
   1911 
   1912     vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
   1913     CHECK_VA_STATUS_RETURN("vaRenderPicture");
   1914 
   1915     return ENCODE_SUCCESS;
   1916 }
   1917 
   1918 VASurfaceMap *VideoEncoderBase::findSurfaceMapByValue(intptr_t value) {
   1919     android::List<VASurfaceMap *>::iterator node;
   1920 
   1921     for(node = mSrcSurfaceMapList.begin(); node !=  mSrcSurfaceMapList.end(); node++)
   1922     {
   1923         if ((*node)->getValue() == value)
   1924             return *node;
   1925         else
   1926             continue;
   1927     }
   1928 
   1929     return NULL;
   1930 }
   1931