Home | History | Annotate | Download | only in videodecoder
      1 /*
      2 * Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
      3 *
      4 * Licensed under the Apache License, Version 2.0 (the "License");
      5 * you may not use this file except in compliance with the License.
      6 * You may obtain a copy of the License at
      7 *
      8 * http://www.apache.org/licenses/LICENSE-2.0
      9 *
     10 * Unless required by applicable law or agreed to in writing, software
     11 * distributed under the License is distributed on an "AS IS" BASIS,
     12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13 * See the License for the specific language governing permissions and
     14 * limitations under the License.
     15 */
     16 
     17 #include "VideoDecoderAVC.h"
     18 #include "VideoDecoderTrace.h"
     19 #include <string.h>
     20 #include <cutils/properties.h>
     21 
     22 // Macros for actual buffer needed calculation
     23 #define WIDI_CONSUMED   6
     24 #define HDMI_CONSUMED   2
     25 #define NW_CONSUMED     2
     26 #define POC_DEFAULT     0x7FFFFFFF
     27 
     28 VideoDecoderAVC::VideoDecoderAVC(const char *mimeType)
     29     : VideoDecoderBase(mimeType, VBP_H264),
     30       mToggleDPB(0),
     31       mErrorConcealment(false){
     32 
     33     invalidateDPB(0);
     34     invalidateDPB(1);
     35     mLastPictureFlags = VA_PICTURE_H264_INVALID;
     36 }
     37 
     38 VideoDecoderAVC::~VideoDecoderAVC() {
     39     stop();
     40 }
     41 
     42 Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) {
     43     Decode_Status status;
     44 
     45     status = VideoDecoderBase::start(buffer);
     46     CHECK_STATUS("VideoDecoderBase::start");
     47 
     48     // We don't want base class to manage reference.
     49     VideoDecoderBase::ManageReference(false);
     50     // output by picture order count
     51     VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC);
     52 
     53     mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT;
     54     if (buffer->data == NULL || buffer->size == 0) {
     55         WTRACE("No config data to start VA.");
     56         if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) {
     57             ITRACE("Used client supplied profile and surface to start VA.");
     58             return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile);
     59         }
     60         return DECODE_SUCCESS;
     61     }
     62 
     63     vbp_data_h264 *data = NULL;
     64     status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
     65     CHECK_STATUS("VideoDecoderBase::parseBuffer");
     66 
     67     status = startVA(data);
     68     return status;
     69 }
     70 
     71 void VideoDecoderAVC::stop(void) {
     72     // drop the last  frame and ignore return value
     73     endDecodingFrame(true);
     74     VideoDecoderBase::stop();
     75     invalidateDPB(0);
     76     invalidateDPB(1);
     77     mToggleDPB = 0;
     78     mErrorConcealment = false;
     79     mLastPictureFlags = VA_PICTURE_H264_INVALID;
     80 }
     81 
     82 void VideoDecoderAVC::flush(void) {
     83     // drop the frame and ignore return value
     84     VideoDecoderBase::flush();
     85     invalidateDPB(0);
     86     invalidateDPB(1);
     87     mToggleDPB = 0;
     88     mLastPictureFlags = VA_PICTURE_H264_INVALID;
     89 }
     90 
     91 Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) {
     92     Decode_Status status;
     93     vbp_data_h264 *data = NULL;
     94     if (buffer == NULL) {
     95         return DECODE_INVALID_DATA;
     96     }
     97     status =  VideoDecoderBase::parseBuffer(
     98             buffer->data,
     99             buffer->size,
    100             false,
    101             (void**)&data);
    102     CHECK_STATUS("VideoDecoderBase::parseBuffer");
    103 
    104     if (!mVAStarted) {
    105          if (data->has_sps && data->has_pps) {
    106             status = startVA(data);
    107             CHECK_STATUS("startVA");
    108         } else {
    109             WTRACE("Can't start VA as either SPS or PPS is still not available.");
    110             return DECODE_SUCCESS;
    111         }
    112     }
    113 
    114     VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
    115 
    116     status = decodeFrame(buffer, data);
    117     if (status == DECODE_MULTIPLE_FRAME) {
    118         buffer->ext = &mExtensionBuffer;
    119         mExtensionBuffer.extType = PACKED_FRAME_TYPE;
    120         mExtensionBuffer.extSize = sizeof(mPackedFrame);
    121         mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
    122     }
    123     return status;
    124 }
    125 
    126 Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
    127     Decode_Status status;
    128     if (data->has_sps == 0 || data->has_pps == 0) {
    129         return DECODE_NO_CONFIG;
    130     }
    131 
    132     mVideoFormatInfo.flags = 0;
    133     uint32_t fieldFlags = 0;
    134     for (unsigned int i = 0; i < data->num_pictures; i++) {
    135         VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
    136         fieldFlags |= pic.flags;
    137         // Don't remove the following codes, it can be enabled for debugging DPB.
    138 #if 0
    139         VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d,  reference = %d",
    140                 i,
    141                 buffer->timeStamp/1E6,
    142                 pic.TopFieldOrderCnt,
    143                 pic.BottomFieldOrderCnt,
    144                 pic.flags,
    145                 (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    146                 (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
    147 #endif
    148     }
    149     int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD;
    150     int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD;
    151     if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) {
    152         mVideoFormatInfo.flags |= IS_SINGLE_FIELD;
    153     }
    154 
    155     if (data->new_sps || data->new_pps) {
    156         status = handleNewSequence(data);
    157         CHECK_STATUS("handleNewSequence");
    158     }
    159 
    160     if (isWiDiStatusChanged()) {
    161         mSizeChanged = false;
    162         flushSurfaceBuffers();
    163         return DECODE_FORMAT_CHANGE;
    164     }
    165 
    166     // first pic_data always exists, check if any slice is parsed
    167     if (data->pic_data[0].num_slices == 0) {
    168         ITRACE("No slice available for decoding.");
    169         status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
    170         mSizeChanged = false;
    171         return status;
    172     }
    173 
    174     uint64_t lastPTS = mCurrentPTS;
    175     mCurrentPTS = buffer->timeStamp;
    176     //if (lastPTS != mCurrentPTS) {
    177     if (isNewFrame(data, lastPTS == mCurrentPTS)) {
    178         if (mLowDelay) {
    179             // start decoding a new frame
    180             status = beginDecodingFrame(data);
    181             if (status != DECODE_SUCCESS) {
    182                 Decode_Status st = status;
    183                 // finish decoding the last frame if
    184                 // encounter error when decode the new frame
    185                 status = endDecodingFrame(false);
    186                 CHECK_STATUS("endDecodingFrame");
    187                 return st;
    188             }
    189         }
    190 
    191         // finish decoding the last frame
    192         status = endDecodingFrame(false);
    193         CHECK_STATUS("endDecodingFrame");
    194 
    195         if (!mLowDelay) {
    196             // start decoding a new frame
    197             status = beginDecodingFrame(data);
    198             CHECK_STATUS("beginDecodingFrame");
    199         }
    200     } else {
    201         status = continueDecodingFrame(data);
    202         CHECK_STATUS("continueDecodingFrame");
    203     }
    204 
    205     // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field
    206 #if 0
    207     if (buffer->flag & HAS_COMPLETE_FRAME) {
    208         // finish decoding current frame
    209         status = endDecodingFrame(false);
    210         CHECK_STATUS("endDecodingFrame");
    211     }
    212 #endif
    213     return DECODE_SUCCESS;
    214 }
    215 
    216 Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) {
    217     Decode_Status status;
    218 
    219     status = acquireSurfaceBuffer();
    220     CHECK_STATUS("acquireSurfaceBuffer");
    221     VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
    222     if ((picture->flags  & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    223         (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
    224         mAcquiredBuffer->referenceFrame = true;
    225     } else {
    226         mAcquiredBuffer->referenceFrame = false;
    227     }
    228     // set asReference in updateDPB
    229 
    230     if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
    231         mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
    232     } else {
    233         mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
    234     }
    235 
    236     // TODO: Set the discontinuity flag
    237     mAcquiredBuffer->renderBuffer.flag = 0;
    238     mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
    239     mAcquiredBuffer->pictureOrder = getPOC(picture);
    240 
    241     if (mSizeChanged) {
    242         mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
    243         mSizeChanged = false;
    244     }
    245 
    246     status  = continueDecodingFrame(data);
    247     // surface buffer is released if decode fails
    248     return status;
    249 }
    250 
    251 
    252 Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) {
    253     Decode_Status status;
    254     vbp_picture_data_h264 *picData = data->pic_data;
    255 
    256     // TODO: remove these debugging codes
    257     if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
    258         ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
    259         return DECODE_FAIL;
    260     }
    261     for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
    262         // sanity check
    263         if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
    264             return DECODE_PARSER_FAIL;
    265         }
    266 
    267         if (picIndex > 0 &&
    268             (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
    269             // it is a packed frame buffer
    270             vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1];
    271             vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]);
    272             mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset;
    273             mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame
    274             ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size);
    275             return DECODE_MULTIPLE_FRAME;
    276         }
    277 
    278         for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
    279             status = decodeSlice(data, picIndex, sliceIndex);
    280             if (status != DECODE_SUCCESS) {
    281                 endDecodingFrame(true);
    282                 // TODO: this is new code
    283                 // remove current frame from DPB as it can't be decoded.
    284                 removeReferenceFromDPB(picData->pic_parms);
    285                 return status;
    286             }
    287         }
    288     }
    289     return DECODE_SUCCESS;
    290 }
    291 
    292 Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
    293     Decode_Status status;
    294     VAStatus vaStatus;
    295     uint32_t bufferIDCount = 0;
    296     // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
    297     VABufferID bufferIDs[4];
    298 
    299     vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
    300     vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
    301     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
    302     VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
    303 
    304     if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
    305         // either condition indicates start of a new frame
    306         if (sliceParam->first_mb_in_slice != 0) {
    307             WTRACE("The first slice is lost.");
    308             // TODO: handle the first slice lost
    309         }
    310         if (mDecodingFrame) {
    311             // interlace content, complete decoding the first field
    312             vaStatus = vaEndPicture(mVADisplay, mVAContext);
    313             CHECK_VA_STATUS("vaEndPicture");
    314 
    315             // for interlace content, top field may be valid only after the second field is parsed
    316             int32_t poc = getPOC(&(picParam->CurrPic));
    317             if (poc < mAcquiredBuffer->pictureOrder) {
    318                 mAcquiredBuffer->pictureOrder = poc;
    319             }
    320         }
    321 
    322         // Check there is no reference frame loss before decoding a frame
    323 
    324         // Update  the reference frames and surface IDs for DPB and current frame
    325         status = updateDPB(picParam);
    326         CHECK_STATUS("updateDPB");
    327 
    328 #ifndef USE_AVC_SHORT_FORMAT
    329         //We have to provide a hacked DPB rather than complete DPB for libva as workaround
    330         status = updateReferenceFrames(picData);
    331         CHECK_STATUS("updateReferenceFrames");
    332 #endif
    333         vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
    334         CHECK_VA_STATUS("vaBeginPicture");
    335 
    336         // start decoding a frame
    337         mDecodingFrame = true;
    338 
    339         vaStatus = vaCreateBuffer(
    340             mVADisplay,
    341             mVAContext,
    342             VAPictureParameterBufferType,
    343             sizeof(VAPictureParameterBufferH264),
    344             1,
    345             picParam,
    346             &bufferIDs[bufferIDCount]);
    347         CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
    348         bufferIDCount++;
    349 
    350         vaStatus = vaCreateBuffer(
    351             mVADisplay,
    352             mVAContext,
    353             VAIQMatrixBufferType,
    354             sizeof(VAIQMatrixBufferH264),
    355             1,
    356             data->IQ_matrix_buf,
    357             &bufferIDs[bufferIDCount]);
    358         CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
    359         bufferIDCount++;
    360     }
    361 
    362 #ifndef USE_AVC_SHORT_FORMAT
    363 
    364     status = setReference(sliceParam);
    365     CHECK_STATUS("setReference");
    366 
    367     vaStatus = vaCreateBuffer(
    368         mVADisplay,
    369         mVAContext,
    370         VASliceParameterBufferType,
    371         sizeof(VASliceParameterBufferH264),
    372         1,
    373         sliceParam,
    374         &bufferIDs[bufferIDCount]);
    375 #else
    376     vaStatus = vaCreateBuffer(
    377         mVADisplay,
    378         mVAContext,
    379         VASliceParameterBufferType,
    380         sizeof(VASliceParameterBufferH264Base),
    381         1,
    382         sliceParam,
    383         &bufferIDs[bufferIDCount]);
    384 #endif
    385     CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
    386     bufferIDCount++;
    387 
    388     vaStatus = vaCreateBuffer(
    389         mVADisplay,
    390         mVAContext,
    391         VASliceDataBufferType,
    392         sliceData->slice_size, //size
    393         1,        //num_elements
    394         sliceData->buffer_addr + sliceData->slice_offset,
    395         &bufferIDs[bufferIDCount]);
    396     CHECK_VA_STATUS("vaCreateSliceDataBuffer");
    397     bufferIDCount++;
    398 
    399     vaStatus = vaRenderPicture(
    400         mVADisplay,
    401         mVAContext,
    402         bufferIDs,
    403         bufferIDCount);
    404     CHECK_VA_STATUS("vaRenderPicture");
    405 
    406     return DECODE_SUCCESS;
    407 }
    408 
    409 Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) {
    410     int32_t numList = 1;
    411     // TODO: set numList to 0 if it is I slice
    412     if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) {
    413         // B slice
    414         numList = 2;
    415     }
    416 
    417     int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1;
    418     VAPictureH264 *ref = sliceParam->RefPicList0;
    419 
    420     for (int32_t i = 0; i < numList; i++) {
    421         if (activeMinus1 >= REF_LIST_SIZE) {
    422             ETRACE("Invalid activeMinus1 (%d)", activeMinus1);
    423             return DECODE_PARSER_FAIL;
    424         }
    425         for (int32_t j = 0; j <= activeMinus1; j++, ref++) {
    426             if (!(ref->flags & VA_PICTURE_H264_INVALID)) {
    427                 ref->picture_id = findSurface(ref);
    428                 if (ref->picture_id == VA_INVALID_SURFACE) {
    429                     // Error DecodeRefMissing is counted once even there're multiple
    430                     mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
    431                     mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
    432 
    433                     if (mLastReference) {
    434                         WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref));
    435                         ref->picture_id = mLastReference->renderBuffer.surface;
    436                     } else {
    437                         ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref));
    438                         return DECODE_NO_REFERENCE;
    439                     }
    440                 }
    441             }
    442         }
    443         activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1;
    444         ref = sliceParam->RefPicList1;
    445     }
    446     return DECODE_SUCCESS;
    447 }
    448 
    449 Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) {
    450     clearAsReference(mToggleDPB);
    451     // pointer to toggled DPB (new)
    452     DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB];
    453     VAPictureH264 *ref = picParam->ReferenceFrames;
    454 
    455     // update current picture ID
    456     picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface;
    457 
    458     // build new DPB
    459     for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) {
    460         if (ref->flags & VA_PICTURE_H264_INVALID) {
    461             continue;
    462         }
    463 #ifdef USE_AVC_SHORT_FORMAT
    464         ref->picture_id = findSurface(ref);
    465 #endif
    466         dpb->poc = getPOC(ref);
    467         // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC
    468         dpb->surfaceBuffer = findRefSurfaceBuffer(ref);
    469         if (dpb->surfaceBuffer == NULL) {
    470             ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic)));
    471             // Error DecodeRefMissing is counted once even there're multiple
    472             mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
    473             mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
    474             if (dpb->poc == getPOC(&(picParam->CurrPic))) {
    475                 WTRACE("updateDPB: Using the current picture for missing reference.");
    476                 dpb->surfaceBuffer = mAcquiredBuffer;
    477             } else if (mLastReference) {
    478                 WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder);
    479                 // TODO: this is new code for error resilience
    480                 dpb->surfaceBuffer = mLastReference;
    481             } else {
    482                 WTRACE("updateDPB: Unable to recover the missing reference frame.");
    483                 // continue buillding DPB without updating dpb pointer.
    484                 continue;
    485                 // continue building DPB as this reference may not be actually used.
    486                 // especially happen after seeking to a non-IDR I frame.
    487                 //return DECODE_NO_REFERENCE;
    488             }
    489         }
    490         if (dpb->surfaceBuffer) {
    491             // this surface is used as reference
    492             dpb->surfaceBuffer->asReferernce = true;
    493         }
    494         dpb++;
    495     }
    496 
    497     // add current frame to DPB if it  is a reference frame
    498     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    499         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
    500         dpb->poc = getPOC(&(picParam->CurrPic));
    501         dpb->surfaceBuffer = mAcquiredBuffer;
    502         dpb->surfaceBuffer->asReferernce = true;
    503     }
    504     // invalidate the current used DPB
    505     invalidateDPB(mToggleDPB);
    506     mToggleDPB = !mToggleDPB;
    507     return DECODE_SUCCESS;
    508 }
    509 
    510 Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) {
    511     bool found = false;
    512     uint32_t flags = 0;
    513     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
    514     VASliceParameterBufferH264 *sliceParam = NULL;
    515     uint8_t activeMinus1 = 0;
    516     VAPictureH264 *refList = NULL;
    517     VAPictureH264 *dpb = picParam->ReferenceFrames;
    518     VAPictureH264 *refFrame = NULL;
    519 
    520     for(int i = 0; i < picParam->num_ref_frames; i++) {
    521         dpb->picture_id = findSurface(dpb);
    522         dpb++;
    523     }
    524 
    525     return DECODE_SUCCESS;
    526 
    527     // invalidate DPB in the picture buffer
    528     memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames));
    529     picParam->num_ref_frames = 0;
    530 
    531     // update DPB  from the reference list in each slice.
    532     for (uint32_t slice = 0; slice < picData->num_slices; slice++) {
    533         sliceParam = &(picData->slc_data[slice].slc_parms);
    534 
    535         for (int32_t list = 0; list < 2; list++) {
    536             refList = (list == 0) ? sliceParam->RefPicList0 :
    537                                     sliceParam->RefPicList1;
    538             activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 :
    539                                          sliceParam->num_ref_idx_l1_active_minus1;
    540             if (activeMinus1 >= REF_LIST_SIZE) {
    541                 return DECODE_PARSER_FAIL;
    542             }
    543             for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) {
    544                 if (refList->flags & VA_PICTURE_H264_INVALID) {
    545                     break;
    546                 }
    547                 found = false;
    548                 refFrame = picParam->ReferenceFrames;
    549                 for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) {
    550                     if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) {
    551                         ///check for complementary field
    552                         flags = refFrame->flags | refList->flags;
    553                         //If both TOP and BOTTOM are set, we'll clear those flags
    554                         if ((flags & VA_PICTURE_H264_TOP_FIELD) &&
    555                             (flags & VA_PICTURE_H264_BOTTOM_FIELD)) {
    556                             refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
    557                         }
    558                         found = true;  //already in the DPB; will not add this one
    559                         break;
    560                     }
    561                 }
    562                 if (found == false) {
    563                     // add a new reference to the DPB
    564                     dpb->picture_id = findSurface(refList);
    565                     if (dpb->picture_id == VA_INVALID_SURFACE) {
    566                         if (mLastReference != NULL) {
    567                             dpb->picture_id = mLastReference->renderBuffer.surface;
    568                         } else {
    569                             ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList));
    570                             return DECODE_NO_REFERENCE;
    571                         }
    572                     }
    573                     dpb->flags = refList->flags;
    574                     // if it's bottom field in dpb, there must have top field in DPB,
    575                     // so clear the bottom flag, or will confuse VED to address top field
    576                     if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD)
    577                         dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD);
    578                     dpb->frame_idx = refList->frame_idx;
    579                     dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt;
    580                     dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt;
    581                     dpb++;
    582                     picParam->num_ref_frames++;
    583                 }
    584             }
    585         }
    586     }
    587     return DECODE_SUCCESS;
    588 }
    589 
    590 void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) {
    591     // remove the current frame from DPB as it can't be decoded.
    592     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    593         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
    594         DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
    595         int32_t poc = getPOC(&(picParam->CurrPic));
    596         for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
    597             if (poc == dpb->poc) {
    598                 dpb->poc = (int32_t)POC_DEFAULT;
    599                 if (dpb->surfaceBuffer) {
    600                     dpb->surfaceBuffer->asReferernce = false;
    601                 }
    602                 dpb->surfaceBuffer = NULL;
    603                 break;
    604             }
    605         }
    606     }
    607 }
    608 
    609 int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) {
    610     if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) {
    611         return pic->BottomFieldOrderCnt;
    612     }
    613     return pic->TopFieldOrderCnt;
    614 }
    615 
    616 VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) {
    617     VideoSurfaceBuffer *p = findSurfaceBuffer(pic);
    618     if (p == NULL) {
    619         ETRACE("Could not find surface for poc %d", getPOC(pic));
    620         return VA_INVALID_SURFACE;
    621     }
    622     return p->renderBuffer.surface;
    623 }
    624 
    625 VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) {
    626     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
    627     for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
    628         if (dpb->poc == pic->BottomFieldOrderCnt ||
    629             dpb->poc == pic->TopFieldOrderCnt) {
    630             // TODO: remove these debugging codes
    631             if (dpb->surfaceBuffer == NULL) {
    632                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
    633             }
    634             return dpb->surfaceBuffer;
    635         }
    636     }
    637     // ETRACE("Unable to find surface for poc %d", getPOC(pic));
    638     return NULL;
    639 }
    640 
    641 VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) {
    642     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
    643     // always looking for the latest one in the DPB, in case ref frames have same POC
    644     dpb += (DPB_SIZE - 1);
    645     for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) {
    646         if (dpb->poc == pic->BottomFieldOrderCnt ||
    647             dpb->poc == pic->TopFieldOrderCnt) {
    648             // TODO: remove these debugging codes
    649             if (dpb->surfaceBuffer == NULL) {
    650                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
    651             }
    652             return dpb->surfaceBuffer;
    653         }
    654     }
    655     ETRACE("Unable to find surface for poc %d", getPOC(pic));
    656     return NULL;
    657 }
    658 
    659 void VideoDecoderAVC::invalidateDPB(int toggle) {
    660     DecodedPictureBuffer* p = mDPBs[toggle];
    661     for (int i = 0; i < DPB_SIZE; i++) {
    662         p->poc = (int32_t) POC_DEFAULT;
    663         p->surfaceBuffer = NULL;
    664         p++;
    665     }
    666 }
    667 
    668 void VideoDecoderAVC::clearAsReference(int toggle) {
    669     DecodedPictureBuffer* p = mDPBs[toggle];
    670     for (int i = 0; i < DPB_SIZE; i++) {
    671         if (p->surfaceBuffer) {
    672             p->surfaceBuffer->asReferernce = false;
    673         }
    674         p++;
    675     }
    676 }
    677 
    678 Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) {
    679     int32_t DPBSize = getDPBSize(data);
    680 
    681     //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline
    682     VAProfile vaProfile = VAProfileH264High;
    683 
    684     // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so
    685     if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) &&
    686         data->codec_data->constraint_set1_flag == 1) {
    687         if (mErrorConcealment) {
    688             vaProfile = VAProfileH264ConstrainedBaseline;
    689         }
    690     }
    691 
    692     VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize);
    693     updateFormatInfo(data);
    694 
    695    // for 1080p, limit the total surface to 19, according the hardware limitation
    696    // change the max surface number from 19->10 to workaround memory shortage
    697    // remove the workaround
    698     if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) {
    699         DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER;
    700     }
    701 
    702     return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile);
    703 }
    704 
    705 void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
    706     // new video size
    707     uint32_t width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
    708     uint32_t height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
    709     ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
    710         mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
    711 
    712     if (mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) {
    713         // When Adaptive playback is enabled, turn off low delay mode.
    714         // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay.
    715         enableLowDelayMode(false);
    716     } else {
    717         // for baseline profile or constrained high profile, enable low delay mode automatically
    718         enableLowDelayMode((data->codec_data->profile_idc == 66) || (data->codec_data->profile_idc == 100 && data->codec_data->constraint_set4_flag == 1 && data->codec_data->constraint_set5_flag == 1));
    719     }
    720 
    721     if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
    722         pthread_mutex_lock(&mFormatLock);
    723     }
    724 
    725     if ((mVideoFormatInfo.width != width ||
    726         mVideoFormatInfo.height != height) &&
    727         width && height) {
    728         if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width ||
    729             VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) {
    730             mSizeChanged = true;
    731             ITRACE("Video size is changed.");
    732         }
    733         mVideoFormatInfo.width = width;
    734         mVideoFormatInfo.height = height;
    735     }
    736 
    737     // video_range has default value of 0.
    738     mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag;
    739 
    740     switch (data->codec_data->matrix_coefficients) {
    741         case 1:
    742             mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
    743             break;
    744 
    745         // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
    746         // SMPTE 170M/BT601
    747         case 5:
    748         case 6:
    749             mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
    750             break;
    751 
    752         default:
    753             // unknown color matrix, set to 0 so color space flag will not be set.
    754             mVideoFormatInfo.colorMatrix = 0;
    755             break;
    756     }
    757     mVideoFormatInfo.aspectX = data->codec_data->sar_width;
    758     mVideoFormatInfo.aspectY = data->codec_data->sar_height;
    759     mVideoFormatInfo.bitrate = data->codec_data->bit_rate;
    760     mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
    761     mVideoFormatInfo.cropRight = data->codec_data->crop_right;
    762     mVideoFormatInfo.cropTop = data->codec_data->crop_top;
    763     mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
    764 
    765     ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d",
    766         data->codec_data->crop_left,
    767         data->codec_data->crop_top,
    768         data->codec_data->crop_right,
    769         data->codec_data->crop_bottom);
    770 
    771     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
    772         mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
    773     } else {
    774         // The number of actual buffer needed is
    775         // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer)
    776         // while outputQueue = DPB < 8? DPB :8
    777         mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */
    778                                               + data->codec_data->num_ref_frames
    779 #ifndef USE_GEN_HW
    780                                               + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */
    781                                               + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */
    782 #endif
    783                                               + 1;
    784     }
    785 
    786     ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
    787 
    788     if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
    789         if (mSizeChanged
    790             || isWiDiStatusChanged()
    791             || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)) {
    792             mVideoFormatInfo.valid = false;
    793         } else {
    794             mVideoFormatInfo.valid = true;
    795         }
    796 
    797         pthread_mutex_unlock(&mFormatLock);
    798     } else {
    799         mVideoFormatInfo.valid = true;
    800     }
    801 
    802     setRenderRect();
    803     setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange);
    804 }
    805 
    806 bool VideoDecoderAVC::isWiDiStatusChanged() {
    807 #ifndef USE_GEN_HW
    808     if (mWiDiOn)
    809         return false;
    810 
    811     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION)
    812         return false;
    813 
    814     if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER))
    815         return false;
    816 
    817     char prop[PROPERTY_VALUE_MAX];
    818     bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) &&
    819                     (!strcmp(prop, "1") || !strcasecmp(prop, "true"));
    820     if (widi_on) {
    821         mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED;
    822         mWiDiOn = true;
    823         ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded);
    824         return true;
    825     }
    826     return false;
    827 #else
    828     return false;
    829 #endif
    830 }
    831 
    832 Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
    833     Decode_Status status;
    834     updateFormatInfo(data);
    835 
    836     bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
    837     if (rawDataMode && mSizeChanged) {
    838         flushSurfaceBuffers();
    839         mSizeChanged = false;
    840         return DECODE_FORMAT_CHANGE;
    841     }
    842 
    843     bool needFlush = false;
    844     if (!rawDataMode) {
    845         if (mStoreMetaData) {
    846             needFlush = mSizeChanged
    847                     || isWiDiStatusChanged()
    848                     || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
    849         } else {
    850             needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
    851                     || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
    852                     || isWiDiStatusChanged()
    853                     || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
    854         }
    855     }
    856 
    857     if (needFlush) {
    858         if (mStoreMetaData) {
    859             status = endDecodingFrame(false);
    860             CHECK_STATUS("endDecodingFrame");
    861         } else {
    862             flushSurfaceBuffers();
    863         }
    864         mSizeChanged = false;
    865         return DECODE_FORMAT_CHANGE;
    866     } else
    867         return DECODE_SUCCESS;
    868 }
    869 
    870 bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) {
    871     if (data->num_pictures == 0) {
    872         ETRACE("num_pictures == 0");
    873         return true;
    874     }
    875 
    876     vbp_picture_data_h264* picData = data->pic_data;
    877     if (picData->num_slices == 0) {
    878         ETRACE("num_slices == 0");
    879         return true;
    880     }
    881 
    882     bool newFrame = false;
    883     uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
    884 
    885     if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) {
    886         // not the first slice, assume it is continuation of a partial frame
    887         // TODO: check if it is new frame boundary as the first slice may get lost in streaming case.
    888         WTRACE("first_mb_in_slice != 0");
    889         if (!equalPTS) {
    890             // return true if different timestamp, it is a workaround here for a streaming case
    891             WTRACE("different PTS, treat it as a new frame");
    892             return true;
    893         }
    894     } else {
    895         if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) {
    896             ETRACE("Current picture has both odd field and even field.");
    897         }
    898         // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and
    899         // the last picture form an opposite field pair
    900         if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) {
    901             // opposite field
    902             newFrame = false;
    903             WTRACE("current picture is not at frame boundary.");
    904             mLastPictureFlags = 0;
    905         } else {
    906             newFrame = true;
    907             mLastPictureFlags = 0;
    908             for (uint32_t i = 0; i < data->num_pictures; i++) {
    909                 mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags;
    910             }
    911             if ((mLastPictureFlags & fieldFlags) == fieldFlags) {
    912                 // current buffer contains both odd field and even field.
    913                 mLastPictureFlags = 0;
    914             }
    915         }
    916     }
    917 
    918     return newFrame;
    919 }
    920 
    921 int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) {
    922     // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16
    923     struct DPBTable {
    924         int32_t level;
    925         float maxDPB;
    926     } dpbTable[] = {
    927         {9,  148.5},
    928         {10, 148.5},
    929         {11, 337.5},
    930         {12, 891.0},
    931         {13, 891.0},
    932         {20, 891.0},
    933         {21, 1782.0},
    934         {22, 3037.5},
    935         {30, 3037.5},
    936         {31, 6750.0},
    937         {32, 7680.0},
    938         {40, 12288.0},
    939         {41, 12288.0},
    940         {42, 13056.0},
    941         {50, 41400.0},
    942         {51, 69120.0}
    943     };
    944 
    945     int32_t count = sizeof(dpbTable)/sizeof(DPBTable);
    946     float maxDPB = 0;
    947     for (int32_t i = 0; i < count; i++)
    948     {
    949         if (dpbTable[i].level == data->codec_data->level_idc) {
    950             maxDPB = dpbTable[i].maxDPB;
    951             break;
    952         }
    953     }
    954 
    955     int32_t maxDPBSize = maxDPB * 1024 / (
    956         (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) *
    957         (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) *
    958         384);
    959 
    960     if (maxDPBSize > 16) {
    961         maxDPBSize = 16;
    962     } else if (maxDPBSize == 0) {
    963         maxDPBSize = 3;
    964     }
    965     if(maxDPBSize < data->codec_data->num_ref_frames) {
    966         maxDPBSize = data->codec_data->num_ref_frames;
    967     }
    968 
    969     // add one extra frame for current frame.
    970     maxDPBSize += 1;
    971     ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames);
    972     return maxDPBSize;
    973 }
    974 
    975 Decode_Status VideoDecoderAVC::checkHardwareCapability() {
    976 #ifndef USE_GEN_HW
    977     VAStatus vaStatus;
    978     VAConfigAttrib cfgAttribs[2];
    979     cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
    980     cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
    981     vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High,
    982             VAEntrypointVLD, cfgAttribs, 2);
    983     CHECK_VA_STATUS("vaGetConfigAttributes");
    984     if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
    985         ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
    986                 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
    987         return DECODE_DRIVER_FAIL;
    988     }
    989 #endif
    990     return DECODE_SUCCESS;
    991 }
    992 
    993 #ifdef USE_AVC_SHORT_FORMAT
    994 Decode_Status VideoDecoderAVC::getCodecSpecificConfigs(
    995     VAProfile profile, VAConfigID *config)
    996 {
    997     VAStatus vaStatus;
    998     VAConfigAttrib attrib[2];
    999 
   1000     if (config == NULL) {
   1001         ETRACE("Invalid parameter!");
   1002         return DECODE_FAIL;
   1003     }
   1004 
   1005     attrib[0].type = VAConfigAttribRTFormat;
   1006     attrib[0].value = VA_RT_FORMAT_YUV420;
   1007     attrib[1].type = VAConfigAttribDecSliceMode;
   1008     attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
   1009 
   1010     vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
   1011 
   1012     if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) {
   1013         ITRACE("AVC short format used");
   1014         attrib[1].value = VA_DEC_SLICE_MODE_BASE;
   1015     } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
   1016         ITRACE("AVC long format ssed");
   1017         attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
   1018     } else {
   1019         ETRACE("Unsupported Decode Slice Mode!");
   1020         return DECODE_FAIL;
   1021     }
   1022 
   1023     vaStatus = vaCreateConfig(
   1024             mVADisplay,
   1025             profile,
   1026             VAEntrypointVLD,
   1027             &attrib[0],
   1028             2,
   1029             config);
   1030     CHECK_VA_STATUS("vaCreateConfig");
   1031 
   1032     return DECODE_SUCCESS;
   1033 }
   1034 #endif
   1035