Home | History | Annotate | Download | only in videodecoder
      1 /*
      2 * Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
      3 *
      4 * Licensed under the Apache License, Version 2.0 (the "License");
      5 * you may not use this file except in compliance with the License.
      6 * You may obtain a copy of the License at
      7 *
      8 * http://www.apache.org/licenses/LICENSE-2.0
      9 *
     10 * Unless required by applicable law or agreed to in writing, software
     11 * distributed under the License is distributed on an "AS IS" BASIS,
     12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13 * See the License for the specific language governing permissions and
     14 * limitations under the License.
     15 */
     16 
     17 #include "VideoDecoderAVC.h"
     18 #include "VideoDecoderTrace.h"
     19 #include <string.h>
     20 #include <cutils/properties.h>
     21 
     22 // Macros for actual buffer needed calculation
     23 #define WIDI_CONSUMED   6
     24 #define HDMI_CONSUMED   2
     25 #define NW_CONSUMED     2
     26 #define POC_DEFAULT     0x7FFFFFFF
     27 
     28 VideoDecoderAVC::VideoDecoderAVC(const char *mimeType)
     29     : VideoDecoderBase(mimeType, VBP_H264),
     30       mToggleDPB(0),
     31       mErrorConcealment(false),
     32       mAdaptive(false){
     33 
     34     invalidateDPB(0);
     35     invalidateDPB(1);
     36     mLastPictureFlags = VA_PICTURE_H264_INVALID;
     37 }
     38 
     39 VideoDecoderAVC::~VideoDecoderAVC() {
     40     stop();
     41 }
     42 
     43 Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) {
     44     Decode_Status status;
     45 
     46     status = VideoDecoderBase::start(buffer);
     47     CHECK_STATUS("VideoDecoderBase::start");
     48 
     49     // We don't want base class to manage reference.
     50     VideoDecoderBase::ManageReference(false);
     51     // output by picture order count
     52     VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC);
     53 
     54     mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT;
     55     if (buffer->data == NULL || buffer->size == 0) {
     56         WTRACE("No config data to start VA.");
     57         if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) {
     58             ITRACE("Used client supplied profile and surface to start VA.");
     59             return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile);
     60         }
     61         return DECODE_SUCCESS;
     62     }
     63 
     64     vbp_data_h264 *data = NULL;
     65     status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
     66     CHECK_STATUS("VideoDecoderBase::parseBuffer");
     67 
     68     status = startVA(data);
     69     return status;
     70 }
     71 
     72 void VideoDecoderAVC::stop(void) {
     73     // drop the last  frame and ignore return value
     74     endDecodingFrame(true);
     75     VideoDecoderBase::stop();
     76     invalidateDPB(0);
     77     invalidateDPB(1);
     78     mToggleDPB = 0;
     79     mErrorConcealment = false;
     80     mLastPictureFlags = VA_PICTURE_H264_INVALID;
     81 }
     82 
     83 void VideoDecoderAVC::flush(void) {
     84     // drop the frame and ignore return value
     85     VideoDecoderBase::flush();
     86     invalidateDPB(0);
     87     invalidateDPB(1);
     88     mToggleDPB = 0;
     89     mLastPictureFlags = VA_PICTURE_H264_INVALID;
     90 }
     91 
     92 Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) {
     93     Decode_Status status;
     94     vbp_data_h264 *data = NULL;
     95     if (buffer == NULL) {
     96         return DECODE_INVALID_DATA;
     97     }
     98     status =  VideoDecoderBase::parseBuffer(
     99             buffer->data,
    100             buffer->size,
    101             false,
    102             (void**)&data);
    103     CHECK_STATUS("VideoDecoderBase::parseBuffer");
    104 
    105     if (!mVAStarted) {
    106          if (data->has_sps && data->has_pps) {
    107             status = startVA(data);
    108             CHECK_STATUS("startVA");
    109         } else {
    110             WTRACE("Can't start VA as either SPS or PPS is still not available.");
    111             return DECODE_SUCCESS;
    112         }
    113     }
    114 
    115     VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
    116 
    117     status = decodeFrame(buffer, data);
    118     if (status == DECODE_MULTIPLE_FRAME) {
    119         buffer->ext = &mExtensionBuffer;
    120         mExtensionBuffer.extType = PACKED_FRAME_TYPE;
    121         mExtensionBuffer.extSize = sizeof(mPackedFrame);
    122         mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
    123     }
    124     return status;
    125 }
    126 
    127 Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
    128     Decode_Status status;
    129     if (data->has_sps == 0 || data->has_pps == 0) {
    130         return DECODE_NO_CONFIG;
    131     }
    132 
    133     mVideoFormatInfo.flags = 0;
    134     uint32_t fieldFlags = 0;
    135     for (unsigned int i = 0; i < data->num_pictures; i++) {
    136         VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
    137         fieldFlags |= pic.flags;
    138         // Don't remove the following codes, it can be enabled for debugging DPB.
    139 #if 0
    140         VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d,  reference = %d",
    141                 i,
    142                 buffer->timeStamp/1E6,
    143                 pic.TopFieldOrderCnt,
    144                 pic.BottomFieldOrderCnt,
    145                 pic.flags,
    146                 (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    147                 (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
    148 #endif
    149     }
    150     int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD;
    151     int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD;
    152     if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) {
    153         mVideoFormatInfo.flags |= IS_SINGLE_FIELD;
    154     }
    155 
    156     if (data->new_sps || data->new_pps) {
    157         status = handleNewSequence(data);
    158         CHECK_STATUS("handleNewSequence");
    159     }
    160 
    161     if (isWiDiStatusChanged()) {
    162         mSizeChanged = false;
    163         flushSurfaceBuffers();
    164         return DECODE_FORMAT_CHANGE;
    165     }
    166 
    167     // first pic_data always exists, check if any slice is parsed
    168     if (data->pic_data[0].num_slices == 0) {
    169         ITRACE("No slice available for decoding.");
    170         status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
    171         mSizeChanged = false;
    172         return status;
    173     }
    174 
    175     uint64_t lastPTS = mCurrentPTS;
    176     mCurrentPTS = buffer->timeStamp;
    177     //if (lastPTS != mCurrentPTS) {
    178     if (isNewFrame(data, lastPTS == mCurrentPTS)) {
    179         if (mLowDelay) {
    180             // start decoding a new frame
    181             status = beginDecodingFrame(data);
    182             if (status != DECODE_SUCCESS) {
    183                 Decode_Status st = status;
    184                 // finish decoding the last frame if
    185                 // encounter error when decode the new frame
    186                 status = endDecodingFrame(false);
    187                 CHECK_STATUS("endDecodingFrame");
    188                 return st;
    189             }
    190         }
    191 
    192         // finish decoding the last frame
    193         status = endDecodingFrame(false);
    194         CHECK_STATUS("endDecodingFrame");
    195 
    196         if (!mLowDelay) {
    197             // start decoding a new frame
    198             status = beginDecodingFrame(data);
    199             CHECK_STATUS("beginDecodingFrame");
    200         }
    201     } else {
    202         status = continueDecodingFrame(data);
    203         CHECK_STATUS("continueDecodingFrame");
    204     }
    205 
    206     // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field
    207 #if 0
    208     if (buffer->flag & HAS_COMPLETE_FRAME) {
    209         // finish decoding current frame
    210         status = endDecodingFrame(false);
    211         CHECK_STATUS("endDecodingFrame");
    212     }
    213 #endif
    214     return DECODE_SUCCESS;
    215 }
    216 
    217 Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) {
    218     Decode_Status status;
    219 
    220     status = acquireSurfaceBuffer();
    221     CHECK_STATUS("acquireSurfaceBuffer");
    222     VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
    223     if ((picture->flags  & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    224         (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
    225         mAcquiredBuffer->referenceFrame = true;
    226     } else {
    227         mAcquiredBuffer->referenceFrame = false;
    228     }
    229     // set asReference in updateDPB
    230 
    231     if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
    232         mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
    233     } else {
    234         mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
    235     }
    236 
    237     // TODO: Set the discontinuity flag
    238     mAcquiredBuffer->renderBuffer.flag = 0;
    239     mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
    240     mAcquiredBuffer->pictureOrder = getPOC(picture);
    241 
    242     if (mSizeChanged) {
    243         mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
    244         mSizeChanged = false;
    245     }
    246 
    247     status  = continueDecodingFrame(data);
    248     // surface buffer is released if decode fails
    249     return status;
    250 }
    251 
    252 
    253 Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) {
    254     Decode_Status status;
    255     vbp_picture_data_h264 *picData = data->pic_data;
    256 
    257     // TODO: remove these debugging codes
    258     if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
    259         ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
    260         return DECODE_FAIL;
    261     }
    262     for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
    263         // sanity check
    264         if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
    265             return DECODE_PARSER_FAIL;
    266         }
    267 
    268         if (picIndex > 0 &&
    269             (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
    270             // it is a packed frame buffer
    271             vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1];
    272             vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]);
    273             mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset;
    274             mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame
    275             ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size);
    276             return DECODE_MULTIPLE_FRAME;
    277         }
    278 
    279         for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
    280             status = decodeSlice(data, picIndex, sliceIndex);
    281             if (status != DECODE_SUCCESS) {
    282                 endDecodingFrame(true);
    283                 // TODO: this is new code
    284                 // remove current frame from DPB as it can't be decoded.
    285                 removeReferenceFromDPB(picData->pic_parms);
    286                 return status;
    287             }
    288         }
    289     }
    290     return DECODE_SUCCESS;
    291 }
    292 
    293 Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
    294     Decode_Status status;
    295     VAStatus vaStatus;
    296     uint32_t bufferIDCount = 0;
    297     // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
    298     VABufferID bufferIDs[4];
    299 
    300     vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
    301     vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
    302     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
    303     VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
    304 
    305     if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
    306         // either condition indicates start of a new frame
    307         if (sliceParam->first_mb_in_slice != 0) {
    308             WTRACE("The first slice is lost.");
    309             // TODO: handle the first slice lost
    310         }
    311         if (mDecodingFrame) {
    312             // interlace content, complete decoding the first field
    313             vaStatus = vaEndPicture(mVADisplay, mVAContext);
    314             CHECK_VA_STATUS("vaEndPicture");
    315 
    316             // for interlace content, top field may be valid only after the second field is parsed
    317             int32_t poc = getPOC(&(picParam->CurrPic));
    318             if (poc < mAcquiredBuffer->pictureOrder) {
    319                 mAcquiredBuffer->pictureOrder = poc;
    320             }
    321         }
    322 
    323         // Check there is no reference frame loss before decoding a frame
    324 
    325         // Update  the reference frames and surface IDs for DPB and current frame
    326         status = updateDPB(picParam);
    327         CHECK_STATUS("updateDPB");
    328 
    329 #ifndef USE_AVC_SHORT_FORMAT
    330         //We have to provide a hacked DPB rather than complete DPB for libva as workaround
    331         status = updateReferenceFrames(picData);
    332         CHECK_STATUS("updateReferenceFrames");
    333 #endif
    334         vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
    335         CHECK_VA_STATUS("vaBeginPicture");
    336 
    337         // start decoding a frame
    338         mDecodingFrame = true;
    339 
    340         vaStatus = vaCreateBuffer(
    341             mVADisplay,
    342             mVAContext,
    343             VAPictureParameterBufferType,
    344             sizeof(VAPictureParameterBufferH264),
    345             1,
    346             picParam,
    347             &bufferIDs[bufferIDCount]);
    348         CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
    349         bufferIDCount++;
    350 
    351         vaStatus = vaCreateBuffer(
    352             mVADisplay,
    353             mVAContext,
    354             VAIQMatrixBufferType,
    355             sizeof(VAIQMatrixBufferH264),
    356             1,
    357             data->IQ_matrix_buf,
    358             &bufferIDs[bufferIDCount]);
    359         CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
    360         bufferIDCount++;
    361     }
    362 
    363 #ifndef USE_AVC_SHORT_FORMAT
    364 
    365     status = setReference(sliceParam);
    366     CHECK_STATUS("setReference");
    367 
    368     vaStatus = vaCreateBuffer(
    369         mVADisplay,
    370         mVAContext,
    371         VASliceParameterBufferType,
    372         sizeof(VASliceParameterBufferH264),
    373         1,
    374         sliceParam,
    375         &bufferIDs[bufferIDCount]);
    376 #else
    377     vaStatus = vaCreateBuffer(
    378         mVADisplay,
    379         mVAContext,
    380         VASliceParameterBufferType,
    381         sizeof(VASliceParameterBufferH264Base),
    382         1,
    383         sliceParam,
    384         &bufferIDs[bufferIDCount]);
    385 #endif
    386     CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
    387     bufferIDCount++;
    388 
    389     vaStatus = vaCreateBuffer(
    390         mVADisplay,
    391         mVAContext,
    392         VASliceDataBufferType,
    393         sliceData->slice_size, //size
    394         1,        //num_elements
    395         sliceData->buffer_addr + sliceData->slice_offset,
    396         &bufferIDs[bufferIDCount]);
    397     CHECK_VA_STATUS("vaCreateSliceDataBuffer");
    398     bufferIDCount++;
    399 
    400     vaStatus = vaRenderPicture(
    401         mVADisplay,
    402         mVAContext,
    403         bufferIDs,
    404         bufferIDCount);
    405     CHECK_VA_STATUS("vaRenderPicture");
    406 
    407     return DECODE_SUCCESS;
    408 }
    409 
    410 Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) {
    411     int32_t numList = 1;
    412     // TODO: set numList to 0 if it is I slice
    413     if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) {
    414         // B slice
    415         numList = 2;
    416     }
    417 
    418     int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1;
    419     VAPictureH264 *ref = sliceParam->RefPicList0;
    420 
    421     for (int32_t i = 0; i < numList; i++) {
    422         if (activeMinus1 >= REF_LIST_SIZE) {
    423             ETRACE("Invalid activeMinus1 (%d)", activeMinus1);
    424             return DECODE_PARSER_FAIL;
    425         }
    426         for (int32_t j = 0; j <= activeMinus1; j++, ref++) {
    427             if (!(ref->flags & VA_PICTURE_H264_INVALID)) {
    428                 ref->picture_id = findSurface(ref);
    429                 if (ref->picture_id == VA_INVALID_SURFACE) {
    430                     // Error DecodeRefMissing is counted once even there're multiple
    431                     mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
    432                     mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
    433 
    434                     if (mLastReference) {
    435                         WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref));
    436                         ref->picture_id = mLastReference->renderBuffer.surface;
    437                     } else {
    438                         ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref));
    439                         return DECODE_NO_REFERENCE;
    440                     }
    441                 }
    442             }
    443         }
    444         activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1;
    445         ref = sliceParam->RefPicList1;
    446     }
    447     return DECODE_SUCCESS;
    448 }
    449 
    450 Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) {
    451     clearAsReference(mToggleDPB);
    452     // pointer to toggled DPB (new)
    453     DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB];
    454     VAPictureH264 *ref = picParam->ReferenceFrames;
    455 
    456     // update current picture ID
    457     picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface;
    458 
    459     // build new DPB
    460     for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) {
    461         if (ref->flags & VA_PICTURE_H264_INVALID) {
    462             continue;
    463         }
    464 #ifdef USE_AVC_SHORT_FORMAT
    465         ref->picture_id = findSurface(ref);
    466 #endif
    467         dpb->poc = getPOC(ref);
    468         // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC
    469         dpb->surfaceBuffer = findRefSurfaceBuffer(ref);
    470         if (dpb->surfaceBuffer == NULL) {
    471             ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic)));
    472             // Error DecodeRefMissing is counted once even there're multiple
    473             mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
    474             mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
    475             if (dpb->poc == getPOC(&(picParam->CurrPic))) {
    476                 WTRACE("updateDPB: Using the current picture for missing reference.");
    477                 dpb->surfaceBuffer = mAcquiredBuffer;
    478             } else if (mLastReference) {
    479                 WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder);
    480                 // TODO: this is new code for error resilience
    481                 dpb->surfaceBuffer = mLastReference;
    482             } else {
    483                 WTRACE("updateDPB: Unable to recover the missing reference frame.");
    484                 // continue buillding DPB without updating dpb pointer.
    485                 continue;
    486                 // continue building DPB as this reference may not be actually used.
    487                 // especially happen after seeking to a non-IDR I frame.
    488                 //return DECODE_NO_REFERENCE;
    489             }
    490         }
    491         if (dpb->surfaceBuffer) {
    492             // this surface is used as reference
    493             dpb->surfaceBuffer->asReferernce = true;
    494         }
    495         dpb++;
    496     }
    497 
    498     // add current frame to DPB if it  is a reference frame
    499     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    500         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
    501         dpb->poc = getPOC(&(picParam->CurrPic));
    502         dpb->surfaceBuffer = mAcquiredBuffer;
    503         dpb->surfaceBuffer->asReferernce = true;
    504     }
    505     // invalidate the current used DPB
    506     invalidateDPB(mToggleDPB);
    507     mToggleDPB = !mToggleDPB;
    508     return DECODE_SUCCESS;
    509 }
    510 
    511 Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) {
    512     bool found = false;
    513     uint32_t flags = 0;
    514     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
    515     VASliceParameterBufferH264 *sliceParam = NULL;
    516     uint8_t activeMinus1 = 0;
    517     VAPictureH264 *refList = NULL;
    518     VAPictureH264 *dpb = picParam->ReferenceFrames;
    519     VAPictureH264 *refFrame = NULL;
    520 
    521     for(int i = 0; i < picParam->num_ref_frames; i++) {
    522         dpb->picture_id = findSurface(dpb);
    523         dpb++;
    524     }
    525 
    526     return DECODE_SUCCESS;
    527 
    528     // invalidate DPB in the picture buffer
    529     memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames));
    530     picParam->num_ref_frames = 0;
    531 
    532     // update DPB  from the reference list in each slice.
    533     for (uint32_t slice = 0; slice < picData->num_slices; slice++) {
    534         sliceParam = &(picData->slc_data[slice].slc_parms);
    535 
    536         for (int32_t list = 0; list < 2; list++) {
    537             refList = (list == 0) ? sliceParam->RefPicList0 :
    538                                     sliceParam->RefPicList1;
    539             activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 :
    540                                          sliceParam->num_ref_idx_l1_active_minus1;
    541             if (activeMinus1 >= REF_LIST_SIZE) {
    542                 return DECODE_PARSER_FAIL;
    543             }
    544             for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) {
    545                 if (refList->flags & VA_PICTURE_H264_INVALID) {
    546                     break;
    547                 }
    548                 found = false;
    549                 refFrame = picParam->ReferenceFrames;
    550                 for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) {
    551                     if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) {
    552                         ///check for complementary field
    553                         flags = refFrame->flags | refList->flags;
    554                         //If both TOP and BOTTOM are set, we'll clear those flags
    555                         if ((flags & VA_PICTURE_H264_TOP_FIELD) &&
    556                             (flags & VA_PICTURE_H264_BOTTOM_FIELD)) {
    557                             refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
    558                         }
    559                         found = true;  //already in the DPB; will not add this one
    560                         break;
    561                     }
    562                 }
    563                 if (found == false) {
    564                     // add a new reference to the DPB
    565                     dpb->picture_id = findSurface(refList);
    566                     if (dpb->picture_id == VA_INVALID_SURFACE) {
    567                         if (mLastReference != NULL) {
    568                             dpb->picture_id = mLastReference->renderBuffer.surface;
    569                         } else {
    570                             ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList));
    571                             return DECODE_NO_REFERENCE;
    572                         }
    573                     }
    574                     dpb->flags = refList->flags;
    575                     // if it's bottom field in dpb, there must have top field in DPB,
    576                     // so clear the bottom flag, or will confuse VED to address top field
    577                     if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD)
    578                         dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD);
    579                     dpb->frame_idx = refList->frame_idx;
    580                     dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt;
    581                     dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt;
    582                     dpb++;
    583                     picParam->num_ref_frames++;
    584                 }
    585             }
    586         }
    587     }
    588     return DECODE_SUCCESS;
    589 }
    590 
    591 void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) {
    592     // remove the current frame from DPB as it can't be decoded.
    593     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
    594         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
    595         DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
    596         int32_t poc = getPOC(&(picParam->CurrPic));
    597         for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
    598             if (poc == dpb->poc) {
    599                 dpb->poc = (int32_t)POC_DEFAULT;
    600                 if (dpb->surfaceBuffer) {
    601                     dpb->surfaceBuffer->asReferernce = false;
    602                 }
    603                 dpb->surfaceBuffer = NULL;
    604                 break;
    605             }
    606         }
    607     }
    608 }
    609 
    610 int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) {
    611     if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) {
    612         return pic->BottomFieldOrderCnt;
    613     }
    614     return pic->TopFieldOrderCnt;
    615 }
    616 
    617 VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) {
    618     VideoSurfaceBuffer *p = findSurfaceBuffer(pic);
    619     if (p == NULL) {
    620         ETRACE("Could not find surface for poc %d", getPOC(pic));
    621         return VA_INVALID_SURFACE;
    622     }
    623     return p->renderBuffer.surface;
    624 }
    625 
    626 VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) {
    627     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
    628     for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
    629         if (dpb->poc == pic->BottomFieldOrderCnt ||
    630             dpb->poc == pic->TopFieldOrderCnt) {
    631             // TODO: remove these debugging codes
    632             if (dpb->surfaceBuffer == NULL) {
    633                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
    634             }
    635             return dpb->surfaceBuffer;
    636         }
    637     }
    638     // ETRACE("Unable to find surface for poc %d", getPOC(pic));
    639     return NULL;
    640 }
    641 
    642 VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) {
    643     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
    644     // always looking for the latest one in the DPB, in case ref frames have same POC
    645     dpb += (DPB_SIZE - 1);
    646     for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) {
    647         if (dpb->poc == pic->BottomFieldOrderCnt ||
    648             dpb->poc == pic->TopFieldOrderCnt) {
    649             // TODO: remove these debugging codes
    650             if (dpb->surfaceBuffer == NULL) {
    651                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
    652             }
    653             return dpb->surfaceBuffer;
    654         }
    655     }
    656     ETRACE("Unable to find surface for poc %d", getPOC(pic));
    657     return NULL;
    658 }
    659 
    660 void VideoDecoderAVC::invalidateDPB(int toggle) {
    661     DecodedPictureBuffer* p = mDPBs[toggle];
    662     for (int i = 0; i < DPB_SIZE; i++) {
    663         p->poc = (int32_t) POC_DEFAULT;
    664         p->surfaceBuffer = NULL;
    665         p++;
    666     }
    667 }
    668 
    669 void VideoDecoderAVC::clearAsReference(int toggle) {
    670     DecodedPictureBuffer* p = mDPBs[toggle];
    671     for (int i = 0; i < DPB_SIZE; i++) {
    672         if (p->surfaceBuffer) {
    673             p->surfaceBuffer->asReferernce = false;
    674         }
    675         p++;
    676     }
    677 }
    678 
    679 Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) {
    680     int32_t DPBSize = getDPBSize(data);
    681 
    682     //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline
    683     VAProfile vaProfile = VAProfileH264High;
    684 
    685     if ((mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) || mAdaptive) {
    686         // When Adaptive playback is enabled, turn off low delay mode.
    687         // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay.
    688         enableLowDelayMode(false);
    689     } else {
    690         // for baseline profile or constrained high profile, enable low delay mode automatically
    691         enableLowDelayMode((data->codec_data->profile_idc == 66) || (data->codec_data->profile_idc == 100 && data->codec_data->constraint_set4_flag == 1 && data->codec_data->constraint_set5_flag == 1));
    692     }
    693 
    694     // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so
    695     if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) &&
    696         data->codec_data->constraint_set1_flag == 1) {
    697         if (mErrorConcealment) {
    698             vaProfile = VAProfileH264ConstrainedBaseline;
    699         }
    700     }
    701 
    702     VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize);
    703     updateFormatInfo(data);
    704 
    705    // for 1080p, limit the total surface to 19, according the hardware limitation
    706    // change the max surface number from 19->10 to workaround memory shortage
    707    // remove the workaround
    708     if(mVideoFormatInfo.surfaceHeight == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) {
    709         DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER;
    710     }
    711 
    712     return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile);
    713 }
    714 
    715 void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
    716     // new video size
    717     uint32_t width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
    718     uint32_t height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
    719 
    720     if (data->codec_data->crop_top > 0)
    721         height -= data->codec_data->crop_top;
    722 
    723     if (data->codec_data->crop_bottom > 0)
    724         height -= data->codec_data->crop_bottom;
    725 
    726     if(data->codec_data->crop_left > 0)
    727         width -= data->codec_data->crop_left;
    728 
    729     if(data->codec_data->crop_right > 0)
    730         width -= data->codec_data->crop_right;
    731 
    732     ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
    733         mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
    734 
    735     if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
    736         pthread_mutex_lock(&mFormatLock);
    737     }
    738 
    739     if ((mVideoFormatInfo.width != width ||
    740         mVideoFormatInfo.height != height) &&
    741         width && height) {
    742         if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width ||
    743             VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) {
    744             mSizeChanged = true;
    745             mAdaptive = true;
    746             ITRACE("Video size is changed.");
    747         }
    748         mVideoFormatInfo.width = width;
    749         mVideoFormatInfo.height = height;
    750     }
    751 
    752     // video_range has default value of 0.
    753     mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag;
    754 
    755     switch (data->codec_data->matrix_coefficients) {
    756         case 1:
    757             mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
    758             break;
    759 
    760         // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
    761         // SMPTE 170M/BT601
    762         case 5:
    763         case 6:
    764             mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
    765             break;
    766 
    767         default:
    768             // unknown color matrix, set to 0 so color space flag will not be set.
    769             mVideoFormatInfo.colorMatrix = 0;
    770             break;
    771     }
    772     mVideoFormatInfo.aspectX = data->codec_data->sar_width;
    773     mVideoFormatInfo.aspectY = data->codec_data->sar_height;
    774     mVideoFormatInfo.bitrate = data->codec_data->bit_rate;
    775     mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
    776     mVideoFormatInfo.cropRight = data->codec_data->crop_right;
    777     mVideoFormatInfo.cropTop = data->codec_data->crop_top;
    778     mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
    779 
    780     ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d",
    781         data->codec_data->crop_left,
    782         data->codec_data->crop_top,
    783         data->codec_data->crop_right,
    784         data->codec_data->crop_bottom);
    785 
    786     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
    787         mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
    788     } else {
    789         // The number of actual buffer needed is
    790         // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer)
    791         // while outputQueue = DPB < 8? DPB :8
    792         mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */
    793                                               + data->codec_data->num_ref_frames
    794 #ifndef USE_GEN_HW
    795                                               + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */
    796                                               + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */
    797 #endif
    798                                               + 1;
    799     }
    800 
    801     ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
    802 
    803     if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) && mStoreMetaData) {
    804         if (mSizeChanged
    805             || isWiDiStatusChanged()
    806             || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)) {
    807             mVideoFormatInfo.valid = false;
    808         } else {
    809             mVideoFormatInfo.valid = true;
    810         }
    811 
    812         pthread_mutex_unlock(&mFormatLock);
    813     } else {
    814         mVideoFormatInfo.valid = true;
    815     }
    816 
    817     setRenderRect();
    818     setColorSpaceInfo(mVideoFormatInfo.colorMatrix, mVideoFormatInfo.videoRange);
    819 }
    820 
    821 bool VideoDecoderAVC::isWiDiStatusChanged() {
    822 #ifndef USE_GEN_HW
    823     if (mWiDiOn)
    824         return false;
    825 
    826     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION)
    827         return false;
    828 
    829     if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER))
    830         return false;
    831 
    832     char prop[PROPERTY_VALUE_MAX];
    833     bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) &&
    834                     (!strcmp(prop, "1") || !strcasecmp(prop, "true"));
    835     if (widi_on) {
    836         mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED;
    837         mWiDiOn = true;
    838         ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded);
    839         return true;
    840     }
    841     return false;
    842 #else
    843     return false;
    844 #endif
    845 }
    846 
    847 Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
    848     Decode_Status status;
    849     updateFormatInfo(data);
    850 
    851     bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
    852     if (rawDataMode && mSizeChanged) {
    853         flushSurfaceBuffers();
    854         mSizeChanged = false;
    855         return DECODE_FORMAT_CHANGE;
    856     }
    857 
    858     bool needFlush = false;
    859     if (!rawDataMode) {
    860         if (mStoreMetaData) {
    861             needFlush = mSizeChanged
    862                     || isWiDiStatusChanged()
    863                     || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
    864         } else {
    865             needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
    866                     || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
    867                     || isWiDiStatusChanged()
    868                     || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
    869         }
    870     }
    871 
    872     if (needFlush) {
    873         if (mStoreMetaData) {
    874             status = endDecodingFrame(false);
    875             CHECK_STATUS("endDecodingFrame");
    876         } else {
    877             flushSurfaceBuffers();
    878         }
    879         mSizeChanged = false;
    880         return DECODE_FORMAT_CHANGE;
    881     } else
    882         return DECODE_SUCCESS;
    883 }
    884 
    885 bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) {
    886     if (data->num_pictures == 0) {
    887         ETRACE("num_pictures == 0");
    888         return true;
    889     }
    890 
    891     vbp_picture_data_h264* picData = data->pic_data;
    892     if (picData->num_slices == 0) {
    893         ETRACE("num_slices == 0");
    894         return true;
    895     }
    896 
    897     bool newFrame = false;
    898     uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
    899 
    900     if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) {
    901         // not the first slice, assume it is continuation of a partial frame
    902         // TODO: check if it is new frame boundary as the first slice may get lost in streaming case.
    903         WTRACE("first_mb_in_slice != 0");
    904         if (!equalPTS) {
    905             // return true if different timestamp, it is a workaround here for a streaming case
    906             WTRACE("different PTS, treat it as a new frame");
    907             return true;
    908         }
    909     } else {
    910         if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) {
    911             ETRACE("Current picture has both odd field and even field.");
    912         }
    913         // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and
    914         // the last picture form an opposite field pair
    915         if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) {
    916             // opposite field
    917             newFrame = false;
    918             WTRACE("current picture is not at frame boundary.");
    919             mLastPictureFlags = 0;
    920         } else {
    921             newFrame = true;
    922             mLastPictureFlags = 0;
    923             for (uint32_t i = 0; i < data->num_pictures; i++) {
    924                 mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags;
    925             }
    926             if ((mLastPictureFlags & fieldFlags) == fieldFlags) {
    927                 // current buffer contains both odd field and even field.
    928                 mLastPictureFlags = 0;
    929             }
    930         }
    931     }
    932 
    933     return newFrame;
    934 }
    935 
    936 int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) {
    937     // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16
    938     struct DPBTable {
    939         int32_t level;
    940         float maxDPB;
    941     } dpbTable[] = {
    942         {9,  148.5},
    943         {10, 148.5},
    944         {11, 337.5},
    945         {12, 891.0},
    946         {13, 891.0},
    947         {20, 891.0},
    948         {21, 1782.0},
    949         {22, 3037.5},
    950         {30, 3037.5},
    951         {31, 6750.0},
    952         {32, 7680.0},
    953         {40, 12288.0},
    954         {41, 12288.0},
    955         {42, 13056.0},
    956         {50, 41400.0},
    957         {51, 69120.0}
    958     };
    959 
    960     int32_t count = sizeof(dpbTable)/sizeof(DPBTable);
    961     float maxDPB = 0;
    962     for (int32_t i = 0; i < count; i++)
    963     {
    964         if (dpbTable[i].level == data->codec_data->level_idc) {
    965             maxDPB = dpbTable[i].maxDPB;
    966             break;
    967         }
    968     }
    969 
    970     int32_t maxDPBSize = maxDPB * 1024 / (
    971         (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) *
    972         (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) *
    973         384);
    974 
    975     if (maxDPBSize > 16) {
    976         maxDPBSize = 16;
    977     } else if (maxDPBSize == 0) {
    978         maxDPBSize = 3;
    979     }
    980     if(maxDPBSize < data->codec_data->num_ref_frames) {
    981         maxDPBSize = data->codec_data->num_ref_frames;
    982     }
    983 
    984     // add one extra frame for current frame.
    985     maxDPBSize += 1;
    986     ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames);
    987     return maxDPBSize;
    988 }
    989 
    990 Decode_Status VideoDecoderAVC::checkHardwareCapability() {
    991 #ifndef USE_GEN_HW
    992     VAStatus vaStatus;
    993     VAConfigAttrib cfgAttribs[2];
    994     cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
    995     cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
    996     vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High,
    997             VAEntrypointVLD, cfgAttribs, 2);
    998     CHECK_VA_STATUS("vaGetConfigAttributes");
    999     if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
   1000         ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
   1001                 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
   1002         return DECODE_DRIVER_FAIL;
   1003     }
   1004 #endif
   1005     return DECODE_SUCCESS;
   1006 }
   1007 
   1008 #ifdef USE_AVC_SHORT_FORMAT
   1009 Decode_Status VideoDecoderAVC::getCodecSpecificConfigs(
   1010     VAProfile profile, VAConfigID *config)
   1011 {
   1012     VAStatus vaStatus;
   1013     VAConfigAttrib attrib[2];
   1014 
   1015     if (config == NULL) {
   1016         ETRACE("Invalid parameter!");
   1017         return DECODE_FAIL;
   1018     }
   1019 
   1020     attrib[0].type = VAConfigAttribRTFormat;
   1021     attrib[0].value = VA_RT_FORMAT_YUV420;
   1022     attrib[1].type = VAConfigAttribDecSliceMode;
   1023     attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
   1024 
   1025     vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
   1026 
   1027     if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) {
   1028         ITRACE("AVC short format used");
   1029         attrib[1].value = VA_DEC_SLICE_MODE_BASE;
   1030     } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
   1031         ITRACE("AVC long format ssed");
   1032         attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
   1033     } else {
   1034         ETRACE("Unsupported Decode Slice Mode!");
   1035         return DECODE_FAIL;
   1036     }
   1037 
   1038     vaStatus = vaCreateConfig(
   1039             mVADisplay,
   1040             profile,
   1041             VAEntrypointVLD,
   1042             &attrib[0],
   1043             2,
   1044             config);
   1045     CHECK_VA_STATUS("vaCreateConfig");
   1046 
   1047     return DECODE_SUCCESS;
   1048 }
   1049 #endif
   1050