Home | History | Annotate | Download | only in libstagefright
      1 /*
      2  * Copyright (C) 2017 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "FrameDecoder"
     19 
     20 #include "include/FrameDecoder.h"
     21 #include <binder/MemoryBase.h>
     22 #include <binder/MemoryHeapBase.h>
     23 #include <gui/Surface.h>
     24 #include <inttypes.h>
     25 #include <media/ICrypto.h>
     26 #include <media/IMediaSource.h>
     27 #include <media/MediaCodecBuffer.h>
     28 #include <media/stagefright/foundation/avc_utils.h>
     29 #include <media/stagefright/foundation/ADebug.h>
     30 #include <media/stagefright/foundation/AMessage.h>
     31 #include <media/stagefright/ColorConverter.h>
     32 #include <media/stagefright/MediaBuffer.h>
     33 #include <media/stagefright/MediaCodec.h>
     34 #include <media/stagefright/MediaDefs.h>
     35 #include <media/stagefright/MediaErrors.h>
     36 #include <media/stagefright/Utils.h>
     37 #include <private/media/VideoFrame.h>
     38 #include <utils/Log.h>
     39 
     40 namespace android {
     41 
     42 static const int64_t kBufferTimeOutUs = 10000ll; // 10 msec
     43 static const size_t kRetryCount = 50; // must be >0
     44 
     45 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
     46         int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
     47         int32_t dstBpp, bool metaOnly = false) {
     48     int32_t rotationAngle;
     49     if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
     50         rotationAngle = 0;  // By default, no rotation
     51     }
     52     uint32_t type;
     53     const void *iccData;
     54     size_t iccSize;
     55     if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
     56         iccData = NULL;
     57         iccSize = 0;
     58     }
     59 
     60     int32_t sarWidth, sarHeight;
     61     int32_t displayWidth, displayHeight;
     62     if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
     63             && trackMeta->findInt32(kKeySARHeight, &sarHeight)
     64             && sarHeight != 0) {
     65         displayWidth = (width * sarWidth) / sarHeight;
     66         displayHeight = height;
     67     } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
     68                 && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
     69                 && displayWidth > 0 && displayHeight > 0
     70                 && width > 0 && height > 0) {
     71         ALOGV("found display size %dx%d", displayWidth, displayHeight);
     72     } else {
     73         displayWidth = width;
     74         displayHeight = height;
     75     }
     76 
     77     VideoFrame frame(width, height, displayWidth, displayHeight,
     78             tileWidth, tileHeight, rotationAngle, dstBpp, !metaOnly, iccSize);
     79 
     80     size_t size = frame.getFlattenedSize();
     81     sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
     82     if (heap == NULL) {
     83         ALOGE("failed to create MemoryDealer");
     84         return NULL;
     85     }
     86     sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
     87     if (frameMem == NULL) {
     88         ALOGE("not enough memory for VideoFrame size=%zu", size);
     89         return NULL;
     90     }
     91     VideoFrame* frameCopy = static_cast<VideoFrame*>(frameMem->pointer());
     92     frameCopy->init(frame, iccData, iccSize);
     93 
     94     return frameMem;
     95 }
     96 
     97 bool findThumbnailInfo(
     98         const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
     99         uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
    100     uint32_t dummyType;
    101     const void *dummyData;
    102     size_t dummySize;
    103     return trackMeta->findInt32(kKeyThumbnailWidth, width)
    104         && trackMeta->findInt32(kKeyThumbnailHeight, height)
    105         && trackMeta->findData(kKeyThumbnailHVCC,
    106                 type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
    107 }
    108 
    109 bool findGridInfo(const sp<MetaData> &trackMeta,
    110         int32_t *tileWidth, int32_t *tileHeight, int32_t *gridRows, int32_t *gridCols) {
    111     return trackMeta->findInt32(kKeyTileWidth, tileWidth) && (*tileWidth > 0)
    112         && trackMeta->findInt32(kKeyTileHeight, tileHeight) && (*tileHeight > 0)
    113         && trackMeta->findInt32(kKeyGridRows, gridRows) && (*gridRows > 0)
    114         && trackMeta->findInt32(kKeyGridCols, gridCols) && (*gridCols > 0);
    115 }
    116 
    117 bool getDstColorFormat(
    118         android_pixel_format_t colorFormat,
    119         OMX_COLOR_FORMATTYPE *dstFormat,
    120         int32_t *dstBpp) {
    121     switch (colorFormat) {
    122         case HAL_PIXEL_FORMAT_RGB_565:
    123         {
    124             *dstFormat = OMX_COLOR_Format16bitRGB565;
    125             *dstBpp = 2;
    126             return true;
    127         }
    128         case HAL_PIXEL_FORMAT_RGBA_8888:
    129         {
    130             *dstFormat = OMX_COLOR_Format32BitRGBA8888;
    131             *dstBpp = 4;
    132             return true;
    133         }
    134         case HAL_PIXEL_FORMAT_BGRA_8888:
    135         {
    136             *dstFormat = OMX_COLOR_Format32bitBGRA8888;
    137             *dstBpp = 4;
    138             return true;
    139         }
    140         default:
    141         {
    142             ALOGE("Unsupported color format: %d", colorFormat);
    143             break;
    144         }
    145     }
    146     return false;
    147 }
    148 
    149 //static
    150 sp<IMemory> FrameDecoder::getMetadataOnly(
    151         const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
    152     OMX_COLOR_FORMATTYPE dstFormat;
    153     int32_t dstBpp;
    154     if (!getDstColorFormat(
    155             (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
    156         return NULL;
    157     }
    158 
    159     int32_t width, height, tileWidth = 0, tileHeight = 0;
    160     if (thumbnail) {
    161         if (!findThumbnailInfo(trackMeta, &width, &height)) {
    162             return NULL;
    163         }
    164     } else {
    165         CHECK(trackMeta->findInt32(kKeyWidth, &width));
    166         CHECK(trackMeta->findInt32(kKeyHeight, &height));
    167 
    168         int32_t gridRows, gridCols;
    169         if (!findGridInfo(trackMeta, &tileWidth, &tileHeight, &gridRows, &gridCols)) {
    170             tileWidth = tileHeight = 0;
    171         }
    172     }
    173     return allocVideoFrame(trackMeta,
    174             width, height, tileWidth, tileHeight, dstBpp, true /*metaOnly*/);
    175 }
    176 
    177 FrameDecoder::FrameDecoder(
    178         const AString &componentName,
    179         const sp<MetaData> &trackMeta,
    180         const sp<IMediaSource> &source)
    181     : mComponentName(componentName),
    182       mTrackMeta(trackMeta),
    183       mSource(source),
    184       mDstFormat(OMX_COLOR_Format16bitRGB565),
    185       mDstBpp(2),
    186       mHaveMoreInputs(true),
    187       mFirstSample(true) {
    188 }
    189 
    190 FrameDecoder::~FrameDecoder() {
    191     if (mDecoder != NULL) {
    192         mDecoder->release();
    193         mSource->stop();
    194     }
    195 }
    196 
    197 status_t FrameDecoder::init(
    198         int64_t frameTimeUs, size_t numFrames, int option, int colorFormat) {
    199     if (!getDstColorFormat(
    200             (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
    201         return ERROR_UNSUPPORTED;
    202     }
    203 
    204     sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
    205             frameTimeUs, numFrames, option, &mReadOptions);
    206     if (videoFormat == NULL) {
    207         ALOGE("video format or seek mode not supported");
    208         return ERROR_UNSUPPORTED;
    209     }
    210 
    211     status_t err;
    212     sp<ALooper> looper = new ALooper;
    213     looper->start();
    214     sp<MediaCodec> decoder = MediaCodec::CreateByComponentName(
    215             looper, mComponentName, &err);
    216     if (decoder.get() == NULL || err != OK) {
    217         ALOGW("Failed to instantiate decoder [%s]", mComponentName.c_str());
    218         return (decoder.get() == NULL) ? NO_MEMORY : err;
    219     }
    220 
    221     err = decoder->configure(
    222             videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
    223     if (err != OK) {
    224         ALOGW("configure returned error %d (%s)", err, asString(err));
    225         decoder->release();
    226         return err;
    227     }
    228 
    229     err = decoder->start();
    230     if (err != OK) {
    231         ALOGW("start returned error %d (%s)", err, asString(err));
    232         decoder->release();
    233         return err;
    234     }
    235 
    236     err = mSource->start();
    237     if (err != OK) {
    238         ALOGW("source failed to start: %d (%s)", err, asString(err));
    239         decoder->release();
    240         return err;
    241     }
    242     mDecoder = decoder;
    243 
    244     return OK;
    245 }
    246 
    247 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
    248     status_t err = onExtractRect(rect);
    249     if (err == OK) {
    250         err = extractInternal();
    251     }
    252     if (err != OK) {
    253         return NULL;
    254     }
    255 
    256     return mFrames.size() > 0 ? mFrames[0] : NULL;
    257 }
    258 
    259 status_t FrameDecoder::extractFrames(std::vector<sp<IMemory> >* frames) {
    260     status_t err = extractInternal();
    261     if (err != OK) {
    262         return err;
    263     }
    264 
    265     for (size_t i = 0; i < mFrames.size(); i++) {
    266         frames->push_back(mFrames[i]);
    267     }
    268     return OK;
    269 }
    270 
    271 status_t FrameDecoder::extractInternal() {
    272     status_t err = OK;
    273     bool done = false;
    274     size_t retriesLeft = kRetryCount;
    275     do {
    276         size_t index;
    277         int64_t ptsUs = 0ll;
    278         uint32_t flags = 0;
    279 
    280         // Queue as many inputs as we possibly can, then block on dequeuing
    281         // outputs. After getting each output, come back and queue the inputs
    282         // again to keep the decoder busy.
    283         while (mHaveMoreInputs) {
    284             err = mDecoder->dequeueInputBuffer(&index, 0);
    285             if (err != OK) {
    286                 ALOGV("Timed out waiting for input");
    287                 if (retriesLeft) {
    288                     err = OK;
    289                 }
    290                 break;
    291             }
    292             sp<MediaCodecBuffer> codecBuffer;
    293             err = mDecoder->getInputBuffer(index, &codecBuffer);
    294             if (err != OK) {
    295                 ALOGE("failed to get input buffer %zu", index);
    296                 break;
    297             }
    298 
    299             MediaBufferBase *mediaBuffer = NULL;
    300 
    301             err = mSource->read(&mediaBuffer, &mReadOptions);
    302             mReadOptions.clearSeekTo();
    303             if (err != OK) {
    304                 ALOGW("Input Error or EOS");
    305                 mHaveMoreInputs = false;
    306                 if (!mFirstSample && err == ERROR_END_OF_STREAM) {
    307                     err = OK;
    308                 }
    309                 break;
    310             }
    311 
    312             if (mediaBuffer->range_length() > codecBuffer->capacity()) {
    313                 ALOGE("buffer size (%zu) too large for codec input size (%zu)",
    314                         mediaBuffer->range_length(), codecBuffer->capacity());
    315                 mHaveMoreInputs = false;
    316                 err = BAD_VALUE;
    317             } else {
    318                 codecBuffer->setRange(0, mediaBuffer->range_length());
    319 
    320                 CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
    321                 memcpy(codecBuffer->data(),
    322                         (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
    323                         mediaBuffer->range_length());
    324 
    325                 onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
    326                 mFirstSample = false;
    327             }
    328 
    329             mediaBuffer->release();
    330 
    331             if (mHaveMoreInputs) {
    332                 ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
    333                         codecBuffer->size(), ptsUs, flags);
    334 
    335                 err = mDecoder->queueInputBuffer(
    336                         index,
    337                         codecBuffer->offset(),
    338                         codecBuffer->size(),
    339                         ptsUs,
    340                         flags);
    341 
    342                 if (flags & MediaCodec::BUFFER_FLAG_EOS) {
    343                     mHaveMoreInputs = false;
    344                 }
    345             }
    346         }
    347 
    348         while (err == OK) {
    349             size_t offset, size;
    350             // wait for a decoded buffer
    351             err = mDecoder->dequeueOutputBuffer(
    352                     &index,
    353                     &offset,
    354                     &size,
    355                     &ptsUs,
    356                     &flags,
    357                     kBufferTimeOutUs);
    358 
    359             if (err == INFO_FORMAT_CHANGED) {
    360                 ALOGV("Received format change");
    361                 err = mDecoder->getOutputFormat(&mOutputFormat);
    362             } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
    363                 ALOGV("Output buffers changed");
    364                 err = OK;
    365             } else {
    366                 if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) {
    367                     ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
    368                     err = OK;
    369                 } else if (err == OK) {
    370                     // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
    371                     // from the extractor, decode to the specified frame. Otherwise we're done.
    372                     ALOGV("Received an output buffer, timeUs=%lld", (long long)ptsUs);
    373                     sp<MediaCodecBuffer> videoFrameBuffer;
    374                     err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
    375                     if (err != OK) {
    376                         ALOGE("failed to get output buffer %zu", index);
    377                         break;
    378                     }
    379                     err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
    380                     mDecoder->releaseOutputBuffer(index);
    381                 } else {
    382                     ALOGW("Received error %d (%s) instead of output", err, asString(err));
    383                     done = true;
    384                 }
    385                 break;
    386             }
    387         }
    388     } while (err == OK && !done);
    389 
    390     if (err != OK) {
    391         ALOGE("failed to get video frame (err %d)", err);
    392     }
    393 
    394     return err;
    395 }
    396 
    397 //////////////////////////////////////////////////////////////////////
    398 
    399 VideoFrameDecoder::VideoFrameDecoder(
    400         const AString &componentName,
    401         const sp<MetaData> &trackMeta,
    402         const sp<IMediaSource> &source)
    403     : FrameDecoder(componentName, trackMeta, source),
    404       mIsAvcOrHevc(false),
    405       mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
    406       mTargetTimeUs(-1ll),
    407       mNumFrames(0),
    408       mNumFramesDecoded(0) {
    409 }
    410 
    411 sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
    412         int64_t frameTimeUs, size_t numFrames, int seekMode, MediaSource::ReadOptions *options) {
    413     mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
    414     if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
    415             mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
    416         ALOGE("Unknown seek mode: %d", mSeekMode);
    417         return NULL;
    418     }
    419     mNumFrames = numFrames;
    420 
    421     const char *mime;
    422     if (!trackMeta()->findCString(kKeyMIMEType, &mime)) {
    423         ALOGE("Could not find mime type");
    424         return NULL;
    425     }
    426 
    427     mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
    428             || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
    429 
    430     if (frameTimeUs < 0) {
    431         int64_t thumbNailTime;
    432         if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
    433                 || thumbNailTime < 0) {
    434             thumbNailTime = 0;
    435         }
    436         options->setSeekTo(thumbNailTime, mSeekMode);
    437     } else {
    438         options->setSeekTo(frameTimeUs, mSeekMode);
    439     }
    440 
    441     sp<AMessage> videoFormat;
    442     if (convertMetaDataToMessage(trackMeta(), &videoFormat) != OK) {
    443         ALOGE("b/23680780");
    444         ALOGW("Failed to convert meta data to message");
    445         return NULL;
    446     }
    447 
    448     // TODO: Use Flexible color instead
    449     videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
    450 
    451     // For the thumbnail extraction case, try to allocate single buffer in both
    452     // input and output ports, if seeking to a sync frame. NOTE: This request may
    453     // fail if component requires more than that for decoding.
    454     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
    455             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
    456     if (!isSeekingClosest) {
    457         videoFormat->setInt32("android._num-input-buffers", 1);
    458         videoFormat->setInt32("android._num-output-buffers", 1);
    459     }
    460     return videoFormat;
    461 }
    462 
    463 status_t VideoFrameDecoder::onInputReceived(
    464         const sp<MediaCodecBuffer> &codecBuffer,
    465         MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
    466     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
    467             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
    468 
    469     if (firstSample && isSeekingClosest) {
    470         sampleMeta.findInt64(kKeyTargetTime, &mTargetTimeUs);
    471         ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
    472     }
    473 
    474     if (mIsAvcOrHevc && !isSeekingClosest
    475             && IsIDR(codecBuffer->data(), codecBuffer->size())) {
    476         // Only need to decode one IDR frame, unless we're seeking with CLOSEST
    477         // option, in which case we need to actually decode to targetTimeUs.
    478         *flags |= MediaCodec::BUFFER_FLAG_EOS;
    479     }
    480     return OK;
    481 }
    482 
    483 status_t VideoFrameDecoder::onOutputReceived(
    484         const sp<MediaCodecBuffer> &videoFrameBuffer,
    485         const sp<AMessage> &outputFormat,
    486         int64_t timeUs, bool *done) {
    487     bool shouldOutput = (mTargetTimeUs < 0ll) || (timeUs >= mTargetTimeUs);
    488 
    489     // If this is not the target frame, skip color convert.
    490     if (!shouldOutput) {
    491         *done = false;
    492         return OK;
    493     }
    494 
    495     *done = (++mNumFramesDecoded >= mNumFrames);
    496 
    497     if (outputFormat == NULL) {
    498         return ERROR_MALFORMED;
    499     }
    500 
    501     int32_t width, height;
    502     CHECK(outputFormat->findInt32("width", &width));
    503     CHECK(outputFormat->findInt32("height", &height));
    504 
    505     int32_t crop_left, crop_top, crop_right, crop_bottom;
    506     if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
    507         crop_left = crop_top = 0;
    508         crop_right = width - 1;
    509         crop_bottom = height - 1;
    510     }
    511 
    512     sp<IMemory> frameMem = allocVideoFrame(
    513             trackMeta(),
    514             (crop_right - crop_left + 1),
    515             (crop_bottom - crop_top + 1),
    516             0,
    517             0,
    518             dstBpp());
    519     addFrame(frameMem);
    520     VideoFrame* frame = static_cast<VideoFrame*>(frameMem->pointer());
    521 
    522     int32_t srcFormat;
    523     CHECK(outputFormat->findInt32("color-format", &srcFormat));
    524 
    525     ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
    526 
    527     if (converter.isValid()) {
    528         converter.convert(
    529                 (const uint8_t *)videoFrameBuffer->data(),
    530                 width, height,
    531                 crop_left, crop_top, crop_right, crop_bottom,
    532                 frame->getFlattenedData(),
    533                 frame->mWidth,
    534                 frame->mHeight,
    535                 crop_left, crop_top, crop_right, crop_bottom);
    536         return OK;
    537     }
    538 
    539     ALOGE("Unable to convert from format 0x%08x to 0x%08x",
    540                 srcFormat, dstFormat());
    541     return ERROR_UNSUPPORTED;
    542 }
    543 
    544 ////////////////////////////////////////////////////////////////////////
    545 
    546 ImageDecoder::ImageDecoder(
    547         const AString &componentName,
    548         const sp<MetaData> &trackMeta,
    549         const sp<IMediaSource> &source)
    550     : FrameDecoder(componentName, trackMeta, source),
    551       mFrame(NULL),
    552       mWidth(0),
    553       mHeight(0),
    554       mGridRows(1),
    555       mGridCols(1),
    556       mTileWidth(0),
    557       mTileHeight(0),
    558       mTilesDecoded(0),
    559       mTargetTiles(0) {
    560 }
    561 
    562 sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
    563         int64_t frameTimeUs, size_t /*numFrames*/,
    564         int /*seekMode*/, MediaSource::ReadOptions *options) {
    565     sp<MetaData> overrideMeta;
    566     if (frameTimeUs < 0) {
    567         uint32_t type;
    568         const void *data;
    569         size_t size;
    570 
    571         // if we have a stand-alone thumbnail, set up the override meta,
    572         // and set seekTo time to -1.
    573         if (!findThumbnailInfo(trackMeta(), &mWidth, &mHeight, &type, &data, &size)) {
    574             ALOGE("Thumbnail not available");
    575             return NULL;
    576         }
    577         overrideMeta = new MetaData(*(trackMeta()));
    578         overrideMeta->remove(kKeyDisplayWidth);
    579         overrideMeta->remove(kKeyDisplayHeight);
    580         overrideMeta->setInt32(kKeyWidth, mWidth);
    581         overrideMeta->setInt32(kKeyHeight, mHeight);
    582         overrideMeta->setData(kKeyHVCC, type, data, size);
    583         options->setSeekTo(-1);
    584     } else {
    585         CHECK(trackMeta()->findInt32(kKeyWidth, &mWidth));
    586         CHECK(trackMeta()->findInt32(kKeyHeight, &mHeight));
    587 
    588         options->setSeekTo(frameTimeUs);
    589     }
    590 
    591     mGridRows = mGridCols = 1;
    592     if (overrideMeta == NULL) {
    593         // check if we're dealing with a tiled heif
    594         int32_t tileWidth, tileHeight, gridRows, gridCols;
    595         if (findGridInfo(trackMeta(), &tileWidth, &tileHeight, &gridRows, &gridCols)) {
    596             if (mWidth <= tileWidth * gridCols && mHeight <= tileHeight * gridRows) {
    597                 ALOGV("grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
    598                         gridCols, gridRows, tileWidth, tileHeight, mWidth, mHeight);
    599 
    600                 overrideMeta = new MetaData(*(trackMeta()));
    601                 overrideMeta->setInt32(kKeyWidth, tileWidth);
    602                 overrideMeta->setInt32(kKeyHeight, tileHeight);
    603                 mTileWidth = tileWidth;
    604                 mTileHeight = tileHeight;
    605                 mGridCols = gridCols;
    606                 mGridRows = gridRows;
    607             } else {
    608                 ALOGW("ignore bad grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
    609                         gridCols, gridRows, tileWidth, tileHeight, mWidth, mHeight);
    610             }
    611         }
    612         if (overrideMeta == NULL) {
    613             overrideMeta = trackMeta();
    614         }
    615     }
    616     mTargetTiles = mGridCols * mGridRows;
    617 
    618     sp<AMessage> videoFormat;
    619     if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
    620         ALOGE("b/23680780");
    621         ALOGW("Failed to convert meta data to message");
    622         return NULL;
    623     }
    624 
    625     // TODO: Use Flexible color instead
    626     videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
    627 
    628     if ((mGridRows == 1) && (mGridCols == 1)) {
    629         videoFormat->setInt32("android._num-input-buffers", 1);
    630         videoFormat->setInt32("android._num-output-buffers", 1);
    631     }
    632     return videoFormat;
    633 }
    634 
    635 status_t ImageDecoder::onExtractRect(FrameRect *rect) {
    636     // TODO:
    637     // This callback is for verifying whether we can decode the rect,
    638     // and if so, set up the internal variables for decoding.
    639     // Currently, rect decoding is restricted to sequentially decoding one
    640     // row of tiles at a time. We can't decode arbitrary rects, as the image
    641     // track doesn't yet support seeking by tiles. So all we do here is to
    642     // verify the rect against what we expect.
    643     // When seeking by tile is supported, this code should be updated to
    644     // set the seek parameters.
    645     if (rect == NULL) {
    646         if (mTilesDecoded > 0) {
    647             return ERROR_UNSUPPORTED;
    648         }
    649         mTargetTiles = mGridRows * mGridCols;
    650         return OK;
    651     }
    652 
    653     if (mTileWidth <= 0 || mTileHeight <=0) {
    654         return ERROR_UNSUPPORTED;
    655     }
    656 
    657     int32_t row = mTilesDecoded / mGridCols;
    658     int32_t expectedTop = row * mTileHeight;
    659     int32_t expectedBot = (row + 1) * mTileHeight;
    660     if (expectedBot > mHeight) {
    661         expectedBot = mHeight;
    662     }
    663     if (rect->left != 0 || rect->top != expectedTop
    664             || rect->right != mWidth || rect->bottom != expectedBot) {
    665         ALOGE("currently only support sequential decoding of slices");
    666         return ERROR_UNSUPPORTED;
    667     }
    668 
    669     // advance one row
    670     mTargetTiles = mTilesDecoded + mGridCols;
    671     return OK;
    672 }
    673 
    674 status_t ImageDecoder::onOutputReceived(
    675         const sp<MediaCodecBuffer> &videoFrameBuffer,
    676         const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
    677     if (outputFormat == NULL) {
    678         return ERROR_MALFORMED;
    679     }
    680 
    681     int32_t width, height;
    682     CHECK(outputFormat->findInt32("width", &width));
    683     CHECK(outputFormat->findInt32("height", &height));
    684 
    685     if (mFrame == NULL) {
    686         sp<IMemory> frameMem = allocVideoFrame(
    687                 trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp());
    688         mFrame = static_cast<VideoFrame*>(frameMem->pointer());
    689 
    690         addFrame(frameMem);
    691     }
    692 
    693     int32_t srcFormat;
    694     CHECK(outputFormat->findInt32("color-format", &srcFormat));
    695 
    696     ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
    697 
    698     int32_t dstLeft, dstTop, dstRight, dstBottom;
    699     dstLeft = mTilesDecoded % mGridCols * width;
    700     dstTop = mTilesDecoded / mGridCols * height;
    701     dstRight = dstLeft + width - 1;
    702     dstBottom = dstTop + height - 1;
    703 
    704     int32_t crop_left, crop_top, crop_right, crop_bottom;
    705     if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
    706         crop_left = crop_top = 0;
    707         crop_right = width - 1;
    708         crop_bottom = height - 1;
    709     }
    710 
    711     // apply crop on bottom-right
    712     // TODO: need to move this into the color converter itself.
    713     if (dstRight >= mWidth) {
    714         crop_right = mWidth - dstLeft - 1;
    715         dstRight = dstLeft + crop_right;
    716     }
    717     if (dstBottom >= mHeight) {
    718         crop_bottom = mHeight - dstTop - 1;
    719         dstBottom = dstTop + crop_bottom;
    720     }
    721 
    722     *done = (++mTilesDecoded >= mTargetTiles);
    723 
    724     if (converter.isValid()) {
    725         converter.convert(
    726                 (const uint8_t *)videoFrameBuffer->data(),
    727                 width, height,
    728                 crop_left, crop_top, crop_right, crop_bottom,
    729                 mFrame->getFlattenedData(),
    730                 mFrame->mWidth,
    731                 mFrame->mHeight,
    732                 dstLeft, dstTop, dstRight, dstBottom);
    733         return OK;
    734     }
    735 
    736     ALOGE("Unable to convert from format 0x%08x to 0x%08x",
    737                 srcFormat, dstFormat());
    738     return ERROR_UNSUPPORTED;
    739 }
    740 
    741 }  // namespace android
    742