Home | History | Annotate | Download | only in libheif
      1 /*
      2  * Copyright (C) 2017 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 //#define LOG_NDEBUG 0
     18 #define LOG_TAG "HeifDecoderImpl"
     19 
     20 #include "HeifDecoderImpl.h"
     21 
     22 #include <stdio.h>
     23 
     24 #include <binder/IMemory.h>
     25 #include <binder/MemoryDealer.h>
     26 #include <drm/drm_framework_common.h>
     27 #include <media/IDataSource.h>
     28 #include <media/mediametadataretriever.h>
     29 #include <media/MediaSource.h>
     30 #include <media/stagefright/foundation/ADebug.h>
     31 #include <private/media/VideoFrame.h>
     32 #include <utils/Log.h>
     33 #include <utils/RefBase.h>
     34 
     35 HeifDecoder* createHeifDecoder() {
     36     return new android::HeifDecoderImpl();
     37 }
     38 
     39 namespace android {
     40 
     41 /*
     42  * HeifDataSource
     43  *
     44  * Proxies data requests over IDataSource interface from MediaMetadataRetriever
     45  * to the HeifStream interface we received from the heif decoder client.
     46  */
     47 class HeifDataSource : public BnDataSource {
     48 public:
     49     /*
     50      * Constructs HeifDataSource; will take ownership of |stream|.
     51      */
     52     HeifDataSource(HeifStream* stream)
     53         : mStream(stream), mEOS(false),
     54           mCachedOffset(0), mCachedSize(0), mCacheBufferSize(0) {}
     55 
     56     ~HeifDataSource() override {}
     57 
     58     /*
     59      * Initializes internal resources.
     60      */
     61     bool init();
     62 
     63     sp<IMemory> getIMemory() override { return mMemory; }
     64     ssize_t readAt(off64_t offset, size_t size) override;
     65     status_t getSize(off64_t* size) override ;
     66     void close() {}
     67     uint32_t getFlags() override { return 0; }
     68     String8 toString() override { return String8("HeifDataSource"); }
     69     sp<DecryptHandle> DrmInitialization(const char*) override {
     70         return nullptr;
     71     }
     72 
     73 private:
     74     enum {
     75         /*
     76          * Buffer size for passing the read data to mediaserver. Set to 64K
     77          * (which is what MediaDataSource Java API's jni implementation uses).
     78          */
     79         kBufferSize = 64 * 1024,
     80         /*
     81          * Initial and max cache buffer size.
     82          */
     83         kInitialCacheBufferSize = 4 * 1024 * 1024,
     84         kMaxCacheBufferSize = 64 * 1024 * 1024,
     85     };
     86     sp<IMemory> mMemory;
     87     std::unique_ptr<HeifStream> mStream;
     88     bool mEOS;
     89     std::unique_ptr<uint8_t> mCache;
     90     off64_t mCachedOffset;
     91     size_t mCachedSize;
     92     size_t mCacheBufferSize;
     93 };
     94 
     95 bool HeifDataSource::init() {
     96     sp<MemoryDealer> memoryDealer =
     97             new MemoryDealer(kBufferSize, "HeifDataSource");
     98     mMemory = memoryDealer->allocate(kBufferSize);
     99     if (mMemory == nullptr) {
    100         ALOGE("Failed to allocate shared memory!");
    101         return false;
    102     }
    103     mCache.reset(new uint8_t[kInitialCacheBufferSize]);
    104     if (mCache.get() == nullptr) {
    105         ALOGE("mFailed to allocate cache!");
    106         return false;
    107     }
    108     mCacheBufferSize = kInitialCacheBufferSize;
    109     return true;
    110 }
    111 
    112 ssize_t HeifDataSource::readAt(off64_t offset, size_t size) {
    113     ALOGV("readAt: offset=%lld, size=%zu", (long long)offset, size);
    114 
    115     if (offset < mCachedOffset) {
    116         // try seek, then rewind/skip, fail if none worked
    117         if (mStream->seek(offset)) {
    118             ALOGV("readAt: seek to offset=%lld", (long long)offset);
    119             mCachedOffset = offset;
    120             mCachedSize = 0;
    121             mEOS = false;
    122         } else if (mStream->rewind()) {
    123             ALOGV("readAt: rewind to offset=0");
    124             mCachedOffset = 0;
    125             mCachedSize = 0;
    126             mEOS = false;
    127         } else {
    128             ALOGE("readAt: couldn't seek or rewind!");
    129             mEOS = true;
    130         }
    131     }
    132 
    133     if (mEOS && (offset < mCachedOffset ||
    134                  offset >= (off64_t)(mCachedOffset + mCachedSize))) {
    135         ALOGV("readAt: EOS");
    136         return ERROR_END_OF_STREAM;
    137     }
    138 
    139     // at this point, offset must be >= mCachedOffset, other cases should
    140     // have been caught above.
    141     CHECK(offset >= mCachedOffset);
    142 
    143     off64_t resultOffset;
    144     if (__builtin_add_overflow(offset, size, &resultOffset)) {
    145         return ERROR_IO;
    146     }
    147 
    148     if (size == 0) {
    149         return 0;
    150     }
    151 
    152     // Can only read max of kBufferSize
    153     if (size > kBufferSize) {
    154         size = kBufferSize;
    155     }
    156 
    157     // copy from cache if the request falls entirely in cache
    158     if (offset + size <= mCachedOffset + mCachedSize) {
    159         memcpy(mMemory->pointer(), mCache.get() + offset - mCachedOffset, size);
    160         return size;
    161     }
    162 
    163     // need to fetch more, check if we need to expand the cache buffer.
    164     if ((off64_t)(offset + size) > mCachedOffset + kMaxCacheBufferSize) {
    165         // it's reaching max cache buffer size, need to roll window, and possibly
    166         // expand the cache buffer.
    167         size_t newCacheBufferSize = mCacheBufferSize;
    168         std::unique_ptr<uint8_t> newCache;
    169         uint8_t* dst = mCache.get();
    170         if (newCacheBufferSize < kMaxCacheBufferSize) {
    171             newCacheBufferSize = kMaxCacheBufferSize;
    172             newCache.reset(new uint8_t[newCacheBufferSize]);
    173             dst = newCache.get();
    174         }
    175 
    176         // when rolling the cache window, try to keep about half the old bytes
    177         // in case that the client goes back.
    178         off64_t newCachedOffset = offset - (off64_t)(newCacheBufferSize / 2);
    179         if (newCachedOffset < mCachedOffset) {
    180             newCachedOffset = mCachedOffset;
    181         }
    182 
    183         int64_t newCachedSize = (int64_t)(mCachedOffset + mCachedSize) - newCachedOffset;
    184         if (newCachedSize > 0) {
    185             // in this case, the new cache region partially overlop the old cache,
    186             // move the portion of the cache we want to save to the beginning of
    187             // the cache buffer.
    188             memcpy(dst, mCache.get() + newCachedOffset - mCachedOffset, newCachedSize);
    189         } else if (newCachedSize < 0){
    190             // in this case, the new cache region is entirely out of the old cache,
    191             // in order to guarantee sequential read, we need to skip a number of
    192             // bytes before reading.
    193             size_t bytesToSkip = -newCachedSize;
    194             size_t bytesSkipped = mStream->read(nullptr, bytesToSkip);
    195             if (bytesSkipped != bytesToSkip) {
    196                 // bytesSkipped is invalid, there is not enough bytes to reach
    197                 // the requested offset.
    198                 ALOGE("readAt: skip failed, EOS");
    199 
    200                 mEOS = true;
    201                 mCachedOffset = newCachedOffset;
    202                 mCachedSize = 0;
    203                 return ERROR_END_OF_STREAM;
    204             }
    205             // set cache size to 0, since we're not keeping any old cache
    206             newCachedSize = 0;
    207         }
    208 
    209         if (newCache.get() != nullptr) {
    210             mCache.reset(newCache.release());
    211             mCacheBufferSize = newCacheBufferSize;
    212         }
    213         mCachedOffset = newCachedOffset;
    214         mCachedSize = newCachedSize;
    215 
    216         ALOGV("readAt: rolling cache window to (%lld, %zu), cache buffer size %zu",
    217                 (long long)mCachedOffset, mCachedSize, mCacheBufferSize);
    218     } else {
    219         // expand cache buffer, but no need to roll the window
    220         size_t newCacheBufferSize = mCacheBufferSize;
    221         while (offset + size > mCachedOffset + newCacheBufferSize) {
    222             newCacheBufferSize *= 2;
    223         }
    224         CHECK(newCacheBufferSize <= kMaxCacheBufferSize);
    225         if (mCacheBufferSize < newCacheBufferSize) {
    226             uint8_t* newCache = new uint8_t[newCacheBufferSize];
    227             memcpy(newCache, mCache.get(), mCachedSize);
    228             mCache.reset(newCache);
    229             mCacheBufferSize = newCacheBufferSize;
    230 
    231             ALOGV("readAt: current cache window (%lld, %zu), new cache buffer size %zu",
    232                     (long long) mCachedOffset, mCachedSize, mCacheBufferSize);
    233         }
    234     }
    235     size_t bytesToRead = offset + size - mCachedOffset - mCachedSize;
    236     size_t bytesRead = mStream->read(mCache.get() + mCachedSize, bytesToRead);
    237     if (bytesRead > bytesToRead || bytesRead == 0) {
    238         // bytesRead is invalid
    239         mEOS = true;
    240         bytesRead = 0;
    241     } else if (bytesRead < bytesToRead) {
    242         // read some bytes but not all, set EOS
    243         mEOS = true;
    244     }
    245     mCachedSize += bytesRead;
    246     ALOGV("readAt: current cache window (%lld, %zu)",
    247             (long long) mCachedOffset, mCachedSize);
    248 
    249     // here bytesAvailable could be negative if offset jumped past EOS.
    250     int64_t bytesAvailable = mCachedOffset + mCachedSize - offset;
    251     if (bytesAvailable <= 0) {
    252         return ERROR_END_OF_STREAM;
    253     }
    254     if (bytesAvailable < (int64_t)size) {
    255         size = bytesAvailable;
    256     }
    257     memcpy(mMemory->pointer(), mCache.get() + offset - mCachedOffset, size);
    258     return size;
    259 }
    260 
    261 status_t HeifDataSource::getSize(off64_t* size) {
    262     if (!mStream->hasLength()) {
    263         *size = -1;
    264         ALOGE("getSize: not supported!");
    265         return ERROR_UNSUPPORTED;
    266     }
    267     *size = mStream->getLength();
    268     ALOGV("getSize: size=%lld", (long long)*size);
    269     return OK;
    270 }
    271 
    272 /////////////////////////////////////////////////////////////////////////
    273 
    274 struct HeifDecoderImpl::DecodeThread : public Thread {
    275     explicit DecodeThread(HeifDecoderImpl *decoder) : mDecoder(decoder) {}
    276 
    277 private:
    278     HeifDecoderImpl* mDecoder;
    279 
    280     bool threadLoop();
    281 
    282     DISALLOW_EVIL_CONSTRUCTORS(DecodeThread);
    283 };
    284 
    285 bool HeifDecoderImpl::DecodeThread::threadLoop() {
    286     return mDecoder->decodeAsync();
    287 }
    288 
    289 /////////////////////////////////////////////////////////////////////////
    290 
    291 HeifDecoderImpl::HeifDecoderImpl() :
    292     // output color format should always be set via setOutputColor(), in case
    293     // it's not, default to HAL_PIXEL_FORMAT_RGB_565.
    294     mOutputColor(HAL_PIXEL_FORMAT_RGB_565),
    295     mCurScanline(0),
    296     mWidth(0),
    297     mHeight(0),
    298     mFrameDecoded(false),
    299     mHasImage(false),
    300     mHasVideo(false),
    301     mAvailableLines(0),
    302     mNumSlices(1),
    303     mSliceHeight(0),
    304     mAsyncDecodeDone(false) {
    305 }
    306 
    307 HeifDecoderImpl::~HeifDecoderImpl() {
    308     if (mThread != nullptr) {
    309         mThread->join();
    310     }
    311 }
    312 
    313 bool HeifDecoderImpl::init(HeifStream* stream, HeifFrameInfo* frameInfo) {
    314     mFrameDecoded = false;
    315     mFrameMemory.clear();
    316 
    317     sp<HeifDataSource> dataSource = new HeifDataSource(stream);
    318     if (!dataSource->init()) {
    319         return false;
    320     }
    321     mDataSource = dataSource;
    322 
    323     mRetriever = new MediaMetadataRetriever();
    324     status_t err = mRetriever->setDataSource(mDataSource, "image/heif");
    325     if (err != OK) {
    326         ALOGE("failed to set data source!");
    327 
    328         mRetriever.clear();
    329         mDataSource.clear();
    330         return false;
    331     }
    332     ALOGV("successfully set data source.");
    333 
    334     const char* hasImage = mRetriever->extractMetadata(METADATA_KEY_HAS_IMAGE);
    335     const char* hasVideo = mRetriever->extractMetadata(METADATA_KEY_HAS_VIDEO);
    336 
    337     mHasImage = hasImage && !strcasecmp(hasImage, "yes");
    338     mHasVideo = hasVideo && !strcasecmp(hasVideo, "yes");
    339     sp<IMemory> sharedMem;
    340     if (mHasImage) {
    341         // image index < 0 to retrieve primary image
    342         sharedMem = mRetriever->getImageAtIndex(
    343                 -1, mOutputColor, true /*metaOnly*/);
    344     } else if (mHasVideo) {
    345         sharedMem = mRetriever->getFrameAtTime(0,
    346                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
    347                 mOutputColor, true /*metaOnly*/);
    348     }
    349 
    350     if (sharedMem == nullptr || sharedMem->pointer() == nullptr) {
    351         ALOGE("getFrameAtTime: videoFrame is a nullptr");
    352         return false;
    353     }
    354 
    355     VideoFrame* videoFrame = static_cast<VideoFrame*>(sharedMem->pointer());
    356 
    357     ALOGV("Meta dimension %dx%d, display %dx%d, angle %d, iccSize %d",
    358             videoFrame->mWidth,
    359             videoFrame->mHeight,
    360             videoFrame->mDisplayWidth,
    361             videoFrame->mDisplayHeight,
    362             videoFrame->mRotationAngle,
    363             videoFrame->mIccSize);
    364 
    365     if (frameInfo != nullptr) {
    366         frameInfo->set(
    367                 videoFrame->mWidth,
    368                 videoFrame->mHeight,
    369                 videoFrame->mRotationAngle,
    370                 videoFrame->mBytesPerPixel,
    371                 videoFrame->mIccSize,
    372                 videoFrame->getFlattenedIccData());
    373     }
    374     mWidth = videoFrame->mWidth;
    375     mHeight = videoFrame->mHeight;
    376     if (mHasImage && videoFrame->mTileHeight >= 512 && mWidth >= 3000 && mHeight >= 2000 ) {
    377         // Try decoding in slices only if the image has tiles and is big enough.
    378         mSliceHeight = videoFrame->mTileHeight;
    379         mNumSlices = (videoFrame->mHeight + mSliceHeight - 1) / mSliceHeight;
    380         ALOGV("mSliceHeight %u, mNumSlices %zu", mSliceHeight, mNumSlices);
    381     }
    382     return true;
    383 }
    384 
    385 bool HeifDecoderImpl::getEncodedColor(HeifEncodedColor* /*outColor*/) const {
    386     ALOGW("getEncodedColor: not implemented!");
    387     return false;
    388 }
    389 
    390 bool HeifDecoderImpl::setOutputColor(HeifColorFormat heifColor) {
    391     switch(heifColor) {
    392         case kHeifColorFormat_RGB565:
    393         {
    394             mOutputColor = HAL_PIXEL_FORMAT_RGB_565;
    395             return true;
    396         }
    397         case kHeifColorFormat_RGBA_8888:
    398         {
    399             mOutputColor = HAL_PIXEL_FORMAT_RGBA_8888;
    400             return true;
    401         }
    402         case kHeifColorFormat_BGRA_8888:
    403         {
    404             mOutputColor = HAL_PIXEL_FORMAT_BGRA_8888;
    405             return true;
    406         }
    407         default:
    408             break;
    409     }
    410     ALOGE("Unsupported output color format %d", heifColor);
    411     return false;
    412 }
    413 
    414 bool HeifDecoderImpl::decodeAsync() {
    415     for (size_t i = 1; i < mNumSlices; i++) {
    416         ALOGV("decodeAsync(): decoding slice %zu", i);
    417         size_t top = i * mSliceHeight;
    418         size_t bottom = (i + 1) * mSliceHeight;
    419         if (bottom > mHeight) {
    420             bottom = mHeight;
    421         }
    422         sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
    423                 -1, mOutputColor, 0, top, mWidth, bottom);
    424         {
    425             Mutex::Autolock autolock(mLock);
    426 
    427             if (frameMemory == nullptr || frameMemory->pointer() == nullptr) {
    428                 mAsyncDecodeDone = true;
    429                 mScanlineReady.signal();
    430                 break;
    431             }
    432             mFrameMemory = frameMemory;
    433             mAvailableLines = bottom;
    434             ALOGV("decodeAsync(): available lines %zu", mAvailableLines);
    435             mScanlineReady.signal();
    436         }
    437     }
    438     // Aggressive clear to avoid holding on to resources
    439     mRetriever.clear();
    440     mDataSource.clear();
    441     return false;
    442 }
    443 
    444 bool HeifDecoderImpl::decode(HeifFrameInfo* frameInfo) {
    445     // reset scanline pointer
    446     mCurScanline = 0;
    447 
    448     if (mFrameDecoded) {
    449         return true;
    450     }
    451 
    452     // See if we want to decode in slices to allow client to start
    453     // scanline processing in parallel with decode. If this fails
    454     // we fallback to decoding the full frame.
    455     if (mHasImage && mNumSlices > 1) {
    456         // get first slice and metadata
    457         sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
    458                 -1, mOutputColor, 0, 0, mWidth, mSliceHeight);
    459 
    460         if (frameMemory == nullptr || frameMemory->pointer() == nullptr) {
    461             ALOGE("decode: metadata is a nullptr");
    462             return false;
    463         }
    464 
    465         VideoFrame* videoFrame = static_cast<VideoFrame*>(frameMemory->pointer());
    466 
    467         if (frameInfo != nullptr) {
    468             frameInfo->set(
    469                     videoFrame->mWidth,
    470                     videoFrame->mHeight,
    471                     videoFrame->mRotationAngle,
    472                     videoFrame->mBytesPerPixel,
    473                     videoFrame->mIccSize,
    474                     videoFrame->getFlattenedIccData());
    475         }
    476 
    477         mFrameMemory = frameMemory;
    478         mAvailableLines = mSliceHeight;
    479         mThread = new DecodeThread(this);
    480         if (mThread->run("HeifDecode", ANDROID_PRIORITY_FOREGROUND) == OK) {
    481             mFrameDecoded = true;
    482             return true;
    483         }
    484 
    485         // Fallback to decode without slicing
    486         mThread.clear();
    487         mNumSlices = 1;
    488         mSliceHeight = 0;
    489         mAvailableLines = 0;
    490         mFrameMemory.clear();
    491     }
    492 
    493     if (mHasImage) {
    494         // image index < 0 to retrieve primary image
    495         mFrameMemory = mRetriever->getImageAtIndex(-1, mOutputColor);
    496     } else if (mHasVideo) {
    497         mFrameMemory = mRetriever->getFrameAtTime(0,
    498                 MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC, mOutputColor);
    499     }
    500 
    501     if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
    502         ALOGE("decode: videoFrame is a nullptr");
    503         return false;
    504     }
    505 
    506     VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
    507     if (videoFrame->mSize == 0 ||
    508             mFrameMemory->size() < videoFrame->getFlattenedSize()) {
    509         ALOGE("decode: videoFrame size is invalid");
    510         return false;
    511     }
    512 
    513     ALOGV("Decoded dimension %dx%d, display %dx%d, angle %d, rowbytes %d, size %d",
    514             videoFrame->mWidth,
    515             videoFrame->mHeight,
    516             videoFrame->mDisplayWidth,
    517             videoFrame->mDisplayHeight,
    518             videoFrame->mRotationAngle,
    519             videoFrame->mRowBytes,
    520             videoFrame->mSize);
    521 
    522     if (frameInfo != nullptr) {
    523         frameInfo->set(
    524                 videoFrame->mWidth,
    525                 videoFrame->mHeight,
    526                 videoFrame->mRotationAngle,
    527                 videoFrame->mBytesPerPixel,
    528                 videoFrame->mIccSize,
    529                 videoFrame->getFlattenedIccData());
    530     }
    531     mFrameDecoded = true;
    532 
    533     // Aggressively clear to avoid holding on to resources
    534     mRetriever.clear();
    535     mDataSource.clear();
    536     return true;
    537 }
    538 
    539 bool HeifDecoderImpl::getScanlineInner(uint8_t* dst) {
    540     if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
    541         return false;
    542     }
    543     VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
    544     uint8_t* src = videoFrame->getFlattenedData() + videoFrame->mRowBytes * mCurScanline++;
    545     memcpy(dst, src, videoFrame->mBytesPerPixel * videoFrame->mWidth);
    546     return true;
    547 }
    548 
    549 bool HeifDecoderImpl::getScanline(uint8_t* dst) {
    550     if (mCurScanline >= mHeight) {
    551         ALOGE("no more scanline available");
    552         return false;
    553     }
    554 
    555     if (mNumSlices > 1) {
    556         Mutex::Autolock autolock(mLock);
    557 
    558         while (!mAsyncDecodeDone && mCurScanline >= mAvailableLines) {
    559             mScanlineReady.wait(mLock);
    560         }
    561         return (mCurScanline < mAvailableLines) ? getScanlineInner(dst) : false;
    562     }
    563 
    564     return getScanlineInner(dst);
    565 }
    566 
    567 size_t HeifDecoderImpl::skipScanlines(size_t count) {
    568     uint32_t oldScanline = mCurScanline;
    569     mCurScanline += count;
    570     if (mCurScanline > mHeight) {
    571         mCurScanline = mHeight;
    572     }
    573     return (mCurScanline > oldScanline) ? (mCurScanline - oldScanline) : 0;
    574 }
    575 
    576 } // namespace android
    577