Home | History | Annotate | Download | only in jni
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <string.h>
     18 #include "JNIHelpers.h"
     19 #include "utils/log.h"
     20 #include "utils/math.h"
     21 #include "webp/format_constants.h"
     22 
     23 #include "FrameSequence_webp.h"
     24 
     25 #define WEBP_DEBUG 0
     26 
     27 ////////////////////////////////////////////////////////////////////////////////
     28 // Frame sequence
     29 ////////////////////////////////////////////////////////////////////////////////
     30 
     31 static uint32_t GetLE32(const uint8_t* const data) {
     32     return MKFOURCC(data[0], data[1], data[2], data[3]);
     33 }
     34 
     35 // Returns true if the frame covers full canvas.
     36 static bool isFullFrame(const WebPIterator& frame, int canvasWidth, int canvasHeight) {
     37     return (frame.width == canvasWidth && frame.height == canvasHeight);
     38 }
     39 
     40 // Returns true if the rectangle defined by 'frame' contains pixel (x, y).
     41 static bool FrameContainsPixel(const WebPIterator& frame, int x, int y) {
     42     const int left = frame.x_offset;
     43     const int right = left + frame.width;
     44     const int top = frame.y_offset;
     45     const int bottom = top + frame.height;
     46     return x >= left && x < right && y >= top && y < bottom;
     47 }
     48 
     49 // Construct mIsKeyFrame array.
     50 void FrameSequence_webp::constructDependencyChain() {
     51     const size_t frameCount = getFrameCount();
     52     mIsKeyFrame = new bool[frameCount];
     53     const int canvasWidth = getWidth();
     54     const int canvasHeight = getHeight();
     55 
     56     WebPIterator prev;
     57     WebPIterator curr;
     58 
     59     // Note: WebPDemuxGetFrame() uses base-1 counting.
     60     int ok = WebPDemuxGetFrame(mDemux, 1, &curr);
     61     ALOG_ASSERT(ok, "Could not retrieve frame# 0");
     62     mIsKeyFrame[0] = true;  // 0th frame is always a key frame.
     63     for (size_t i = 1; i < frameCount; i++) {
     64         prev = curr;
     65         ok = WebPDemuxGetFrame(mDemux, i + 1, &curr);  // Get ith frame.
     66         ALOG_ASSERT(ok, "Could not retrieve frame# %d", i);
     67 
     68         if ((!curr.has_alpha || curr.blend_method == WEBP_MUX_NO_BLEND) &&
     69                 isFullFrame(curr, canvasWidth, canvasHeight)) {
     70             mIsKeyFrame[i] = true;
     71         } else {
     72             mIsKeyFrame[i] = (prev.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
     73                     (isFullFrame(prev, canvasWidth, canvasHeight) || mIsKeyFrame[i - 1]);
     74         }
     75     }
     76     WebPDemuxReleaseIterator(&prev);
     77     WebPDemuxReleaseIterator(&curr);
     78 
     79 #if WEBP_DEBUG
     80     ALOGD("Dependency chain:");
     81     for (size_t i = 0; i < frameCount; i++) {
     82         ALOGD("Frame# %zu: %s", i, mIsKeyFrame[i] ? "Key frame" : "NOT a key frame");
     83     }
     84 #endif
     85 }
     86 
     87 FrameSequence_webp::FrameSequence_webp(Stream* stream) {
     88     // Read RIFF header to get file size.
     89     uint8_t riff_header[RIFF_HEADER_SIZE];
     90     if (stream->read(riff_header, RIFF_HEADER_SIZE) != RIFF_HEADER_SIZE) {
     91         ALOGE("WebP header load failed");
     92         return;
     93     }
     94     mData.size = CHUNK_HEADER_SIZE + GetLE32(riff_header + TAG_SIZE);
     95     mData.bytes = new uint8_t[mData.size];
     96     memcpy((void*)mData.bytes, riff_header, RIFF_HEADER_SIZE);
     97 
     98     // Read rest of the bytes.
     99     void* remaining_bytes = (void*)(mData.bytes + RIFF_HEADER_SIZE);
    100     size_t remaining_size = mData.size - RIFF_HEADER_SIZE;
    101     if (stream->read(remaining_bytes, remaining_size) != remaining_size) {
    102         ALOGE("WebP full load failed");
    103         return;
    104     }
    105 
    106     // Construct demux.
    107     mDemux = WebPDemux(&mData);
    108     if (!mDemux) {
    109         ALOGE("Parsing of WebP container file failed");
    110         return;
    111     }
    112     mLoopCount = WebPDemuxGetI(mDemux, WEBP_FF_LOOP_COUNT);
    113     mFormatFlags = WebPDemuxGetI(mDemux, WEBP_FF_FORMAT_FLAGS);
    114 #if WEBP_DEBUG
    115     ALOGD("FrameSequence_webp created with size = %d x %d, number of frames = %d, flags = 0x%X",
    116           getWidth(), getHeight(), getFrameCount(), mFormatFlags);
    117 #endif
    118     constructDependencyChain();
    119 }
    120 
    121 FrameSequence_webp::~FrameSequence_webp() {
    122     WebPDemuxDelete(mDemux);
    123     delete[] mData.bytes;
    124     delete[] mIsKeyFrame;
    125 }
    126 
    127 FrameSequenceState* FrameSequence_webp::createState() const {
    128     return new FrameSequenceState_webp(*this);
    129 }
    130 
    131 ////////////////////////////////////////////////////////////////////////////////
    132 // draw helpers
    133 ////////////////////////////////////////////////////////////////////////////////
    134 
    135 static bool willBeCleared(const WebPIterator& iter) {
    136     return iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND;
    137 }
    138 
    139 // return true if area of 'target' completely covers area of 'covered'
    140 static bool checkIfCover(const WebPIterator& target, const WebPIterator& covered) {
    141     const int covered_x_max = covered.x_offset + covered.width;
    142     const int target_x_max = target.x_offset + target.width;
    143     const int covered_y_max = covered.y_offset + covered.height;
    144     const int target_y_max = target.y_offset + target.height;
    145     return target.x_offset <= covered.x_offset
    146            && covered_x_max <= target_x_max
    147            && target.y_offset <= covered.y_offset
    148            && covered_y_max <= target_y_max;
    149 }
    150 
    151 // Clear all pixels in a line to transparent.
    152 static void clearLine(Color8888* dst, int width) {
    153     memset(dst, 0, width * sizeof(*dst));  // Note: Assumes TRANSPARENT == 0x0.
    154 }
    155 
    156 // Copy all pixels from 'src' to 'dst'.
    157 static void copyFrame(const Color8888* src, int srcStride, Color8888* dst, int dstStride,
    158         int width, int height) {
    159     for (int y = 0; y < height; y++) {
    160         memcpy(dst, src, width * sizeof(*dst));
    161         src += srcStride;
    162         dst += dstStride;
    163     }
    164 }
    165 
    166 ////////////////////////////////////////////////////////////////////////////////
    167 // Frame sequence state
    168 ////////////////////////////////////////////////////////////////////////////////
    169 
    170 FrameSequenceState_webp::FrameSequenceState_webp(const FrameSequence_webp& frameSequence) :
    171         mFrameSequence(frameSequence) {
    172     WebPInitDecoderConfig(&mDecoderConfig);
    173     mDecoderConfig.output.is_external_memory = 1;
    174     mDecoderConfig.output.colorspace = MODE_rgbA;  // Pre-multiplied alpha mode.
    175     const int canvasWidth = mFrameSequence.getWidth();
    176     const int canvasHeight = mFrameSequence.getHeight();
    177     mPreservedBuffer = new Color8888[canvasWidth * canvasHeight];
    178 }
    179 
    180 FrameSequenceState_webp::~FrameSequenceState_webp() {
    181     delete[] mPreservedBuffer;
    182 }
    183 
    184 void FrameSequenceState_webp::initializeFrame(const WebPIterator& currIter, Color8888* currBuffer,
    185         int currStride, const WebPIterator& prevIter, const Color8888* prevBuffer, int prevStride) {
    186     const int canvasWidth = mFrameSequence.getWidth();
    187     const int canvasHeight = mFrameSequence.getHeight();
    188     const bool currFrameIsKeyFrame = mFrameSequence.isKeyFrame(currIter.frame_num - 1);
    189 
    190     if (currFrameIsKeyFrame) {  // Clear canvas.
    191         for (int y = 0; y < canvasHeight; y++) {
    192             Color8888* dst = currBuffer + y * currStride;
    193             clearLine(dst, canvasWidth);
    194         }
    195     } else {
    196         // Preserve previous frame as starting state of current frame.
    197         copyFrame(prevBuffer, prevStride, currBuffer, currStride, canvasWidth, canvasHeight);
    198 
    199         // Dispose previous frame rectangle to Background if needed.
    200         bool prevFrameCompletelyCovered =
    201                 (!currIter.has_alpha || currIter.blend_method == WEBP_MUX_NO_BLEND) &&
    202                 checkIfCover(currIter, prevIter);
    203         if ((prevIter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
    204                 !prevFrameCompletelyCovered) {
    205             Color8888* dst = currBuffer + prevIter.x_offset + prevIter.y_offset * currStride;
    206             for (int j = 0; j < prevIter.height; j++) {
    207                 clearLine(dst, prevIter.width);
    208                 dst += currStride;
    209             }
    210         }
    211     }
    212 }
    213 
    214 bool FrameSequenceState_webp::decodeFrame(const WebPIterator& currIter, Color8888* currBuffer,
    215         int currStride, const WebPIterator& prevIter, const Color8888* prevBuffer, int prevStride) {
    216     Color8888* dst = currBuffer + currIter.x_offset + currIter.y_offset * currStride;
    217     mDecoderConfig.output.u.RGBA.rgba = (uint8_t*)dst;
    218     mDecoderConfig.output.u.RGBA.stride = currStride * 4;
    219     mDecoderConfig.output.u.RGBA.size = mDecoderConfig.output.u.RGBA.stride * currIter.height;
    220 
    221     const WebPData& currFrame = currIter.fragment;
    222     if (WebPDecode(currFrame.bytes, currFrame.size, &mDecoderConfig) != VP8_STATUS_OK) {
    223         return false;
    224     }
    225 
    226     const int canvasWidth = mFrameSequence.getWidth();
    227     const int canvasHeight = mFrameSequence.getHeight();
    228     const bool currFrameIsKeyFrame = mFrameSequence.isKeyFrame(currIter.frame_num - 1);
    229     // During the decoding of current frame, we may have set some pixels to be transparent
    230     // (i.e. alpha < 255). However, the value of each of these pixels should have been determined
    231     // by blending it against the value of that pixel in the previous frame if WEBP_MUX_BLEND was
    232     // specified. So, we correct these pixels based on disposal method of the previous frame and
    233     // the previous frame buffer.
    234     if (currIter.blend_method == WEBP_MUX_BLEND && !currFrameIsKeyFrame) {
    235         if (prevIter.dispose_method == WEBP_MUX_DISPOSE_NONE) {
    236             for (int y = 0; y < currIter.height; y++) {
    237                 const int canvasY = currIter.y_offset + y;
    238                 for (int x = 0; x < currIter.width; x++) {
    239                     const int canvasX = currIter.x_offset + x;
    240                     Color8888& currPixel = currBuffer[canvasY * currStride + canvasX];
    241                     // FIXME: Use alpha-blending when alpha is between 0 and 255.
    242                     if (!(currPixel & COLOR_8888_ALPHA_MASK)) {
    243                         const Color8888 prevPixel = prevBuffer[canvasY * prevStride + canvasX];
    244                         currPixel = prevPixel;
    245                     }
    246                 }
    247             }
    248         } else {  // prevIter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND
    249             // Need to restore transparent pixels to as they were just after frame initialization.
    250             // That is:
    251             //   * Transparent if it belongs to previous frame rectangle <-- This is a no-op.
    252             //   * Pixel in the previous canvas otherwise <-- Need to restore.
    253             for (int y = 0; y < currIter.height; y++) {
    254                 const int canvasY = currIter.y_offset + y;
    255                 for (int x = 0; x < currIter.width; x++) {
    256                     const int canvasX = currIter.x_offset + x;
    257                     Color8888& currPixel = currBuffer[canvasY * currStride + canvasX];
    258                     // FIXME: Use alpha-blending when alpha is between 0 and 255.
    259                     if (!(currPixel & COLOR_8888_ALPHA_MASK)
    260                             && !FrameContainsPixel(prevIter, canvasX, canvasY)) {
    261                         const Color8888 prevPixel = prevBuffer[canvasY * prevStride + canvasX];
    262                         currPixel = prevPixel;
    263                     }
    264                 }
    265             }
    266         }
    267     }
    268     return true;
    269 }
    270 
    271 long FrameSequenceState_webp::drawFrame(int frameNr,
    272         Color8888* outputPtr, int outputPixelStride, int previousFrameNr) {
    273     WebPDemuxer* demux = mFrameSequence.getDemuxer();
    274     ALOG_ASSERT(demux, "Cannot drawFrame, mDemux is NULL");
    275 
    276 #if WEBP_DEBUG
    277     ALOGD("  drawFrame called for frame# %d, previous frame# %d", frameNr, previousFrameNr);
    278 #endif
    279 
    280     const int canvasWidth = mFrameSequence.getWidth();
    281     const int canvasHeight = mFrameSequence.getHeight();
    282 
    283     // Find the first frame to be decoded.
    284     int start = max(previousFrameNr + 1, 0);
    285     int earliestRequired = frameNr;
    286     while (earliestRequired > start) {
    287         if (mFrameSequence.isKeyFrame(earliestRequired)) {
    288             start = earliestRequired;
    289             break;
    290         }
    291         earliestRequired--;
    292     }
    293 
    294     WebPIterator currIter;
    295     WebPIterator prevIter;
    296     int ok = WebPDemuxGetFrame(demux, start, &currIter);  // Get frame number 'start - 1'.
    297     ALOG_ASSERT(ok, "Could not retrieve frame# %d", start - 1);
    298 
    299     // Use preserve buffer only if needed.
    300     Color8888* prevBuffer = (frameNr == 0) ? outputPtr : mPreservedBuffer;
    301     int prevStride = (frameNr == 0) ? outputPixelStride : canvasWidth;
    302     Color8888* currBuffer = outputPtr;
    303     int currStride = outputPixelStride;
    304 
    305     for (int i = start; i <= frameNr; i++) {
    306         prevIter = currIter;
    307         ok = WebPDemuxGetFrame(demux, i + 1, &currIter);  // Get ith frame.
    308         ALOG_ASSERT(ok, "Could not retrieve frame# %d", i);
    309 #if WEBP_DEBUG
    310         ALOGD("      producing frame %d (has_alpha = %d, dispose = %s, blend = %s, duration = %d)",
    311               i, currIter.has_alpha,
    312               (currIter.dispose_method == WEBP_MUX_DISPOSE_NONE) ? "none" : "background",
    313               (currIter.blend_method == WEBP_MUX_BLEND) ? "yes" : "no", currIter.duration);
    314 #endif
    315         // We swap the prev/curr buffers as we go.
    316         Color8888* tmpBuffer = prevBuffer;
    317         prevBuffer = currBuffer;
    318         currBuffer = tmpBuffer;
    319 
    320         int tmpStride = prevStride;
    321         prevStride = currStride;
    322         currStride = tmpStride;
    323 
    324 #if WEBP_DEBUG
    325         ALOGD("            prev = %p, curr = %p, out = %p, tmp = %p",
    326               prevBuffer, currBuffer, outputPtr, mPreservedBuffer);
    327 #endif
    328         // Process this frame.
    329         initializeFrame(currIter, currBuffer, currStride, prevIter, prevBuffer, prevStride);
    330 
    331         if (i == frameNr || !willBeCleared(currIter)) {
    332             if (!decodeFrame(currIter, currBuffer, currStride, prevIter, prevBuffer, prevStride)) {
    333                 ALOGE("Error decoding frame# %d", i);
    334                 return -1;
    335             }
    336         }
    337     }
    338 
    339     if (outputPtr != currBuffer) {
    340         copyFrame(currBuffer, currStride, outputPtr, outputPixelStride, canvasWidth, canvasHeight);
    341     }
    342 
    343     // Return last frame's delay.
    344     const int frameCount = mFrameSequence.getFrameCount();
    345     const int lastFrame = (frameNr + frameCount - 1) % frameCount;
    346     ok = WebPDemuxGetFrame(demux, lastFrame, &currIter);
    347     ALOG_ASSERT(ok, "Could not retrieve frame# %d", lastFrame - 1);
    348     const int lastFrameDelay = currIter.duration;
    349 
    350     WebPDemuxReleaseIterator(&currIter);
    351     WebPDemuxReleaseIterator(&prevIter);
    352 
    353     return lastFrameDelay;
    354 }
    355 
    356 ////////////////////////////////////////////////////////////////////////////////
    357 // Registry
    358 ////////////////////////////////////////////////////////////////////////////////
    359 
    360 #include "Registry.h"
    361 
    362 static bool isWebP(void* header, int header_size) {
    363     const uint8_t* const header_str = (const uint8_t*)header;
    364     return (header_size >= RIFF_HEADER_SIZE) &&
    365             !memcmp("RIFF", header_str, 4) &&
    366             !memcmp("WEBP", header_str + 8, 4);
    367 }
    368 
    369 static FrameSequence* createFramesequence(Stream* stream) {
    370     return new FrameSequence_webp(stream);
    371 }
    372 
    373 static RegistryEntry gEntry = {
    374         RIFF_HEADER_SIZE,
    375         isWebP,
    376         createFramesequence,
    377         NULL,
    378 };
    379 static Registry gRegister(gEntry);
    380 
    381