Home | History | Annotate | Download | only in webp
      1 /*
      2  * Copyright (C) 2010 Google Inc. All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  *
      8  * 1.  Redistributions of source code must retain the above copyright
      9  *     notice, this list of conditions and the following disclaimer.
     10  * 2.  Redistributions in binary form must reproduce the above copyright
     11  *     notice, this list of conditions and the following disclaimer in the
     12  *     documentation and/or other materials provided with the distribution.
     13  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
     14  *     its contributors may be used to endorse or promote products derived
     15  *     from this software without specific prior written permission.
     16  *
     17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
     18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
     19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
     20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
     21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
     23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
     24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
     26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27  */
     28 
     29 #include "config.h"
     30 #include "core/platform/image-decoders/webp/WEBPImageDecoder.h"
     31 
     32 #include "core/platform/PlatformInstrumentation.h"
     33 
     34 #if USE(QCMSLIB)
     35 #include "qcms.h"
     36 #endif
     37 
     38 #include "RuntimeEnabledFeatures.h"
     39 
     40 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN)
     41 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : MODE_RGBA; }
     42 #elif SK_B32_SHIFT
     43 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : MODE_RGBA; }
     44 #else // LITTLE_ENDIAN, output BGRA pixels.
     45 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : MODE_BGRA; }
     46 #endif
     47 
     48 namespace WebCore {
     49 
     50 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
     51                                    ImageSource::GammaAndColorProfileOption gammaAndColorProfileOption)
     52     : ImageDecoder(alphaOption, gammaAndColorProfileOption)
     53     , m_decoder(0)
     54     , m_formatFlags(0)
     55     , m_frameBackgroundHasAlpha(false)
     56 #if USE(QCMSLIB)
     57     , m_haveReadProfile(false)
     58     , m_hasProfile(false)
     59     , m_transform(0)
     60 #endif
     61     , m_demux(0)
     62     , m_demuxState(WEBP_DEMUX_PARSING_HEADER)
     63     , m_haveAlreadyParsedThisData(false)
     64     , m_haveReadAnimationParameters(false)
     65     , m_repetitionCount(cAnimationLoopOnce)
     66     , m_decodedHeight(0)
     67 {
     68 }
     69 
     70 WEBPImageDecoder::~WEBPImageDecoder()
     71 {
     72     clear();
     73 }
     74 
     75 void WEBPImageDecoder::clear()
     76 {
     77 #if USE(QCMSLIB)
     78     if (m_transform)
     79         qcms_transform_release(m_transform);
     80     m_transform = 0;
     81 #endif
     82     WebPDemuxDelete(m_demux);
     83     m_demux = 0;
     84     clearDecoder();
     85 }
     86 
     87 void WEBPImageDecoder::clearDecoder()
     88 {
     89     WebPIDelete(m_decoder);
     90     m_decoder = 0;
     91     m_decodedHeight = 0;
     92     m_frameBackgroundHasAlpha = false;
     93 }
     94 
     95 bool WEBPImageDecoder::isSizeAvailable()
     96 {
     97     if (!ImageDecoder::isSizeAvailable())
     98         updateDemuxer();
     99 
    100     return ImageDecoder::isSizeAvailable();
    101 }
    102 
    103 size_t WEBPImageDecoder::frameCount()
    104 {
    105     if (!updateDemuxer())
    106         return 0;
    107 
    108     return m_frameBufferCache.size();
    109 }
    110 
    111 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index)
    112 {
    113     if (index >= frameCount())
    114         return 0;
    115 
    116     ImageFrame& frame = m_frameBufferCache[index];
    117     if (frame.status() == ImageFrame::FrameComplete)
    118         return &frame;
    119 
    120     if (RuntimeEnabledFeatures::animatedWebPEnabled()) {
    121         Vector<size_t> framesToDecode;
    122         size_t frameToDecode = index;
    123         do {
    124             framesToDecode.append(frameToDecode);
    125             frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex();
    126         } while (frameToDecode != notFound && m_frameBufferCache[frameToDecode].status() != ImageFrame::FrameComplete);
    127 
    128         ASSERT(m_demux);
    129         for (size_t i = framesToDecode.size(); i > 0; --i) {
    130             size_t frameIndex = framesToDecode[i - 1];
    131             WebPIterator webpFrame;
    132             if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame))
    133                 return 0;
    134             if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(webpFrame, frameIndex)) {
    135                 WebPDemuxReleaseIterator(&webpFrame);
    136                 return 0;
    137             }
    138             PlatformInstrumentation::willDecodeImage("WEBP");
    139             decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, frameIndex);
    140             PlatformInstrumentation::didDecodeImage();
    141             WebPDemuxReleaseIterator(&webpFrame);
    142 
    143             // We need more data to continue decoding.
    144             if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComplete)
    145                 break;
    146         }
    147 
    148         // It is also a fatal error if all data is received and we have decoded all
    149         // frames available but the file is truncated.
    150         if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE)
    151             setFailed();
    152 
    153         return &frame;
    154     }
    155 
    156     ASSERT(!index);
    157     PlatformInstrumentation::willDecodeImage("WEBP");
    158     decode(reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size(), false, index);
    159     PlatformInstrumentation::didDecodeImage();
    160     return &frame;
    161 }
    162 
    163 void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived)
    164 {
    165     if (failed())
    166         return;
    167 
    168     ImageDecoder::setData(data, allDataReceived);
    169 
    170     if (m_demuxState != WEBP_DEMUX_DONE)
    171         m_haveAlreadyParsedThisData = false;
    172 #if USE(QCMSLIB)
    173     else if (m_hasProfile && !m_haveReadProfile)
    174         m_haveAlreadyParsedThisData = false;
    175 #endif
    176 }
    177 
    178 int WEBPImageDecoder::repetitionCount() const
    179 {
    180     return failed() ? cAnimationLoopOnce : m_repetitionCount;
    181 }
    182 
    183 bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const
    184 {
    185     if (!RuntimeEnabledFeatures::animatedWebPEnabled())
    186         return ImageDecoder::frameIsCompleteAtIndex(index);
    187     if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
    188         return false;
    189     if (!(m_formatFlags & ANIMATION_FLAG))
    190         return ImageDecoder::frameIsCompleteAtIndex(index);
    191     bool frameIsLoadedAtIndex = index < m_frameBufferCache.size();
    192     return frameIsLoadedAtIndex;
    193 }
    194 
    195 float WEBPImageDecoder::frameDurationAtIndex(size_t index) const
    196 {
    197     return index < m_frameBufferCache.size() ? m_frameBufferCache[index].duration() : 0;
    198 }
    199 
    200 bool WEBPImageDecoder::updateDemuxer()
    201 {
    202     if (m_haveAlreadyParsedThisData)
    203         return true;
    204 
    205     m_haveAlreadyParsedThisData = true;
    206 
    207     const unsigned webpHeaderSize = 20;
    208     if (m_data->size() < webpHeaderSize)
    209         return false; // Wait for headers so that WebPDemuxPartial doesn't return null.
    210 
    211     WebPDemuxDelete(m_demux);
    212     WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() };
    213     m_demux = WebPDemuxPartial(&inputData, &m_demuxState);
    214     if (!m_demux)
    215         return setFailed();
    216 
    217     if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
    218         return false; // Not enough data for parsing canvas width/height yet.
    219 
    220     bool hasAnimation = (m_formatFlags & ANIMATION_FLAG);
    221     if (!ImageDecoder::isSizeAvailable()) {
    222         m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS);
    223 #if USE(QCMSLIB)
    224         m_hasProfile = (m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile();
    225 #endif
    226         hasAnimation = (m_formatFlags & ANIMATION_FLAG);
    227         if (hasAnimation && !RuntimeEnabledFeatures::animatedWebPEnabled())
    228             return setFailed();
    229         if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT)))
    230             return setFailed();
    231     }
    232 
    233     ASSERT(ImageDecoder::isSizeAvailable());
    234     const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT);
    235     if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) {
    236         // As we have parsed at least one frame (even if partially),
    237         // we must already have parsed the animation properties.
    238         // This is because ANIM chunk always precedes ANMF chunks.
    239         m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT);
    240         ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits.
    241         if (!m_repetitionCount)
    242             m_repetitionCount = cAnimationLoopInfinite;
    243         m_haveReadAnimationParameters = true;
    244     }
    245 
    246     const size_t oldFrameCount = m_frameBufferCache.size();
    247     if (newFrameCount > oldFrameCount) {
    248         m_frameBufferCache.resize(newFrameCount);
    249         for (size_t i = oldFrameCount; i < newFrameCount; ++i) {
    250             m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha);
    251             if (!hasAnimation) {
    252                 ASSERT(!i);
    253                 m_frameBufferCache[i].setRequiredPreviousFrameIndex(notFound);
    254                 continue;
    255             }
    256             WebPIterator animatedFrame;
    257             WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame);
    258             ASSERT(animatedFrame.complete == 1);
    259             m_frameBufferCache[i].setDuration(animatedFrame.duration);
    260             m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep);
    261             WebPDemuxReleaseIterator(&animatedFrame);
    262             m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i));
    263         }
    264     }
    265 
    266     return true;
    267 }
    268 
    269 bool WEBPImageDecoder::initFrameBuffer(const WebPIterator& frame, size_t frameIndex)
    270 {
    271     ImageFrame& buffer = m_frameBufferCache[frameIndex];
    272     if (buffer.status() != ImageFrame::FrameEmpty) // Already initialized.
    273         return true;
    274 
    275     // Initialize the frame rect in our buffer.
    276     IntRect frameRect(frame.x_offset, frame.y_offset, frame.width, frame.height);
    277 
    278     // Make sure the frameRect doesn't extend outside the buffer.
    279     if (frameRect.maxX() > size().width())
    280         frameRect.setWidth(size().width() - frame.x_offset);
    281     if (frameRect.maxY() > size().height())
    282         frameRect.setHeight(size().height() - frame.y_offset);
    283     buffer.setOriginalFrameRect(frameRect);
    284 
    285     const size_t requiredPreviousFrameIndex = buffer.requiredPreviousFrameIndex();
    286     if (requiredPreviousFrameIndex == notFound) {
    287         // This frame doesn't rely on any previous data.
    288         if (!buffer.setSize(size().width(), size().height()))
    289             return setFailed();
    290         m_frameBackgroundHasAlpha = !frameRect.contains(IntRect(IntPoint(), size()));
    291     } else {
    292         const ImageFrame& prevBuffer = m_frameBufferCache[requiredPreviousFrameIndex];
    293         ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
    294 
    295         // Preserve the last frame as the starting state for this frame.
    296         if (!buffer.copyBitmapData(prevBuffer))
    297             return setFailed();
    298 
    299         if (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor) {
    300             // We want to clear the previous frame to transparent, without
    301             // affecting pixels in the image outside of the frame.
    302             const IntRect& prevRect = prevBuffer.originalFrameRect();
    303             ASSERT(!prevRect.contains(IntRect(IntPoint(), size())));
    304             buffer.zeroFillFrameRect(prevRect);
    305         }
    306 
    307         m_frameBackgroundHasAlpha = prevBuffer.hasAlpha() || (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor);
    308     }
    309 
    310     buffer.setStatus(ImageFrame::FramePartial);
    311     // The buffer is transparent outside the decoded area while the image is loading.
    312     // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
    313     buffer.setHasAlpha(true);
    314     return true;
    315 }
    316 
    317 size_t WEBPImageDecoder::clearCacheExceptFrame(size_t clearExceptFrame)
    318 {
    319     // If |clearExceptFrame| has status FrameComplete, we preserve that frame.
    320     // Otherwise, we preserve a previous frame with status FrameComplete whose data is required
    321     // to decode |clearExceptFrame|, either in initFrameBuffer() or ApplyPostProcessing().
    322     // All other frames can be cleared.
    323     while ((clearExceptFrame < m_frameBufferCache.size()) && (m_frameBufferCache[clearExceptFrame].status() != ImageFrame::FrameComplete))
    324         clearExceptFrame = m_frameBufferCache[clearExceptFrame].requiredPreviousFrameIndex();
    325 
    326     return ImageDecoder::clearCacheExceptFrame(clearExceptFrame);
    327 }
    328 
    329 void WEBPImageDecoder::clearFrameBuffer(size_t frameIndex)
    330 {
    331     if (m_demux && m_demuxState >= WEBP_DEMUX_PARSED_HEADER && m_frameBufferCache[frameIndex].status() == ImageFrame::FramePartial) {
    332         // Clear the decoder state so that this partial frame can be decoded again when requested.
    333         clearDecoder();
    334     }
    335     ImageDecoder::clearFrameBuffer(frameIndex);
    336 }
    337 
    338 #if USE(QCMSLIB)
    339 
    340 void WEBPImageDecoder::createColorTransform(const char* data, size_t size)
    341 {
    342     if (m_transform)
    343         qcms_transform_release(m_transform);
    344     m_transform = 0;
    345 
    346     qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile();
    347     if (!deviceProfile)
    348         return;
    349     qcms_profile* inputProfile = qcms_profile_from_memory(data, size);
    350     if (!inputProfile)
    351         return;
    352 
    353     // We currently only support color profiles for RGB profiled images.
    354     ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile));
    355     // The input image pixels are RGBA format.
    356     qcms_data_type format = QCMS_DATA_RGBA_8;
    357     // FIXME: Don't force perceptual intent if the image profile contains an intent.
    358     m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCMS_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL);
    359 
    360     qcms_profile_release(inputProfile);
    361 }
    362 
    363 void WEBPImageDecoder::readColorProfile()
    364 {
    365     WebPChunkIterator chunkIterator;
    366     if (!WebPDemuxGetChunk(m_demux, "ICCP", 1, &chunkIterator)) {
    367         WebPDemuxReleaseChunkIterator(&chunkIterator);
    368         return;
    369     }
    370 
    371     const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk.bytes);
    372     size_t profileSize = chunkIterator.chunk.size;
    373 
    374     // Only accept RGB color profiles from input class devices.
    375     bool ignoreProfile = false;
    376     if (profileSize < ImageDecoder::iccColorProfileHeaderLength)
    377         ignoreProfile = true;
    378     else if (!ImageDecoder::rgbColorProfile(profileData, profileSize))
    379         ignoreProfile = true;
    380     else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize))
    381         ignoreProfile = true;
    382 
    383     if (!ignoreProfile)
    384         createColorTransform(profileData, profileSize);
    385 
    386     WebPDemuxReleaseChunkIterator(&chunkIterator);
    387 }
    388 
    389 #endif // USE(QCMSLIB)
    390 
    391 void WEBPImageDecoder::applyPostProcessing(size_t frameIndex)
    392 {
    393     ImageFrame& buffer = m_frameBufferCache[frameIndex];
    394     int width;
    395     int decodedHeight;
    396     if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0))
    397         return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062
    398     if (decodedHeight <= 0)
    399         return;
    400 
    401     const IntRect& frameRect = buffer.originalFrameRect();
    402     ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width());
    403     ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height());
    404     const int left = frameRect.x();
    405     const int top = frameRect.y();
    406 
    407 #if USE(QCMSLIB)
    408     if (m_hasProfile) {
    409         if (!m_haveReadProfile) {
    410             readColorProfile();
    411             m_haveReadProfile = true;
    412         }
    413         for (int y = m_decodedHeight; y < decodedHeight; ++y) {
    414             const int canvasY = top + y;
    415             uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canvasY));
    416             if (qcms_transform* transform = colorTransform())
    417                 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGBX);
    418             uint8_t* pixel = row;
    419             for (int x = 0; x < width; ++x, pixel += 4) {
    420                 const int canvasX = left + x;
    421                 buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], pixel[3]);
    422             }
    423         }
    424     }
    425 #endif // USE(QCMSLIB)
    426 
    427     // During the decoding of current frame, we may have set some pixels to be transparent (i.e. alpha < 255).
    428     // However, the value of each of these pixels should have been determined by blending it against the value
    429     // of that pixel in the previous frame. So, we correct these pixels based on disposal method of the previous
    430     // frame and the previous frame buffer.
    431     // FIXME: This could be avoided if libwebp decoder had an API that used the previous required frame
    432     // to do the alpha-blending by itself.
    433     if ((m_formatFlags & ANIMATION_FLAG) && frameIndex) {
    434         ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1];
    435         ImageFrame::FrameDisposalMethod prevMethod = prevBuffer.disposalMethod();
    436         if (prevMethod == ImageFrame::DisposeKeep) { // Restore transparent pixels to pixels in previous canvas.
    437             ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
    438             for (int y = m_decodedHeight; y < decodedHeight; ++y) {
    439                 const int canvasY = top + y;
    440                 for (int x = 0; x < width; ++x) {
    441                     const int canvasX = left + x;
    442                     ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canvasY);
    443                     // FIXME: Use alpha-blending when alpha is between 0 and 255.
    444                     // Alpha-blending is being implemented in: https://bugs.webkit.org/show_bug.cgi?id=17022
    445                     if (!((pixel >> SK_A32_SHIFT) & 0xff)) {
    446                         ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(canvasX, canvasY);
    447                         pixel = prevPixel;
    448                     }
    449                 }
    450             }
    451         } else if (prevMethod == ImageFrame::DisposeOverwriteBgcolor && buffer.requiredPreviousFrameIndex() != notFound) {
    452             // Note: if the requiredPreviousFrameIndex is |notFound|, there's nothing to do.
    453             ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
    454             const IntRect& prevRect = prevBuffer.originalFrameRect();
    455             // We need to restore transparent pixels to as they were just after initFrame() call. That is:
    456             //   * Transparent if it belongs to prevRect <-- This is a no-op.
    457             //   * Pixel in the previous canvas otherwise <-- Need to restore.
    458             for (int y = m_decodedHeight; y < decodedHeight; ++y) {
    459                 const int canvasY = top + y;
    460                 for (int x = 0; x < width; ++x) {
    461                     const int canvasX = left + x;
    462                     ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canvasY);
    463                     // FIXME: Use alpha-blending when alpha is between 0 and 255.
    464                     if (!((pixel >> SK_A32_SHIFT) & 0xff) && !prevRect.contains(IntPoint(canvasX, canvasY))) {
    465                         ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(canvasX, canvasY);
    466                         pixel = prevPixel;
    467                     }
    468                 }
    469             }
    470         }
    471     }
    472 
    473     m_decodedHeight = decodedHeight;
    474 }
    475 
    476 bool WEBPImageDecoder::decode(const uint8_t* dataBytes, size_t dataSize, bool onlySize, size_t frameIndex)
    477 {
    478     if (failed())
    479         return false;
    480 
    481     if (!ImageDecoder::isSizeAvailable()) {
    482         static const size_t imageHeaderSize = 30;
    483         if (dataSize < imageHeaderSize)
    484             return false;
    485         int width, height;
    486         WebPBitstreamFeatures features;
    487         if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK)
    488             return setFailed();
    489         width = features.width;
    490         height = features.height;
    491         m_formatFlags = features.has_alpha ? ALPHA_FLAG : 0;
    492         if (!setSize(width, height))
    493             return setFailed();
    494     }
    495 
    496     ASSERT(ImageDecoder::isSizeAvailable());
    497     if (onlySize)
    498         return true;
    499 
    500     ASSERT(m_frameBufferCache.size() > frameIndex);
    501     ImageFrame& buffer = m_frameBufferCache[frameIndex];
    502     ASSERT(buffer.status() != ImageFrame::FrameComplete);
    503 
    504     if (buffer.status() == ImageFrame::FrameEmpty) {
    505         if (!buffer.setSize(size().width(), size().height()))
    506             return setFailed();
    507         buffer.setStatus(ImageFrame::FramePartial);
    508         // The buffer is transparent outside the decoded area while the image is loading.
    509         // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
    510         buffer.setHasAlpha(true);
    511         buffer.setOriginalFrameRect(IntRect(IntPoint(), size()));
    512     }
    513 
    514     const IntRect& frameRect = buffer.originalFrameRect();
    515     if (!m_decoder) {
    516         WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG);
    517         if (!m_premultiplyAlpha)
    518             mode = outputMode(false);
    519 #if USE(QCMSLIB)
    520         if (m_hasProfile)
    521             mode = MODE_RGBA; // Decode to RGBA for input to libqcms.
    522 #endif
    523         WebPInitDecBuffer(&m_decoderBuffer);
    524         m_decoderBuffer.colorspace = mode;
    525         m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData);
    526         m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height();
    527         m_decoderBuffer.is_external_memory = 1;
    528         m_decoder = WebPINewDecoder(&m_decoderBuffer);
    529         if (!m_decoder)
    530             return setFailed();
    531     }
    532 
    533     m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y()));
    534 
    535     switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) {
    536     case VP8_STATUS_OK:
    537         applyPostProcessing(frameIndex);
    538         buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha);
    539         buffer.setStatus(ImageFrame::FrameComplete);
    540         clearDecoder();
    541         return true;
    542     case VP8_STATUS_SUSPENDED:
    543         applyPostProcessing(frameIndex);
    544         return false;
    545     default:
    546         clear();
    547         return setFailed();
    548     }
    549 }
    550 
    551 } // namespace WebCore
    552