Home | History | Annotate | Download | only in graphics
      1 /*
      2  * Copyright (C) 2012 Google Inc. All rights reserved.
      3  *
      4  * Redistribution and use in source and binary forms, with or without
      5  * modification, are permitted provided that the following conditions
      6  * are met:
      7  * 1. Redistributions of source code must retain the above copyright
      8  *    notice, this list of conditions and the following disclaimer.
      9  * 2. Redistributions in binary form must reproduce the above copyright
     10  *    notice, this list of conditions and the following disclaimer in the
     11  *    documentation and/or other materials provided with the distribution.
     12  *
     13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
     14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
     17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
     18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
     19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
     20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
     21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     24  */
     25 
     26 #include "config.h"
     27 #include "platform/graphics/ImageDecodingStore.h"
     28 
     29 #include "platform/TraceEvent.h"
     30 
     31 namespace WebCore {
     32 
     33 namespace {
     34 
     35 // Allow up to 256 MBytes of discardable entries. The previous limit allowed up to
     36 // 128 entries (independently of their size) which caused OOM errors on websites
     37 // with a lot of (very) large images.
     38 static const size_t maxTotalSizeOfDiscardableEntries = 256 * 1024 * 1024;
     39 static const size_t defaultMaxTotalSizeOfHeapEntries = 32 * 1024 * 1024;
     40 static ImageDecodingStore* s_instance = 0;
     41 static bool s_imageCachingEnabled = true;
     42 
     43 static void setInstance(ImageDecodingStore* imageDecodingStore)
     44 {
     45     delete s_instance;
     46     s_instance = imageDecodingStore;
     47 }
     48 
     49 } // namespace
     50 
     51 ImageDecodingStore::ImageDecodingStore()
     52     : m_heapLimitInBytes(defaultMaxTotalSizeOfHeapEntries)
     53     , m_heapMemoryUsageInBytes(0)
     54     , m_discardableMemoryUsageInBytes(0)
     55 {
     56 }
     57 
     58 ImageDecodingStore::~ImageDecodingStore()
     59 {
     60 #ifndef NDEBUG
     61     setCacheLimitInBytes(0);
     62     ASSERT(!m_imageCacheMap.size());
     63     ASSERT(!m_decoderCacheMap.size());
     64     ASSERT(!m_orderedCacheList.size());
     65     ASSERT(!m_imageCacheKeyMap.size());
     66     ASSERT(!m_decoderCacheKeyMap.size());
     67 #endif
     68 }
     69 
     70 ImageDecodingStore* ImageDecodingStore::instance()
     71 {
     72     return s_instance;
     73 }
     74 
     75 void ImageDecodingStore::initializeOnce()
     76 {
     77     setInstance(ImageDecodingStore::create().leakPtr());
     78 }
     79 
     80 void ImageDecodingStore::shutdown()
     81 {
     82     setInstance(0);
     83 }
     84 
     85 void ImageDecodingStore::setImageCachingEnabled(bool enabled)
     86 {
     87     s_imageCachingEnabled = enabled;
     88 }
     89 
     90 bool ImageDecodingStore::lockCache(const ImageFrameGenerator* generator, const SkISize& scaledSize, size_t index, const ScaledImageFragment** cachedImage)
     91 {
     92     ASSERT(cachedImage);
     93 
     94     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
     95     {
     96         MutexLocker lock(m_mutex);
     97         // Public access is restricted to complete images only.
     98         ImageCacheMap::iterator iter = m_imageCacheMap.find(ImageCacheEntry::makeCacheKey(generator, scaledSize, index, ScaledImageFragment::CompleteImage));
     99         if (iter == m_imageCacheMap.end())
    100             return false;
    101         return lockCacheEntryInternal(iter->value.get(), cachedImage, &cacheEntriesToDelete);
    102     }
    103 }
    104 
    105 void ImageDecodingStore::unlockCache(const ImageFrameGenerator* generator, const ScaledImageFragment* cachedImage)
    106 {
    107     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
    108     {
    109         MutexLocker lock(m_mutex);
    110         cachedImage->bitmap().unlockPixels();
    111         ImageCacheMap::iterator iter = m_imageCacheMap.find(ImageCacheEntry::makeCacheKey(generator, cachedImage->scaledSize(), cachedImage->index(), cachedImage->generation()));
    112         ASSERT_WITH_SECURITY_IMPLICATION(iter != m_imageCacheMap.end());
    113 
    114         CacheEntry* cacheEntry = iter->value.get();
    115         cacheEntry->decrementUseCount();
    116 
    117         // Put the entry to the end of list.
    118         m_orderedCacheList.remove(cacheEntry);
    119         m_orderedCacheList.append(cacheEntry);
    120 
    121         // FIXME: This code is temporary such that in the new Skia
    122         // discardable memory path we do not cache images.
    123         // Once the transition is complete the logic to handle
    124         // image caching should be removed entirely.
    125         if (!s_imageCachingEnabled && !cacheEntry->useCount()) {
    126             removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete);
    127             removeFromCacheListInternal(cacheEntriesToDelete);
    128         }
    129     }
    130 }
    131 
    132 const ScaledImageFragment* ImageDecodingStore::insertAndLockCache(const ImageFrameGenerator* generator, PassOwnPtr<ScaledImageFragment> image)
    133 {
    134     // Prune old cache entries to give space for the new one.
    135     prune();
    136 
    137     ScaledImageFragment* newImage = image.get();
    138     OwnPtr<ImageCacheEntry> newCacheEntry = ImageCacheEntry::createAndUse(generator, image);
    139     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
    140     {
    141         MutexLocker lock(m_mutex);
    142 
    143         ImageCacheMap::iterator iter = m_imageCacheMap.find(newCacheEntry->cacheKey());
    144 
    145         // It is rare but possible that the key of a new cache entry is found.
    146         // This happens if the generation ID of the image object wraps around.
    147         // In this case we will try to return the existing cached object and
    148         // discard the new cache object.
    149         if (iter != m_imageCacheMap.end()) {
    150             const ScaledImageFragment* oldImage;
    151             if (lockCacheEntryInternal(iter->value.get(), &oldImage, &cacheEntriesToDelete)) {
    152                 newCacheEntry->decrementUseCount();
    153                 return oldImage;
    154             }
    155         }
    156 
    157         // The new image is not locked yet so do it here.
    158         newImage->bitmap().lockPixels();
    159         insertCacheInternal(newCacheEntry.release(), &m_imageCacheMap, &m_imageCacheKeyMap);
    160     }
    161     return newImage;
    162 }
    163 
    164 bool ImageDecodingStore::lockDecoder(const ImageFrameGenerator* generator, const SkISize& scaledSize, ImageDecoder** decoder)
    165 {
    166     ASSERT(decoder);
    167 
    168     MutexLocker lock(m_mutex);
    169     DecoderCacheMap::iterator iter = m_decoderCacheMap.find(DecoderCacheEntry::makeCacheKey(generator, scaledSize));
    170     if (iter == m_decoderCacheMap.end())
    171         return false;
    172 
    173     DecoderCacheEntry* cacheEntry = iter->value.get();
    174 
    175     // There can only be one user of a decoder at a time.
    176     ASSERT(!cacheEntry->useCount());
    177     cacheEntry->incrementUseCount();
    178     *decoder = cacheEntry->cachedDecoder();
    179     return true;
    180 }
    181 
    182 void ImageDecodingStore::unlockDecoder(const ImageFrameGenerator* generator, const ImageDecoder* decoder)
    183 {
    184     MutexLocker lock(m_mutex);
    185     DecoderCacheMap::iterator iter = m_decoderCacheMap.find(DecoderCacheEntry::makeCacheKey(generator, decoder));
    186     ASSERT_WITH_SECURITY_IMPLICATION(iter != m_decoderCacheMap.end());
    187 
    188     CacheEntry* cacheEntry = iter->value.get();
    189     cacheEntry->decrementUseCount();
    190 
    191     // Put the entry to the end of list.
    192     m_orderedCacheList.remove(cacheEntry);
    193     m_orderedCacheList.append(cacheEntry);
    194 }
    195 
    196 void ImageDecodingStore::insertDecoder(const ImageFrameGenerator* generator, PassOwnPtr<ImageDecoder> decoder, bool isDiscardable)
    197 {
    198     // Prune old cache entries to give space for the new one.
    199     prune();
    200 
    201     OwnPtr<DecoderCacheEntry> newCacheEntry = DecoderCacheEntry::create(generator, decoder, isDiscardable);
    202 
    203     MutexLocker lock(m_mutex);
    204     ASSERT(!m_decoderCacheMap.contains(newCacheEntry->cacheKey()));
    205     insertCacheInternal(newCacheEntry.release(), &m_decoderCacheMap, &m_decoderCacheKeyMap);
    206 }
    207 
    208 void ImageDecodingStore::removeDecoder(const ImageFrameGenerator* generator, const ImageDecoder* decoder)
    209 {
    210     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
    211     {
    212         MutexLocker lock(m_mutex);
    213         DecoderCacheMap::iterator iter = m_decoderCacheMap.find(DecoderCacheEntry::makeCacheKey(generator, decoder));
    214         ASSERT_WITH_SECURITY_IMPLICATION(iter != m_decoderCacheMap.end());
    215 
    216         CacheEntry* cacheEntry = iter->value.get();
    217         ASSERT(cacheEntry->useCount());
    218         cacheEntry->decrementUseCount();
    219 
    220         // Delete only one decoder cache entry. Ownership of the cache entry
    221         // is transfered to cacheEntriesToDelete such that object can be deleted
    222         // outside of the lock.
    223         removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete);
    224 
    225         // Remove from LRU list.
    226         removeFromCacheListInternal(cacheEntriesToDelete);
    227     }
    228 }
    229 
    230 bool ImageDecodingStore::isCached(const ImageFrameGenerator* generator, const SkISize& scaledSize, size_t index)
    231 {
    232     MutexLocker lock(m_mutex);
    233     ImageCacheMap::iterator iter = m_imageCacheMap.find(ImageCacheEntry::makeCacheKey(generator, scaledSize, index, ScaledImageFragment::CompleteImage));
    234     if (iter == m_imageCacheMap.end())
    235         return false;
    236     return true;
    237 }
    238 
    239 void ImageDecodingStore::removeCacheIndexedByGenerator(const ImageFrameGenerator* generator)
    240 {
    241     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
    242     {
    243         MutexLocker lock(m_mutex);
    244 
    245         // Remove image cache objects and decoder cache objects associated
    246         // with a ImageFrameGenerator.
    247         removeCacheIndexedByGeneratorInternal(&m_imageCacheMap, &m_imageCacheKeyMap, generator, &cacheEntriesToDelete);
    248         removeCacheIndexedByGeneratorInternal(&m_decoderCacheMap, &m_decoderCacheKeyMap, generator, &cacheEntriesToDelete);
    249 
    250         // Remove from LRU list as well.
    251         removeFromCacheListInternal(cacheEntriesToDelete);
    252     }
    253 }
    254 
    255 void ImageDecodingStore::clear()
    256 {
    257     size_t cacheLimitInBytes;
    258     {
    259         MutexLocker lock(m_mutex);
    260         cacheLimitInBytes = m_heapLimitInBytes;
    261         m_heapLimitInBytes = 0;
    262     }
    263 
    264     prune();
    265 
    266     {
    267         MutexLocker lock(m_mutex);
    268         m_heapLimitInBytes = cacheLimitInBytes;
    269     }
    270 }
    271 
    272 void ImageDecodingStore::setCacheLimitInBytes(size_t cacheLimit)
    273 {
    274     // Note that the discardable entries limit is constant (i.e. only the heap limit is updated).
    275     {
    276         MutexLocker lock(m_mutex);
    277         m_heapLimitInBytes = cacheLimit;
    278     }
    279     prune();
    280 }
    281 
    282 size_t ImageDecodingStore::memoryUsageInBytes()
    283 {
    284     MutexLocker lock(m_mutex);
    285     return m_heapMemoryUsageInBytes;
    286 }
    287 
    288 int ImageDecodingStore::cacheEntries()
    289 {
    290     MutexLocker lock(m_mutex);
    291     return m_imageCacheMap.size() + m_decoderCacheMap.size();
    292 }
    293 
    294 int ImageDecodingStore::imageCacheEntries()
    295 {
    296     MutexLocker lock(m_mutex);
    297     return m_imageCacheMap.size();
    298 }
    299 
    300 int ImageDecodingStore::decoderCacheEntries()
    301 {
    302     MutexLocker lock(m_mutex);
    303     return m_decoderCacheMap.size();
    304 }
    305 
    306 void ImageDecodingStore::prune()
    307 {
    308     TRACE_EVENT0("webkit", "ImageDecodingStore::prune");
    309 
    310     Vector<OwnPtr<CacheEntry> > cacheEntriesToDelete;
    311     {
    312         MutexLocker lock(m_mutex);
    313 
    314         // Head of the list is the least recently used entry.
    315         const CacheEntry* cacheEntry = m_orderedCacheList.head();
    316 
    317         // Walk the list of cache entries starting from the least recently used
    318         // and then keep them for deletion later.
    319         while (cacheEntry) {
    320             const bool isPruneNeeded = m_heapMemoryUsageInBytes > m_heapLimitInBytes || !m_heapLimitInBytes
    321                 || m_discardableMemoryUsageInBytes > maxTotalSizeOfDiscardableEntries;
    322             if (!isPruneNeeded)
    323                 break;
    324 
    325             // Cache is not used; Remove it.
    326             if (!cacheEntry->useCount())
    327                 removeFromCacheInternal(cacheEntry, &cacheEntriesToDelete);
    328             cacheEntry = cacheEntry->next();
    329         }
    330 
    331         // Remove from cache list as well.
    332         removeFromCacheListInternal(cacheEntriesToDelete);
    333     }
    334 }
    335 
    336 bool ImageDecodingStore::lockCacheEntryInternal(ImageCacheEntry* cacheEntry, const ScaledImageFragment** cachedImage, Vector<OwnPtr<CacheEntry> >* deletionList)
    337 {
    338     ScaledImageFragment* image = cacheEntry->cachedImage();
    339 
    340     image->bitmap().lockPixels();
    341 
    342     // Memory for this image entry might be discarded already.
    343     // In this case remove the entry.
    344     if (!image->bitmap().getPixels()) {
    345         image->bitmap().unlockPixels();
    346         removeFromCacheInternal(cacheEntry, &m_imageCacheMap, &m_imageCacheKeyMap, deletionList);
    347         removeFromCacheListInternal(*deletionList);
    348         return false;
    349     }
    350     cacheEntry->incrementUseCount();
    351     *cachedImage = image;
    352     return true;
    353 }
    354 
    355 template<class T, class U, class V>
    356 void ImageDecodingStore::insertCacheInternal(PassOwnPtr<T> cacheEntry, U* cacheMap, V* identifierMap)
    357 {
    358     const size_t cacheEntryBytes = cacheEntry->memoryUsageInBytes();
    359     if (cacheEntry->isDiscardable())
    360         m_discardableMemoryUsageInBytes += cacheEntryBytes;
    361     else
    362         m_heapMemoryUsageInBytes += cacheEntryBytes;
    363 
    364     // m_orderedCacheList is used to support LRU operations to reorder cache
    365     // entries quickly.
    366     m_orderedCacheList.append(cacheEntry.get());
    367 
    368     typename U::KeyType key = cacheEntry->cacheKey();
    369     typename V::AddResult result = identifierMap->add(cacheEntry->generator(), typename V::MappedType());
    370     result.iterator->value.add(key);
    371     cacheMap->add(key, cacheEntry);
    372 
    373     TRACE_COUNTER1("webkit", "ImageDecodingStoreDiscardableMemoryUsageBytes", m_discardableMemoryUsageInBytes);
    374     TRACE_COUNTER1("webkit", "ImageDecodingStoreHeapMemoryUsageBytes", m_heapMemoryUsageInBytes);
    375     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfImages", m_imageCacheMap.size());
    376     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfDecoders", m_decoderCacheMap.size());
    377 }
    378 
    379 template<class T, class U, class V>
    380 void ImageDecodingStore::removeFromCacheInternal(const T* cacheEntry, U* cacheMap, V* identifierMap, Vector<OwnPtr<CacheEntry> >* deletionList)
    381 {
    382     const size_t cacheEntryBytes = cacheEntry->memoryUsageInBytes();
    383     if (cacheEntry->isDiscardable()) {
    384         ASSERT(m_discardableMemoryUsageInBytes >= cacheEntryBytes);
    385         m_discardableMemoryUsageInBytes -= cacheEntryBytes;
    386     } else {
    387         ASSERT(m_heapMemoryUsageInBytes >= cacheEntryBytes);
    388         m_heapMemoryUsageInBytes -= cacheEntryBytes;
    389 
    390     }
    391 
    392     // Remove entry from identifier map.
    393     typename V::iterator iter = identifierMap->find(cacheEntry->generator());
    394     ASSERT(iter != identifierMap->end());
    395     iter->value.remove(cacheEntry->cacheKey());
    396     if (!iter->value.size())
    397         identifierMap->remove(iter);
    398 
    399     // Remove entry from cache map.
    400     deletionList->append(cacheMap->take(cacheEntry->cacheKey()));
    401 
    402     TRACE_COUNTER1("webkit", "ImageDecodingStoreDiscardableMemoryUsageBytes", m_discardableMemoryUsageInBytes);
    403     TRACE_COUNTER1("webkit", "ImageDecodingStoreHeapMemoryUsageBytes", m_heapMemoryUsageInBytes);
    404     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfImages", m_imageCacheMap.size());
    405     TRACE_COUNTER1("webkit", "ImageDecodingStoreNumOfDecoders", m_decoderCacheMap.size());
    406 }
    407 
    408 void ImageDecodingStore::removeFromCacheInternal(const CacheEntry* cacheEntry, Vector<OwnPtr<CacheEntry> >* deletionList)
    409 {
    410     if (cacheEntry->type() == CacheEntry::TypeImage) {
    411         removeFromCacheInternal(static_cast<const ImageCacheEntry*>(cacheEntry), &m_imageCacheMap, &m_imageCacheKeyMap, deletionList);
    412     } else if (cacheEntry->type() == CacheEntry::TypeDecoder) {
    413         removeFromCacheInternal(static_cast<const DecoderCacheEntry*>(cacheEntry), &m_decoderCacheMap, &m_decoderCacheKeyMap, deletionList);
    414     } else {
    415         ASSERT(false);
    416     }
    417 }
    418 
    419 template<class U, class V>
    420 void ImageDecodingStore::removeCacheIndexedByGeneratorInternal(U* cacheMap, V* identifierMap, const ImageFrameGenerator* generator, Vector<OwnPtr<CacheEntry> >* deletionList)
    421 {
    422     typename V::iterator iter = identifierMap->find(generator);
    423     if (iter == identifierMap->end())
    424         return;
    425 
    426     // Get all cache identifiers associated with generator.
    427     Vector<typename U::KeyType> cacheIdentifierList;
    428     copyToVector(iter->value, cacheIdentifierList);
    429 
    430     // For each cache identifier find the corresponding CacheEntry and remove it.
    431     for (size_t i = 0; i < cacheIdentifierList.size(); ++i) {
    432         ASSERT(cacheMap->contains(cacheIdentifierList[i]));
    433         const typename U::MappedType::PtrType cacheEntry = cacheMap->get(cacheIdentifierList[i]);
    434         ASSERT(!cacheEntry->useCount());
    435         removeFromCacheInternal(cacheEntry, cacheMap, identifierMap, deletionList);
    436     }
    437 }
    438 
    439 void ImageDecodingStore::removeFromCacheListInternal(const Vector<OwnPtr<CacheEntry> >& deletionList)
    440 {
    441     for (size_t i = 0; i < deletionList.size(); ++i)
    442         m_orderedCacheList.remove(deletionList[i].get());
    443 }
    444 
    445 } // namespace WebCore
    446