Home | History | Annotate | Download | only in gpu
      1 /*
      2  * Copyright 2017 Google Inc.
      3  *
      4  * Use of this source code is governed by a BSD-style license that can be
      5  * found in the LICENSE file.
      6  */
      7 
      8 #include "GrResourceAllocator.h"
      9 
     10 #include "GrGpuResourcePriv.h"
     11 #include "GrOpList.h"
     12 #include "GrRenderTargetProxy.h"
     13 #include "GrResourceCache.h"
     14 #include "GrResourceProvider.h"
     15 #include "GrSurfacePriv.h"
     16 #include "GrSurfaceProxy.h"
     17 #include "GrSurfaceProxyPriv.h"
     18 #include "GrTextureProxy.h"
     19 
     20 void GrResourceAllocator::Interval::assign(sk_sp<GrSurface> s) {
     21     SkASSERT(!fAssignedSurface);
     22     fAssignedSurface = s;
     23     fProxy->priv().assign(std::move(s));
     24 }
     25 
     26 
     27 void GrResourceAllocator::markEndOfOpList(int opListIndex) {
     28     SkASSERT(!fAssigned);      // We shouldn't be adding any opLists after (or during) assignment
     29 
     30     SkASSERT(fEndOfOpListOpIndices.count() == opListIndex);
     31     if (!fEndOfOpListOpIndices.empty()) {
     32         SkASSERT(fEndOfOpListOpIndices.back() < this->curOp());
     33     }
     34 
     35     fEndOfOpListOpIndices.push_back(this->curOp()); // This is the first op index of the next opList
     36 }
     37 
     38 GrResourceAllocator::~GrResourceAllocator() {
     39 #ifndef SK_DISABLE_EXPLICIT_GPU_RESOURCE_ALLOCATION
     40     SkASSERT(fIntvlList.empty());
     41     SkASSERT(fActiveIntvls.empty());
     42     SkASSERT(!fIntvlHash.count());
     43 #endif
     44 }
     45 
     46 void GrResourceAllocator::addInterval(GrSurfaceProxy* proxy, unsigned int start, unsigned int end
     47                                       SkDEBUGCODE(, bool isDirectDstRead)) {
     48     SkASSERT(start <= end);
     49     SkASSERT(!fAssigned);      // We shouldn't be adding any intervals after (or during) assignment
     50 
     51     if (Interval* intvl = fIntvlHash.find(proxy->uniqueID().asUInt())) {
     52         // Revise the interval for an existing use
     53 #ifdef SK_DEBUG
     54         if (0 == start && 0 == end) {
     55             // This interval is for the initial upload to a deferred proxy. Due to the vagaries
     56             // of how deferred proxies are collected they can appear as uploads multiple times in a
     57             // single opLists' list and as uploads in several opLists.
     58             SkASSERT(0 == intvl->start());
     59         } else if (isDirectDstRead) {
     60             // Direct reads from the render target itself should occur w/in the existing interval
     61             SkASSERT(intvl->start() <= start && intvl->end() >= end);
     62         } else {
     63             SkASSERT(intvl->end() <= start && intvl->end() <= end);
     64         }
     65 #endif
     66         intvl->extendEnd(end);
     67         return;
     68     }
     69 
     70     Interval* newIntvl;
     71     if (fFreeIntervalList) {
     72         newIntvl = fFreeIntervalList;
     73         fFreeIntervalList = newIntvl->next();
     74         newIntvl->resetTo(proxy, start, end);
     75     } else {
     76         newIntvl = fIntervalAllocator.make<Interval>(proxy, start, end);
     77     }
     78 
     79     fIntvlList.insertByIncreasingStart(newIntvl);
     80     fIntvlHash.add(newIntvl);
     81 
     82 #ifdef SK_DISABLE_EXPLICIT_GPU_RESOURCE_ALLOCATION
     83     // FIXME: remove this once we can do the lazy instantiation from assign instead.
     84     if (GrSurfaceProxy::LazyState::kNot != proxy->lazyInstantiationState()) {
     85         proxy->priv().doLazyInstantiation(fResourceProvider);
     86     }
     87 #endif
     88 }
     89 
     90 GrResourceAllocator::Interval* GrResourceAllocator::IntervalList::popHead() {
     91     Interval* temp = fHead;
     92     if (temp) {
     93         fHead = temp->next();
     94     }
     95     return temp;
     96 }
     97 
     98 // TODO: fuse this with insertByIncreasingEnd
     99 void GrResourceAllocator::IntervalList::insertByIncreasingStart(Interval* intvl) {
    100     if (!fHead) {
    101         intvl->setNext(nullptr);
    102         fHead = intvl;
    103     } else if (intvl->start() <= fHead->start()) {
    104         intvl->setNext(fHead);
    105         fHead = intvl;
    106     } else {
    107         Interval* prev = fHead;
    108         Interval* next = prev->next();
    109         for (; next && intvl->start() > next->start(); prev = next, next = next->next()) {
    110         }
    111         intvl->setNext(next);
    112         prev->setNext(intvl);
    113     }
    114 }
    115 
    116 // TODO: fuse this with insertByIncreasingStart
    117 void GrResourceAllocator::IntervalList::insertByIncreasingEnd(Interval* intvl) {
    118     if (!fHead) {
    119         intvl->setNext(nullptr);
    120         fHead = intvl;
    121     } else if (intvl->end() <= fHead->end()) {
    122         intvl->setNext(fHead);
    123         fHead = intvl;
    124     } else {
    125         Interval* prev = fHead;
    126         Interval* next = prev->next();
    127         for (; next && intvl->end() > next->end(); prev = next, next = next->next()) {
    128         }
    129         intvl->setNext(next);
    130         prev->setNext(intvl);
    131     }
    132 }
    133 
    134 // 'surface' can be reused. Add it back to the free pool.
    135 void GrResourceAllocator::freeUpSurface(sk_sp<GrSurface> surface) {
    136     const GrScratchKey &key = surface->resourcePriv().getScratchKey();
    137 
    138     if (!key.isValid()) {
    139         return; // can't do it w/o a valid scratch key
    140     }
    141 
    142     if (surface->getUniqueKey().isValid()) {
    143         // If the surface has a unique key we throw it back into the resource cache.
    144         // If things get really tight 'findSurfaceFor' may pull it back out but there is
    145         // no need to have it in tight rotation.
    146         return;
    147     }
    148 
    149     // TODO: fix this insertion so we get a more LRU-ish behavior
    150     fFreePool.insert(key, surface.release());
    151 }
    152 
    153 // First try to reuse one of the recently allocated/used GrSurfaces in the free pool.
    154 // If we can't find a useable one, create a new one.
    155 sk_sp<GrSurface> GrResourceAllocator::findSurfaceFor(const GrSurfaceProxy* proxy,
    156                                                      bool needsStencil) {
    157     // First look in the free pool
    158     GrScratchKey key;
    159 
    160     proxy->priv().computeScratchKey(&key);
    161 
    162     auto filter = [&] (const GrSurface* s) {
    163         return !proxy->priv().requiresNoPendingIO() || !s->surfacePriv().hasPendingIO();
    164     };
    165     sk_sp<GrSurface> surface(fFreePool.findAndRemove(key, filter));
    166     if (surface) {
    167         if (SkBudgeted::kYes == proxy->isBudgeted() &&
    168             SkBudgeted::kNo == surface->resourcePriv().isBudgeted()) {
    169             // This gets the job done but isn't quite correct. It would be better to try to
    170             // match budgeted proxies w/ budgeted surface and unbudgeted w/ unbudgeted.
    171             surface->resourcePriv().makeBudgeted();
    172         }
    173 
    174         GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider, surface.get(), needsStencil);
    175         return surface;
    176     }
    177 
    178     // Failing that, try to grab a new one from the resource cache
    179     return proxy->priv().createSurface(fResourceProvider);
    180 }
    181 
    182 // Remove any intervals that end before the current index. Return their GrSurfaces
    183 // to the free pool.
    184 void GrResourceAllocator::expire(unsigned int curIndex) {
    185     while (!fActiveIntvls.empty() && fActiveIntvls.peekHead()->end() < curIndex) {
    186         Interval* temp = fActiveIntvls.popHead();
    187 
    188         if (temp->wasAssignedSurface()) {
    189             this->freeUpSurface(temp->detachSurface());
    190         }
    191 
    192         // Add temp to the free interval list so it can be reused
    193         temp->setNext(fFreeIntervalList);
    194         fFreeIntervalList = temp;
    195     }
    196 }
    197 
    198 bool GrResourceAllocator::assign(int* startIndex, int* stopIndex, AssignError* outError) {
    199     SkASSERT(outError);
    200     *outError = AssignError::kNoError;
    201 
    202     fIntvlHash.reset(); // we don't need the interval hash anymore
    203     if (fIntvlList.empty()) {
    204         return false;          // nothing to render
    205     }
    206 
    207     *startIndex = fCurOpListIndex;
    208     *stopIndex = fEndOfOpListOpIndices.count();
    209 
    210     SkDEBUGCODE(fAssigned = true;)
    211 
    212     while (Interval* cur = fIntvlList.popHead()) {
    213         if (fEndOfOpListOpIndices[fCurOpListIndex] < cur->start()) {
    214             fCurOpListIndex++;
    215         }
    216 
    217         this->expire(cur->start());
    218 
    219         bool needsStencil = cur->proxy()->asRenderTargetProxy()
    220                                             ? cur->proxy()->asRenderTargetProxy()->needsStencil()
    221                                             : false;
    222 
    223         if (cur->proxy()->priv().isInstantiated()) {
    224             GrSurfaceProxyPriv::AttachStencilIfNeeded(fResourceProvider,
    225                                                       cur->proxy()->priv().peekSurface(),
    226                                                       needsStencil);
    227 
    228             fActiveIntvls.insertByIncreasingEnd(cur);
    229 
    230             if (fResourceProvider->overBudget()) {
    231                 // Only force intermediate draws on opList boundaries
    232                 if (!fIntvlList.empty() &&
    233                     fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
    234                     *stopIndex = fCurOpListIndex+1;
    235                     return true;
    236                 }
    237             }
    238 
    239             continue;
    240         }
    241 
    242         if (GrSurfaceProxy::LazyState::kNot != cur->proxy()->lazyInstantiationState()) {
    243             if (!cur->proxy()->priv().doLazyInstantiation(fResourceProvider)) {
    244                 *outError = AssignError::kFailedProxyInstantiation;
    245             }
    246         } else if (sk_sp<GrSurface> surface = this->findSurfaceFor(cur->proxy(), needsStencil)) {
    247             // TODO: make getUniqueKey virtual on GrSurfaceProxy
    248             GrTextureProxy* tex = cur->proxy()->asTextureProxy();
    249             if (tex && tex->getUniqueKey().isValid()) {
    250                 fResourceProvider->assignUniqueKeyToResource(tex->getUniqueKey(), surface.get());
    251                 SkASSERT(surface->getUniqueKey() == tex->getUniqueKey());
    252             }
    253 
    254             cur->assign(std::move(surface));
    255         } else {
    256             SkASSERT(!cur->proxy()->priv().isInstantiated());
    257             *outError = AssignError::kFailedProxyInstantiation;
    258         }
    259 
    260         fActiveIntvls.insertByIncreasingEnd(cur);
    261 
    262         if (fResourceProvider->overBudget()) {
    263             // Only force intermediate draws on opList boundaries
    264             if (!fIntvlList.empty() &&
    265                 fEndOfOpListOpIndices[fCurOpListIndex] < fIntvlList.peekHead()->start()) {
    266                 *stopIndex = fCurOpListIndex+1;
    267                 return true;
    268             }
    269         }
    270     }
    271 
    272     // expire all the remaining intervals to drain the active interval list
    273     this->expire(std::numeric_limits<unsigned int>::max());
    274     return true;
    275 }
    276