Home | History | Annotate | Download | only in private
      1 /*
      2  * Copyright 2016 Google Inc.
      3  *
      4  * Use of this source code is governed by a BSD-style license that can be
      5  * found in the LICENSE file.
      6  */
      7 
      8 #ifndef GrSurfaceProxy_DEFINED
      9 #define GrSurfaceProxy_DEFINED
     10 
     11 #include "GrGpuResource.h"
     12 #include "GrSurface.h"
     13 
     14 #include "SkRect.h"
     15 
     16 class GrBackendTexture;
     17 class GrCaps;
     18 class GrOpList;
     19 class GrProxyProvider;
     20 class GrRenderTargetOpList;
     21 class GrRenderTargetProxy;
     22 class GrResourceProvider;
     23 class GrSurfaceContext;
     24 class GrSurfaceProxyPriv;
     25 class GrTextureOpList;
     26 class GrTextureProxy;
     27 
     28 // This class replicates the functionality GrIORef<GrSurface> but tracks the
     29 // utilitization for later resource allocation (for the deferred case) and
     30 // forwards on the utilization in the wrapped case
     31 class GrIORefProxy : public SkNoncopyable {
     32 public:
     33     void ref() const {
     34         this->validate();
     35 
     36         ++fRefCnt;
     37         if (fTarget) {
     38             fTarget->ref();
     39         }
     40     }
     41 
     42     void unref() const {
     43         this->validate();
     44 
     45         if (fTarget) {
     46             fTarget->unref();
     47         }
     48 
     49         --fRefCnt;
     50         this->didRemoveRefOrPendingIO();
     51     }
     52 
     53 #ifdef SK_DEBUG
     54     bool isUnique_debugOnly() const { // For asserts.
     55         SkASSERT(fRefCnt >= 0 && fPendingWrites >= 0 && fPendingReads >= 0);
     56         return 1 == fRefCnt + fPendingWrites + fPendingReads;
     57     }
     58 #endif
     59 
     60     void release() {
     61         SkASSERT(1 == fRefCnt);
     62         SkASSERT(0 == fPendingReads);
     63         SkASSERT(0 == fPendingWrites);
     64 
     65         SkASSERT(fTarget->internalHasUniqueRef());
     66         SkASSERT(!fTarget->internalHasPendingIO());
     67         fTarget->unref();
     68         fTarget = nullptr;
     69     }
     70 
     71     void validate() const {
     72 #ifdef SK_DEBUG
     73         SkASSERT(fRefCnt >= 0);
     74         SkASSERT(fPendingReads >= 0);
     75         SkASSERT(fPendingWrites >= 0);
     76         SkASSERT(fRefCnt + fPendingReads + fPendingWrites >= 1);
     77 
     78         if (fTarget) {
     79             // The backing GrSurface can have more refs than the proxy if the proxy
     80             // started off wrapping an external resource (that came in with refs).
     81             // The GrSurface should never have fewer refs than the proxy however.
     82             SkASSERT(fTarget->fRefCnt >= fRefCnt);
     83             SkASSERT(fTarget->fPendingReads >= fPendingReads);
     84             SkASSERT(fTarget->fPendingWrites >= fPendingWrites);
     85         }
     86 #endif
     87     }
     88 
     89     int32_t getProxyRefCnt_TestOnly() const;
     90     int32_t getBackingRefCnt_TestOnly() const;
     91     int32_t getPendingReadCnt_TestOnly() const;
     92     int32_t getPendingWriteCnt_TestOnly() const;
     93 
     94     void addPendingRead() const {
     95         this->validate();
     96 
     97         ++fPendingReads;
     98         if (fTarget) {
     99             fTarget->addPendingRead();
    100         }
    101     }
    102 
    103     void completedRead() const {
    104         this->validate();
    105 
    106         if (fTarget) {
    107             fTarget->completedRead();
    108         }
    109 
    110         --fPendingReads;
    111         this->didRemoveRefOrPendingIO();
    112     }
    113 
    114     void addPendingWrite() const {
    115         this->validate();
    116 
    117         ++fPendingWrites;
    118         if (fTarget) {
    119             fTarget->addPendingWrite();
    120         }
    121     }
    122 
    123     void completedWrite() const {
    124         this->validate();
    125 
    126         if (fTarget) {
    127             fTarget->completedWrite();
    128         }
    129 
    130         --fPendingWrites;
    131         this->didRemoveRefOrPendingIO();
    132     }
    133 
    134 protected:
    135     GrIORefProxy() : fTarget(nullptr), fRefCnt(1), fPendingReads(0), fPendingWrites(0) {}
    136     GrIORefProxy(sk_sp<GrSurface> surface) : fRefCnt(1), fPendingReads(0), fPendingWrites(0) {
    137         // Since we're manually forwarding on refs & unrefs we don't want sk_sp doing
    138         // anything extra.
    139         fTarget = surface.release();
    140     }
    141     virtual ~GrIORefProxy() {
    142         // We don't unref 'fTarget' here since the 'unref' method will already
    143         // have forwarded on the unref call that got us here.
    144     }
    145 
    146     // This GrIORefProxy was deferred before but has just been instantiated. To
    147     // make all the reffing & unreffing work out we now need to transfer any deferred
    148     // refs & unrefs to the new GrSurface
    149     void transferRefs() {
    150         SkASSERT(fTarget);
    151 
    152         SkASSERT(fTarget->fRefCnt > 0);
    153         fTarget->fRefCnt += (fRefCnt-1); // don't xfer the proxy's creation ref
    154         fTarget->fPendingReads += fPendingReads;
    155         fTarget->fPendingWrites += fPendingWrites;
    156     }
    157 
    158     bool internalHasPendingIO() const {
    159         if (fTarget) {
    160             return fTarget->internalHasPendingIO();
    161         }
    162 
    163         return SkToBool(fPendingWrites | fPendingReads);
    164     }
    165 
    166     bool internalHasPendingWrite() const {
    167         if (fTarget) {
    168             return fTarget->internalHasPendingWrite();
    169         }
    170 
    171         return SkToBool(fPendingWrites);
    172     }
    173 
    174     // For deferred proxies this will be null. For wrapped proxies it will point to the
    175     // wrapped resource.
    176     GrSurface* fTarget;
    177 
    178 private:
    179     // This class is used to manage conversion of refs to pending reads/writes.
    180     friend class GrSurfaceProxyRef;
    181     template <typename, GrIOType> friend class GrPendingIOResource;
    182 
    183     void didRemoveRefOrPendingIO() const {
    184         if (0 == fPendingReads && 0 == fPendingWrites && 0 == fRefCnt) {
    185             delete this;
    186         }
    187     }
    188 
    189     mutable int32_t fRefCnt;
    190     mutable int32_t fPendingReads;
    191     mutable int32_t fPendingWrites;
    192 };
    193 
    194 class GrSurfaceProxy : public GrIORefProxy {
    195 public:
    196     enum class LazyInstantiationType {
    197         kSingleUse,    // Instantiation callback is allowed to be called only once
    198         kMultipleUse,  // Instantiation callback can be called multiple times.
    199     };
    200 
    201     enum class LazyState {
    202         kNot,       // The proxy is instantiated or does not have a lazy callback
    203         kPartially, // The proxy has a lazy callback but knows basic information about itself.
    204         kFully,     // The proxy has a lazy callback and also doesn't know its width, height, etc.
    205     };
    206 
    207     LazyState lazyInstantiationState() const {
    208         if (fTarget || !SkToBool(fLazyInstantiateCallback)) {
    209             return LazyState::kNot;
    210         } else {
    211             if (fWidth <= 0) {
    212                 SkASSERT(fHeight <= 0);
    213                 return LazyState::kFully;
    214             } else {
    215                 SkASSERT(fHeight > 0);
    216                 return LazyState::kPartially;
    217             }
    218         }
    219     }
    220 
    221     GrPixelConfig config() const { return fConfig; }
    222     int width() const {
    223         SkASSERT(LazyState::kFully != this->lazyInstantiationState());
    224         return fWidth;
    225     }
    226     int height() const {
    227         SkASSERT(LazyState::kFully != this->lazyInstantiationState());
    228         return fHeight;
    229     }
    230     int worstCaseWidth() const;
    231     int worstCaseHeight() const;
    232     GrSurfaceOrigin origin() const {
    233         SkASSERT(kTopLeft_GrSurfaceOrigin == fOrigin || kBottomLeft_GrSurfaceOrigin == fOrigin);
    234         return fOrigin;
    235     }
    236 
    237     class UniqueID {
    238     public:
    239         static UniqueID InvalidID() {
    240             return UniqueID(uint32_t(SK_InvalidUniqueID));
    241         }
    242 
    243         // wrapped
    244         explicit UniqueID(const GrGpuResource::UniqueID& id) : fID(id.asUInt()) { }
    245         // deferred and lazy-callback
    246         UniqueID() : fID(GrGpuResource::CreateUniqueID()) { }
    247 
    248         uint32_t asUInt() const { return fID; }
    249 
    250         bool operator==(const UniqueID& other) const {
    251             return fID == other.fID;
    252         }
    253         bool operator!=(const UniqueID& other) const {
    254             return !(*this == other);
    255         }
    256 
    257         void makeInvalid() { fID = SK_InvalidUniqueID; }
    258         bool isInvalid() const { return SK_InvalidUniqueID == fID; }
    259 
    260     private:
    261         explicit UniqueID(uint32_t id) : fID(id) {}
    262 
    263         uint32_t fID;
    264     };
    265 
    266     /*
    267      * The contract for the uniqueID is:
    268      *   for wrapped resources:
    269      *      the uniqueID will match that of the wrapped resource
    270      *
    271      *   for deferred resources:
    272      *      the uniqueID will be different from the real resource, when it is allocated
    273      *      the proxy's uniqueID will not change across the instantiate call
    274      *
    275      *    the uniqueIDs of the proxies and the resources draw from the same pool
    276      *
    277      * What this boils down to is that the uniqueID of a proxy can be used to consistently
    278      * track/identify a proxy but should never be used to distinguish between
    279      * resources and proxies - beware!
    280      */
    281     UniqueID uniqueID() const { return fUniqueID; }
    282 
    283     UniqueID underlyingUniqueID() const {
    284         if (fTarget) {
    285             return UniqueID(fTarget->uniqueID());
    286         }
    287 
    288         return fUniqueID;
    289     }
    290 
    291     virtual bool instantiate(GrResourceProvider* resourceProvider) = 0;
    292 
    293     void deInstantiate();
    294 
    295     /**
    296      * Helper that gets the width and height of the surface as a bounding rectangle.
    297      */
    298     SkRect getBoundsRect() const {
    299         SkASSERT(LazyState::kFully != this->lazyInstantiationState());
    300         return SkRect::MakeIWH(this->width(), this->height());
    301     }
    302 
    303     /**
    304      * @return the texture proxy associated with the surface proxy, may be NULL.
    305      */
    306     virtual GrTextureProxy* asTextureProxy() { return nullptr; }
    307     virtual const GrTextureProxy* asTextureProxy() const { return nullptr; }
    308 
    309     /**
    310      * @return the render target proxy associated with the surface proxy, may be NULL.
    311      */
    312     virtual GrRenderTargetProxy* asRenderTargetProxy() { return nullptr; }
    313     virtual const GrRenderTargetProxy* asRenderTargetProxy() const { return nullptr; }
    314 
    315     /**
    316      * Does the resource count against the resource budget?
    317      */
    318     SkBudgeted isBudgeted() const { return fBudgeted; }
    319 
    320     void setLastOpList(GrOpList* opList);
    321     GrOpList* getLastOpList() { return fLastOpList; }
    322 
    323     GrRenderTargetOpList* getLastRenderTargetOpList();
    324     GrTextureOpList* getLastTextureOpList();
    325 
    326     /**
    327      * Retrieves the amount of GPU memory that will be or currently is used by this resource
    328      * in bytes. It is approximate since we aren't aware of additional padding or copies made
    329      * by the driver.
    330      *
    331      * @return the amount of GPU memory used in bytes
    332      */
    333     size_t gpuMemorySize() const {
    334         SkASSERT(LazyState::kFully != this->lazyInstantiationState());
    335         if (fTarget) {
    336             return fTarget->gpuMemorySize();
    337         }
    338         if (kInvalidGpuMemorySize == fGpuMemorySize) {
    339             fGpuMemorySize = this->onUninstantiatedGpuMemorySize();
    340             SkASSERT(kInvalidGpuMemorySize != fGpuMemorySize);
    341         }
    342         return fGpuMemorySize;
    343     }
    344 
    345     // Helper function that creates a temporary SurfaceContext to perform the copy
    346     // It always returns a kExact-backed proxy bc it is used when converting an SkSpecialImage
    347     // to an SkImage. The copy is is not a render target and not multisampled.
    348     static sk_sp<GrTextureProxy> Copy(GrContext*, GrSurfaceProxy* src, GrMipMapped,
    349                                       SkIRect srcRect, SkBudgeted);
    350 
    351     // Copy the entire 'src'
    352     // It always returns a kExact-backed proxy bc it is used in SkGpuDevice::snapSpecial
    353     static sk_sp<GrTextureProxy> Copy(GrContext* context, GrSurfaceProxy* src, GrMipMapped,
    354                                       SkBudgeted budgeted);
    355 
    356     // Test-only entry point - should decrease in use as proxies propagate
    357     static sk_sp<GrSurfaceContext> TestCopy(GrContext* context, const GrSurfaceDesc& dstDesc,
    358                                             GrSurfaceProxy* srcProxy);
    359 
    360     bool isWrapped_ForTesting() const;
    361 
    362     SkDEBUGCODE(void validate(GrContext*) const;)
    363 
    364     // Provides access to functions that aren't part of the public API.
    365     inline GrSurfaceProxyPriv priv();
    366     inline const GrSurfaceProxyPriv priv() const;
    367 
    368 protected:
    369     // Deferred version
    370     GrSurfaceProxy(const GrSurfaceDesc& desc, SkBackingFit fit, SkBudgeted budgeted, uint32_t flags)
    371             : GrSurfaceProxy(nullptr, LazyInstantiationType::kSingleUse,
    372                              desc, fit, budgeted, flags) {
    373         // Note: this ctor pulls a new uniqueID from the same pool at the GrGpuResources
    374     }
    375 
    376     using LazyInstantiateCallback = std::function<sk_sp<GrSurface>(GrResourceProvider*)>;
    377 
    378     // Lazy-callback version
    379     GrSurfaceProxy(LazyInstantiateCallback&& callback, LazyInstantiationType lazyType,
    380                    const GrSurfaceDesc& desc, SkBackingFit fit, SkBudgeted budgeted,
    381                    uint32_t flags);
    382 
    383     // Wrapped version
    384     GrSurfaceProxy(sk_sp<GrSurface> surface, GrSurfaceOrigin origin, SkBackingFit fit);
    385 
    386     virtual ~GrSurfaceProxy();
    387 
    388     friend class GrSurfaceProxyPriv;
    389 
    390     // Methods made available via GrSurfaceProxyPriv
    391     bool hasPendingIO() const {
    392         return this->internalHasPendingIO();
    393     }
    394 
    395     bool hasPendingWrite() const {
    396         return this->internalHasPendingWrite();
    397     }
    398 
    399     void computeScratchKey(GrScratchKey*) const;
    400 
    401     virtual sk_sp<GrSurface> createSurface(GrResourceProvider*) const = 0;
    402     void assign(sk_sp<GrSurface> surface);
    403 
    404     sk_sp<GrSurface> createSurfaceImpl(GrResourceProvider*, int sampleCnt, bool needsStencil,
    405                                        GrSurfaceFlags flags, GrMipMapped mipMapped) const;
    406 
    407     bool instantiateImpl(GrResourceProvider* resourceProvider, int sampleCnt, bool needsStencil,
    408                          GrSurfaceFlags flags, GrMipMapped mipMapped, const GrUniqueKey*);
    409 
    410 private:
    411     // For wrapped resources, 'fConfig', 'fWidth', 'fHeight', and 'fOrigin; will always be filled in
    412     // from the wrapped resource.
    413     GrPixelConfig        fConfig;
    414     int                  fWidth;
    415     int                  fHeight;
    416     GrSurfaceOrigin      fOrigin;
    417     SkBackingFit         fFit;      // always kApprox for lazy-callback resources
    418                                     // always kExact for wrapped resources
    419     mutable SkBudgeted   fBudgeted; // always kYes for lazy-callback resources
    420                                     // set from the backing resource for wrapped resources
    421                                     // mutable bc of SkSurface/SkImage wishy-washiness
    422     const uint32_t       fFlags;
    423 
    424     const UniqueID       fUniqueID; // set from the backing resource for wrapped resources
    425 
    426     LazyInstantiateCallback fLazyInstantiateCallback;
    427     // If this is set to kSingleuse, then after one call to fLazyInstantiateCallback we will cleanup
    428     // the lazy callback and then delete it. This will allow for any refs and resources being held
    429     // by the standard function to be released. This is specifically useful in non-dll cases where
    430     // we make lazy proxies and instantiate them immediately.
    431     // Note: This is ignored if fLazyInstantiateCallback is null.
    432     LazyInstantiationType fLazyInstantiationType;
    433     SkDEBUGCODE(virtual void validateLazySurface(const GrSurface*) = 0;)
    434 
    435     static const size_t kInvalidGpuMemorySize = ~static_cast<size_t>(0);
    436     SkDEBUGCODE(size_t getRawGpuMemorySize_debugOnly() const { return fGpuMemorySize; })
    437 
    438     virtual size_t onUninstantiatedGpuMemorySize() const = 0;
    439 
    440     bool                 fNeedsClear;
    441 
    442     // This entry is lazily evaluated so, when the proxy wraps a resource, the resource
    443     // will be called but, when the proxy is deferred, it will compute the answer itself.
    444     // If the proxy computes its own answer that answer is checked (in debug mode) in
    445     // the instantiation method.
    446     mutable size_t      fGpuMemorySize;
    447 
    448     // The last opList that wrote to or is currently going to write to this surface
    449     // The opList can be closed (e.g., no surface context is currently bound
    450     // to this proxy).
    451     // This back-pointer is required so that we can add a dependancy between
    452     // the opList used to create the current contents of this surface
    453     // and the opList of a destination surface to which this one is being drawn or copied.
    454     // This pointer is unreffed. OpLists own a ref on their surface proxies.
    455     GrOpList* fLastOpList;
    456 
    457     typedef GrIORefProxy INHERITED;
    458 };
    459 
    460 #endif
    461