Home | History | Annotate | Download | only in core
      1 /*
      2  * Copyright 2006 The Android Open Source Project
      3  *
      4  * Use of this source code is governed by a BSD-style license that can be
      5  * found in the LICENSE file.
      6  */
      7 
      8 #ifndef SkRefCnt_DEFINED
      9 #define SkRefCnt_DEFINED
     10 
     11 #include "SkTypes.h"
     12 
     13 #include <atomic>
     14 #include <cstddef>
     15 #include <functional>
     16 #include <memory>
     17 #include <ostream>
     18 #include <type_traits>
     19 #include <utility>
     20 
     21 /** \class SkRefCntBase
     22 
     23     SkRefCntBase is the base class for objects that may be shared by multiple
     24     objects. When an existing owner wants to share a reference, it calls ref().
     25     When an owner wants to release its reference, it calls unref(). When the
     26     shared object's reference count goes to zero as the result of an unref()
     27     call, its (virtual) destructor is called. It is an error for the
     28     destructor to be called explicitly (or via the object going out of scope on
     29     the stack or calling delete) if getRefCnt() > 1.
     30 */
     31 class SK_API SkRefCntBase {
     32 public:
     33     /** Default construct, initializing the reference count to 1.
     34     */
     35     SkRefCntBase() : fRefCnt(1) {}
     36 
     37     /** Destruct, asserting that the reference count is 1.
     38     */
     39     virtual ~SkRefCntBase() {
     40     #ifdef SK_DEBUG
     41         SkASSERTF(this->getRefCnt() == 1, "fRefCnt was %d", this->getRefCnt());
     42         // illegal value, to catch us if we reuse after delete
     43         fRefCnt.store(0, std::memory_order_relaxed);
     44     #endif
     45     }
     46 
     47     /** May return true if the caller is the only owner.
     48      *  Ensures that all previous owner's actions are complete.
     49      */
     50     bool unique() const {
     51         if (1 == fRefCnt.load(std::memory_order_acquire)) {
     52             // The acquire barrier is only really needed if we return true.  It
     53             // prevents code conditioned on the result of unique() from running
     54             // until previous owners are all totally done calling unref().
     55             return true;
     56         }
     57         return false;
     58     }
     59 
     60     /** Increment the reference count. Must be balanced by a call to unref().
     61     */
     62     void ref() const {
     63         SkASSERT(this->getRefCnt() > 0);
     64         // No barrier required.
     65         (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed);
     66     }
     67 
     68     /** Decrement the reference count. If the reference count is 1 before the
     69         decrement, then delete the object. Note that if this is the case, then
     70         the object needs to have been allocated via new, and not on the stack.
     71     */
     72     void unref() const {
     73         SkASSERT(this->getRefCnt() > 0);
     74         // A release here acts in place of all releases we "should" have been doing in ref().
     75         if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
     76             // Like unique(), the acquire is only needed on success, to make sure
     77             // code in internal_dispose() doesn't happen before the decrement.
     78             this->internal_dispose();
     79         }
     80     }
     81 
     82 private:
     83 
     84 #ifdef SK_DEBUG
     85     /** Return the reference count. Use only for debugging. */
     86     int32_t getRefCnt() const {
     87         return fRefCnt.load(std::memory_order_relaxed);
     88     }
     89 #endif
     90 
     91     /**
     92      *  Called when the ref count goes to 0.
     93      */
     94     virtual void internal_dispose() const {
     95     #ifdef SK_DEBUG
     96         SkASSERT(0 == this->getRefCnt());
     97         fRefCnt.store(1, std::memory_order_relaxed);
     98     #endif
     99         delete this;
    100     }
    101 
    102     // The following friends are those which override internal_dispose()
    103     // and conditionally call SkRefCnt::internal_dispose().
    104     friend class SkWeakRefCnt;
    105 
    106     mutable std::atomic<int32_t> fRefCnt;
    107 
    108     SkRefCntBase(SkRefCntBase&&) = delete;
    109     SkRefCntBase(const SkRefCntBase&) = delete;
    110     SkRefCntBase& operator=(SkRefCntBase&&) = delete;
    111     SkRefCntBase& operator=(const SkRefCntBase&) = delete;
    112 };
    113 
    114 #ifdef SK_REF_CNT_MIXIN_INCLUDE
    115 // It is the responsibility of the following include to define the type SkRefCnt.
    116 // This SkRefCnt should normally derive from SkRefCntBase.
    117 #include SK_REF_CNT_MIXIN_INCLUDE
    118 #else
    119 class SK_API SkRefCnt : public SkRefCntBase {
    120     // "#include SK_REF_CNT_MIXIN_INCLUDE" doesn't work with this build system.
    121     #if defined(SK_BUILD_FOR_GOOGLE3)
    122     public:
    123         void deref() const { this->unref(); }
    124     #endif
    125 };
    126 #endif
    127 
    128 ///////////////////////////////////////////////////////////////////////////////
    129 
    130 /** Call obj->ref() and return obj. The obj must not be nullptr.
    131  */
    132 template <typename T> static inline T* SkRef(T* obj) {
    133     SkASSERT(obj);
    134     obj->ref();
    135     return obj;
    136 }
    137 
    138 /** Check if the argument is non-null, and if so, call obj->ref() and return obj.
    139  */
    140 template <typename T> static inline T* SkSafeRef(T* obj) {
    141     if (obj) {
    142         obj->ref();
    143     }
    144     return obj;
    145 }
    146 
    147 /** Check if the argument is non-null, and if so, call obj->unref()
    148  */
    149 template <typename T> static inline void SkSafeUnref(T* obj) {
    150     if (obj) {
    151         obj->unref();
    152     }
    153 }
    154 
    155 ///////////////////////////////////////////////////////////////////////////////
    156 
    157 // This is a variant of SkRefCnt that's Not Virtual, so weighs 4 bytes instead of 8 or 16.
    158 // There's only benefit to using this if the deriving class does not otherwise need a vtable.
    159 template <typename Derived>
    160 class SkNVRefCnt {
    161 public:
    162     SkNVRefCnt() : fRefCnt(1) {}
    163     ~SkNVRefCnt() {
    164     #ifdef SK_DEBUG
    165         int rc = fRefCnt.load(std::memory_order_relaxed);
    166         SkASSERTF(rc == 1, "NVRefCnt was %d", rc);
    167     #endif
    168     }
    169 
    170     // Implementation is pretty much the same as SkRefCntBase. All required barriers are the same:
    171     //   - unique() needs acquire when it returns true, and no barrier if it returns false;
    172     //   - ref() doesn't need any barrier;
    173     //   - unref() needs a release barrier, and an acquire if it's going to call delete.
    174 
    175     bool unique() const { return 1 == fRefCnt.load(std::memory_order_acquire); }
    176     void ref() const { (void)fRefCnt.fetch_add(+1, std::memory_order_relaxed); }
    177     void  unref() const {
    178         if (1 == fRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
    179             // restore the 1 for our destructor's assert
    180             SkDEBUGCODE(fRefCnt.store(1, std::memory_order_relaxed));
    181             delete (const Derived*)this;
    182         }
    183     }
    184     void  deref() const { this->unref(); }
    185 
    186 private:
    187     mutable std::atomic<int32_t> fRefCnt;
    188 
    189     SkNVRefCnt(SkNVRefCnt&&) = delete;
    190     SkNVRefCnt(const SkNVRefCnt&) = delete;
    191     SkNVRefCnt& operator=(SkNVRefCnt&&) = delete;
    192     SkNVRefCnt& operator=(const SkNVRefCnt&) = delete;
    193 };
    194 
    195 ///////////////////////////////////////////////////////////////////////////////////////////////////
    196 
    197 /**
    198  *  Shared pointer class to wrap classes that support a ref()/unref() interface.
    199  *
    200  *  This can be used for classes inheriting from SkRefCnt, but it also works for other
    201  *  classes that match the interface, but have different internal choices: e.g. the hosted class
    202  *  may have its ref/unref be thread-safe, but that is not assumed/imposed by sk_sp.
    203  */
    204 template <typename T> class sk_sp {
    205 public:
    206     using element_type = T;
    207 
    208     constexpr sk_sp() : fPtr(nullptr) {}
    209     constexpr sk_sp(std::nullptr_t) : fPtr(nullptr) {}
    210 
    211     /**
    212      *  Shares the underlying object by calling ref(), so that both the argument and the newly
    213      *  created sk_sp both have a reference to it.
    214      */
    215     sk_sp(const sk_sp<T>& that) : fPtr(SkSafeRef(that.get())) {}
    216     template <typename U,
    217               typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
    218     sk_sp(const sk_sp<U>& that) : fPtr(SkSafeRef(that.get())) {}
    219 
    220     /**
    221      *  Move the underlying object from the argument to the newly created sk_sp. Afterwards only
    222      *  the new sk_sp will have a reference to the object, and the argument will point to null.
    223      *  No call to ref() or unref() will be made.
    224      */
    225     sk_sp(sk_sp<T>&& that) : fPtr(that.release()) {}
    226     template <typename U,
    227               typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
    228     sk_sp(sk_sp<U>&& that) : fPtr(that.release()) {}
    229 
    230     /**
    231      *  Adopt the bare pointer into the newly created sk_sp.
    232      *  No call to ref() or unref() will be made.
    233      */
    234     explicit sk_sp(T* obj) : fPtr(obj) {}
    235 
    236     /**
    237      *  Calls unref() on the underlying object pointer.
    238      */
    239     ~sk_sp() {
    240         SkSafeUnref(fPtr);
    241         SkDEBUGCODE(fPtr = nullptr);
    242     }
    243 
    244     sk_sp<T>& operator=(std::nullptr_t) { this->reset(); return *this; }
    245 
    246     /**
    247      *  Shares the underlying object referenced by the argument by calling ref() on it. If this
    248      *  sk_sp previously had a reference to an object (i.e. not null) it will call unref() on that
    249      *  object.
    250      */
    251     sk_sp<T>& operator=(const sk_sp<T>& that) {
    252         if (this != &that) {
    253             this->reset(SkSafeRef(that.get()));
    254         }
    255         return *this;
    256     }
    257     template <typename U,
    258               typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
    259     sk_sp<T>& operator=(const sk_sp<U>& that) {
    260         this->reset(SkSafeRef(that.get()));
    261         return *this;
    262     }
    263 
    264     /**
    265      *  Move the underlying object from the argument to the sk_sp. If the sk_sp previously held
    266      *  a reference to another object, unref() will be called on that object. No call to ref()
    267      *  will be made.
    268      */
    269     sk_sp<T>& operator=(sk_sp<T>&& that) {
    270         this->reset(that.release());
    271         return *this;
    272     }
    273     template <typename U,
    274               typename = typename std::enable_if<std::is_convertible<U*, T*>::value>::type>
    275     sk_sp<T>& operator=(sk_sp<U>&& that) {
    276         this->reset(that.release());
    277         return *this;
    278     }
    279 
    280     T& operator*() const {
    281         SkASSERT(this->get() != nullptr);
    282         return *this->get();
    283     }
    284 
    285     explicit operator bool() const { return this->get() != nullptr; }
    286 
    287     T* get() const { return fPtr; }
    288     T* operator->() const { return fPtr; }
    289 
    290     /**
    291      *  Adopt the new bare pointer, and call unref() on any previously held object (if not null).
    292      *  No call to ref() will be made.
    293      */
    294     void reset(T* ptr = nullptr) {
    295         // Calling fPtr->unref() may call this->~() or this->reset(T*).
    296         // http://wg21.cmeerw.net/lwg/issue998
    297         // http://wg21.cmeerw.net/lwg/issue2262
    298         T* oldPtr = fPtr;
    299         fPtr = ptr;
    300         SkSafeUnref(oldPtr);
    301     }
    302 
    303     /**
    304      *  Return the bare pointer, and set the internal object pointer to nullptr.
    305      *  The caller must assume ownership of the object, and manage its reference count directly.
    306      *  No call to unref() will be made.
    307      */
    308     T* SK_WARN_UNUSED_RESULT release() {
    309         T* ptr = fPtr;
    310         fPtr = nullptr;
    311         return ptr;
    312     }
    313 
    314     void swap(sk_sp<T>& that) /*noexcept*/ {
    315         using std::swap;
    316         swap(fPtr, that.fPtr);
    317     }
    318 
    319 private:
    320     T*  fPtr;
    321 };
    322 
    323 template <typename T> inline void swap(sk_sp<T>& a, sk_sp<T>& b) /*noexcept*/ {
    324     a.swap(b);
    325 }
    326 
    327 template <typename T, typename U> inline bool operator==(const sk_sp<T>& a, const sk_sp<U>& b) {
    328     return a.get() == b.get();
    329 }
    330 template <typename T> inline bool operator==(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
    331     return !a;
    332 }
    333 template <typename T> inline bool operator==(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
    334     return !b;
    335 }
    336 
    337 template <typename T, typename U> inline bool operator!=(const sk_sp<T>& a, const sk_sp<U>& b) {
    338     return a.get() != b.get();
    339 }
    340 template <typename T> inline bool operator!=(const sk_sp<T>& a, std::nullptr_t) /*noexcept*/ {
    341     return static_cast<bool>(a);
    342 }
    343 template <typename T> inline bool operator!=(std::nullptr_t, const sk_sp<T>& b) /*noexcept*/ {
    344     return static_cast<bool>(b);
    345 }
    346 
    347 template <typename T, typename U> inline bool operator<(const sk_sp<T>& a, const sk_sp<U>& b) {
    348     // Provide defined total order on sk_sp.
    349     // http://wg21.cmeerw.net/lwg/issue1297
    350     // http://wg21.cmeerw.net/lwg/issue1401 .
    351     return std::less<typename std::common_type<T*, U*>::type>()(a.get(), b.get());
    352 }
    353 template <typename T> inline bool operator<(const sk_sp<T>& a, std::nullptr_t) {
    354     return std::less<T*>()(a.get(), nullptr);
    355 }
    356 template <typename T> inline bool operator<(std::nullptr_t, const sk_sp<T>& b) {
    357     return std::less<T*>()(nullptr, b.get());
    358 }
    359 
    360 template <typename T, typename U> inline bool operator<=(const sk_sp<T>& a, const sk_sp<U>& b) {
    361     return !(b < a);
    362 }
    363 template <typename T> inline bool operator<=(const sk_sp<T>& a, std::nullptr_t) {
    364     return !(nullptr < a);
    365 }
    366 template <typename T> inline bool operator<=(std::nullptr_t, const sk_sp<T>& b) {
    367     return !(b < nullptr);
    368 }
    369 
    370 template <typename T, typename U> inline bool operator>(const sk_sp<T>& a, const sk_sp<U>& b) {
    371     return b < a;
    372 }
    373 template <typename T> inline bool operator>(const sk_sp<T>& a, std::nullptr_t) {
    374     return nullptr < a;
    375 }
    376 template <typename T> inline bool operator>(std::nullptr_t, const sk_sp<T>& b) {
    377     return b < nullptr;
    378 }
    379 
    380 template <typename T, typename U> inline bool operator>=(const sk_sp<T>& a, const sk_sp<U>& b) {
    381     return !(a < b);
    382 }
    383 template <typename T> inline bool operator>=(const sk_sp<T>& a, std::nullptr_t) {
    384     return !(a < nullptr);
    385 }
    386 template <typename T> inline bool operator>=(std::nullptr_t, const sk_sp<T>& b) {
    387     return !(nullptr < b);
    388 }
    389 
    390 template <typename C, typename CT, typename T>
    391 auto operator<<(std::basic_ostream<C, CT>& os, const sk_sp<T>& sp) -> decltype(os << sp.get()) {
    392     return os << sp.get();
    393 }
    394 
    395 template <typename T, typename... Args>
    396 sk_sp<T> sk_make_sp(Args&&... args) {
    397     return sk_sp<T>(new T(std::forward<Args>(args)...));
    398 }
    399 
    400 /*
    401  *  Returns a sk_sp wrapping the provided ptr AND calls ref on it (if not null).
    402  *
    403  *  This is different than the semantics of the constructor for sk_sp, which just wraps the ptr,
    404  *  effectively "adopting" it.
    405  */
    406 template <typename T> sk_sp<T> sk_ref_sp(T* obj) {
    407     return sk_sp<T>(SkSafeRef(obj));
    408 }
    409 
    410 template <typename T> sk_sp<T> sk_ref_sp(const T* obj) {
    411     return sk_sp<T>(const_cast<T*>(SkSafeRef(obj)));
    412 }
    413 
    414 #endif
    415