Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_HANDLE_SCOPE_H_
     18 #define ART_RUNTIME_HANDLE_SCOPE_H_
     19 
     20 #include <stack>
     21 
     22 #include "base/enums.h"
     23 #include "base/logging.h"
     24 #include "base/macros.h"
     25 #include "base/mutex.h"
     26 #include "handle.h"
     27 #include "stack_reference.h"
     28 #include "verify_object.h"
     29 
     30 namespace art {
     31 
     32 class HandleScope;
     33 template<class MirrorType> class ObjPtr;
     34 class Thread;
     35 class VariableSizedHandleScope;
     36 
     37 namespace mirror {
     38 class Object;
     39 }
     40 
     41 // Basic handle scope, tracked by a list. May be variable sized.
     42 class PACKED(4) BaseHandleScope {
     43  public:
     44   bool IsVariableSized() const {
     45     return number_of_references_ == kNumReferencesVariableSized;
     46   }
     47 
     48   // Number of references contained within this handle scope.
     49   ALWAYS_INLINE uint32_t NumberOfReferences() const;
     50 
     51   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
     52 
     53   template <typename Visitor>
     54   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
     55 
     56   // Link to previous BaseHandleScope or null.
     57   BaseHandleScope* GetLink() const {
     58     return link_;
     59   }
     60 
     61   ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
     62   ALWAYS_INLINE HandleScope* AsHandleScope();
     63   ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
     64   ALWAYS_INLINE const HandleScope* AsHandleScope() const;
     65 
     66  protected:
     67   BaseHandleScope(BaseHandleScope* link, uint32_t num_references)
     68       : link_(link),
     69         number_of_references_(num_references) {}
     70 
     71   // Variable sized constructor.
     72   explicit BaseHandleScope(BaseHandleScope* link)
     73       : link_(link),
     74         number_of_references_(kNumReferencesVariableSized) {}
     75 
     76   static constexpr int32_t kNumReferencesVariableSized = -1;
     77 
     78   // Link-list of handle scopes. The root is held by a Thread.
     79   BaseHandleScope* const link_;
     80 
     81   // Number of handlerized references. -1 for variable sized handle scopes.
     82   const int32_t number_of_references_;
     83 
     84  private:
     85   DISALLOW_COPY_AND_ASSIGN(BaseHandleScope);
     86 };
     87 
     88 // HandleScopes are scoped objects containing a number of Handles. They are used to allocate
     89 // handles, for these handles (and the objects contained within them) to be visible/roots for the
     90 // GC. It is most common to stack allocate HandleScopes using StackHandleScope.
     91 class PACKED(4) HandleScope : public BaseHandleScope {
     92  public:
     93   ~HandleScope() {}
     94 
     95   // We have versions with and without explicit pointer size of the following. The first two are
     96   // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one
     97   // takes the pointer size explicitly so that at compile time we can cross-compile correctly.
     98 
     99   // Returns the size of a HandleScope containing num_references handles.
    100   static size_t SizeOf(uint32_t num_references);
    101 
    102   // Returns the size of a HandleScope containing num_references handles.
    103   static size_t SizeOf(PointerSize pointer_size, uint32_t num_references);
    104 
    105   ALWAYS_INLINE mirror::Object* GetReference(size_t i) const
    106       REQUIRES_SHARED(Locks::mutator_lock_);
    107 
    108   ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
    109 
    110   ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
    111       REQUIRES_SHARED(Locks::mutator_lock_);
    112 
    113   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
    114       REQUIRES_SHARED(Locks::mutator_lock_);
    115 
    116   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
    117 
    118   // Offset of link within HandleScope, used by generated code.
    119   static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) {
    120     return 0;
    121   }
    122 
    123   // Offset of length within handle scope, used by generated code.
    124   static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) {
    125     return static_cast<size_t>(pointer_size);
    126   }
    127 
    128   // Offset of link within handle scope, used by generated code.
    129   static constexpr size_t ReferencesOffset(PointerSize pointer_size) {
    130     return NumberOfReferencesOffset(pointer_size) + sizeof(number_of_references_);
    131   }
    132 
    133   // Placement new creation.
    134   static HandleScope* Create(void* storage, BaseHandleScope* link, uint32_t num_references)
    135       WARN_UNUSED {
    136     return new (storage) HandleScope(link, num_references);
    137   }
    138 
    139   // Number of references contained within this handle scope.
    140   ALWAYS_INLINE uint32_t NumberOfReferences() const {
    141     DCHECK_GE(number_of_references_, 0);
    142     return static_cast<uint32_t>(number_of_references_);
    143   }
    144 
    145   template <typename Visitor>
    146   void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
    147     for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
    148       // GetReference returns a pointer to the stack reference within the handle scope. If this
    149       // needs to be updated, it will be done by the root visitor.
    150       visitor.VisitRootIfNonNull(GetHandle(i).GetReference());
    151     }
    152   }
    153 
    154  protected:
    155   // Return backing storage used for references.
    156   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
    157     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(kRuntimePointerSize);
    158     return reinterpret_cast<StackReference<mirror::Object>*>(address);
    159   }
    160 
    161   explicit HandleScope(size_t number_of_references) : HandleScope(nullptr, number_of_references) {}
    162 
    163   // Semi-hidden constructor. Construction expected by generated code and StackHandleScope.
    164   HandleScope(BaseHandleScope* link, uint32_t num_references)
    165       : BaseHandleScope(link, num_references) {}
    166 
    167   // Storage for references.
    168   // StackReference<mirror::Object> references_[number_of_references_]
    169 
    170  private:
    171   DISALLOW_COPY_AND_ASSIGN(HandleScope);
    172 };
    173 
    174 // A wrapper which wraps around Object** and restores the pointer in the destructor.
    175 // TODO: Delete
    176 template<class T>
    177 class HandleWrapper : public MutableHandle<T> {
    178  public:
    179   HandleWrapper(T** obj, const MutableHandle<T>& handle)
    180      : MutableHandle<T>(handle), obj_(obj) {
    181   }
    182 
    183   HandleWrapper(const HandleWrapper&) = default;
    184 
    185   ~HandleWrapper() {
    186     *obj_ = MutableHandle<T>::Get();
    187   }
    188 
    189  private:
    190   T** const obj_;
    191 };
    192 
    193 
    194 // A wrapper which wraps around ObjPtr<Object>* and restores the pointer in the destructor.
    195 // TODO: Add more functionality.
    196 template<class T>
    197 class HandleWrapperObjPtr : public MutableHandle<T> {
    198  public:
    199   HandleWrapperObjPtr(ObjPtr<T>* obj, const MutableHandle<T>& handle)
    200       : MutableHandle<T>(handle), obj_(obj) {}
    201 
    202   HandleWrapperObjPtr(const HandleWrapperObjPtr&) = default;
    203 
    204   ~HandleWrapperObjPtr() {
    205     *obj_ = ObjPtr<T>(MutableHandle<T>::Get());
    206   }
    207 
    208  private:
    209   ObjPtr<T>* const obj_;
    210 };
    211 
    212 // Fixed size handle scope that is not necessarily linked in the thread.
    213 template<size_t kNumReferences>
    214 class PACKED(4) FixedSizeHandleScope : public HandleScope {
    215  public:
    216   template<class T>
    217   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
    218 
    219   template<class T>
    220   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
    221       REQUIRES_SHARED(Locks::mutator_lock_);
    222 
    223   template<class T>
    224   ALWAYS_INLINE HandleWrapperObjPtr<T> NewHandleWrapper(ObjPtr<T>* object)
    225       REQUIRES_SHARED(Locks::mutator_lock_);
    226 
    227   template<class MirrorType>
    228   ALWAYS_INLINE MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> object)
    229     REQUIRES_SHARED(Locks::mutator_lock_);
    230 
    231   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
    232       REQUIRES_SHARED(Locks::mutator_lock_);
    233 
    234   size_t RemainingSlots() const {
    235     return kNumReferences - pos_;
    236   }
    237 
    238  private:
    239   explicit ALWAYS_INLINE FixedSizeHandleScope(BaseHandleScope* link,
    240                                               mirror::Object* fill_value = nullptr);
    241   ALWAYS_INLINE ~FixedSizeHandleScope() {}
    242 
    243   template<class T>
    244   ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_) {
    245     DCHECK_LT(i, kNumReferences);
    246     return MutableHandle<T>(&GetReferences()[i]);
    247   }
    248 
    249   // Reference storage needs to be first as expected by the HandleScope layout.
    250   StackReference<mirror::Object> storage_[kNumReferences];
    251 
    252   // Position new handles will be created.
    253   uint32_t pos_ = 0;
    254 
    255   template<size_t kNumRefs> friend class StackHandleScope;
    256   friend class VariableSizedHandleScope;
    257 };
    258 
    259 // Scoped handle storage of a fixed size that is stack allocated.
    260 template<size_t kNumReferences>
    261 class PACKED(4) StackHandleScope FINAL : public FixedSizeHandleScope<kNumReferences> {
    262  public:
    263   explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr);
    264   ALWAYS_INLINE ~StackHandleScope();
    265 
    266   Thread* Self() const {
    267     return self_;
    268   }
    269 
    270  private:
    271   // The thread that the stack handle scope is a linked list upon. The stack handle scope will
    272   // push and pop itself from this thread.
    273   Thread* const self_;
    274 };
    275 
    276 // Utility class to manage a variable sized handle scope by having a list of fixed size handle
    277 // scopes.
    278 // Calls to NewHandle will create a new handle inside the current FixedSizeHandleScope.
    279 // When the current handle scope becomes full a new one is created and put at the front of the
    280 // list.
    281 class VariableSizedHandleScope : public BaseHandleScope {
    282  public:
    283   explicit VariableSizedHandleScope(Thread* const self);
    284   ~VariableSizedHandleScope();
    285 
    286   template<class T>
    287   MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
    288 
    289   template<class MirrorType>
    290   MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> ptr)
    291       REQUIRES_SHARED(Locks::mutator_lock_);
    292 
    293   // Number of references contained within this handle scope.
    294   ALWAYS_INLINE uint32_t NumberOfReferences() const;
    295 
    296   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
    297 
    298   template <typename Visitor>
    299   void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
    300 
    301  private:
    302   static constexpr size_t kLocalScopeSize = 64u;
    303   static constexpr size_t kSizeOfReferencesPerScope =
    304       kLocalScopeSize
    305           - /* BaseHandleScope::link_ */ sizeof(BaseHandleScope*)
    306           - /* BaseHandleScope::number_of_references_ */ sizeof(int32_t)
    307           - /* FixedSizeHandleScope<>::pos_ */ sizeof(uint32_t);
    308   static constexpr size_t kNumReferencesPerScope =
    309       kSizeOfReferencesPerScope / sizeof(StackReference<mirror::Object>);
    310 
    311   Thread* const self_;
    312 
    313   // Linked list of fixed size handle scopes.
    314   using LocalScopeType = FixedSizeHandleScope<kNumReferencesPerScope>;
    315   static_assert(sizeof(LocalScopeType) == kLocalScopeSize, "Unexpected size of LocalScopeType");
    316   LocalScopeType* current_scope_;
    317 
    318   DISALLOW_COPY_AND_ASSIGN(VariableSizedHandleScope);
    319 };
    320 
    321 }  // namespace art
    322 
    323 #endif  // ART_RUNTIME_HANDLE_SCOPE_H_
    324