Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_HANDLE_SCOPE_H_
     18 #define ART_RUNTIME_HANDLE_SCOPE_H_
     19 
     20 #include <stack>
     21 
     22 #include <android-base/logging.h>
     23 
     24 #include "base/enums.h"
     25 #include "base/macros.h"
     26 #include "base/mutex.h"
     27 #include "handle.h"
     28 #include "stack_reference.h"
     29 #include "verify_object.h"
     30 
     31 namespace art {
     32 
     33 class HandleScope;
     34 template<class MirrorType> class ObjPtr;
     35 class Thread;
     36 class VariableSizedHandleScope;
     37 
     38 namespace mirror {
     39 class Object;
     40 }  // namespace mirror
     41 
     42 // Basic handle scope, tracked by a list. May be variable sized.
     43 class PACKED(4) BaseHandleScope {
     44  public:
     45   bool IsVariableSized() const {
     46     return number_of_references_ == kNumReferencesVariableSized;
     47   }
     48 
     49   // Number of references contained within this handle scope.
     50   ALWAYS_INLINE uint32_t NumberOfReferences() const;
     51 
     52   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
     53 
     54   template <typename Visitor>
     55   ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
     56 
     57   // Link to previous BaseHandleScope or null.
     58   BaseHandleScope* GetLink() const {
     59     return link_;
     60   }
     61 
     62   ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
     63   ALWAYS_INLINE HandleScope* AsHandleScope();
     64   ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
     65   ALWAYS_INLINE const HandleScope* AsHandleScope() const;
     66 
     67  protected:
     68   BaseHandleScope(BaseHandleScope* link, uint32_t num_references)
     69       : link_(link),
     70         number_of_references_(num_references) {}
     71 
     72   // Variable sized constructor.
     73   explicit BaseHandleScope(BaseHandleScope* link)
     74       : link_(link),
     75         number_of_references_(kNumReferencesVariableSized) {}
     76 
     77   static constexpr int32_t kNumReferencesVariableSized = -1;
     78 
     79   // Link-list of handle scopes. The root is held by a Thread.
     80   BaseHandleScope* const link_;
     81 
     82   // Number of handlerized references. -1 for variable sized handle scopes.
     83   const int32_t number_of_references_;
     84 
     85  private:
     86   DISALLOW_COPY_AND_ASSIGN(BaseHandleScope);
     87 };
     88 
     89 // HandleScopes are scoped objects containing a number of Handles. They are used to allocate
     90 // handles, for these handles (and the objects contained within them) to be visible/roots for the
     91 // GC. It is most common to stack allocate HandleScopes using StackHandleScope.
     92 class PACKED(4) HandleScope : public BaseHandleScope {
     93  public:
     94   ~HandleScope() {}
     95 
     96   // We have versions with and without explicit pointer size of the following. The first two are
     97   // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one
     98   // takes the pointer size explicitly so that at compile time we can cross-compile correctly.
     99 
    100   // Returns the size of a HandleScope containing num_references handles.
    101   static size_t SizeOf(uint32_t num_references);
    102 
    103   // Returns the size of a HandleScope containing num_references handles.
    104   static size_t SizeOf(PointerSize pointer_size, uint32_t num_references);
    105 
    106   ALWAYS_INLINE mirror::Object* GetReference(size_t i) const
    107       REQUIRES_SHARED(Locks::mutator_lock_);
    108 
    109   ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
    110 
    111   ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
    112       REQUIRES_SHARED(Locks::mutator_lock_);
    113 
    114   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
    115       REQUIRES_SHARED(Locks::mutator_lock_);
    116 
    117   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
    118 
    119   // Offset of link within HandleScope, used by generated code.
    120   static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) {
    121     return 0;
    122   }
    123 
    124   // Offset of length within handle scope, used by generated code.
    125   static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) {
    126     return static_cast<size_t>(pointer_size);
    127   }
    128 
    129   // Offset of link within handle scope, used by generated code.
    130   static constexpr size_t ReferencesOffset(PointerSize pointer_size) {
    131     return NumberOfReferencesOffset(pointer_size) + sizeof(number_of_references_);
    132   }
    133 
    134   // Placement new creation.
    135   static HandleScope* Create(void* storage, BaseHandleScope* link, uint32_t num_references)
    136       WARN_UNUSED {
    137     return new (storage) HandleScope(link, num_references);
    138   }
    139 
    140   // Number of references contained within this handle scope.
    141   ALWAYS_INLINE uint32_t NumberOfReferences() const {
    142     DCHECK_GE(number_of_references_, 0);
    143     return static_cast<uint32_t>(number_of_references_);
    144   }
    145 
    146   template <typename Visitor>
    147   void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
    148     for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
    149       // GetReference returns a pointer to the stack reference within the handle scope. If this
    150       // needs to be updated, it will be done by the root visitor.
    151       visitor.VisitRootIfNonNull(GetHandle(i).GetReference());
    152     }
    153   }
    154 
    155  protected:
    156   // Return backing storage used for references.
    157   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
    158     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(kRuntimePointerSize);
    159     return reinterpret_cast<StackReference<mirror::Object>*>(address);
    160   }
    161 
    162   explicit HandleScope(size_t number_of_references) : HandleScope(nullptr, number_of_references) {}
    163 
    164   // Semi-hidden constructor. Construction expected by generated code and StackHandleScope.
    165   HandleScope(BaseHandleScope* link, uint32_t num_references)
    166       : BaseHandleScope(link, num_references) {}
    167 
    168   // Storage for references.
    169   // StackReference<mirror::Object> references_[number_of_references_]
    170 
    171  private:
    172   DISALLOW_COPY_AND_ASSIGN(HandleScope);
    173 };
    174 
    175 // A wrapper which wraps around Object** and restores the pointer in the destructor.
    176 // TODO: Delete
    177 template<class T>
    178 class HandleWrapper : public MutableHandle<T> {
    179  public:
    180   HandleWrapper(T** obj, const MutableHandle<T>& handle)
    181      : MutableHandle<T>(handle), obj_(obj) {
    182   }
    183 
    184   HandleWrapper(const HandleWrapper&) = default;
    185 
    186   ~HandleWrapper() {
    187     *obj_ = MutableHandle<T>::Get();
    188   }
    189 
    190  private:
    191   T** const obj_;
    192 };
    193 
    194 
    195 // A wrapper which wraps around ObjPtr<Object>* and restores the pointer in the destructor.
    196 // TODO: Add more functionality.
    197 template<class T>
    198 class HandleWrapperObjPtr : public MutableHandle<T> {
    199  public:
    200   HandleWrapperObjPtr(ObjPtr<T>* obj, const MutableHandle<T>& handle)
    201       : MutableHandle<T>(handle), obj_(obj) {}
    202 
    203   HandleWrapperObjPtr(const HandleWrapperObjPtr&) = default;
    204 
    205   ~HandleWrapperObjPtr() {
    206     *obj_ = ObjPtr<T>(MutableHandle<T>::Get());
    207   }
    208 
    209  private:
    210   ObjPtr<T>* const obj_;
    211 };
    212 
    213 // Fixed size handle scope that is not necessarily linked in the thread.
    214 template<size_t kNumReferences>
    215 class PACKED(4) FixedSizeHandleScope : public HandleScope {
    216  public:
    217   template<class T>
    218   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
    219 
    220   template<class T>
    221   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
    222       REQUIRES_SHARED(Locks::mutator_lock_);
    223 
    224   template<class T>
    225   ALWAYS_INLINE HandleWrapperObjPtr<T> NewHandleWrapper(ObjPtr<T>* object)
    226       REQUIRES_SHARED(Locks::mutator_lock_);
    227 
    228   template<class MirrorType>
    229   ALWAYS_INLINE MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> object)
    230     REQUIRES_SHARED(Locks::mutator_lock_);
    231 
    232   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
    233       REQUIRES_SHARED(Locks::mutator_lock_);
    234 
    235   size_t RemainingSlots() const {
    236     return kNumReferences - pos_;
    237   }
    238 
    239  private:
    240   explicit ALWAYS_INLINE FixedSizeHandleScope(BaseHandleScope* link,
    241                                               mirror::Object* fill_value = nullptr);
    242   ALWAYS_INLINE ~FixedSizeHandleScope() {}
    243 
    244   template<class T>
    245   ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_) {
    246     DCHECK_LT(i, kNumReferences);
    247     return MutableHandle<T>(&GetReferences()[i]);
    248   }
    249 
    250   // Reference storage needs to be first as expected by the HandleScope layout.
    251   StackReference<mirror::Object> storage_[kNumReferences];
    252 
    253   // Position new handles will be created.
    254   uint32_t pos_ = 0;
    255 
    256   template<size_t kNumRefs> friend class StackHandleScope;
    257   friend class VariableSizedHandleScope;
    258 };
    259 
    260 // Scoped handle storage of a fixed size that is stack allocated.
    261 template<size_t kNumReferences>
    262 class PACKED(4) StackHandleScope FINAL : public FixedSizeHandleScope<kNumReferences> {
    263  public:
    264   explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr);
    265   ALWAYS_INLINE ~StackHandleScope();
    266 
    267   Thread* Self() const {
    268     return self_;
    269   }
    270 
    271  private:
    272   // The thread that the stack handle scope is a linked list upon. The stack handle scope will
    273   // push and pop itself from this thread.
    274   Thread* const self_;
    275 };
    276 
    277 // Utility class to manage a variable sized handle scope by having a list of fixed size handle
    278 // scopes.
    279 // Calls to NewHandle will create a new handle inside the current FixedSizeHandleScope.
    280 // When the current handle scope becomes full a new one is created and put at the front of the
    281 // list.
    282 class VariableSizedHandleScope : public BaseHandleScope {
    283  public:
    284   explicit VariableSizedHandleScope(Thread* const self);
    285   ~VariableSizedHandleScope();
    286 
    287   template<class T>
    288   MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
    289 
    290   template<class MirrorType>
    291   MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> ptr)
    292       REQUIRES_SHARED(Locks::mutator_lock_);
    293 
    294   // Number of references contained within this handle scope.
    295   ALWAYS_INLINE uint32_t NumberOfReferences() const;
    296 
    297   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
    298 
    299   template <typename Visitor>
    300   void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
    301 
    302  private:
    303   static constexpr size_t kLocalScopeSize = 64u;
    304   static constexpr size_t kSizeOfReferencesPerScope =
    305       kLocalScopeSize
    306           - /* BaseHandleScope::link_ */ sizeof(BaseHandleScope*)
    307           - /* BaseHandleScope::number_of_references_ */ sizeof(int32_t)
    308           - /* FixedSizeHandleScope<>::pos_ */ sizeof(uint32_t);
    309   static constexpr size_t kNumReferencesPerScope =
    310       kSizeOfReferencesPerScope / sizeof(StackReference<mirror::Object>);
    311 
    312   Thread* const self_;
    313 
    314   // Linked list of fixed size handle scopes.
    315   using LocalScopeType = FixedSizeHandleScope<kNumReferencesPerScope>;
    316   static_assert(sizeof(LocalScopeType) == kLocalScopeSize, "Unexpected size of LocalScopeType");
    317   LocalScopeType* current_scope_;
    318 
    319   DISALLOW_COPY_AND_ASSIGN(VariableSizedHandleScope);
    320 };
    321 
    322 }  // namespace art
    323 
    324 #endif  // ART_RUNTIME_HANDLE_SCOPE_H_
    325