Home | History | Annotate | Download | only in runtime
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_HANDLE_SCOPE_H_
     18 #define ART_RUNTIME_HANDLE_SCOPE_H_
     19 
     20 #include <stack>
     21 
     22 #include "base/logging.h"
     23 #include "base/macros.h"
     24 #include "handle.h"
     25 #include "stack.h"
     26 #include "verify_object.h"
     27 
     28 namespace art {
     29 namespace mirror {
     30 class Object;
     31 }
     32 
     33 class Thread;
     34 
     35 // HandleScopes are scoped objects containing a number of Handles. They are used to allocate
     36 // handles, for these handles (and the objects contained within them) to be visible/roots for the
     37 // GC. It is most common to stack allocate HandleScopes using StackHandleScope.
     38 class PACKED(4) HandleScope {
     39  public:
     40   ~HandleScope() {}
     41 
     42   // Number of references contained within this handle scope.
     43   uint32_t NumberOfReferences() const {
     44     return number_of_references_;
     45   }
     46 
     47   // We have versions with and without explicit pointer size of the following. The first two are
     48   // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one
     49   // takes the pointer size explicitly so that at compile time we can cross-compile correctly.
     50 
     51   // Returns the size of a HandleScope containing num_references handles.
     52   static size_t SizeOf(uint32_t num_references);
     53 
     54   // Returns the size of a HandleScope containing num_references handles.
     55   static size_t SizeOf(size_t pointer_size, uint32_t num_references);
     56 
     57   // Link to previous HandleScope or null.
     58   HandleScope* GetLink() const {
     59     return link_;
     60   }
     61 
     62   ALWAYS_INLINE mirror::Object* GetReference(size_t i) const
     63       SHARED_REQUIRES(Locks::mutator_lock_);
     64 
     65   ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
     66 
     67   ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
     68       SHARED_REQUIRES(Locks::mutator_lock_);
     69 
     70   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
     71       SHARED_REQUIRES(Locks::mutator_lock_);
     72 
     73   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
     74 
     75   // Offset of link within HandleScope, used by generated code.
     76   static size_t LinkOffset(size_t pointer_size ATTRIBUTE_UNUSED) {
     77     return 0;
     78   }
     79 
     80   // Offset of length within handle scope, used by generated code.
     81   static size_t NumberOfReferencesOffset(size_t pointer_size) {
     82     return pointer_size;
     83   }
     84 
     85   // Offset of link within handle scope, used by generated code.
     86   static size_t ReferencesOffset(size_t pointer_size) {
     87     return pointer_size + sizeof(number_of_references_);
     88   }
     89 
     90   // Placement new creation.
     91   static HandleScope* Create(void* storage, HandleScope* link, uint32_t num_references)
     92       WARN_UNUSED {
     93     return new (storage) HandleScope(link, num_references);
     94   }
     95 
     96  protected:
     97   // Return backing storage used for references.
     98   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
     99     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(sizeof(void*));
    100     return reinterpret_cast<StackReference<mirror::Object>*>(address);
    101   }
    102 
    103   explicit HandleScope(size_t number_of_references) :
    104       link_(nullptr), number_of_references_(number_of_references) {
    105   }
    106 
    107   // Semi-hidden constructor. Construction expected by generated code and StackHandleScope.
    108   HandleScope(HandleScope* link, uint32_t num_references) :
    109       link_(link), number_of_references_(num_references) {
    110   }
    111 
    112   // Link-list of handle scopes. The root is held by a Thread.
    113   HandleScope* const link_;
    114 
    115   // Number of handlerized references.
    116   const uint32_t number_of_references_;
    117 
    118   // Storage for references.
    119   // StackReference<mirror::Object> references_[number_of_references_]
    120 
    121  private:
    122   DISALLOW_COPY_AND_ASSIGN(HandleScope);
    123 };
    124 
    125 // A wrapper which wraps around Object** and restores the pointer in the destructor.
    126 // TODO: Add more functionality.
    127 template<class T>
    128 class HandleWrapper : public MutableHandle<T> {
    129  public:
    130   HandleWrapper(T** obj, const MutableHandle<T>& handle)
    131      : MutableHandle<T>(handle), obj_(obj) {
    132   }
    133 
    134   HandleWrapper(const HandleWrapper&) = default;
    135 
    136   ~HandleWrapper() {
    137     *obj_ = MutableHandle<T>::Get();
    138   }
    139 
    140  private:
    141   T** const obj_;
    142 };
    143 
    144 // Scoped handle storage of a fixed size that is usually stack allocated.
    145 template<size_t kNumReferences>
    146 class PACKED(4) StackHandleScope FINAL : public HandleScope {
    147  public:
    148   explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr);
    149   ALWAYS_INLINE ~StackHandleScope();
    150 
    151   template<class T>
    152   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) SHARED_REQUIRES(Locks::mutator_lock_);
    153 
    154   template<class T>
    155   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
    156       SHARED_REQUIRES(Locks::mutator_lock_);
    157 
    158   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
    159       SHARED_REQUIRES(Locks::mutator_lock_);
    160 
    161   Thread* Self() const {
    162     return self_;
    163   }
    164 
    165  private:
    166   template<class T>
    167   ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) SHARED_REQUIRES(Locks::mutator_lock_) {
    168     DCHECK_LT(i, kNumReferences);
    169     return MutableHandle<T>(&GetReferences()[i]);
    170   }
    171 
    172   // Reference storage needs to be first as expected by the HandleScope layout.
    173   StackReference<mirror::Object> storage_[kNumReferences];
    174 
    175   // The thread that the stack handle scope is a linked list upon. The stack handle scope will
    176   // push and pop itself from this thread.
    177   Thread* const self_;
    178 
    179   // Position new handles will be created.
    180   size_t pos_;
    181 
    182   template<size_t kNumRefs> friend class StackHandleScope;
    183 };
    184 
    185 // Utility class to manage a collection (stack) of StackHandleScope. All the managed
    186 // scope handle have the same fixed sized.
    187 // Calls to NewHandle will create a new handle inside the top StackHandleScope.
    188 // When the handle scope becomes full a new one is created and push on top of the
    189 // previous.
    190 //
    191 // NB:
    192 // - it is not safe to use the *same* StackHandleScopeCollection intermix with
    193 // other StackHandleScopes.
    194 // - this is a an easy way around implementing a full ZoneHandleScope to manage an
    195 // arbitrary number of handles.
    196 class StackHandleScopeCollection {
    197  public:
    198   explicit StackHandleScopeCollection(Thread* const self) :
    199       self_(self),
    200       current_scope_num_refs_(0) {
    201   }
    202 
    203   ~StackHandleScopeCollection() {
    204     while (!scopes_.empty()) {
    205       delete scopes_.top();
    206       scopes_.pop();
    207     }
    208   }
    209 
    210   template<class T>
    211   MutableHandle<T> NewHandle(T* object) SHARED_REQUIRES(Locks::mutator_lock_) {
    212     if (scopes_.empty() || current_scope_num_refs_ >= kNumReferencesPerScope) {
    213       StackHandleScope<kNumReferencesPerScope>* scope =
    214           new StackHandleScope<kNumReferencesPerScope>(self_);
    215       scopes_.push(scope);
    216       current_scope_num_refs_ = 0;
    217     }
    218     current_scope_num_refs_++;
    219     return scopes_.top()->NewHandle(object);
    220   }
    221 
    222  private:
    223   static constexpr size_t kNumReferencesPerScope = 4;
    224 
    225   Thread* const self_;
    226 
    227   std::stack<StackHandleScope<kNumReferencesPerScope>*> scopes_;
    228   size_t current_scope_num_refs_;
    229 
    230   DISALLOW_COPY_AND_ASSIGN(StackHandleScopeCollection);
    231 };
    232 
    233 }  // namespace art
    234 
    235 #endif  // ART_RUNTIME_HANDLE_SCOPE_H_
    236