Home | History | Annotate | Download | only in mirror
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
     18 #define ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
     19 
     20 #include "dex_cache.h"
     21 
     22 #include <android-base/logging.h>
     23 
     24 #include "art_field.h"
     25 #include "art_method.h"
     26 #include "base/casts.h"
     27 #include "base/enums.h"
     28 #include "class_linker.h"
     29 #include "dex/dex_file.h"
     30 #include "gc/heap-inl.h"
     31 #include "gc_root.h"
     32 #include "mirror/call_site.h"
     33 #include "mirror/class.h"
     34 #include "mirror/method_type.h"
     35 #include "obj_ptr.h"
     36 #include "runtime.h"
     37 
     38 #include <atomic>
     39 
     40 namespace art {
     41 namespace mirror {
     42 
     43 template <typename T>
     44 inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache,
     45                                               PointerSize pointer_size) {
     46   NativeDexCachePair<T> first_elem;
     47   first_elem.object = nullptr;
     48   first_elem.index = InvalidIndexForSlot(0);
     49   DexCache::SetNativePairPtrSize(dex_cache, 0, first_elem, pointer_size);
     50 }
     51 
     52 inline uint32_t DexCache::ClassSize(PointerSize pointer_size) {
     53   const uint32_t vtable_entries = Object::kVTableLength;
     54   return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size);
     55 }
     56 
     57 inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) {
     58   DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds());
     59   const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize;
     60   DCHECK_LT(slot_idx, NumStrings());
     61   return slot_idx;
     62 }
     63 
     64 inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) {
     65   return GetStrings()[StringSlotIndex(string_idx)].load(
     66       std::memory_order_relaxed).GetObjectForIndex(string_idx.index_);
     67 }
     68 
     69 inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) {
     70   DCHECK(resolved != nullptr);
     71   GetStrings()[StringSlotIndex(string_idx)].store(
     72       StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed);
     73   Runtime* const runtime = Runtime::Current();
     74   if (UNLIKELY(runtime->IsActiveTransaction())) {
     75     DCHECK(runtime->IsAotCompiler());
     76     runtime->RecordResolveString(this, string_idx);
     77   }
     78   // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
     79   runtime->GetHeap()->WriteBarrierEveryFieldOf(this);
     80 }
     81 
     82 inline void DexCache::ClearString(dex::StringIndex string_idx) {
     83   DCHECK(Runtime::Current()->IsAotCompiler());
     84   uint32_t slot_idx = StringSlotIndex(string_idx);
     85   StringDexCacheType* slot = &GetStrings()[slot_idx];
     86   // This is racy but should only be called from the transactional interpreter.
     87   if (slot->load(std::memory_order_relaxed).index == string_idx.index_) {
     88     StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx));
     89     slot->store(cleared, std::memory_order_relaxed);
     90   }
     91 }
     92 
     93 inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) {
     94   DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds());
     95   const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize;
     96   DCHECK_LT(slot_idx, NumResolvedTypes());
     97   return slot_idx;
     98 }
     99 
    100 inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) {
    101   // It is theorized that a load acquire is not required since obtaining the resolved class will
    102   // always have an address dependency or a lock.
    103   return GetResolvedTypes()[TypeSlotIndex(type_idx)].load(
    104       std::memory_order_relaxed).GetObjectForIndex(type_idx.index_);
    105 }
    106 
    107 inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) {
    108   DCHECK(resolved != nullptr);
    109   // TODO default transaction support.
    110   // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a
    111   // class but not necessarily seeing the loaded members like the static fields array.
    112   // See b/32075261.
    113   GetResolvedTypes()[TypeSlotIndex(type_idx)].store(
    114       TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release);
    115   // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
    116   Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this);
    117 }
    118 
    119 inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) {
    120   DCHECK(Runtime::Current()->IsAotCompiler());
    121   uint32_t slot_idx = TypeSlotIndex(type_idx);
    122   TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx];
    123   // This is racy but should only be called from the single-threaded ImageWriter and tests.
    124   if (slot->load(std::memory_order_relaxed).index == type_idx.index_) {
    125     TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx));
    126     slot->store(cleared, std::memory_order_relaxed);
    127   }
    128 }
    129 
    130 inline uint32_t DexCache::MethodTypeSlotIndex(uint32_t proto_idx) {
    131   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
    132   DCHECK_LT(proto_idx, GetDexFile()->NumProtoIds());
    133   const uint32_t slot_idx = proto_idx % kDexCacheMethodTypeCacheSize;
    134   DCHECK_LT(slot_idx, NumResolvedMethodTypes());
    135   return slot_idx;
    136 }
    137 
    138 inline MethodType* DexCache::GetResolvedMethodType(uint32_t proto_idx) {
    139   return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load(
    140       std::memory_order_relaxed).GetObjectForIndex(proto_idx);
    141 }
    142 
    143 inline void DexCache::SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved) {
    144   DCHECK(resolved != nullptr);
    145   GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store(
    146       MethodTypeDexCachePair(resolved, proto_idx), std::memory_order_relaxed);
    147   // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
    148   Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this);
    149 }
    150 
    151 inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) {
    152   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
    153   DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
    154   GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
    155   Atomic<GcRoot<mirror::CallSite>>& ref =
    156       reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
    157   return ref.LoadSequentiallyConsistent().Read();
    158 }
    159 
    160 inline CallSite* DexCache::SetResolvedCallSite(uint32_t call_site_idx, CallSite* call_site) {
    161   DCHECK(Runtime::Current()->IsMethodHandlesEnabled());
    162   DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds());
    163 
    164   GcRoot<mirror::CallSite> null_call_site(nullptr);
    165   GcRoot<mirror::CallSite> candidate(call_site);
    166   GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx];
    167 
    168   // The first assignment for a given call site wins.
    169   Atomic<GcRoot<mirror::CallSite>>& ref =
    170       reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target);
    171   if (ref.CompareAndSetStrongSequentiallyConsistent(null_call_site, candidate)) {
    172     // TODO: Fine-grained marking, so that we don't need to go through all arrays in full.
    173     Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this);
    174     return call_site;
    175   } else {
    176     return target.Read();
    177   }
    178 }
    179 
    180 inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) {
    181   DCHECK_LT(field_idx, GetDexFile()->NumFieldIds());
    182   const uint32_t slot_idx = field_idx % kDexCacheFieldCacheSize;
    183   DCHECK_LT(slot_idx, NumResolvedFields());
    184   return slot_idx;
    185 }
    186 
    187 inline ArtField* DexCache::GetResolvedField(uint32_t field_idx, PointerSize ptr_size) {
    188   DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
    189   auto pair = GetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), ptr_size);
    190   return pair.GetObjectForIndex(field_idx);
    191 }
    192 
    193 inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field, PointerSize ptr_size) {
    194   DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
    195   DCHECK(field != nullptr);
    196   FieldDexCachePair pair(field, field_idx);
    197   SetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), pair, ptr_size);
    198 }
    199 
    200 inline void DexCache::ClearResolvedField(uint32_t field_idx, PointerSize ptr_size) {
    201   DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
    202   uint32_t slot_idx = FieldSlotIndex(field_idx);
    203   auto* resolved_fields = GetResolvedFields();
    204   // This is racy but should only be called from the single-threaded ImageWriter.
    205   DCHECK(Runtime::Current()->IsAotCompiler());
    206   if (GetNativePairPtrSize(resolved_fields, slot_idx, ptr_size).index == field_idx) {
    207     FieldDexCachePair cleared(nullptr, FieldDexCachePair::InvalidIndexForSlot(slot_idx));
    208     SetNativePairPtrSize(resolved_fields, slot_idx, cleared, ptr_size);
    209   }
    210 }
    211 
    212 inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) {
    213   DCHECK_LT(method_idx, GetDexFile()->NumMethodIds());
    214   const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize;
    215   DCHECK_LT(slot_idx, NumResolvedMethods());
    216   return slot_idx;
    217 }
    218 
    219 inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
    220   DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
    221   auto pair = GetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), ptr_size);
    222   return pair.GetObjectForIndex(method_idx);
    223 }
    224 
    225 inline void DexCache::SetResolvedMethod(uint32_t method_idx,
    226                                         ArtMethod* method,
    227                                         PointerSize ptr_size) {
    228   DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
    229   DCHECK(method != nullptr);
    230   MethodDexCachePair pair(method, method_idx);
    231   SetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), pair, ptr_size);
    232 }
    233 
    234 inline void DexCache::ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) {
    235   DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size);
    236   uint32_t slot_idx = MethodSlotIndex(method_idx);
    237   auto* resolved_methods = GetResolvedMethods();
    238   // This is racy but should only be called from the single-threaded ImageWriter.
    239   DCHECK(Runtime::Current()->IsAotCompiler());
    240   if (GetNativePairPtrSize(resolved_methods, slot_idx, ptr_size).index == method_idx) {
    241     MethodDexCachePair cleared(nullptr, MethodDexCachePair::InvalidIndexForSlot(slot_idx));
    242     SetNativePairPtrSize(resolved_methods, slot_idx, cleared, ptr_size);
    243   }
    244 }
    245 
    246 template <typename T>
    247 NativeDexCachePair<T> DexCache::GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
    248                                                      size_t idx,
    249                                                      PointerSize ptr_size) {
    250   if (ptr_size == PointerSize::k64) {
    251     auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
    252     ConversionPair64 value = AtomicLoadRelaxed16B(&array[idx]);
    253     return NativeDexCachePair<T>(reinterpret_cast64<T*>(value.first),
    254                                  dchecked_integral_cast<size_t>(value.second));
    255   } else {
    256     auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array);
    257     ConversionPair32 value = array[idx].load(std::memory_order_relaxed);
    258     return NativeDexCachePair<T>(reinterpret_cast<T*>(value.first), value.second);
    259   }
    260 }
    261 
    262 template <typename T>
    263 void DexCache::SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array,
    264                                     size_t idx,
    265                                     NativeDexCachePair<T> pair,
    266                                     PointerSize ptr_size) {
    267   if (ptr_size == PointerSize::k64) {
    268     auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array);
    269     ConversionPair64 v(reinterpret_cast64<uint64_t>(pair.object), pair.index);
    270     AtomicStoreRelease16B(&array[idx], v);
    271   } else {
    272     auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array);
    273     ConversionPair32 v(
    274         dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(pair.object)),
    275         dchecked_integral_cast<uint32_t>(pair.index));
    276     array[idx].store(v, std::memory_order_release);
    277   }
    278 }
    279 
    280 template <typename T,
    281           ReadBarrierOption kReadBarrierOption,
    282           typename Visitor>
    283 inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs,
    284                                size_t num_pairs,
    285                                const Visitor& visitor)
    286     REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
    287   for (size_t i = 0; i < num_pairs; ++i) {
    288     DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed);
    289     // NOTE: We need the "template" keyword here to avoid a compilation
    290     // failure. GcRoot<T> is a template argument-dependent type and we need to
    291     // tell the compiler to treat "Read" as a template rather than a field or
    292     // function. Otherwise, on encountering the "<" token, the compiler would
    293     // treat "Read" as a field.
    294     T* const before = source.object.template Read<kReadBarrierOption>();
    295     visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier());
    296     if (source.object.template Read<kReadBarrierOption>() != before) {
    297       pairs[i].store(source, std::memory_order_relaxed);
    298     }
    299   }
    300 }
    301 
    302 template <bool kVisitNativeRoots,
    303           VerifyObjectFlags kVerifyFlags,
    304           ReadBarrierOption kReadBarrierOption,
    305           typename Visitor>
    306 inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) {
    307   // Visit instance fields first.
    308   VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
    309   // Visit arrays after.
    310   if (kVisitNativeRoots) {
    311     VisitDexCachePairs<String, kReadBarrierOption, Visitor>(
    312         GetStrings(), NumStrings(), visitor);
    313 
    314     VisitDexCachePairs<Class, kReadBarrierOption, Visitor>(
    315         GetResolvedTypes(), NumResolvedTypes(), visitor);
    316 
    317     VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>(
    318         GetResolvedMethodTypes(), NumResolvedMethodTypes(), visitor);
    319 
    320     GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites();
    321     for (size_t i = 0, num_call_sites = NumResolvedCallSites(); i != num_call_sites; ++i) {
    322       visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier());
    323     }
    324   }
    325 }
    326 
    327 template <ReadBarrierOption kReadBarrierOption, typename Visitor>
    328 inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) {
    329   StringDexCacheType* src = GetStrings();
    330   for (size_t i = 0, count = NumStrings(); i < count; ++i) {
    331     StringDexCachePair source = src[i].load(std::memory_order_relaxed);
    332     String* ptr = source.object.Read<kReadBarrierOption>();
    333     String* new_source = visitor(ptr);
    334     source.object = GcRoot<String>(new_source);
    335     dest[i].store(source, std::memory_order_relaxed);
    336   }
    337 }
    338 
    339 template <ReadBarrierOption kReadBarrierOption, typename Visitor>
    340 inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) {
    341   TypeDexCacheType* src = GetResolvedTypes();
    342   for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) {
    343     TypeDexCachePair source = src[i].load(std::memory_order_relaxed);
    344     Class* ptr = source.object.Read<kReadBarrierOption>();
    345     Class* new_source = visitor(ptr);
    346     source.object = GcRoot<Class>(new_source);
    347     dest[i].store(source, std::memory_order_relaxed);
    348   }
    349 }
    350 
    351 template <ReadBarrierOption kReadBarrierOption, typename Visitor>
    352 inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest,
    353                                                const Visitor& visitor) {
    354   MethodTypeDexCacheType* src = GetResolvedMethodTypes();
    355   for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) {
    356     MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed);
    357     MethodType* ptr = source.object.Read<kReadBarrierOption>();
    358     MethodType* new_source = visitor(ptr);
    359     source.object = GcRoot<MethodType>(new_source);
    360     dest[i].store(source, std::memory_order_relaxed);
    361   }
    362 }
    363 
    364 template <ReadBarrierOption kReadBarrierOption, typename Visitor>
    365 inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest,
    366                                              const Visitor& visitor) {
    367   GcRoot<mirror::CallSite>* src = GetResolvedCallSites();
    368   for (size_t i = 0, count = NumResolvedCallSites(); i < count; ++i) {
    369     mirror::CallSite* source = src[i].Read<kReadBarrierOption>();
    370     mirror::CallSite* new_source = visitor(source);
    371     dest[i] = GcRoot<mirror::CallSite>(new_source);
    372   }
    373 }
    374 
    375 }  // namespace mirror
    376 }  // namespace art
    377 
    378 #endif  // ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_
    379