1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ 18 #define ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ 19 20 #include "dex_cache.h" 21 22 #include <android-base/logging.h> 23 24 #include "art_field.h" 25 #include "art_method.h" 26 #include "base/casts.h" 27 #include "base/enums.h" 28 #include "class_linker.h" 29 #include "dex/dex_file.h" 30 #include "gc_root-inl.h" 31 #include "mirror/call_site.h" 32 #include "mirror/class.h" 33 #include "mirror/method_type.h" 34 #include "obj_ptr.h" 35 #include "object-inl.h" 36 #include "runtime.h" 37 #include "write_barrier-inl.h" 38 39 #include <atomic> 40 41 namespace art { 42 namespace mirror { 43 44 template <typename T> 45 inline DexCachePair<T>::DexCachePair(ObjPtr<T> object, uint32_t index) 46 : object(object), index(index) {} 47 48 template <typename T> 49 inline void DexCachePair<T>::Initialize(std::atomic<DexCachePair<T>>* dex_cache) { 50 DexCachePair<T> first_elem; 51 first_elem.object = GcRoot<T>(nullptr); 52 first_elem.index = InvalidIndexForSlot(0); 53 dex_cache[0].store(first_elem, std::memory_order_relaxed); 54 } 55 56 template <typename T> 57 inline T* DexCachePair<T>::GetObjectForIndex(uint32_t idx) { 58 if (idx != index) { 59 return nullptr; 60 } 61 DCHECK(!object.IsNull()); 62 return object.Read(); 63 } 64 65 template <typename T> 66 inline void NativeDexCachePair<T>::Initialize(std::atomic<NativeDexCachePair<T>>* dex_cache, 67 PointerSize pointer_size) { 68 NativeDexCachePair<T> first_elem; 69 first_elem.object = nullptr; 70 first_elem.index = InvalidIndexForSlot(0); 71 DexCache::SetNativePairPtrSize(dex_cache, 0, first_elem, pointer_size); 72 } 73 74 inline uint32_t DexCache::ClassSize(PointerSize pointer_size) { 75 const uint32_t vtable_entries = Object::kVTableLength; 76 return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size); 77 } 78 79 inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) { 80 DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds()); 81 const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize; 82 DCHECK_LT(slot_idx, NumStrings()); 83 return slot_idx; 84 } 85 86 inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) { 87 const uint32_t num_preresolved_strings = NumPreResolvedStrings(); 88 if (num_preresolved_strings != 0u) { 89 GcRoot<mirror::String>* preresolved_strings = GetPreResolvedStrings(); 90 // num_preresolved_strings can become 0 and preresolved_strings can become null in any order 91 // when ClearPreResolvedStrings is called. 92 if (preresolved_strings != nullptr) { 93 DCHECK_LT(string_idx.index_, num_preresolved_strings); 94 DCHECK_EQ(num_preresolved_strings, GetDexFile()->NumStringIds()); 95 mirror::String* string = preresolved_strings[string_idx.index_].Read(); 96 if (LIKELY(string != nullptr)) { 97 return string; 98 } 99 } 100 } 101 return GetStrings()[StringSlotIndex(string_idx)].load( 102 std::memory_order_relaxed).GetObjectForIndex(string_idx.index_); 103 } 104 105 inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) { 106 DCHECK(resolved != nullptr); 107 GetStrings()[StringSlotIndex(string_idx)].store( 108 StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed); 109 Runtime* const runtime = Runtime::Current(); 110 if (UNLIKELY(runtime->IsActiveTransaction())) { 111 DCHECK(runtime->IsAotCompiler()); 112 runtime->RecordResolveString(this, string_idx); 113 } 114 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. 115 WriteBarrier::ForEveryFieldWrite(this); 116 } 117 118 inline void DexCache::SetPreResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) { 119 DCHECK(resolved != nullptr); 120 DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds()); 121 GetPreResolvedStrings()[string_idx.index_] = GcRoot<mirror::String>(resolved); 122 Runtime* const runtime = Runtime::Current(); 123 CHECK(runtime->IsAotCompiler()); 124 CHECK(!runtime->IsActiveTransaction()); 125 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. 126 WriteBarrier::ForEveryFieldWrite(this); 127 } 128 129 inline void DexCache::ClearPreResolvedStrings() { 130 SetFieldPtr64</*kTransactionActive=*/false, 131 /*kCheckTransaction=*/false, 132 kVerifyNone, 133 GcRoot<mirror::String>*>(PreResolvedStringsOffset(), nullptr); 134 SetField32</*kTransactionActive=*/false, 135 /*bool kCheckTransaction=*/false, 136 kVerifyNone, 137 /*kIsVolatile=*/false>(NumPreResolvedStringsOffset(), 0); 138 } 139 140 inline void DexCache::ClearString(dex::StringIndex string_idx) { 141 DCHECK(Runtime::Current()->IsAotCompiler()); 142 uint32_t slot_idx = StringSlotIndex(string_idx); 143 StringDexCacheType* slot = &GetStrings()[slot_idx]; 144 // This is racy but should only be called from the transactional interpreter. 145 if (slot->load(std::memory_order_relaxed).index == string_idx.index_) { 146 StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx)); 147 slot->store(cleared, std::memory_order_relaxed); 148 } 149 } 150 151 inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) { 152 DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds()); 153 const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize; 154 DCHECK_LT(slot_idx, NumResolvedTypes()); 155 return slot_idx; 156 } 157 158 inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) { 159 // It is theorized that a load acquire is not required since obtaining the resolved class will 160 // always have an address dependency or a lock. 161 return GetResolvedTypes()[TypeSlotIndex(type_idx)].load( 162 std::memory_order_relaxed).GetObjectForIndex(type_idx.index_); 163 } 164 165 inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) { 166 DCHECK(resolved != nullptr); 167 // TODO default transaction support. 168 // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a 169 // class but not necessarily seeing the loaded members like the static fields array. 170 // See b/32075261. 171 GetResolvedTypes()[TypeSlotIndex(type_idx)].store( 172 TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release); 173 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. 174 WriteBarrier::ForEveryFieldWrite(this); 175 } 176 177 inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) { 178 DCHECK(Runtime::Current()->IsAotCompiler()); 179 uint32_t slot_idx = TypeSlotIndex(type_idx); 180 TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx]; 181 // This is racy but should only be called from the single-threaded ImageWriter and tests. 182 if (slot->load(std::memory_order_relaxed).index == type_idx.index_) { 183 TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx)); 184 slot->store(cleared, std::memory_order_relaxed); 185 } 186 } 187 188 inline uint32_t DexCache::MethodTypeSlotIndex(dex::ProtoIndex proto_idx) { 189 DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); 190 DCHECK_LT(proto_idx.index_, GetDexFile()->NumProtoIds()); 191 const uint32_t slot_idx = proto_idx.index_ % kDexCacheMethodTypeCacheSize; 192 DCHECK_LT(slot_idx, NumResolvedMethodTypes()); 193 return slot_idx; 194 } 195 196 inline MethodType* DexCache::GetResolvedMethodType(dex::ProtoIndex proto_idx) { 197 return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load( 198 std::memory_order_relaxed).GetObjectForIndex(proto_idx.index_); 199 } 200 201 inline void DexCache::SetResolvedMethodType(dex::ProtoIndex proto_idx, MethodType* resolved) { 202 DCHECK(resolved != nullptr); 203 GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store( 204 MethodTypeDexCachePair(resolved, proto_idx.index_), std::memory_order_relaxed); 205 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. 206 WriteBarrier::ForEveryFieldWrite(this); 207 } 208 209 inline CallSite* DexCache::GetResolvedCallSite(uint32_t call_site_idx) { 210 DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); 211 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds()); 212 GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx]; 213 Atomic<GcRoot<mirror::CallSite>>& ref = 214 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target); 215 return ref.load(std::memory_order_seq_cst).Read(); 216 } 217 218 inline ObjPtr<CallSite> DexCache::SetResolvedCallSite(uint32_t call_site_idx, 219 ObjPtr<CallSite> call_site) { 220 DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); 221 DCHECK_LT(call_site_idx, GetDexFile()->NumCallSiteIds()); 222 223 GcRoot<mirror::CallSite> null_call_site(nullptr); 224 GcRoot<mirror::CallSite> candidate(call_site); 225 GcRoot<mirror::CallSite>& target = GetResolvedCallSites()[call_site_idx]; 226 227 // The first assignment for a given call site wins. 228 Atomic<GcRoot<mirror::CallSite>>& ref = 229 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target); 230 if (ref.CompareAndSetStrongSequentiallyConsistent(null_call_site, candidate)) { 231 // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. 232 WriteBarrier::ForEveryFieldWrite(this); 233 return call_site; 234 } else { 235 return target.Read(); 236 } 237 } 238 239 inline uint32_t DexCache::FieldSlotIndex(uint32_t field_idx) { 240 DCHECK_LT(field_idx, GetDexFile()->NumFieldIds()); 241 const uint32_t slot_idx = field_idx % kDexCacheFieldCacheSize; 242 DCHECK_LT(slot_idx, NumResolvedFields()); 243 return slot_idx; 244 } 245 246 inline ArtField* DexCache::GetResolvedField(uint32_t field_idx, PointerSize ptr_size) { 247 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); 248 auto pair = GetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), ptr_size); 249 return pair.GetObjectForIndex(field_idx); 250 } 251 252 inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field, PointerSize ptr_size) { 253 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); 254 DCHECK(field != nullptr); 255 FieldDexCachePair pair(field, field_idx); 256 SetNativePairPtrSize(GetResolvedFields(), FieldSlotIndex(field_idx), pair, ptr_size); 257 } 258 259 inline void DexCache::ClearResolvedField(uint32_t field_idx, PointerSize ptr_size) { 260 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); 261 uint32_t slot_idx = FieldSlotIndex(field_idx); 262 auto* resolved_fields = GetResolvedFields(); 263 // This is racy but should only be called from the single-threaded ImageWriter. 264 DCHECK(Runtime::Current()->IsAotCompiler()); 265 if (GetNativePairPtrSize(resolved_fields, slot_idx, ptr_size).index == field_idx) { 266 FieldDexCachePair cleared(nullptr, FieldDexCachePair::InvalidIndexForSlot(slot_idx)); 267 SetNativePairPtrSize(resolved_fields, slot_idx, cleared, ptr_size); 268 } 269 } 270 271 inline uint32_t DexCache::MethodSlotIndex(uint32_t method_idx) { 272 DCHECK_LT(method_idx, GetDexFile()->NumMethodIds()); 273 const uint32_t slot_idx = method_idx % kDexCacheMethodCacheSize; 274 DCHECK_LT(slot_idx, NumResolvedMethods()); 275 return slot_idx; 276 } 277 278 inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) { 279 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); 280 auto pair = GetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), ptr_size); 281 return pair.GetObjectForIndex(method_idx); 282 } 283 284 inline void DexCache::SetResolvedMethod(uint32_t method_idx, 285 ArtMethod* method, 286 PointerSize ptr_size) { 287 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); 288 DCHECK(method != nullptr); 289 MethodDexCachePair pair(method, method_idx); 290 SetNativePairPtrSize(GetResolvedMethods(), MethodSlotIndex(method_idx), pair, ptr_size); 291 } 292 293 inline void DexCache::ClearResolvedMethod(uint32_t method_idx, PointerSize ptr_size) { 294 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); 295 uint32_t slot_idx = MethodSlotIndex(method_idx); 296 auto* resolved_methods = GetResolvedMethods(); 297 // This is racy but should only be called from the single-threaded ImageWriter. 298 DCHECK(Runtime::Current()->IsAotCompiler()); 299 if (GetNativePairPtrSize(resolved_methods, slot_idx, ptr_size).index == method_idx) { 300 MethodDexCachePair cleared(nullptr, MethodDexCachePair::InvalidIndexForSlot(slot_idx)); 301 SetNativePairPtrSize(resolved_methods, slot_idx, cleared, ptr_size); 302 } 303 } 304 305 template <typename T> 306 NativeDexCachePair<T> DexCache::GetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, 307 size_t idx, 308 PointerSize ptr_size) { 309 if (ptr_size == PointerSize::k64) { 310 auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array); 311 ConversionPair64 value = AtomicLoadRelaxed16B(&array[idx]); 312 return NativeDexCachePair<T>(reinterpret_cast64<T*>(value.first), 313 dchecked_integral_cast<size_t>(value.second)); 314 } else { 315 auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array); 316 ConversionPair32 value = array[idx].load(std::memory_order_relaxed); 317 return NativeDexCachePair<T>(reinterpret_cast32<T*>(value.first), value.second); 318 } 319 } 320 321 template <typename T> 322 void DexCache::SetNativePairPtrSize(std::atomic<NativeDexCachePair<T>>* pair_array, 323 size_t idx, 324 NativeDexCachePair<T> pair, 325 PointerSize ptr_size) { 326 if (ptr_size == PointerSize::k64) { 327 auto* array = reinterpret_cast<std::atomic<ConversionPair64>*>(pair_array); 328 ConversionPair64 v(reinterpret_cast64<uint64_t>(pair.object), pair.index); 329 AtomicStoreRelease16B(&array[idx], v); 330 } else { 331 auto* array = reinterpret_cast<std::atomic<ConversionPair32>*>(pair_array); 332 ConversionPair32 v(reinterpret_cast32<uint32_t>(pair.object), 333 dchecked_integral_cast<uint32_t>(pair.index)); 334 array[idx].store(v, std::memory_order_release); 335 } 336 } 337 338 template <typename T, 339 ReadBarrierOption kReadBarrierOption, 340 typename Visitor> 341 inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs, 342 size_t num_pairs, 343 const Visitor& visitor) 344 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { 345 for (size_t i = 0; i < num_pairs; ++i) { 346 DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed); 347 // NOTE: We need the "template" keyword here to avoid a compilation 348 // failure. GcRoot<T> is a template argument-dependent type and we need to 349 // tell the compiler to treat "Read" as a template rather than a field or 350 // function. Otherwise, on encountering the "<" token, the compiler would 351 // treat "Read" as a field. 352 T* const before = source.object.template Read<kReadBarrierOption>(); 353 visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier()); 354 if (source.object.template Read<kReadBarrierOption>() != before) { 355 pairs[i].store(source, std::memory_order_relaxed); 356 } 357 } 358 } 359 360 template <bool kVisitNativeRoots, 361 VerifyObjectFlags kVerifyFlags, 362 ReadBarrierOption kReadBarrierOption, 363 typename Visitor> 364 inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) { 365 // Visit instance fields first. 366 VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor); 367 // Visit arrays after. 368 if (kVisitNativeRoots) { 369 VisitDexCachePairs<String, kReadBarrierOption, Visitor>( 370 GetStrings<kVerifyFlags>(), NumStrings<kVerifyFlags>(), visitor); 371 372 VisitDexCachePairs<Class, kReadBarrierOption, Visitor>( 373 GetResolvedTypes<kVerifyFlags>(), NumResolvedTypes<kVerifyFlags>(), visitor); 374 375 VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>( 376 GetResolvedMethodTypes<kVerifyFlags>(), NumResolvedMethodTypes<kVerifyFlags>(), visitor); 377 378 GcRoot<mirror::CallSite>* resolved_call_sites = GetResolvedCallSites<kVerifyFlags>(); 379 size_t num_call_sites = NumResolvedCallSites<kVerifyFlags>(); 380 for (size_t i = 0; i != num_call_sites; ++i) { 381 visitor.VisitRootIfNonNull(resolved_call_sites[i].AddressWithoutBarrier()); 382 } 383 384 GcRoot<mirror::String>* const preresolved_strings = GetPreResolvedStrings(); 385 const size_t num_preresolved_strings = NumPreResolvedStrings(); 386 for (size_t i = 0; i != num_preresolved_strings; ++i) { 387 visitor.VisitRootIfNonNull(preresolved_strings[i].AddressWithoutBarrier()); 388 } 389 } 390 } 391 392 template <ReadBarrierOption kReadBarrierOption, typename Visitor> 393 inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) { 394 StringDexCacheType* src = GetStrings(); 395 for (size_t i = 0, count = NumStrings(); i < count; ++i) { 396 StringDexCachePair source = src[i].load(std::memory_order_relaxed); 397 String* ptr = source.object.Read<kReadBarrierOption>(); 398 String* new_source = visitor(ptr); 399 source.object = GcRoot<String>(new_source); 400 dest[i].store(source, std::memory_order_relaxed); 401 } 402 } 403 404 template <ReadBarrierOption kReadBarrierOption, typename Visitor> 405 inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) { 406 TypeDexCacheType* src = GetResolvedTypes(); 407 for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) { 408 TypeDexCachePair source = src[i].load(std::memory_order_relaxed); 409 Class* ptr = source.object.Read<kReadBarrierOption>(); 410 Class* new_source = visitor(ptr); 411 source.object = GcRoot<Class>(new_source); 412 dest[i].store(source, std::memory_order_relaxed); 413 } 414 } 415 416 template <ReadBarrierOption kReadBarrierOption, typename Visitor> 417 inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, 418 const Visitor& visitor) { 419 MethodTypeDexCacheType* src = GetResolvedMethodTypes(); 420 for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) { 421 MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed); 422 MethodType* ptr = source.object.Read<kReadBarrierOption>(); 423 MethodType* new_source = visitor(ptr); 424 source.object = GcRoot<MethodType>(new_source); 425 dest[i].store(source, std::memory_order_relaxed); 426 } 427 } 428 429 template <ReadBarrierOption kReadBarrierOption, typename Visitor> 430 inline void DexCache::FixupResolvedCallSites(GcRoot<mirror::CallSite>* dest, 431 const Visitor& visitor) { 432 GcRoot<mirror::CallSite>* src = GetResolvedCallSites(); 433 for (size_t i = 0, count = NumResolvedCallSites(); i < count; ++i) { 434 mirror::CallSite* source = src[i].Read<kReadBarrierOption>(); 435 mirror::CallSite* new_source = visitor(source); 436 dest[i] = GcRoot<mirror::CallSite>(new_source); 437 } 438 } 439 440 inline ObjPtr<String> DexCache::GetLocation() { 441 return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_)); 442 } 443 444 } // namespace mirror 445 } // namespace art 446 447 #endif // ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ 448