1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "compiler_driver.h" 18 19 #include <unistd.h> 20 21 #ifndef __APPLE__ 22 #include <malloc.h> // For mallinfo 23 #endif 24 25 #include <string_view> 26 #include <unordered_set> 27 #include <vector> 28 29 #include "android-base/logging.h" 30 #include "android-base/strings.h" 31 32 #include "art_field-inl.h" 33 #include "art_method-inl.h" 34 #include "base/arena_allocator.h" 35 #include "base/array_ref.h" 36 #include "base/bit_vector.h" 37 #include "base/enums.h" 38 #include "base/logging.h" // For VLOG 39 #include "base/stl_util.h" 40 #include "base/string_view_cpp20.h" 41 #include "base/systrace.h" 42 #include "base/time_utils.h" 43 #include "base/timing_logger.h" 44 #include "class_linker-inl.h" 45 #include "compiled_method-inl.h" 46 #include "compiler.h" 47 #include "compiler_callbacks.h" 48 #include "compiler_driver-inl.h" 49 #include "dex/class_accessor-inl.h" 50 #include "dex/descriptors_names.h" 51 #include "dex/dex_file-inl.h" 52 #include "dex/dex_file_annotations.h" 53 #include "dex/dex_instruction-inl.h" 54 #include "dex/dex_to_dex_compiler.h" 55 #include "dex/verification_results.h" 56 #include "dex/verified_method.h" 57 #include "driver/compiler_options.h" 58 #include "driver/dex_compilation_unit.h" 59 #include "gc/accounting/card_table-inl.h" 60 #include "gc/accounting/heap_bitmap.h" 61 #include "gc/space/image_space.h" 62 #include "gc/space/space.h" 63 #include "handle_scope-inl.h" 64 #include "intrinsics_enum.h" 65 #include "jni/jni_internal.h" 66 #include "linker/linker_patch.h" 67 #include "mirror/class-inl.h" 68 #include "mirror/class_loader.h" 69 #include "mirror/dex_cache-inl.h" 70 #include "mirror/object-inl.h" 71 #include "mirror/object-refvisitor-inl.h" 72 #include "mirror/object_array-inl.h" 73 #include "mirror/throwable.h" 74 #include "object_lock.h" 75 #include "profile/profile_compilation_info.h" 76 #include "runtime.h" 77 #include "runtime_intrinsics.h" 78 #include "scoped_thread_state_change-inl.h" 79 #include "thread.h" 80 #include "thread_list.h" 81 #include "thread_pool.h" 82 #include "trampolines/trampoline_compiler.h" 83 #include "transaction.h" 84 #include "utils/atomic_dex_ref_map-inl.h" 85 #include "utils/dex_cache_arrays_layout-inl.h" 86 #include "utils/swap_space.h" 87 #include "vdex_file.h" 88 #include "verifier/class_verifier.h" 89 #include "verifier/verifier_deps.h" 90 #include "verifier/verifier_enums.h" 91 92 namespace art { 93 94 static constexpr bool kTimeCompileMethod = !kIsDebugBuild; 95 96 // Print additional info during profile guided compilation. 97 static constexpr bool kDebugProfileGuidedCompilation = false; 98 99 // Max encoded fields allowed for initializing app image. Hardcode the number for now 100 // because 5000 should be large enough. 101 static constexpr uint32_t kMaxEncodedFields = 5000; 102 103 static double Percentage(size_t x, size_t y) { 104 return 100.0 * (static_cast<double>(x)) / (static_cast<double>(x + y)); 105 } 106 107 static void DumpStat(size_t x, size_t y, const char* str) { 108 if (x == 0 && y == 0) { 109 return; 110 } 111 LOG(INFO) << Percentage(x, y) << "% of " << str << " for " << (x + y) << " cases"; 112 } 113 114 class CompilerDriver::AOTCompilationStats { 115 public: 116 AOTCompilationStats() 117 : stats_lock_("AOT compilation statistics lock") {} 118 119 void Dump() { 120 DumpStat(resolved_instance_fields_, unresolved_instance_fields_, "instance fields resolved"); 121 DumpStat(resolved_local_static_fields_ + resolved_static_fields_, unresolved_static_fields_, 122 "static fields resolved"); 123 DumpStat(resolved_local_static_fields_, resolved_static_fields_ + unresolved_static_fields_, 124 "static fields local to a class"); 125 DumpStat(safe_casts_, not_safe_casts_, "check-casts removed based on type information"); 126 // Note, the code below subtracts the stat value so that when added to the stat value we have 127 // 100% of samples. TODO: clean this up. 128 DumpStat(type_based_devirtualization_, 129 resolved_methods_[kVirtual] + unresolved_methods_[kVirtual] + 130 resolved_methods_[kInterface] + unresolved_methods_[kInterface] - 131 type_based_devirtualization_, 132 "virtual/interface calls made direct based on type information"); 133 134 const size_t total = std::accumulate( 135 class_status_count_, 136 class_status_count_ + static_cast<size_t>(ClassStatus::kLast) + 1, 137 0u); 138 for (size_t i = 0; i <= static_cast<size_t>(ClassStatus::kLast); ++i) { 139 std::ostringstream oss; 140 oss << "classes with status " << static_cast<ClassStatus>(i); 141 DumpStat(class_status_count_[i], total - class_status_count_[i], oss.str().c_str()); 142 } 143 144 for (size_t i = 0; i <= kMaxInvokeType; i++) { 145 std::ostringstream oss; 146 oss << static_cast<InvokeType>(i) << " methods were AOT resolved"; 147 DumpStat(resolved_methods_[i], unresolved_methods_[i], oss.str().c_str()); 148 if (virtual_made_direct_[i] > 0) { 149 std::ostringstream oss2; 150 oss2 << static_cast<InvokeType>(i) << " methods made direct"; 151 DumpStat(virtual_made_direct_[i], 152 resolved_methods_[i] + unresolved_methods_[i] - virtual_made_direct_[i], 153 oss2.str().c_str()); 154 } 155 if (direct_calls_to_boot_[i] > 0) { 156 std::ostringstream oss2; 157 oss2 << static_cast<InvokeType>(i) << " method calls are direct into boot"; 158 DumpStat(direct_calls_to_boot_[i], 159 resolved_methods_[i] + unresolved_methods_[i] - direct_calls_to_boot_[i], 160 oss2.str().c_str()); 161 } 162 if (direct_methods_to_boot_[i] > 0) { 163 std::ostringstream oss2; 164 oss2 << static_cast<InvokeType>(i) << " method calls have methods in boot"; 165 DumpStat(direct_methods_to_boot_[i], 166 resolved_methods_[i] + unresolved_methods_[i] - direct_methods_to_boot_[i], 167 oss2.str().c_str()); 168 } 169 } 170 } 171 172 // Allow lossy statistics in non-debug builds. 173 #ifndef NDEBUG 174 #define STATS_LOCK() MutexLock mu(Thread::Current(), stats_lock_) 175 #else 176 #define STATS_LOCK() 177 #endif 178 179 void ResolvedInstanceField() REQUIRES(!stats_lock_) { 180 STATS_LOCK(); 181 resolved_instance_fields_++; 182 } 183 184 void UnresolvedInstanceField() REQUIRES(!stats_lock_) { 185 STATS_LOCK(); 186 unresolved_instance_fields_++; 187 } 188 189 void ResolvedLocalStaticField() REQUIRES(!stats_lock_) { 190 STATS_LOCK(); 191 resolved_local_static_fields_++; 192 } 193 194 void ResolvedStaticField() REQUIRES(!stats_lock_) { 195 STATS_LOCK(); 196 resolved_static_fields_++; 197 } 198 199 void UnresolvedStaticField() REQUIRES(!stats_lock_) { 200 STATS_LOCK(); 201 unresolved_static_fields_++; 202 } 203 204 // Indicate that type information from the verifier led to devirtualization. 205 void PreciseTypeDevirtualization() REQUIRES(!stats_lock_) { 206 STATS_LOCK(); 207 type_based_devirtualization_++; 208 } 209 210 // A check-cast could be eliminated due to verifier type analysis. 211 void SafeCast() REQUIRES(!stats_lock_) { 212 STATS_LOCK(); 213 safe_casts_++; 214 } 215 216 // A check-cast couldn't be eliminated due to verifier type analysis. 217 void NotASafeCast() REQUIRES(!stats_lock_) { 218 STATS_LOCK(); 219 not_safe_casts_++; 220 } 221 222 // Register a class status. 223 void AddClassStatus(ClassStatus status) REQUIRES(!stats_lock_) { 224 STATS_LOCK(); 225 ++class_status_count_[static_cast<size_t>(status)]; 226 } 227 228 private: 229 Mutex stats_lock_; 230 231 size_t resolved_instance_fields_ = 0u; 232 size_t unresolved_instance_fields_ = 0u; 233 234 size_t resolved_local_static_fields_ = 0u; 235 size_t resolved_static_fields_ = 0u; 236 size_t unresolved_static_fields_ = 0u; 237 // Type based devirtualization for invoke interface and virtual. 238 size_t type_based_devirtualization_ = 0u; 239 240 size_t resolved_methods_[kMaxInvokeType + 1] = {}; 241 size_t unresolved_methods_[kMaxInvokeType + 1] = {}; 242 size_t virtual_made_direct_[kMaxInvokeType + 1] = {}; 243 size_t direct_calls_to_boot_[kMaxInvokeType + 1] = {}; 244 size_t direct_methods_to_boot_[kMaxInvokeType + 1] = {}; 245 246 size_t safe_casts_ = 0u; 247 size_t not_safe_casts_ = 0u; 248 249 size_t class_status_count_[static_cast<size_t>(ClassStatus::kLast) + 1] = {}; 250 251 DISALLOW_COPY_AND_ASSIGN(AOTCompilationStats); 252 }; 253 254 CompilerDriver::CompilerDriver( 255 const CompilerOptions* compiler_options, 256 Compiler::Kind compiler_kind, 257 size_t thread_count, 258 int swap_fd) 259 : compiler_options_(compiler_options), 260 compiler_(), 261 compiler_kind_(compiler_kind), 262 number_of_soft_verifier_failures_(0), 263 had_hard_verifier_failure_(false), 264 parallel_thread_count_(thread_count), 265 stats_(new AOTCompilationStats), 266 compiled_method_storage_(swap_fd), 267 max_arena_alloc_(0), 268 dex_to_dex_compiler_(this) { 269 DCHECK(compiler_options_ != nullptr); 270 271 compiled_method_storage_.SetDedupeEnabled(compiler_options_->DeduplicateCode()); 272 compiler_.reset(Compiler::Create(*compiler_options, &compiled_method_storage_, compiler_kind)); 273 } 274 275 CompilerDriver::~CompilerDriver() { 276 compiled_methods_.Visit([this](const DexFileReference& ref ATTRIBUTE_UNUSED, 277 CompiledMethod* method) { 278 if (method != nullptr) { 279 CompiledMethod::ReleaseSwapAllocatedCompiledMethod(GetCompiledMethodStorage(), method); 280 } 281 }); 282 } 283 284 285 #define CREATE_TRAMPOLINE(type, abi, offset) \ 286 if (Is64BitInstructionSet(GetCompilerOptions().GetInstructionSet())) { \ 287 return CreateTrampoline64(GetCompilerOptions().GetInstructionSet(), \ 288 abi, \ 289 type ## _ENTRYPOINT_OFFSET(PointerSize::k64, offset)); \ 290 } else { \ 291 return CreateTrampoline32(GetCompilerOptions().GetInstructionSet(), \ 292 abi, \ 293 type ## _ENTRYPOINT_OFFSET(PointerSize::k32, offset)); \ 294 } 295 296 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateJniDlsymLookup() const { 297 CREATE_TRAMPOLINE(JNI, kJniAbi, pDlsymLookup) 298 } 299 300 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickGenericJniTrampoline() 301 const { 302 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickGenericJniTrampoline) 303 } 304 305 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickImtConflictTrampoline() 306 const { 307 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickImtConflictTrampoline) 308 } 309 310 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickResolutionTrampoline() 311 const { 312 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickResolutionTrampoline) 313 } 314 315 std::unique_ptr<const std::vector<uint8_t>> CompilerDriver::CreateQuickToInterpreterBridge() 316 const { 317 CREATE_TRAMPOLINE(QUICK, kQuickAbi, pQuickToInterpreterBridge) 318 } 319 #undef CREATE_TRAMPOLINE 320 321 void CompilerDriver::CompileAll(jobject class_loader, 322 const std::vector<const DexFile*>& dex_files, 323 TimingLogger* timings) { 324 DCHECK(!Runtime::Current()->IsStarted()); 325 326 CheckThreadPools(); 327 328 if (GetCompilerOptions().IsBootImage()) { 329 // We don't need to setup the intrinsics for non boot image compilation, as 330 // those compilations will pick up a boot image that have the ArtMethod already 331 // set with the intrinsics flag. 332 InitializeIntrinsics(); 333 } 334 // Compile: 335 // 1) Compile all classes and methods enabled for compilation. May fall back to dex-to-dex 336 // compilation. 337 if (GetCompilerOptions().IsAnyCompilationEnabled()) { 338 Compile(class_loader, dex_files, timings); 339 } 340 if (GetCompilerOptions().GetDumpStats()) { 341 stats_->Dump(); 342 } 343 } 344 345 static optimizer::DexToDexCompiler::CompilationLevel GetDexToDexCompilationLevel( 346 Thread* self, const CompilerDriver& driver, Handle<mirror::ClassLoader> class_loader, 347 const DexFile& dex_file, const dex::ClassDef& class_def) 348 REQUIRES_SHARED(Locks::mutator_lock_) { 349 // When the dex file is uncompressed in the APK, we do not generate a copy in the .vdex 350 // file. As a result, dex2oat will map the dex file read-only, and we only need to check 351 // that to know if we can do quickening. 352 if (dex_file.GetContainer() != nullptr && dex_file.GetContainer()->IsReadOnly()) { 353 return optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile; 354 } 355 auto* const runtime = Runtime::Current(); 356 DCHECK(driver.GetCompilerOptions().IsQuickeningCompilationEnabled()); 357 const char* descriptor = dex_file.GetClassDescriptor(class_def); 358 ClassLinker* class_linker = runtime->GetClassLinker(); 359 ObjPtr<mirror::Class> klass = class_linker->FindClass(self, descriptor, class_loader); 360 if (klass == nullptr) { 361 CHECK(self->IsExceptionPending()); 362 self->ClearException(); 363 return optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile; 364 } 365 // DexToDex at the kOptimize level may introduce quickened opcodes, which replace symbolic 366 // references with actual offsets. We cannot re-verify such instructions. 367 // 368 // We store the verification information in the class status in the oat file, which the linker 369 // can validate (checksums) and use to skip load-time verification. It is thus safe to 370 // optimize when a class has been fully verified before. 371 optimizer::DexToDexCompiler::CompilationLevel max_level = 372 optimizer::DexToDexCompiler::CompilationLevel::kOptimize; 373 if (driver.GetCompilerOptions().GetDebuggable()) { 374 // We are debuggable so definitions of classes might be changed. We don't want to do any 375 // optimizations that could break that. 376 max_level = optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile; 377 } 378 if (klass->IsVerified()) { 379 // Class is verified so we can enable DEX-to-DEX compilation for performance. 380 return max_level; 381 } else { 382 // Class verification has failed: do not run DEX-to-DEX optimizations. 383 return optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile; 384 } 385 } 386 387 static optimizer::DexToDexCompiler::CompilationLevel GetDexToDexCompilationLevel( 388 Thread* self, 389 const CompilerDriver& driver, 390 jobject jclass_loader, 391 const DexFile& dex_file, 392 const dex::ClassDef& class_def) { 393 ScopedObjectAccess soa(self); 394 StackHandleScope<1> hs(soa.Self()); 395 Handle<mirror::ClassLoader> class_loader( 396 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 397 return GetDexToDexCompilationLevel(self, driver, class_loader, dex_file, class_def); 398 } 399 400 // Does the runtime for the InstructionSet provide an implementation returned by 401 // GetQuickGenericJniStub allowing down calls that aren't compiled using a JNI compiler? 402 static bool InstructionSetHasGenericJniStub(InstructionSet isa) { 403 switch (isa) { 404 case InstructionSet::kArm: 405 case InstructionSet::kArm64: 406 case InstructionSet::kThumb2: 407 case InstructionSet::kMips: 408 case InstructionSet::kMips64: 409 case InstructionSet::kX86: 410 case InstructionSet::kX86_64: return true; 411 default: return false; 412 } 413 } 414 415 template <typename CompileFn> 416 static void CompileMethodHarness( 417 Thread* self, 418 CompilerDriver* driver, 419 const dex::CodeItem* code_item, 420 uint32_t access_flags, 421 InvokeType invoke_type, 422 uint16_t class_def_idx, 423 uint32_t method_idx, 424 Handle<mirror::ClassLoader> class_loader, 425 const DexFile& dex_file, 426 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level, 427 Handle<mirror::DexCache> dex_cache, 428 CompileFn compile_fn) { 429 DCHECK(driver != nullptr); 430 CompiledMethod* compiled_method; 431 uint64_t start_ns = kTimeCompileMethod ? NanoTime() : 0; 432 MethodReference method_ref(&dex_file, method_idx); 433 434 compiled_method = compile_fn(self, 435 driver, 436 code_item, 437 access_flags, 438 invoke_type, 439 class_def_idx, 440 method_idx, 441 class_loader, 442 dex_file, 443 dex_to_dex_compilation_level, 444 dex_cache); 445 446 if (kTimeCompileMethod) { 447 uint64_t duration_ns = NanoTime() - start_ns; 448 if (duration_ns > MsToNs(driver->GetCompiler()->GetMaximumCompilationTimeBeforeWarning())) { 449 LOG(WARNING) << "Compilation of " << dex_file.PrettyMethod(method_idx) 450 << " took " << PrettyDuration(duration_ns); 451 } 452 } 453 454 if (compiled_method != nullptr) { 455 driver->AddCompiledMethod(method_ref, compiled_method); 456 } 457 458 if (self->IsExceptionPending()) { 459 ScopedObjectAccess soa(self); 460 LOG(FATAL) << "Unexpected exception compiling: " << dex_file.PrettyMethod(method_idx) << "\n" 461 << self->GetException()->Dump(); 462 } 463 } 464 465 static void CompileMethodDex2Dex( 466 Thread* self, 467 CompilerDriver* driver, 468 const dex::CodeItem* code_item, 469 uint32_t access_flags, 470 InvokeType invoke_type, 471 uint16_t class_def_idx, 472 uint32_t method_idx, 473 Handle<mirror::ClassLoader> class_loader, 474 const DexFile& dex_file, 475 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level, 476 Handle<mirror::DexCache> dex_cache) { 477 auto dex_2_dex_fn = [](Thread* self ATTRIBUTE_UNUSED, 478 CompilerDriver* driver, 479 const dex::CodeItem* code_item, 480 uint32_t access_flags, 481 InvokeType invoke_type, 482 uint16_t class_def_idx, 483 uint32_t method_idx, 484 Handle<mirror::ClassLoader> class_loader, 485 const DexFile& dex_file, 486 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level, 487 Handle<mirror::DexCache> dex_cache ATTRIBUTE_UNUSED) -> CompiledMethod* { 488 DCHECK(driver != nullptr); 489 MethodReference method_ref(&dex_file, method_idx); 490 491 optimizer::DexToDexCompiler* const compiler = &driver->GetDexToDexCompiler(); 492 493 if (compiler->ShouldCompileMethod(method_ref)) { 494 const VerificationResults* results = driver->GetCompilerOptions().GetVerificationResults(); 495 DCHECK(results != nullptr); 496 const VerifiedMethod* verified_method = results->GetVerifiedMethod(method_ref); 497 // Do not optimize if a VerifiedMethod is missing. SafeCast elision, 498 // for example, relies on it. 499 return compiler->CompileMethod( 500 code_item, 501 access_flags, 502 invoke_type, 503 class_def_idx, 504 method_idx, 505 class_loader, 506 dex_file, 507 (verified_method != nullptr) 508 ? dex_to_dex_compilation_level 509 : optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile); 510 } 511 return nullptr; 512 }; 513 CompileMethodHarness(self, 514 driver, 515 code_item, 516 access_flags, 517 invoke_type, 518 class_def_idx, 519 method_idx, 520 class_loader, 521 dex_file, 522 dex_to_dex_compilation_level, 523 dex_cache, 524 dex_2_dex_fn); 525 } 526 527 static void CompileMethodQuick( 528 Thread* self, 529 CompilerDriver* driver, 530 const dex::CodeItem* code_item, 531 uint32_t access_flags, 532 InvokeType invoke_type, 533 uint16_t class_def_idx, 534 uint32_t method_idx, 535 Handle<mirror::ClassLoader> class_loader, 536 const DexFile& dex_file, 537 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level, 538 Handle<mirror::DexCache> dex_cache) { 539 auto quick_fn = []( 540 Thread* self, 541 CompilerDriver* driver, 542 const dex::CodeItem* code_item, 543 uint32_t access_flags, 544 InvokeType invoke_type, 545 uint16_t class_def_idx, 546 uint32_t method_idx, 547 Handle<mirror::ClassLoader> class_loader, 548 const DexFile& dex_file, 549 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level, 550 Handle<mirror::DexCache> dex_cache) { 551 DCHECK(driver != nullptr); 552 CompiledMethod* compiled_method = nullptr; 553 MethodReference method_ref(&dex_file, method_idx); 554 555 if ((access_flags & kAccNative) != 0) { 556 // Are we extracting only and have support for generic JNI down calls? 557 if (!driver->GetCompilerOptions().IsJniCompilationEnabled() && 558 InstructionSetHasGenericJniStub(driver->GetCompilerOptions().GetInstructionSet())) { 559 // Leaving this empty will trigger the generic JNI version 560 } else { 561 // Query any JNI optimization annotations such as @FastNative or @CriticalNative. 562 access_flags |= annotations::GetNativeMethodAnnotationAccessFlags( 563 dex_file, dex_file.GetClassDef(class_def_idx), method_idx); 564 565 compiled_method = driver->GetCompiler()->JniCompile( 566 access_flags, method_idx, dex_file, dex_cache); 567 CHECK(compiled_method != nullptr); 568 } 569 } else if ((access_flags & kAccAbstract) != 0) { 570 // Abstract methods don't have code. 571 } else { 572 const VerificationResults* results = driver->GetCompilerOptions().GetVerificationResults(); 573 DCHECK(results != nullptr); 574 const VerifiedMethod* verified_method = results->GetVerifiedMethod(method_ref); 575 bool compile = 576 // Basic checks, e.g., not <clinit>. 577 results->IsCandidateForCompilation(method_ref, access_flags) && 578 // Did not fail to create VerifiedMethod metadata. 579 verified_method != nullptr && 580 // Do not have failures that should punt to the interpreter. 581 !verified_method->HasRuntimeThrow() && 582 (verified_method->GetEncounteredVerificationFailures() & 583 (verifier::VERIFY_ERROR_FORCE_INTERPRETER | verifier::VERIFY_ERROR_LOCKING)) == 0 && 584 // Is eligable for compilation by methods-to-compile filter. 585 driver->ShouldCompileBasedOnProfile(method_ref); 586 587 if (compile) { 588 // NOTE: if compiler declines to compile this method, it will return null. 589 compiled_method = driver->GetCompiler()->Compile(code_item, 590 access_flags, 591 invoke_type, 592 class_def_idx, 593 method_idx, 594 class_loader, 595 dex_file, 596 dex_cache); 597 ProfileMethodsCheck check_type = 598 driver->GetCompilerOptions().CheckProfiledMethodsCompiled(); 599 if (UNLIKELY(check_type != ProfileMethodsCheck::kNone)) { 600 bool violation = driver->ShouldCompileBasedOnProfile(method_ref) && 601 (compiled_method == nullptr); 602 if (violation) { 603 std::ostringstream oss; 604 oss << "Failed to compile " 605 << method_ref.dex_file->PrettyMethod(method_ref.index) 606 << "[" << method_ref.dex_file->GetLocation() << "]" 607 << " as expected by profile"; 608 switch (check_type) { 609 case ProfileMethodsCheck::kNone: 610 break; 611 case ProfileMethodsCheck::kLog: 612 LOG(ERROR) << oss.str(); 613 break; 614 case ProfileMethodsCheck::kAbort: 615 LOG(FATAL_WITHOUT_ABORT) << oss.str(); 616 _exit(1); 617 } 618 } 619 } 620 } 621 if (compiled_method == nullptr && 622 dex_to_dex_compilation_level != 623 optimizer::DexToDexCompiler::CompilationLevel::kDontDexToDexCompile) { 624 DCHECK(!Runtime::Current()->UseJitCompilation()); 625 // TODO: add a command-line option to disable DEX-to-DEX compilation ? 626 driver->GetDexToDexCompiler().MarkForCompilation(self, method_ref); 627 } 628 } 629 return compiled_method; 630 }; 631 CompileMethodHarness(self, 632 driver, 633 code_item, 634 access_flags, 635 invoke_type, 636 class_def_idx, 637 method_idx, 638 class_loader, 639 dex_file, 640 dex_to_dex_compilation_level, 641 dex_cache, 642 quick_fn); 643 } 644 645 void CompilerDriver::Resolve(jobject class_loader, 646 const std::vector<const DexFile*>& dex_files, 647 TimingLogger* timings) { 648 // Resolution allocates classes and needs to run single-threaded to be deterministic. 649 bool force_determinism = GetCompilerOptions().IsForceDeterminism(); 650 ThreadPool* resolve_thread_pool = force_determinism 651 ? single_thread_pool_.get() 652 : parallel_thread_pool_.get(); 653 size_t resolve_thread_count = force_determinism ? 1U : parallel_thread_count_; 654 655 for (size_t i = 0; i != dex_files.size(); ++i) { 656 const DexFile* dex_file = dex_files[i]; 657 CHECK(dex_file != nullptr); 658 ResolveDexFile(class_loader, 659 *dex_file, 660 dex_files, 661 resolve_thread_pool, 662 resolve_thread_count, 663 timings); 664 } 665 } 666 667 void CompilerDriver::ResolveConstStrings(const std::vector<const DexFile*>& dex_files, 668 bool only_startup_strings, 669 TimingLogger* timings) { 670 if (only_startup_strings && GetCompilerOptions().GetProfileCompilationInfo() == nullptr) { 671 // If there is no profile, don't resolve any strings. Resolving all of the strings in the image 672 // will cause a bloated app image and slow down startup. 673 return; 674 } 675 ScopedObjectAccess soa(Thread::Current()); 676 StackHandleScope<1> hs(soa.Self()); 677 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); 678 MutableHandle<mirror::DexCache> dex_cache(hs.NewHandle<mirror::DexCache>(nullptr)); 679 size_t num_instructions = 0u; 680 681 for (const DexFile* dex_file : dex_files) { 682 dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file)); 683 bool added_preresolved_string_array = false; 684 if (only_startup_strings) { 685 // When resolving startup strings, create the preresolved strings array. 686 added_preresolved_string_array = dex_cache->AddPreResolvedStringsArray(); 687 } 688 TimingLogger::ScopedTiming t("Resolve const-string Strings", timings); 689 690 // TODO: Implement a profile-based filter for the boot image. See b/76145463. 691 for (ClassAccessor accessor : dex_file->GetClasses()) { 692 const ProfileCompilationInfo* profile_compilation_info = 693 GetCompilerOptions().GetProfileCompilationInfo(); 694 695 const bool is_startup_class = 696 profile_compilation_info != nullptr && 697 profile_compilation_info->ContainsClass(*dex_file, accessor.GetClassIdx()); 698 699 // Skip methods that failed to verify since they may contain invalid Dex code. 700 if (GetClassStatus(ClassReference(dex_file, accessor.GetClassDefIndex())) < 701 ClassStatus::kRetryVerificationAtRuntime) { 702 continue; 703 } 704 705 for (const ClassAccessor::Method& method : accessor.GetMethods()) { 706 const bool is_clinit = (method.GetAccessFlags() & kAccConstructor) != 0 && 707 (method.GetAccessFlags() & kAccStatic) != 0; 708 const bool is_startup_clinit = is_startup_class && is_clinit; 709 710 if (profile_compilation_info != nullptr && !is_startup_clinit) { 711 ProfileCompilationInfo::MethodHotness hotness = 712 profile_compilation_info->GetMethodHotness(method.GetReference()); 713 if (added_preresolved_string_array ? !hotness.IsStartup() : !hotness.IsInProfile()) { 714 continue; 715 } 716 } 717 718 // Resolve const-strings in the code. Done to have deterministic allocation behavior. Right 719 // now this is single-threaded for simplicity. 720 // TODO: Collect the relevant string indices in parallel, then allocate them sequentially 721 // in a stable order. 722 for (const DexInstructionPcPair& inst : method.GetInstructions()) { 723 switch (inst->Opcode()) { 724 case Instruction::CONST_STRING: 725 case Instruction::CONST_STRING_JUMBO: { 726 dex::StringIndex string_index((inst->Opcode() == Instruction::CONST_STRING) 727 ? inst->VRegB_21c() 728 : inst->VRegB_31c()); 729 ObjPtr<mirror::String> string = class_linker->ResolveString(string_index, dex_cache); 730 CHECK(string != nullptr) << "Could not allocate a string when forcing determinism"; 731 if (added_preresolved_string_array) { 732 dex_cache->GetPreResolvedStrings()[string_index.index_] = 733 GcRoot<mirror::String>(string); 734 } 735 ++num_instructions; 736 break; 737 } 738 739 default: 740 break; 741 } 742 } 743 } 744 } 745 } 746 VLOG(compiler) << "Resolved " << num_instructions << " const string instructions"; 747 } 748 749 // Initialize type check bit strings for check-cast and instance-of in the code. Done to have 750 // deterministic allocation behavior. Right now this is single-threaded for simplicity. 751 // TODO: Collect the relevant type indices in parallel, then process them sequentially in a 752 // stable order. 753 754 static void InitializeTypeCheckBitstrings(CompilerDriver* driver, 755 ClassLinker* class_linker, 756 Handle<mirror::DexCache> dex_cache, 757 const DexFile& dex_file, 758 const ClassAccessor::Method& method) 759 REQUIRES_SHARED(Locks::mutator_lock_) { 760 for (const DexInstructionPcPair& inst : method.GetInstructions()) { 761 switch (inst->Opcode()) { 762 case Instruction::CHECK_CAST: 763 case Instruction::INSTANCE_OF: { 764 dex::TypeIndex type_index( 765 (inst->Opcode() == Instruction::CHECK_CAST) ? inst->VRegB_21c() : inst->VRegC_22c()); 766 const char* descriptor = dex_file.StringByTypeIdx(type_index); 767 // We currently do not use the bitstring type check for array or final (including 768 // primitive) classes. We may reconsider this in future if it's deemed to be beneficial. 769 // And we cannot use it for classes outside the boot image as we do not know the runtime 770 // value of their bitstring when compiling (it may not even get assigned at runtime). 771 if (descriptor[0] == 'L' && driver->GetCompilerOptions().IsImageClass(descriptor)) { 772 ObjPtr<mirror::Class> klass = 773 class_linker->LookupResolvedType(type_index, 774 dex_cache.Get(), 775 /* class_loader= */ nullptr); 776 CHECK(klass != nullptr) << descriptor << " should have been previously resolved."; 777 // Now assign the bitstring if the class is not final. Keep this in sync with sharpening. 778 if (!klass->IsFinal()) { 779 MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_); 780 SubtypeCheck<ObjPtr<mirror::Class>>::EnsureAssigned(klass); 781 } 782 } 783 break; 784 } 785 786 default: 787 break; 788 } 789 } 790 } 791 792 static void InitializeTypeCheckBitstrings(CompilerDriver* driver, 793 const std::vector<const DexFile*>& dex_files, 794 TimingLogger* timings) { 795 ScopedObjectAccess soa(Thread::Current()); 796 StackHandleScope<1> hs(soa.Self()); 797 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); 798 MutableHandle<mirror::DexCache> dex_cache(hs.NewHandle<mirror::DexCache>(nullptr)); 799 800 for (const DexFile* dex_file : dex_files) { 801 dex_cache.Assign(class_linker->FindDexCache(soa.Self(), *dex_file)); 802 TimingLogger::ScopedTiming t("Initialize type check bitstrings", timings); 803 804 for (ClassAccessor accessor : dex_file->GetClasses()) { 805 // Direct and virtual methods. 806 for (const ClassAccessor::Method& method : accessor.GetMethods()) { 807 InitializeTypeCheckBitstrings(driver, class_linker, dex_cache, *dex_file, method); 808 } 809 } 810 } 811 } 812 813 inline void CompilerDriver::CheckThreadPools() { 814 DCHECK(parallel_thread_pool_ != nullptr); 815 DCHECK(single_thread_pool_ != nullptr); 816 } 817 818 static void EnsureVerifiedOrVerifyAtRuntime(jobject jclass_loader, 819 const std::vector<const DexFile*>& dex_files) { 820 ScopedObjectAccess soa(Thread::Current()); 821 StackHandleScope<2> hs(soa.Self()); 822 Handle<mirror::ClassLoader> class_loader( 823 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 824 MutableHandle<mirror::Class> cls(hs.NewHandle<mirror::Class>(nullptr)); 825 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 826 827 for (const DexFile* dex_file : dex_files) { 828 for (ClassAccessor accessor : dex_file->GetClasses()) { 829 cls.Assign(class_linker->FindClass(soa.Self(), accessor.GetDescriptor(), class_loader)); 830 if (cls == nullptr) { 831 soa.Self()->ClearException(); 832 } else if (&cls->GetDexFile() == dex_file) { 833 DCHECK(cls->IsErroneous() || cls->IsVerified() || cls->ShouldVerifyAtRuntime()) 834 << cls->PrettyClass() 835 << " " << cls->GetStatus(); 836 } 837 } 838 } 839 } 840 841 void CompilerDriver::PreCompile(jobject class_loader, 842 const std::vector<const DexFile*>& dex_files, 843 TimingLogger* timings, 844 /*inout*/ HashSet<std::string>* image_classes, 845 /*out*/ VerificationResults* verification_results) { 846 CheckThreadPools(); 847 848 VLOG(compiler) << "Before precompile " << GetMemoryUsageString(false); 849 850 compiled_classes_.AddDexFiles(GetCompilerOptions().GetDexFilesForOatFile()); 851 dex_to_dex_compiler_.SetDexFiles(GetCompilerOptions().GetDexFilesForOatFile()); 852 853 // Precompile: 854 // 1) Load image classes. 855 // 2) Resolve all classes. 856 // 3) For deterministic boot image, resolve strings for const-string instructions. 857 // 4) Attempt to verify all classes. 858 // 5) Attempt to initialize image classes, and trivially initialized classes. 859 // 6) Update the set of image classes. 860 // 7) For deterministic boot image, initialize bitstrings for type checking. 861 862 LoadImageClasses(timings, image_classes); 863 VLOG(compiler) << "LoadImageClasses: " << GetMemoryUsageString(false); 864 865 if (compiler_options_->IsAnyCompilationEnabled()) { 866 // Avoid adding the dex files in the case where we aren't going to add compiled methods. 867 // This reduces RAM usage for this case. 868 for (const DexFile* dex_file : dex_files) { 869 // Can be already inserted. This happens for gtests. 870 if (!compiled_methods_.HaveDexFile(dex_file)) { 871 compiled_methods_.AddDexFile(dex_file); 872 } 873 } 874 // Resolve eagerly to prepare for compilation. 875 Resolve(class_loader, dex_files, timings); 876 VLOG(compiler) << "Resolve: " << GetMemoryUsageString(false); 877 } 878 879 if (compiler_options_->AssumeClassesAreVerified()) { 880 VLOG(compiler) << "Verify none mode specified, skipping verification."; 881 SetVerified(class_loader, dex_files, timings); 882 } 883 884 if (!compiler_options_->IsVerificationEnabled()) { 885 return; 886 } 887 888 Verify(class_loader, dex_files, timings, verification_results); 889 VLOG(compiler) << "Verify: " << GetMemoryUsageString(false); 890 891 if (GetCompilerOptions().IsForceDeterminism() && GetCompilerOptions().IsBootImage()) { 892 // Resolve strings from const-string. Do this now to have a deterministic image. 893 ResolveConstStrings(dex_files, /*only_startup_strings=*/ false, timings); 894 VLOG(compiler) << "Resolve const-strings: " << GetMemoryUsageString(false); 895 } else if (GetCompilerOptions().ResolveStartupConstStrings()) { 896 ResolveConstStrings(dex_files, /*only_startup_strings=*/ true, timings); 897 } 898 899 if (had_hard_verifier_failure_ && GetCompilerOptions().AbortOnHardVerifierFailure()) { 900 // Avoid dumping threads. Even if we shut down the thread pools, there will still be three 901 // instances of this thread's stack. 902 LOG(FATAL_WITHOUT_ABORT) << "Had a hard failure verifying all classes, and was asked to abort " 903 << "in such situations. Please check the log."; 904 _exit(1); 905 } else if (number_of_soft_verifier_failures_ > 0 && 906 GetCompilerOptions().AbortOnSoftVerifierFailure()) { 907 LOG(FATAL_WITHOUT_ABORT) << "Had " << number_of_soft_verifier_failures_ << " soft failure(s) " 908 << "verifying all classes, and was asked to abort in such situations. " 909 << "Please check the log."; 910 _exit(1); 911 } 912 913 if (compiler_options_->IsAnyCompilationEnabled()) { 914 if (kIsDebugBuild) { 915 EnsureVerifiedOrVerifyAtRuntime(class_loader, dex_files); 916 } 917 InitializeClasses(class_loader, dex_files, timings); 918 VLOG(compiler) << "InitializeClasses: " << GetMemoryUsageString(false); 919 } 920 921 UpdateImageClasses(timings, image_classes); 922 VLOG(compiler) << "UpdateImageClasses: " << GetMemoryUsageString(false); 923 924 if (kBitstringSubtypeCheckEnabled && 925 GetCompilerOptions().IsForceDeterminism() && GetCompilerOptions().IsBootImage()) { 926 // Initialize type check bit string used by check-cast and instanceof. 927 // Do this now to have a deterministic image. 928 // Note: This is done after UpdateImageClasses() at it relies on the image classes to be final. 929 InitializeTypeCheckBitstrings(this, dex_files, timings); 930 } 931 } 932 933 bool CompilerDriver::ShouldCompileBasedOnProfile(const MethodReference& method_ref) const { 934 // If compiling the apex image, filter out methods not in an apex file (the profile used 935 // for boot classpath is the same between the apex image and the boot image, so it includes 936 /// framewkro methods). 937 if (compiler_options_->IsApexBootImage() && 938 !android::base::StartsWith(method_ref.dex_file->GetLocation(), "/apex")) { 939 return false; 940 } 941 942 // Profile compilation info may be null if no profile is passed. 943 if (!CompilerFilter::DependsOnProfile(compiler_options_->GetCompilerFilter())) { 944 // Use the compiler filter instead of the presence of profile_compilation_info_ since 945 // we may want to have full speed compilation along with profile based layout optimizations. 946 return true; 947 } 948 // If we are using a profile filter but do not have a profile compilation info, compile nothing. 949 const ProfileCompilationInfo* profile_compilation_info = 950 GetCompilerOptions().GetProfileCompilationInfo(); 951 if (profile_compilation_info == nullptr) { 952 return false; 953 } 954 // Compile only hot methods, it is the profile saver's job to decide what startup methods to mark 955 // as hot. 956 bool result = profile_compilation_info->GetMethodHotness(method_ref).IsHot(); 957 958 if (kDebugProfileGuidedCompilation) { 959 LOG(INFO) << "[ProfileGuidedCompilation] " 960 << (result ? "Compiled" : "Skipped") << " method:" << method_ref.PrettyMethod(true); 961 } 962 963 return result; 964 } 965 966 class ResolveCatchBlockExceptionsClassVisitor : public ClassVisitor { 967 public: 968 ResolveCatchBlockExceptionsClassVisitor() : classes_() {} 969 970 bool operator()(ObjPtr<mirror::Class> c) override REQUIRES_SHARED(Locks::mutator_lock_) { 971 classes_.push_back(c); 972 return true; 973 } 974 975 void FindExceptionTypesToResolve( 976 std::set<std::pair<dex::TypeIndex, const DexFile*>>* exceptions_to_resolve) 977 REQUIRES_SHARED(Locks::mutator_lock_) { 978 const auto pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); 979 for (ObjPtr<mirror::Class> klass : classes_) { 980 for (ArtMethod& method : klass->GetMethods(pointer_size)) { 981 FindExceptionTypesToResolveForMethod(&method, exceptions_to_resolve); 982 } 983 } 984 } 985 986 private: 987 void FindExceptionTypesToResolveForMethod( 988 ArtMethod* method, 989 std::set<std::pair<dex::TypeIndex, const DexFile*>>* exceptions_to_resolve) 990 REQUIRES_SHARED(Locks::mutator_lock_) { 991 if (method->GetCodeItem() == nullptr) { 992 return; // native or abstract method 993 } 994 CodeItemDataAccessor accessor(method->DexInstructionData()); 995 if (accessor.TriesSize() == 0) { 996 return; // nothing to process 997 } 998 const uint8_t* encoded_catch_handler_list = accessor.GetCatchHandlerData(); 999 size_t num_encoded_catch_handlers = DecodeUnsignedLeb128(&encoded_catch_handler_list); 1000 for (size_t i = 0; i < num_encoded_catch_handlers; i++) { 1001 int32_t encoded_catch_handler_size = DecodeSignedLeb128(&encoded_catch_handler_list); 1002 bool has_catch_all = false; 1003 if (encoded_catch_handler_size <= 0) { 1004 encoded_catch_handler_size = -encoded_catch_handler_size; 1005 has_catch_all = true; 1006 } 1007 for (int32_t j = 0; j < encoded_catch_handler_size; j++) { 1008 dex::TypeIndex encoded_catch_handler_handlers_type_idx = 1009 dex::TypeIndex(DecodeUnsignedLeb128(&encoded_catch_handler_list)); 1010 // Add to set of types to resolve if not already in the dex cache resolved types 1011 if (!method->IsResolvedTypeIdx(encoded_catch_handler_handlers_type_idx)) { 1012 exceptions_to_resolve->emplace(encoded_catch_handler_handlers_type_idx, 1013 method->GetDexFile()); 1014 } 1015 // ignore address associated with catch handler 1016 DecodeUnsignedLeb128(&encoded_catch_handler_list); 1017 } 1018 if (has_catch_all) { 1019 // ignore catch all address 1020 DecodeUnsignedLeb128(&encoded_catch_handler_list); 1021 } 1022 } 1023 } 1024 1025 std::vector<ObjPtr<mirror::Class>> classes_; 1026 }; 1027 1028 class RecordImageClassesVisitor : public ClassVisitor { 1029 public: 1030 explicit RecordImageClassesVisitor(HashSet<std::string>* image_classes) 1031 : image_classes_(image_classes) {} 1032 1033 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) { 1034 std::string temp; 1035 image_classes_->insert(klass->GetDescriptor(&temp)); 1036 return true; 1037 } 1038 1039 private: 1040 HashSet<std::string>* const image_classes_; 1041 }; 1042 1043 // Make a list of descriptors for classes to include in the image 1044 void CompilerDriver::LoadImageClasses(TimingLogger* timings, 1045 /*inout*/ HashSet<std::string>* image_classes) { 1046 CHECK(timings != nullptr); 1047 if (!GetCompilerOptions().IsBootImage()) { 1048 return; 1049 } 1050 1051 TimingLogger::ScopedTiming t("LoadImageClasses", timings); 1052 // Make a first class to load all classes explicitly listed in the file 1053 Thread* self = Thread::Current(); 1054 ScopedObjectAccess soa(self); 1055 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 1056 CHECK(image_classes != nullptr); 1057 for (auto it = image_classes->begin(), end = image_classes->end(); it != end;) { 1058 const std::string& descriptor(*it); 1059 StackHandleScope<1> hs(self); 1060 Handle<mirror::Class> klass( 1061 hs.NewHandle(class_linker->FindSystemClass(self, descriptor.c_str()))); 1062 if (klass == nullptr) { 1063 VLOG(compiler) << "Failed to find class " << descriptor; 1064 it = image_classes->erase(it); 1065 self->ClearException(); 1066 } else { 1067 ++it; 1068 } 1069 } 1070 1071 // Resolve exception classes referenced by the loaded classes. The catch logic assumes 1072 // exceptions are resolved by the verifier when there is a catch block in an interested method. 1073 // Do this here so that exception classes appear to have been specified image classes. 1074 std::set<std::pair<dex::TypeIndex, const DexFile*>> unresolved_exception_types; 1075 StackHandleScope<1> hs(self); 1076 Handle<mirror::Class> java_lang_Throwable( 1077 hs.NewHandle(class_linker->FindSystemClass(self, "Ljava/lang/Throwable;"))); 1078 do { 1079 unresolved_exception_types.clear(); 1080 { 1081 // Thread suspension is not allowed while ResolveCatchBlockExceptionsClassVisitor 1082 // is using a std::vector<ObjPtr<mirror::Class>>. 1083 ScopedAssertNoThreadSuspension ants(__FUNCTION__); 1084 ResolveCatchBlockExceptionsClassVisitor visitor; 1085 class_linker->VisitClasses(&visitor); 1086 visitor.FindExceptionTypesToResolve(&unresolved_exception_types); 1087 } 1088 for (const auto& exception_type : unresolved_exception_types) { 1089 dex::TypeIndex exception_type_idx = exception_type.first; 1090 const DexFile* dex_file = exception_type.second; 1091 StackHandleScope<1> hs2(self); 1092 Handle<mirror::DexCache> dex_cache(hs2.NewHandle(class_linker->RegisterDexFile(*dex_file, 1093 nullptr))); 1094 ObjPtr<mirror::Class> klass = 1095 (dex_cache != nullptr) 1096 ? class_linker->ResolveType(exception_type_idx, 1097 dex_cache, 1098 ScopedNullHandle<mirror::ClassLoader>()) 1099 : nullptr; 1100 if (klass == nullptr) { 1101 const dex::TypeId& type_id = dex_file->GetTypeId(exception_type_idx); 1102 const char* descriptor = dex_file->GetTypeDescriptor(type_id); 1103 LOG(FATAL) << "Failed to resolve class " << descriptor; 1104 } 1105 DCHECK(java_lang_Throwable->IsAssignableFrom(klass)); 1106 } 1107 // Resolving exceptions may load classes that reference more exceptions, iterate until no 1108 // more are found 1109 } while (!unresolved_exception_types.empty()); 1110 1111 // We walk the roots looking for classes so that we'll pick up the 1112 // above classes plus any classes them depend on such super 1113 // classes, interfaces, and the required ClassLinker roots. 1114 RecordImageClassesVisitor visitor(image_classes); 1115 class_linker->VisitClasses(&visitor); 1116 1117 CHECK(!image_classes->empty()); 1118 } 1119 1120 static void MaybeAddToImageClasses(Thread* self, 1121 ObjPtr<mirror::Class> klass, 1122 HashSet<std::string>* image_classes) 1123 REQUIRES_SHARED(Locks::mutator_lock_) { 1124 DCHECK_EQ(self, Thread::Current()); 1125 StackHandleScope<1> hs(self); 1126 std::string temp; 1127 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); 1128 while (!klass->IsObjectClass()) { 1129 const char* descriptor = klass->GetDescriptor(&temp); 1130 if (image_classes->find(std::string_view(descriptor)) != image_classes->end()) { 1131 break; // Previously inserted. 1132 } 1133 image_classes->insert(descriptor); 1134 VLOG(compiler) << "Adding " << descriptor << " to image classes"; 1135 for (size_t i = 0, num_interfaces = klass->NumDirectInterfaces(); i != num_interfaces; ++i) { 1136 ObjPtr<mirror::Class> interface = mirror::Class::GetDirectInterface(self, klass, i); 1137 DCHECK(interface != nullptr); 1138 MaybeAddToImageClasses(self, interface, image_classes); 1139 } 1140 for (auto& m : klass->GetVirtualMethods(pointer_size)) { 1141 MaybeAddToImageClasses(self, m.GetDeclaringClass(), image_classes); 1142 } 1143 if (klass->IsArrayClass()) { 1144 MaybeAddToImageClasses(self, klass->GetComponentType(), image_classes); 1145 } 1146 klass = klass->GetSuperClass(); 1147 } 1148 } 1149 1150 // Keeps all the data for the update together. Also doubles as the reference visitor. 1151 // Note: we can use object pointers because we suspend all threads. 1152 class ClinitImageUpdate { 1153 public: 1154 static ClinitImageUpdate* Create(VariableSizedHandleScope& hs, 1155 HashSet<std::string>* image_class_descriptors, 1156 Thread* self, 1157 ClassLinker* linker) { 1158 std::unique_ptr<ClinitImageUpdate> res(new ClinitImageUpdate(hs, 1159 image_class_descriptors, 1160 self, 1161 linker)); 1162 return res.release(); 1163 } 1164 1165 ~ClinitImageUpdate() { 1166 // Allow others to suspend again. 1167 self_->EndAssertNoThreadSuspension(old_cause_); 1168 } 1169 1170 // Visitor for VisitReferences. 1171 void operator()(ObjPtr<mirror::Object> object, 1172 MemberOffset field_offset, 1173 bool is_static ATTRIBUTE_UNUSED) const 1174 REQUIRES_SHARED(Locks::mutator_lock_) { 1175 mirror::Object* ref = object->GetFieldObject<mirror::Object>(field_offset); 1176 if (ref != nullptr) { 1177 VisitClinitClassesObject(ref); 1178 } 1179 } 1180 1181 // java.lang.ref.Reference visitor for VisitReferences. 1182 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, 1183 ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const {} 1184 1185 // Ignore class native roots. 1186 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) 1187 const {} 1188 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} 1189 1190 void Walk() REQUIRES_SHARED(Locks::mutator_lock_) { 1191 // Use the initial classes as roots for a search. 1192 for (Handle<mirror::Class> klass_root : image_classes_) { 1193 VisitClinitClassesObject(klass_root.Get()); 1194 } 1195 Thread* self = Thread::Current(); 1196 ScopedAssertNoThreadSuspension ants(__FUNCTION__); 1197 for (Handle<mirror::Class> h_klass : to_insert_) { 1198 MaybeAddToImageClasses(self, h_klass.Get(), image_class_descriptors_); 1199 } 1200 } 1201 1202 private: 1203 class FindImageClassesVisitor : public ClassVisitor { 1204 public: 1205 explicit FindImageClassesVisitor(VariableSizedHandleScope& hs, 1206 ClinitImageUpdate* data) 1207 : data_(data), 1208 hs_(hs) {} 1209 1210 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) { 1211 std::string temp; 1212 std::string_view name(klass->GetDescriptor(&temp)); 1213 auto it = data_->image_class_descriptors_->find(name); 1214 if (it != data_->image_class_descriptors_->end()) { 1215 if (LIKELY(klass->IsResolved())) { 1216 data_->image_classes_.push_back(hs_.NewHandle(klass)); 1217 } else { 1218 DCHECK(klass->IsErroneousUnresolved()); 1219 VLOG(compiler) << "Removing unresolved class from image classes: " << name; 1220 data_->image_class_descriptors_->erase(it); 1221 } 1222 } else { 1223 // Check whether it is initialized and has a clinit. They must be kept, too. 1224 if (klass->IsInitialized() && klass->FindClassInitializer( 1225 Runtime::Current()->GetClassLinker()->GetImagePointerSize()) != nullptr) { 1226 data_->image_classes_.push_back(hs_.NewHandle(klass)); 1227 } 1228 } 1229 return true; 1230 } 1231 1232 private: 1233 ClinitImageUpdate* const data_; 1234 VariableSizedHandleScope& hs_; 1235 }; 1236 1237 ClinitImageUpdate(VariableSizedHandleScope& hs, 1238 HashSet<std::string>* image_class_descriptors, 1239 Thread* self, 1240 ClassLinker* linker) REQUIRES_SHARED(Locks::mutator_lock_) 1241 : hs_(hs), 1242 image_class_descriptors_(image_class_descriptors), 1243 self_(self) { 1244 CHECK(linker != nullptr); 1245 CHECK(image_class_descriptors != nullptr); 1246 1247 // Make sure nobody interferes with us. 1248 old_cause_ = self->StartAssertNoThreadSuspension("Boot image closure"); 1249 1250 // Find all the already-marked classes. 1251 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); 1252 FindImageClassesVisitor visitor(hs_, this); 1253 linker->VisitClasses(&visitor); 1254 } 1255 1256 void VisitClinitClassesObject(mirror::Object* object) const 1257 REQUIRES_SHARED(Locks::mutator_lock_) { 1258 DCHECK(object != nullptr); 1259 if (marked_objects_.find(object) != marked_objects_.end()) { 1260 // Already processed. 1261 return; 1262 } 1263 1264 // Mark it. 1265 marked_objects_.insert(object); 1266 1267 if (object->IsClass()) { 1268 // Add to the TODO list since MaybeAddToImageClasses may cause thread suspension. Thread 1269 // suspensionb is not safe to do in VisitObjects or VisitReferences. 1270 to_insert_.push_back(hs_.NewHandle(object->AsClass())); 1271 } else { 1272 // Else visit the object's class. 1273 VisitClinitClassesObject(object->GetClass()); 1274 } 1275 1276 // If it is not a DexCache, visit all references. 1277 if (!object->IsDexCache()) { 1278 object->VisitReferences(*this, *this); 1279 } 1280 } 1281 1282 VariableSizedHandleScope& hs_; 1283 mutable std::vector<Handle<mirror::Class>> to_insert_; 1284 mutable std::unordered_set<mirror::Object*> marked_objects_; 1285 HashSet<std::string>* const image_class_descriptors_; 1286 std::vector<Handle<mirror::Class>> image_classes_; 1287 Thread* const self_; 1288 const char* old_cause_; 1289 1290 DISALLOW_COPY_AND_ASSIGN(ClinitImageUpdate); 1291 }; 1292 1293 void CompilerDriver::UpdateImageClasses(TimingLogger* timings, 1294 /*inout*/ HashSet<std::string>* image_classes) { 1295 if (GetCompilerOptions().IsBootImage()) { 1296 TimingLogger::ScopedTiming t("UpdateImageClasses", timings); 1297 1298 Runtime* runtime = Runtime::Current(); 1299 1300 // Suspend all threads. 1301 ScopedSuspendAll ssa(__FUNCTION__); 1302 1303 VariableSizedHandleScope hs(Thread::Current()); 1304 std::string error_msg; 1305 std::unique_ptr<ClinitImageUpdate> update(ClinitImageUpdate::Create(hs, 1306 image_classes, 1307 Thread::Current(), 1308 runtime->GetClassLinker())); 1309 1310 // Do the marking. 1311 update->Walk(); 1312 } 1313 } 1314 1315 void CompilerDriver::ProcessedInstanceField(bool resolved) { 1316 if (!resolved) { 1317 stats_->UnresolvedInstanceField(); 1318 } else { 1319 stats_->ResolvedInstanceField(); 1320 } 1321 } 1322 1323 void CompilerDriver::ProcessedStaticField(bool resolved, bool local) { 1324 if (!resolved) { 1325 stats_->UnresolvedStaticField(); 1326 } else if (local) { 1327 stats_->ResolvedLocalStaticField(); 1328 } else { 1329 stats_->ResolvedStaticField(); 1330 } 1331 } 1332 1333 ArtField* CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx, 1334 const DexCompilationUnit* mUnit, 1335 bool is_put, 1336 const ScopedObjectAccess& soa) { 1337 // Try to resolve the field and compiling method's class. 1338 ArtField* resolved_field; 1339 ObjPtr<mirror::Class> referrer_class; 1340 Handle<mirror::DexCache> dex_cache(mUnit->GetDexCache()); 1341 { 1342 Handle<mirror::ClassLoader> class_loader = mUnit->GetClassLoader(); 1343 resolved_field = ResolveField(soa, dex_cache, class_loader, field_idx, /* is_static= */ false); 1344 referrer_class = resolved_field != nullptr 1345 ? ResolveCompilingMethodsClass(soa, dex_cache, class_loader, mUnit) : nullptr; 1346 } 1347 bool can_link = false; 1348 if (resolved_field != nullptr && referrer_class != nullptr) { 1349 std::pair<bool, bool> fast_path = IsFastInstanceField( 1350 dex_cache.Get(), referrer_class, resolved_field, field_idx); 1351 can_link = is_put ? fast_path.second : fast_path.first; 1352 } 1353 ProcessedInstanceField(can_link); 1354 return can_link ? resolved_field : nullptr; 1355 } 1356 1357 bool CompilerDriver::ComputeInstanceFieldInfo(uint32_t field_idx, const DexCompilationUnit* mUnit, 1358 bool is_put, MemberOffset* field_offset, 1359 bool* is_volatile) { 1360 ScopedObjectAccess soa(Thread::Current()); 1361 ArtField* resolved_field = ComputeInstanceFieldInfo(field_idx, mUnit, is_put, soa); 1362 1363 if (resolved_field == nullptr) { 1364 // Conservative defaults. 1365 *is_volatile = true; 1366 *field_offset = MemberOffset(static_cast<size_t>(-1)); 1367 return false; 1368 } else { 1369 *is_volatile = resolved_field->IsVolatile(); 1370 *field_offset = resolved_field->GetOffset(); 1371 return true; 1372 } 1373 } 1374 1375 bool CompilerDriver::IsSafeCast(const DexCompilationUnit* mUnit, uint32_t dex_pc) { 1376 if (!compiler_options_->IsVerificationEnabled()) { 1377 // If we didn't verify, every cast has to be treated as non-safe. 1378 return false; 1379 } 1380 DCHECK(mUnit->GetVerifiedMethod() != nullptr); 1381 bool result = mUnit->GetVerifiedMethod()->IsSafeCast(dex_pc); 1382 if (result) { 1383 stats_->SafeCast(); 1384 } else { 1385 stats_->NotASafeCast(); 1386 } 1387 return result; 1388 } 1389 1390 class CompilationVisitor { 1391 public: 1392 virtual ~CompilationVisitor() {} 1393 virtual void Visit(size_t index) = 0; 1394 }; 1395 1396 class ParallelCompilationManager { 1397 public: 1398 ParallelCompilationManager(ClassLinker* class_linker, 1399 jobject class_loader, 1400 CompilerDriver* compiler, 1401 const DexFile* dex_file, 1402 const std::vector<const DexFile*>& dex_files, 1403 ThreadPool* thread_pool) 1404 : index_(0), 1405 class_linker_(class_linker), 1406 class_loader_(class_loader), 1407 compiler_(compiler), 1408 dex_file_(dex_file), 1409 dex_files_(dex_files), 1410 thread_pool_(thread_pool) {} 1411 1412 ClassLinker* GetClassLinker() const { 1413 CHECK(class_linker_ != nullptr); 1414 return class_linker_; 1415 } 1416 1417 jobject GetClassLoader() const { 1418 return class_loader_; 1419 } 1420 1421 CompilerDriver* GetCompiler() const { 1422 CHECK(compiler_ != nullptr); 1423 return compiler_; 1424 } 1425 1426 const DexFile* GetDexFile() const { 1427 CHECK(dex_file_ != nullptr); 1428 return dex_file_; 1429 } 1430 1431 const std::vector<const DexFile*>& GetDexFiles() const { 1432 return dex_files_; 1433 } 1434 1435 void ForAll(size_t begin, size_t end, CompilationVisitor* visitor, size_t work_units) 1436 REQUIRES(!*Locks::mutator_lock_) { 1437 ForAllLambda(begin, end, [visitor](size_t index) { visitor->Visit(index); }, work_units); 1438 } 1439 1440 template <typename Fn> 1441 void ForAllLambda(size_t begin, size_t end, Fn fn, size_t work_units) 1442 REQUIRES(!*Locks::mutator_lock_) { 1443 Thread* self = Thread::Current(); 1444 self->AssertNoPendingException(); 1445 CHECK_GT(work_units, 0U); 1446 1447 index_.store(begin, std::memory_order_relaxed); 1448 for (size_t i = 0; i < work_units; ++i) { 1449 thread_pool_->AddTask(self, new ForAllClosureLambda<Fn>(this, end, fn)); 1450 } 1451 thread_pool_->StartWorkers(self); 1452 1453 // Ensure we're suspended while we're blocked waiting for the other threads to finish (worker 1454 // thread destructor's called below perform join). 1455 CHECK_NE(self->GetState(), kRunnable); 1456 1457 // Wait for all the worker threads to finish. 1458 thread_pool_->Wait(self, true, false); 1459 1460 // And stop the workers accepting jobs. 1461 thread_pool_->StopWorkers(self); 1462 } 1463 1464 size_t NextIndex() { 1465 return index_.fetch_add(1, std::memory_order_seq_cst); 1466 } 1467 1468 private: 1469 template <typename Fn> 1470 class ForAllClosureLambda : public Task { 1471 public: 1472 ForAllClosureLambda(ParallelCompilationManager* manager, size_t end, Fn fn) 1473 : manager_(manager), 1474 end_(end), 1475 fn_(fn) {} 1476 1477 void Run(Thread* self) override { 1478 while (true) { 1479 const size_t index = manager_->NextIndex(); 1480 if (UNLIKELY(index >= end_)) { 1481 break; 1482 } 1483 fn_(index); 1484 self->AssertNoPendingException(); 1485 } 1486 } 1487 1488 void Finalize() override { 1489 delete this; 1490 } 1491 1492 private: 1493 ParallelCompilationManager* const manager_; 1494 const size_t end_; 1495 Fn fn_; 1496 }; 1497 1498 AtomicInteger index_; 1499 ClassLinker* const class_linker_; 1500 const jobject class_loader_; 1501 CompilerDriver* const compiler_; 1502 const DexFile* const dex_file_; 1503 const std::vector<const DexFile*>& dex_files_; 1504 ThreadPool* const thread_pool_; 1505 1506 DISALLOW_COPY_AND_ASSIGN(ParallelCompilationManager); 1507 }; 1508 1509 // A fast version of SkipClass above if the class pointer is available 1510 // that avoids the expensive FindInClassPath search. 1511 static bool SkipClass(jobject class_loader, const DexFile& dex_file, ObjPtr<mirror::Class> klass) 1512 REQUIRES_SHARED(Locks::mutator_lock_) { 1513 DCHECK(klass != nullptr); 1514 const DexFile& original_dex_file = *klass->GetDexCache()->GetDexFile(); 1515 if (&dex_file != &original_dex_file) { 1516 if (class_loader == nullptr) { 1517 LOG(WARNING) << "Skipping class " << klass->PrettyDescriptor() << " from " 1518 << dex_file.GetLocation() << " previously found in " 1519 << original_dex_file.GetLocation(); 1520 } 1521 return true; 1522 } 1523 return false; 1524 } 1525 1526 static void CheckAndClearResolveException(Thread* self) 1527 REQUIRES_SHARED(Locks::mutator_lock_) { 1528 CHECK(self->IsExceptionPending()); 1529 mirror::Throwable* exception = self->GetException(); 1530 std::string temp; 1531 const char* descriptor = exception->GetClass()->GetDescriptor(&temp); 1532 const char* expected_exceptions[] = { 1533 "Ljava/lang/ClassFormatError;", 1534 "Ljava/lang/ClassCircularityError;", 1535 "Ljava/lang/IllegalAccessError;", 1536 "Ljava/lang/IncompatibleClassChangeError;", 1537 "Ljava/lang/InstantiationError;", 1538 "Ljava/lang/LinkageError;", 1539 "Ljava/lang/NoClassDefFoundError;", 1540 "Ljava/lang/NoSuchFieldError;", 1541 "Ljava/lang/NoSuchMethodError;", 1542 "Ljava/lang/VerifyError;", 1543 }; 1544 bool found = false; 1545 for (size_t i = 0; (found == false) && (i < arraysize(expected_exceptions)); ++i) { 1546 if (strcmp(descriptor, expected_exceptions[i]) == 0) { 1547 found = true; 1548 } 1549 } 1550 if (!found) { 1551 LOG(FATAL) << "Unexpected exception " << exception->Dump(); 1552 } 1553 self->ClearException(); 1554 } 1555 1556 class ResolveClassFieldsAndMethodsVisitor : public CompilationVisitor { 1557 public: 1558 explicit ResolveClassFieldsAndMethodsVisitor(const ParallelCompilationManager* manager) 1559 : manager_(manager) {} 1560 1561 void Visit(size_t class_def_index) override REQUIRES(!Locks::mutator_lock_) { 1562 ScopedTrace trace(__FUNCTION__); 1563 Thread* const self = Thread::Current(); 1564 jobject jclass_loader = manager_->GetClassLoader(); 1565 const DexFile& dex_file = *manager_->GetDexFile(); 1566 ClassLinker* class_linker = manager_->GetClassLinker(); 1567 1568 // Method and Field are the worst. We can't resolve without either 1569 // context from the code use (to disambiguate virtual vs direct 1570 // method and instance vs static field) or from class 1571 // definitions. While the compiler will resolve what it can as it 1572 // needs it, here we try to resolve fields and methods used in class 1573 // definitions, since many of them many never be referenced by 1574 // generated code. 1575 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 1576 ScopedObjectAccess soa(self); 1577 StackHandleScope<2> hs(soa.Self()); 1578 Handle<mirror::ClassLoader> class_loader( 1579 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 1580 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache( 1581 soa.Self(), dex_file))); 1582 // Resolve the class. 1583 ObjPtr<mirror::Class> klass = 1584 class_linker->ResolveType(class_def.class_idx_, dex_cache, class_loader); 1585 bool resolve_fields_and_methods; 1586 if (klass == nullptr) { 1587 // Class couldn't be resolved, for example, super-class is in a different dex file. Don't 1588 // attempt to resolve methods and fields when there is no declaring class. 1589 CheckAndClearResolveException(soa.Self()); 1590 resolve_fields_and_methods = false; 1591 } else { 1592 // We successfully resolved a class, should we skip it? 1593 if (SkipClass(jclass_loader, dex_file, klass)) { 1594 return; 1595 } 1596 // We want to resolve the methods and fields eagerly. 1597 resolve_fields_and_methods = true; 1598 } 1599 1600 if (resolve_fields_and_methods) { 1601 ClassAccessor accessor(dex_file, class_def_index); 1602 // Optionally resolve fields and methods and figure out if we need a constructor barrier. 1603 auto method_visitor = [&](const ClassAccessor::Method& method) 1604 REQUIRES_SHARED(Locks::mutator_lock_) { 1605 ArtMethod* resolved = class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>( 1606 method.GetIndex(), 1607 dex_cache, 1608 class_loader, 1609 /*referrer=*/ nullptr, 1610 method.GetInvokeType(class_def.access_flags_)); 1611 if (resolved == nullptr) { 1612 CheckAndClearResolveException(soa.Self()); 1613 } 1614 }; 1615 accessor.VisitFieldsAndMethods( 1616 // static fields 1617 [&](ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) { 1618 ArtField* resolved = class_linker->ResolveField( 1619 field.GetIndex(), dex_cache, class_loader, /*is_static=*/ true); 1620 if (resolved == nullptr) { 1621 CheckAndClearResolveException(soa.Self()); 1622 } 1623 }, 1624 // instance fields 1625 [&](ClassAccessor::Field& field) REQUIRES_SHARED(Locks::mutator_lock_) { 1626 ArtField* resolved = class_linker->ResolveField( 1627 field.GetIndex(), dex_cache, class_loader, /*is_static=*/ false); 1628 if (resolved == nullptr) { 1629 CheckAndClearResolveException(soa.Self()); 1630 } 1631 }, 1632 /*direct_method_visitor=*/ method_visitor, 1633 /*virtual_method_visitor=*/ method_visitor); 1634 } 1635 } 1636 1637 private: 1638 const ParallelCompilationManager* const manager_; 1639 }; 1640 1641 class ResolveTypeVisitor : public CompilationVisitor { 1642 public: 1643 explicit ResolveTypeVisitor(const ParallelCompilationManager* manager) : manager_(manager) { 1644 } 1645 void Visit(size_t type_idx) override REQUIRES(!Locks::mutator_lock_) { 1646 // Class derived values are more complicated, they require the linker and loader. 1647 ScopedObjectAccess soa(Thread::Current()); 1648 ClassLinker* class_linker = manager_->GetClassLinker(); 1649 const DexFile& dex_file = *manager_->GetDexFile(); 1650 StackHandleScope<2> hs(soa.Self()); 1651 Handle<mirror::ClassLoader> class_loader( 1652 hs.NewHandle(soa.Decode<mirror::ClassLoader>(manager_->GetClassLoader()))); 1653 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->RegisterDexFile( 1654 dex_file, 1655 class_loader.Get()))); 1656 ObjPtr<mirror::Class> klass = (dex_cache != nullptr) 1657 ? class_linker->ResolveType(dex::TypeIndex(type_idx), dex_cache, class_loader) 1658 : nullptr; 1659 1660 if (klass == nullptr) { 1661 soa.Self()->AssertPendingException(); 1662 mirror::Throwable* exception = soa.Self()->GetException(); 1663 VLOG(compiler) << "Exception during type resolution: " << exception->Dump(); 1664 if (exception->GetClass()->DescriptorEquals("Ljava/lang/OutOfMemoryError;")) { 1665 // There's little point continuing compilation if the heap is exhausted. 1666 LOG(FATAL) << "Out of memory during type resolution for compilation"; 1667 } 1668 soa.Self()->ClearException(); 1669 } 1670 } 1671 1672 private: 1673 const ParallelCompilationManager* const manager_; 1674 }; 1675 1676 void CompilerDriver::ResolveDexFile(jobject class_loader, 1677 const DexFile& dex_file, 1678 const std::vector<const DexFile*>& dex_files, 1679 ThreadPool* thread_pool, 1680 size_t thread_count, 1681 TimingLogger* timings) { 1682 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 1683 1684 // TODO: we could resolve strings here, although the string table is largely filled with class 1685 // and method names. 1686 1687 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files, 1688 thread_pool); 1689 if (GetCompilerOptions().IsBootImage()) { 1690 // For images we resolve all types, such as array, whereas for applications just those with 1691 // classdefs are resolved by ResolveClassFieldsAndMethods. 1692 TimingLogger::ScopedTiming t("Resolve Types", timings); 1693 ResolveTypeVisitor visitor(&context); 1694 context.ForAll(0, dex_file.NumTypeIds(), &visitor, thread_count); 1695 } 1696 1697 TimingLogger::ScopedTiming t("Resolve MethodsAndFields", timings); 1698 ResolveClassFieldsAndMethodsVisitor visitor(&context); 1699 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count); 1700 } 1701 1702 void CompilerDriver::SetVerified(jobject class_loader, 1703 const std::vector<const DexFile*>& dex_files, 1704 TimingLogger* timings) { 1705 // This can be run in parallel. 1706 for (const DexFile* dex_file : dex_files) { 1707 CHECK(dex_file != nullptr); 1708 SetVerifiedDexFile(class_loader, 1709 *dex_file, 1710 dex_files, 1711 parallel_thread_pool_.get(), 1712 parallel_thread_count_, 1713 timings); 1714 } 1715 } 1716 1717 static void LoadAndUpdateStatus(const ClassAccessor& accessor, 1718 ClassStatus status, 1719 Handle<mirror::ClassLoader> class_loader, 1720 Thread* self) 1721 REQUIRES_SHARED(Locks::mutator_lock_) { 1722 StackHandleScope<1> hs(self); 1723 const char* descriptor = accessor.GetDescriptor(); 1724 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 1725 Handle<mirror::Class> cls(hs.NewHandle<mirror::Class>( 1726 class_linker->FindClass(self, descriptor, class_loader))); 1727 if (cls != nullptr) { 1728 // Check that the class is resolved with the current dex file. We might get 1729 // a boot image class, or a class in a different dex file for multidex, and 1730 // we should not update the status in that case. 1731 if (&cls->GetDexFile() == &accessor.GetDexFile()) { 1732 ObjectLock<mirror::Class> lock(self, cls); 1733 mirror::Class::SetStatus(cls, status, self); 1734 if (status >= ClassStatus::kVerified) { 1735 cls->SetVerificationAttempted(); 1736 } 1737 } 1738 } else { 1739 DCHECK(self->IsExceptionPending()); 1740 self->ClearException(); 1741 } 1742 } 1743 1744 bool CompilerDriver::FastVerify(jobject jclass_loader, 1745 const std::vector<const DexFile*>& dex_files, 1746 TimingLogger* timings, 1747 /*out*/ VerificationResults* verification_results) { 1748 verifier::VerifierDeps* verifier_deps = 1749 Runtime::Current()->GetCompilerCallbacks()->GetVerifierDeps(); 1750 // If there exist VerifierDeps that aren't the ones we just created to output, use them to verify. 1751 if (verifier_deps == nullptr || verifier_deps->OutputOnly()) { 1752 return false; 1753 } 1754 TimingLogger::ScopedTiming t("Fast Verify", timings); 1755 1756 ScopedObjectAccess soa(Thread::Current()); 1757 StackHandleScope<2> hs(soa.Self()); 1758 Handle<mirror::ClassLoader> class_loader( 1759 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 1760 std::string error_msg; 1761 1762 if (!verifier_deps->ValidateDependencies( 1763 soa.Self(), 1764 class_loader, 1765 // This returns classpath dex files in no particular order but VerifierDeps 1766 // does not care about the order. 1767 classpath_classes_.GetDexFiles(), 1768 &error_msg)) { 1769 LOG(WARNING) << "Fast verification failed: " << error_msg; 1770 return false; 1771 } 1772 1773 bool compiler_only_verifies = !GetCompilerOptions().IsAnyCompilationEnabled(); 1774 1775 // We successfully validated the dependencies, now update class status 1776 // of verified classes. Note that the dependencies also record which classes 1777 // could not be fully verified; we could try again, but that would hurt verification 1778 // time. So instead we assume these classes still need to be verified at 1779 // runtime. 1780 for (const DexFile* dex_file : dex_files) { 1781 // Fetch the list of verified classes. 1782 const std::vector<bool>& verified_classes = verifier_deps->GetVerifiedClasses(*dex_file); 1783 DCHECK_EQ(verified_classes.size(), dex_file->NumClassDefs()); 1784 for (ClassAccessor accessor : dex_file->GetClasses()) { 1785 if (verified_classes[accessor.GetClassDefIndex()]) { 1786 if (compiler_only_verifies) { 1787 // Just update the compiled_classes_ map. The compiler doesn't need to resolve 1788 // the type. 1789 ClassReference ref(dex_file, accessor.GetClassDefIndex()); 1790 const ClassStatus existing = ClassStatus::kNotReady; 1791 ClassStateTable::InsertResult result = 1792 compiled_classes_.Insert(ref, existing, ClassStatus::kVerified); 1793 CHECK_EQ(result, ClassStateTable::kInsertResultSuccess) << ref.dex_file->GetLocation(); 1794 } else { 1795 // Update the class status, so later compilation stages know they don't need to verify 1796 // the class. 1797 LoadAndUpdateStatus(accessor, ClassStatus::kVerified, class_loader, soa.Self()); 1798 // Create `VerifiedMethod`s for each methods, the compiler expects one for 1799 // quickening or compiling. 1800 // Note that this means: 1801 // - We're only going to compile methods that did verify. 1802 // - Quickening will not do checkcast ellision. 1803 // TODO(ngeoffray): Reconsider this once we refactor compiler filters. 1804 for (const ClassAccessor::Method& method : accessor.GetMethods()) { 1805 verification_results->CreateVerifiedMethodFor(method.GetReference()); 1806 } 1807 } 1808 } else if (!compiler_only_verifies) { 1809 // Make sure later compilation stages know they should not try to verify 1810 // this class again. 1811 LoadAndUpdateStatus(accessor, 1812 ClassStatus::kRetryVerificationAtRuntime, 1813 class_loader, 1814 soa.Self()); 1815 } 1816 } 1817 } 1818 return true; 1819 } 1820 1821 void CompilerDriver::Verify(jobject jclass_loader, 1822 const std::vector<const DexFile*>& dex_files, 1823 TimingLogger* timings, 1824 /*out*/ VerificationResults* verification_results) { 1825 if (FastVerify(jclass_loader, dex_files, timings, verification_results)) { 1826 return; 1827 } 1828 1829 // If there is no existing `verifier_deps` (because of non-existing vdex), or 1830 // the existing `verifier_deps` is not valid anymore, create a new one for 1831 // non boot image compilation. The verifier will need it to record the new dependencies. 1832 // Then dex2oat can update the vdex file with these new dependencies. 1833 if (!GetCompilerOptions().IsBootImage()) { 1834 // Dex2oat creates the verifier deps. 1835 // Create the main VerifierDeps, and set it to this thread. 1836 verifier::VerifierDeps* verifier_deps = 1837 Runtime::Current()->GetCompilerCallbacks()->GetVerifierDeps(); 1838 CHECK(verifier_deps != nullptr); 1839 Thread::Current()->SetVerifierDeps(verifier_deps); 1840 // Create per-thread VerifierDeps to avoid contention on the main one. 1841 // We will merge them after verification. 1842 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) { 1843 worker->GetThread()->SetVerifierDeps( 1844 new verifier::VerifierDeps(GetCompilerOptions().GetDexFilesForOatFile())); 1845 } 1846 } 1847 1848 // Verification updates VerifierDeps and needs to run single-threaded to be deterministic. 1849 bool force_determinism = GetCompilerOptions().IsForceDeterminism(); 1850 ThreadPool* verify_thread_pool = 1851 force_determinism ? single_thread_pool_.get() : parallel_thread_pool_.get(); 1852 size_t verify_thread_count = force_determinism ? 1U : parallel_thread_count_; 1853 for (const DexFile* dex_file : dex_files) { 1854 CHECK(dex_file != nullptr); 1855 VerifyDexFile(jclass_loader, 1856 *dex_file, 1857 dex_files, 1858 verify_thread_pool, 1859 verify_thread_count, 1860 timings); 1861 } 1862 1863 if (!GetCompilerOptions().IsBootImage()) { 1864 // Merge all VerifierDeps into the main one. 1865 verifier::VerifierDeps* verifier_deps = Thread::Current()->GetVerifierDeps(); 1866 for (ThreadPoolWorker* worker : parallel_thread_pool_->GetWorkers()) { 1867 std::unique_ptr<verifier::VerifierDeps> thread_deps(worker->GetThread()->GetVerifierDeps()); 1868 worker->GetThread()->SetVerifierDeps(nullptr); // We just took ownership. 1869 verifier_deps->MergeWith(std::move(thread_deps), 1870 GetCompilerOptions().GetDexFilesForOatFile()); 1871 } 1872 Thread::Current()->SetVerifierDeps(nullptr); 1873 } 1874 } 1875 1876 class VerifyClassVisitor : public CompilationVisitor { 1877 public: 1878 VerifyClassVisitor(const ParallelCompilationManager* manager, verifier::HardFailLogMode log_level) 1879 : manager_(manager), 1880 log_level_(log_level), 1881 sdk_version_(Runtime::Current()->GetTargetSdkVersion()) {} 1882 1883 void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) override { 1884 ScopedTrace trace(__FUNCTION__); 1885 ScopedObjectAccess soa(Thread::Current()); 1886 const DexFile& dex_file = *manager_->GetDexFile(); 1887 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 1888 const char* descriptor = dex_file.GetClassDescriptor(class_def); 1889 ClassLinker* class_linker = manager_->GetClassLinker(); 1890 jobject jclass_loader = manager_->GetClassLoader(); 1891 StackHandleScope<3> hs(soa.Self()); 1892 Handle<mirror::ClassLoader> class_loader( 1893 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 1894 Handle<mirror::Class> klass( 1895 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader))); 1896 verifier::FailureKind failure_kind; 1897 if (klass == nullptr) { 1898 CHECK(soa.Self()->IsExceptionPending()); 1899 soa.Self()->ClearException(); 1900 1901 /* 1902 * At compile time, we can still structurally verify the class even if FindClass fails. 1903 * This is to ensure the class is structurally sound for compilation. An unsound class 1904 * will be rejected by the verifier and later skipped during compilation in the compiler. 1905 */ 1906 Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache( 1907 soa.Self(), dex_file))); 1908 std::string error_msg; 1909 failure_kind = 1910 verifier::ClassVerifier::VerifyClass(soa.Self(), 1911 &dex_file, 1912 dex_cache, 1913 class_loader, 1914 class_def, 1915 Runtime::Current()->GetCompilerCallbacks(), 1916 true /* allow soft failures */, 1917 log_level_, 1918 sdk_version_, 1919 &error_msg); 1920 if (failure_kind == verifier::FailureKind::kHardFailure) { 1921 LOG(ERROR) << "Verification failed on class " << PrettyDescriptor(descriptor) 1922 << " because: " << error_msg; 1923 manager_->GetCompiler()->SetHadHardVerifierFailure(); 1924 } else if (failure_kind == verifier::FailureKind::kSoftFailure) { 1925 manager_->GetCompiler()->AddSoftVerifierFailure(); 1926 } else { 1927 // Force a soft failure for the VerifierDeps. This is a sanity measure, as 1928 // the vdex file already records that the class hasn't been resolved. It avoids 1929 // trying to do future verification optimizations when processing the vdex file. 1930 DCHECK(failure_kind == verifier::FailureKind::kNoFailure) << failure_kind; 1931 failure_kind = verifier::FailureKind::kSoftFailure; 1932 } 1933 } else if (&klass->GetDexFile() != &dex_file) { 1934 // Skip a duplicate class (as the resolved class is from another, earlier dex file). 1935 // Record the information that we skipped this class in the vdex. 1936 // If the class resolved to a dex file not covered by the vdex, e.g. boot class path, 1937 // it is considered external, dependencies on it will be recorded and the vdex will 1938 // remain usable regardless of whether the class remains redefined or not (in the 1939 // latter case, this class will be verify-at-runtime). 1940 // On the other hand, if the class resolved to a dex file covered by the vdex, i.e. 1941 // a different dex file within the same APK, this class will always be eclipsed by it. 1942 // Recording that it was redefined is not necessary but will save class resolution 1943 // time during fast-verify. 1944 verifier::VerifierDeps::MaybeRecordClassRedefinition(dex_file, class_def); 1945 return; // Do not update state. 1946 } else if (!SkipClass(jclass_loader, dex_file, klass.Get())) { 1947 CHECK(klass->IsResolved()) << klass->PrettyClass(); 1948 failure_kind = class_linker->VerifyClass(soa.Self(), klass, log_level_); 1949 1950 if (klass->IsErroneous()) { 1951 // ClassLinker::VerifyClass throws, which isn't useful in the compiler. 1952 CHECK(soa.Self()->IsExceptionPending()); 1953 soa.Self()->ClearException(); 1954 manager_->GetCompiler()->SetHadHardVerifierFailure(); 1955 } else if (failure_kind == verifier::FailureKind::kSoftFailure) { 1956 manager_->GetCompiler()->AddSoftVerifierFailure(); 1957 } 1958 1959 CHECK(klass->ShouldVerifyAtRuntime() || klass->IsVerified() || klass->IsErroneous()) 1960 << klass->PrettyDescriptor() << ": state=" << klass->GetStatus(); 1961 1962 // Class has a meaningful status for the compiler now, record it. 1963 ClassReference ref(manager_->GetDexFile(), class_def_index); 1964 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus()); 1965 1966 // It is *very* problematic if there are resolution errors in the boot classpath. 1967 // 1968 // It is also bad if classes fail verification. For example, we rely on things working 1969 // OK without verification when the decryption dialog is brought up. It is thus highly 1970 // recommended to compile the boot classpath with 1971 // --abort-on-hard-verifier-error --abort-on-soft-verifier-error 1972 // which is the default build system configuration. 1973 if (kIsDebugBuild) { 1974 if (manager_->GetCompiler()->GetCompilerOptions().IsBootImage()) { 1975 if (!klass->IsResolved() || klass->IsErroneous()) { 1976 LOG(FATAL) << "Boot classpath class " << klass->PrettyClass() 1977 << " failed to resolve/is erroneous: state= " << klass->GetStatus(); 1978 UNREACHABLE(); 1979 } 1980 } 1981 if (klass->IsVerified()) { 1982 DCHECK_EQ(failure_kind, verifier::FailureKind::kNoFailure); 1983 } else if (klass->ShouldVerifyAtRuntime()) { 1984 DCHECK_EQ(failure_kind, verifier::FailureKind::kSoftFailure); 1985 } else { 1986 DCHECK_EQ(failure_kind, verifier::FailureKind::kHardFailure); 1987 } 1988 } 1989 } else { 1990 // Make the skip a soft failure, essentially being considered as verify at runtime. 1991 failure_kind = verifier::FailureKind::kSoftFailure; 1992 } 1993 verifier::VerifierDeps::MaybeRecordVerificationStatus(dex_file, class_def, failure_kind); 1994 soa.Self()->AssertNoPendingException(); 1995 } 1996 1997 private: 1998 const ParallelCompilationManager* const manager_; 1999 const verifier::HardFailLogMode log_level_; 2000 const uint32_t sdk_version_; 2001 }; 2002 2003 void CompilerDriver::VerifyDexFile(jobject class_loader, 2004 const DexFile& dex_file, 2005 const std::vector<const DexFile*>& dex_files, 2006 ThreadPool* thread_pool, 2007 size_t thread_count, 2008 TimingLogger* timings) { 2009 TimingLogger::ScopedTiming t("Verify Dex File", timings); 2010 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2011 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files, 2012 thread_pool); 2013 bool abort_on_verifier_failures = GetCompilerOptions().AbortOnHardVerifierFailure() 2014 || GetCompilerOptions().AbortOnSoftVerifierFailure(); 2015 verifier::HardFailLogMode log_level = abort_on_verifier_failures 2016 ? verifier::HardFailLogMode::kLogInternalFatal 2017 : verifier::HardFailLogMode::kLogWarning; 2018 VerifyClassVisitor visitor(&context, log_level); 2019 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count); 2020 } 2021 2022 class SetVerifiedClassVisitor : public CompilationVisitor { 2023 public: 2024 explicit SetVerifiedClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {} 2025 2026 void Visit(size_t class_def_index) REQUIRES(!Locks::mutator_lock_) override { 2027 ScopedTrace trace(__FUNCTION__); 2028 ScopedObjectAccess soa(Thread::Current()); 2029 const DexFile& dex_file = *manager_->GetDexFile(); 2030 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 2031 const char* descriptor = dex_file.GetClassDescriptor(class_def); 2032 ClassLinker* class_linker = manager_->GetClassLinker(); 2033 jobject jclass_loader = manager_->GetClassLoader(); 2034 StackHandleScope<3> hs(soa.Self()); 2035 Handle<mirror::ClassLoader> class_loader( 2036 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 2037 Handle<mirror::Class> klass( 2038 hs.NewHandle(class_linker->FindClass(soa.Self(), descriptor, class_loader))); 2039 // Class might have failed resolution. Then don't set it to verified. 2040 if (klass != nullptr) { 2041 // Only do this if the class is resolved. If even resolution fails, quickening will go very, 2042 // very wrong. 2043 if (klass->IsResolved() && !klass->IsErroneousResolved()) { 2044 if (klass->GetStatus() < ClassStatus::kVerified) { 2045 ObjectLock<mirror::Class> lock(soa.Self(), klass); 2046 // Set class status to verified. 2047 mirror::Class::SetStatus(klass, ClassStatus::kVerified, soa.Self()); 2048 // Mark methods as pre-verified. If we don't do this, the interpreter will run with 2049 // access checks. 2050 InstructionSet instruction_set = 2051 manager_->GetCompiler()->GetCompilerOptions().GetInstructionSet(); 2052 klass->SetSkipAccessChecksFlagOnAllMethods(GetInstructionSetPointerSize(instruction_set)); 2053 klass->SetVerificationAttempted(); 2054 } 2055 // Record the final class status if necessary. 2056 ClassReference ref(manager_->GetDexFile(), class_def_index); 2057 manager_->GetCompiler()->RecordClassStatus(ref, klass->GetStatus()); 2058 } 2059 } else { 2060 Thread* self = soa.Self(); 2061 DCHECK(self->IsExceptionPending()); 2062 self->ClearException(); 2063 } 2064 } 2065 2066 private: 2067 const ParallelCompilationManager* const manager_; 2068 }; 2069 2070 void CompilerDriver::SetVerifiedDexFile(jobject class_loader, 2071 const DexFile& dex_file, 2072 const std::vector<const DexFile*>& dex_files, 2073 ThreadPool* thread_pool, 2074 size_t thread_count, 2075 TimingLogger* timings) { 2076 TimingLogger::ScopedTiming t("Verify Dex File", timings); 2077 if (!compiled_classes_.HaveDexFile(&dex_file)) { 2078 compiled_classes_.AddDexFile(&dex_file); 2079 } 2080 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2081 ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files, 2082 thread_pool); 2083 SetVerifiedClassVisitor visitor(&context); 2084 context.ForAll(0, dex_file.NumClassDefs(), &visitor, thread_count); 2085 } 2086 2087 class InitializeClassVisitor : public CompilationVisitor { 2088 public: 2089 explicit InitializeClassVisitor(const ParallelCompilationManager* manager) : manager_(manager) {} 2090 2091 void Visit(size_t class_def_index) override { 2092 ScopedTrace trace(__FUNCTION__); 2093 jobject jclass_loader = manager_->GetClassLoader(); 2094 const DexFile& dex_file = *manager_->GetDexFile(); 2095 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 2096 const dex::TypeId& class_type_id = dex_file.GetTypeId(class_def.class_idx_); 2097 const char* descriptor = dex_file.StringDataByIdx(class_type_id.descriptor_idx_); 2098 2099 ScopedObjectAccess soa(Thread::Current()); 2100 StackHandleScope<3> hs(soa.Self()); 2101 Handle<mirror::ClassLoader> class_loader( 2102 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 2103 Handle<mirror::Class> klass( 2104 hs.NewHandle(manager_->GetClassLinker()->FindClass(soa.Self(), descriptor, class_loader))); 2105 2106 if (klass != nullptr) { 2107 if (!SkipClass(manager_->GetClassLoader(), dex_file, klass.Get())) { 2108 TryInitializeClass(klass, class_loader); 2109 } 2110 manager_->GetCompiler()->stats_->AddClassStatus(klass->GetStatus()); 2111 } 2112 // Clear any class not found or verification exceptions. 2113 soa.Self()->ClearException(); 2114 } 2115 2116 // A helper function for initializing klass. 2117 void TryInitializeClass(Handle<mirror::Class> klass, Handle<mirror::ClassLoader>& class_loader) 2118 REQUIRES_SHARED(Locks::mutator_lock_) { 2119 const DexFile& dex_file = klass->GetDexFile(); 2120 const dex::ClassDef* class_def = klass->GetClassDef(); 2121 const dex::TypeId& class_type_id = dex_file.GetTypeId(class_def->class_idx_); 2122 const char* descriptor = dex_file.StringDataByIdx(class_type_id.descriptor_idx_); 2123 ScopedObjectAccessUnchecked soa(Thread::Current()); 2124 StackHandleScope<3> hs(soa.Self()); 2125 const bool is_boot_image = manager_->GetCompiler()->GetCompilerOptions().IsBootImage(); 2126 const bool is_app_image = manager_->GetCompiler()->GetCompilerOptions().IsAppImage(); 2127 2128 ClassStatus old_status = klass->GetStatus(); 2129 // Don't initialize classes in boot space when compiling app image 2130 if (is_app_image && klass->IsBootStrapClassLoaded()) { 2131 // Also return early and don't store the class status in the recorded class status. 2132 return; 2133 } 2134 // Only try to initialize classes that were successfully verified. 2135 if (klass->IsVerified()) { 2136 // Attempt to initialize the class but bail if we either need to initialize the super-class 2137 // or static fields. 2138 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, false); 2139 old_status = klass->GetStatus(); 2140 if (!klass->IsInitialized()) { 2141 // We don't want non-trivial class initialization occurring on multiple threads due to 2142 // deadlock problems. For example, a parent class is initialized (holding its lock) that 2143 // refers to a sub-class in its static/class initializer causing it to try to acquire the 2144 // sub-class' lock. While on a second thread the sub-class is initialized (holding its lock) 2145 // after first initializing its parents, whose locks are acquired. This leads to a 2146 // parent-to-child and a child-to-parent lock ordering and consequent potential deadlock. 2147 // We need to use an ObjectLock due to potential suspension in the interpreting code. Rather 2148 // than use a special Object for the purpose we use the Class of java.lang.Class. 2149 Handle<mirror::Class> h_klass(hs.NewHandle(klass->GetClass())); 2150 ObjectLock<mirror::Class> lock(soa.Self(), h_klass); 2151 // Attempt to initialize allowing initialization of parent classes but still not static 2152 // fields. 2153 // Initialize dependencies first only for app image, to make TryInitialize recursive. 2154 bool is_superclass_initialized = !is_app_image ? true : 2155 InitializeDependencies(klass, class_loader, soa.Self()); 2156 if (!is_app_image || (is_app_image && is_superclass_initialized)) { 2157 manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, false, true); 2158 // It's OK to clear the exception here since the compiler is supposed to be fault 2159 // tolerant and will silently not initialize classes that have exceptions. 2160 soa.Self()->ClearException(); 2161 } 2162 // Otherwise it's in app image but superclasses can't be initialized, no need to proceed. 2163 old_status = klass->GetStatus(); 2164 2165 bool too_many_encoded_fields = !is_boot_image && 2166 klass->NumStaticFields() > kMaxEncodedFields; 2167 2168 // If the class was not initialized, we can proceed to see if we can initialize static 2169 // fields. Limit the max number of encoded fields. 2170 if (!klass->IsInitialized() && 2171 (is_app_image || is_boot_image) && 2172 is_superclass_initialized && 2173 !too_many_encoded_fields && 2174 manager_->GetCompiler()->GetCompilerOptions().IsImageClass(descriptor)) { 2175 bool can_init_static_fields = false; 2176 if (is_boot_image) { 2177 // We need to initialize static fields, we only do this for image classes that aren't 2178 // marked with the $NoPreloadHolder (which implies this should not be initialized 2179 // early). 2180 can_init_static_fields = !EndsWith(std::string_view(descriptor), "$NoPreloadHolder;"); 2181 } else { 2182 CHECK(is_app_image); 2183 // The boot image case doesn't need to recursively initialize the dependencies with 2184 // special logic since the class linker already does this. 2185 can_init_static_fields = 2186 ClassLinker::kAppImageMayContainStrings && 2187 !soa.Self()->IsExceptionPending() && 2188 is_superclass_initialized && 2189 NoClinitInDependency(klass, soa.Self(), &class_loader); 2190 // TODO The checking for clinit can be removed since it's already 2191 // checked when init superclass. Currently keep it because it contains 2192 // processing of intern strings. Will be removed later when intern strings 2193 // and clinit are both initialized. 2194 } 2195 2196 if (can_init_static_fields) { 2197 VLOG(compiler) << "Initializing: " << descriptor; 2198 // TODO multithreading support. We should ensure the current compilation thread has 2199 // exclusive access to the runtime and the transaction. To achieve this, we could use 2200 // a ReaderWriterMutex but we're holding the mutator lock so we fail mutex sanity 2201 // checks in Thread::AssertThreadSuspensionIsAllowable. 2202 Runtime* const runtime = Runtime::Current(); 2203 // Run the class initializer in transaction mode. 2204 runtime->EnterTransactionMode(is_app_image, klass.Get()); 2205 2206 bool success = manager_->GetClassLinker()->EnsureInitialized(soa.Self(), klass, true, 2207 true); 2208 // TODO we detach transaction from runtime to indicate we quit the transactional 2209 // mode which prevents the GC from visiting objects modified during the transaction. 2210 // Ensure GC is not run so don't access freed objects when aborting transaction. 2211 2212 { 2213 ScopedAssertNoThreadSuspension ants("Transaction end"); 2214 2215 if (success) { 2216 runtime->ExitTransactionMode(); 2217 DCHECK(!runtime->IsActiveTransaction()); 2218 2219 if (is_boot_image) { 2220 // For boot image, we want to put the updated status in the oat class since we 2221 // can't reject the image anyways. 2222 old_status = klass->GetStatus(); 2223 } 2224 } else { 2225 CHECK(soa.Self()->IsExceptionPending()); 2226 mirror::Throwable* exception = soa.Self()->GetException(); 2227 VLOG(compiler) << "Initialization of " << descriptor << " aborted because of " 2228 << exception->Dump(); 2229 std::ostream* file_log = manager_->GetCompiler()-> 2230 GetCompilerOptions().GetInitFailureOutput(); 2231 if (file_log != nullptr) { 2232 *file_log << descriptor << "\n"; 2233 *file_log << exception->Dump() << "\n"; 2234 } 2235 soa.Self()->ClearException(); 2236 runtime->RollbackAllTransactions(); 2237 CHECK_EQ(old_status, klass->GetStatus()) << "Previous class status not restored"; 2238 } 2239 } 2240 2241 if (!success) { 2242 // On failure, still intern strings of static fields and seen in <clinit>, as these 2243 // will be created in the zygote. This is separated from the transaction code just 2244 // above as we will allocate strings, so must be allowed to suspend. 2245 if (&klass->GetDexFile() == manager_->GetDexFile()) { 2246 InternStrings(klass, class_loader); 2247 } else { 2248 DCHECK(!is_boot_image) << "Boot image must have equal dex files"; 2249 } 2250 } 2251 } 2252 } 2253 // Clear exception in case EnsureInitialized has caused one in the code above. 2254 // It's OK to clear the exception here since the compiler is supposed to be fault 2255 // tolerant and will silently not initialize classes that have exceptions. 2256 soa.Self()->ClearException(); 2257 2258 // If the class still isn't initialized, at least try some checks that initialization 2259 // would do so they can be skipped at runtime. 2260 if (!klass->IsInitialized() && 2261 manager_->GetClassLinker()->ValidateSuperClassDescriptors(klass)) { 2262 old_status = ClassStatus::kSuperclassValidated; 2263 } else { 2264 soa.Self()->ClearException(); 2265 } 2266 soa.Self()->AssertNoPendingException(); 2267 } 2268 } 2269 // Record the final class status if necessary. 2270 ClassReference ref(&dex_file, klass->GetDexClassDefIndex()); 2271 // Back up the status before doing initialization for static encoded fields, 2272 // because the static encoded branch wants to keep the status to uninitialized. 2273 manager_->GetCompiler()->RecordClassStatus(ref, old_status); 2274 } 2275 2276 private: 2277 void InternStrings(Handle<mirror::Class> klass, Handle<mirror::ClassLoader> class_loader) 2278 REQUIRES_SHARED(Locks::mutator_lock_) { 2279 DCHECK(manager_->GetCompiler()->GetCompilerOptions().IsBootImage()); 2280 DCHECK(klass->IsVerified()); 2281 DCHECK(!klass->IsInitialized()); 2282 2283 StackHandleScope<1> hs(Thread::Current()); 2284 Handle<mirror::DexCache> dex_cache = hs.NewHandle(klass->GetDexCache()); 2285 const dex::ClassDef* class_def = klass->GetClassDef(); 2286 ClassLinker* class_linker = manager_->GetClassLinker(); 2287 2288 // Check encoded final field values for strings and intern. 2289 annotations::RuntimeEncodedStaticFieldValueIterator value_it(dex_cache, 2290 class_loader, 2291 manager_->GetClassLinker(), 2292 *class_def); 2293 for ( ; value_it.HasNext(); value_it.Next()) { 2294 if (value_it.GetValueType() == annotations::RuntimeEncodedStaticFieldValueIterator::kString) { 2295 // Resolve the string. This will intern the string. 2296 art::ObjPtr<mirror::String> resolved = class_linker->ResolveString( 2297 dex::StringIndex(value_it.GetJavaValue().i), dex_cache); 2298 CHECK(resolved != nullptr); 2299 } 2300 } 2301 2302 // Intern strings seen in <clinit>. 2303 ArtMethod* clinit = klass->FindClassInitializer(class_linker->GetImagePointerSize()); 2304 if (clinit != nullptr) { 2305 for (const DexInstructionPcPair& inst : clinit->DexInstructions()) { 2306 if (inst->Opcode() == Instruction::CONST_STRING) { 2307 ObjPtr<mirror::String> s = class_linker->ResolveString( 2308 dex::StringIndex(inst->VRegB_21c()), dex_cache); 2309 CHECK(s != nullptr); 2310 } else if (inst->Opcode() == Instruction::CONST_STRING_JUMBO) { 2311 ObjPtr<mirror::String> s = class_linker->ResolveString( 2312 dex::StringIndex(inst->VRegB_31c()), dex_cache); 2313 CHECK(s != nullptr); 2314 } 2315 } 2316 } 2317 } 2318 2319 bool ResolveTypesOfMethods(Thread* self, ArtMethod* m) 2320 REQUIRES_SHARED(Locks::mutator_lock_) { 2321 // Return value of ResolveReturnType() is discarded because resolve will be done internally. 2322 ObjPtr<mirror::Class> rtn_type = m->ResolveReturnType(); 2323 if (rtn_type == nullptr) { 2324 self->ClearException(); 2325 return false; 2326 } 2327 const dex::TypeList* types = m->GetParameterTypeList(); 2328 if (types != nullptr) { 2329 for (uint32_t i = 0; i < types->Size(); ++i) { 2330 dex::TypeIndex param_type_idx = types->GetTypeItem(i).type_idx_; 2331 ObjPtr<mirror::Class> param_type = m->ResolveClassFromTypeIndex(param_type_idx); 2332 if (param_type == nullptr) { 2333 self->ClearException(); 2334 return false; 2335 } 2336 } 2337 } 2338 return true; 2339 } 2340 2341 // Pre resolve types mentioned in all method signatures before start a transaction 2342 // since ResolveType doesn't work in transaction mode. 2343 bool PreResolveTypes(Thread* self, const Handle<mirror::Class>& klass) 2344 REQUIRES_SHARED(Locks::mutator_lock_) { 2345 PointerSize pointer_size = manager_->GetClassLinker()->GetImagePointerSize(); 2346 for (ArtMethod& m : klass->GetMethods(pointer_size)) { 2347 if (!ResolveTypesOfMethods(self, &m)) { 2348 return false; 2349 } 2350 } 2351 if (klass->IsInterface()) { 2352 return true; 2353 } else if (klass->HasSuperClass()) { 2354 StackHandleScope<1> hs(self); 2355 MutableHandle<mirror::Class> super_klass(hs.NewHandle<mirror::Class>(klass->GetSuperClass())); 2356 for (int i = super_klass->GetVTableLength() - 1; i >= 0; --i) { 2357 ArtMethod* m = klass->GetVTableEntry(i, pointer_size); 2358 ArtMethod* super_m = super_klass->GetVTableEntry(i, pointer_size); 2359 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) { 2360 return false; 2361 } 2362 } 2363 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) { 2364 super_klass.Assign(klass->GetIfTable()->GetInterface(i)); 2365 if (klass->GetClassLoader() != super_klass->GetClassLoader()) { 2366 uint32_t num_methods = super_klass->NumVirtualMethods(); 2367 for (uint32_t j = 0; j < num_methods; ++j) { 2368 ArtMethod* m = klass->GetIfTable()->GetMethodArray(i)->GetElementPtrSize<ArtMethod*>( 2369 j, pointer_size); 2370 ArtMethod* super_m = super_klass->GetVirtualMethod(j, pointer_size); 2371 if (!ResolveTypesOfMethods(self, m) || !ResolveTypesOfMethods(self, super_m)) { 2372 return false; 2373 } 2374 } 2375 } 2376 } 2377 } 2378 return true; 2379 } 2380 2381 // Initialize the klass's dependencies recursively before initializing itself. 2382 // Checking for interfaces is also necessary since interfaces can contain 2383 // both default methods and static encoded fields. 2384 bool InitializeDependencies(const Handle<mirror::Class>& klass, 2385 Handle<mirror::ClassLoader> class_loader, 2386 Thread* self) 2387 REQUIRES_SHARED(Locks::mutator_lock_) { 2388 if (klass->HasSuperClass()) { 2389 ObjPtr<mirror::Class> super_class = klass->GetSuperClass(); 2390 StackHandleScope<1> hs(self); 2391 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class)); 2392 if (!handle_scope_super->IsInitialized()) { 2393 this->TryInitializeClass(handle_scope_super, class_loader); 2394 if (!handle_scope_super->IsInitialized()) { 2395 return false; 2396 } 2397 } 2398 } 2399 2400 uint32_t num_if = klass->NumDirectInterfaces(); 2401 for (size_t i = 0; i < num_if; i++) { 2402 ObjPtr<mirror::Class> 2403 interface = mirror::Class::GetDirectInterface(self, klass.Get(), i); 2404 StackHandleScope<1> hs(self); 2405 Handle<mirror::Class> handle_interface(hs.NewHandle(interface)); 2406 2407 TryInitializeClass(handle_interface, class_loader); 2408 2409 if (!handle_interface->IsInitialized()) { 2410 return false; 2411 } 2412 } 2413 2414 return PreResolveTypes(self, klass); 2415 } 2416 2417 // In this phase the classes containing class initializers are ignored. Make sure no 2418 // clinit appears in kalss's super class chain and interfaces. 2419 bool NoClinitInDependency(const Handle<mirror::Class>& klass, 2420 Thread* self, 2421 Handle<mirror::ClassLoader>* class_loader) 2422 REQUIRES_SHARED(Locks::mutator_lock_) { 2423 ArtMethod* clinit = 2424 klass->FindClassInitializer(manager_->GetClassLinker()->GetImagePointerSize()); 2425 if (clinit != nullptr) { 2426 VLOG(compiler) << klass->PrettyClass() << ' ' << clinit->PrettyMethod(true); 2427 return false; 2428 } 2429 if (klass->HasSuperClass()) { 2430 ObjPtr<mirror::Class> super_class = klass->GetSuperClass(); 2431 StackHandleScope<1> hs(self); 2432 Handle<mirror::Class> handle_scope_super(hs.NewHandle(super_class)); 2433 if (!NoClinitInDependency(handle_scope_super, self, class_loader)) { 2434 return false; 2435 } 2436 } 2437 2438 uint32_t num_if = klass->NumDirectInterfaces(); 2439 for (size_t i = 0; i < num_if; i++) { 2440 ObjPtr<mirror::Class> 2441 interface = mirror::Class::GetDirectInterface(self, klass.Get(), i); 2442 StackHandleScope<1> hs(self); 2443 Handle<mirror::Class> handle_interface(hs.NewHandle(interface)); 2444 if (!NoClinitInDependency(handle_interface, self, class_loader)) { 2445 return false; 2446 } 2447 } 2448 2449 return true; 2450 } 2451 2452 const ParallelCompilationManager* const manager_; 2453 }; 2454 2455 void CompilerDriver::InitializeClasses(jobject jni_class_loader, 2456 const DexFile& dex_file, 2457 const std::vector<const DexFile*>& dex_files, 2458 TimingLogger* timings) { 2459 TimingLogger::ScopedTiming t("InitializeNoClinit", timings); 2460 2461 // Initialization allocates objects and needs to run single-threaded to be deterministic. 2462 bool force_determinism = GetCompilerOptions().IsForceDeterminism(); 2463 ThreadPool* init_thread_pool = force_determinism 2464 ? single_thread_pool_.get() 2465 : parallel_thread_pool_.get(); 2466 size_t init_thread_count = force_determinism ? 1U : parallel_thread_count_; 2467 2468 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2469 ParallelCompilationManager context(class_linker, jni_class_loader, this, &dex_file, dex_files, 2470 init_thread_pool); 2471 2472 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsAppImage()) { 2473 // Set the concurrency thread to 1 to support initialization for App Images since transaction 2474 // doesn't support multithreading now. 2475 // TODO: remove this when transactional mode supports multithreading. 2476 init_thread_count = 1U; 2477 } 2478 InitializeClassVisitor visitor(&context); 2479 context.ForAll(0, dex_file.NumClassDefs(), &visitor, init_thread_count); 2480 } 2481 2482 class InitializeArrayClassesAndCreateConflictTablesVisitor : public ClassVisitor { 2483 public: 2484 explicit InitializeArrayClassesAndCreateConflictTablesVisitor(VariableSizedHandleScope& hs) 2485 : hs_(hs) {} 2486 2487 bool operator()(ObjPtr<mirror::Class> klass) override 2488 REQUIRES_SHARED(Locks::mutator_lock_) { 2489 if (Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) { 2490 return true; 2491 } 2492 if (klass->IsArrayClass()) { 2493 StackHandleScope<1> hs(Thread::Current()); 2494 auto h_klass = hs.NewHandleWrapper(&klass); 2495 Runtime::Current()->GetClassLinker()->EnsureInitialized(hs.Self(), h_klass, true, true); 2496 } 2497 // Collect handles since there may be thread suspension in future EnsureInitialized. 2498 to_visit_.push_back(hs_.NewHandle(klass)); 2499 return true; 2500 } 2501 2502 void FillAllIMTAndConflictTables() REQUIRES_SHARED(Locks::mutator_lock_) { 2503 for (Handle<mirror::Class> c : to_visit_) { 2504 // Create the conflict tables. 2505 FillIMTAndConflictTables(c.Get()); 2506 } 2507 } 2508 2509 private: 2510 void FillIMTAndConflictTables(ObjPtr<mirror::Class> klass) 2511 REQUIRES_SHARED(Locks::mutator_lock_) { 2512 if (!klass->ShouldHaveImt()) { 2513 return; 2514 } 2515 if (visited_classes_.find(klass) != visited_classes_.end()) { 2516 return; 2517 } 2518 if (klass->HasSuperClass()) { 2519 FillIMTAndConflictTables(klass->GetSuperClass()); 2520 } 2521 if (!klass->IsTemp()) { 2522 Runtime::Current()->GetClassLinker()->FillIMTAndConflictTables(klass); 2523 } 2524 visited_classes_.insert(klass); 2525 } 2526 2527 VariableSizedHandleScope& hs_; 2528 std::vector<Handle<mirror::Class>> to_visit_; 2529 std::unordered_set<ObjPtr<mirror::Class>, HashObjPtr> visited_classes_; 2530 }; 2531 2532 void CompilerDriver::InitializeClasses(jobject class_loader, 2533 const std::vector<const DexFile*>& dex_files, 2534 TimingLogger* timings) { 2535 for (size_t i = 0; i != dex_files.size(); ++i) { 2536 const DexFile* dex_file = dex_files[i]; 2537 CHECK(dex_file != nullptr); 2538 InitializeClasses(class_loader, *dex_file, dex_files, timings); 2539 } 2540 if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsAppImage()) { 2541 // Make sure that we call EnsureIntiailized on all the array classes to call 2542 // SetVerificationAttempted so that the access flags are set. If we do not do this they get 2543 // changed at runtime resulting in more dirty image pages. 2544 // Also create conflict tables. 2545 // Only useful if we are compiling an image. 2546 ScopedObjectAccess soa(Thread::Current()); 2547 VariableSizedHandleScope hs(soa.Self()); 2548 InitializeArrayClassesAndCreateConflictTablesVisitor visitor(hs); 2549 Runtime::Current()->GetClassLinker()->VisitClassesWithoutClassesLock(&visitor); 2550 visitor.FillAllIMTAndConflictTables(); 2551 } 2552 if (GetCompilerOptions().IsBootImage()) { 2553 // Prune garbage objects created during aborted transactions. 2554 Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references= */ true); 2555 } 2556 } 2557 2558 template <typename CompileFn> 2559 static void CompileDexFile(CompilerDriver* driver, 2560 jobject class_loader, 2561 const DexFile& dex_file, 2562 const std::vector<const DexFile*>& dex_files, 2563 ThreadPool* thread_pool, 2564 size_t thread_count, 2565 TimingLogger* timings, 2566 const char* timing_name, 2567 CompileFn compile_fn) { 2568 TimingLogger::ScopedTiming t(timing_name, timings); 2569 ParallelCompilationManager context(Runtime::Current()->GetClassLinker(), 2570 class_loader, 2571 driver, 2572 &dex_file, 2573 dex_files, 2574 thread_pool); 2575 2576 auto compile = [&context, &compile_fn](size_t class_def_index) { 2577 const DexFile& dex_file = *context.GetDexFile(); 2578 SCOPED_TRACE << "compile " << dex_file.GetLocation() << "@" << class_def_index; 2579 ClassLinker* class_linker = context.GetClassLinker(); 2580 jobject jclass_loader = context.GetClassLoader(); 2581 ClassReference ref(&dex_file, class_def_index); 2582 const dex::ClassDef& class_def = dex_file.GetClassDef(class_def_index); 2583 ClassAccessor accessor(dex_file, class_def_index); 2584 CompilerDriver* const driver = context.GetCompiler(); 2585 // Skip compiling classes with generic verifier failures since they will still fail at runtime 2586 if (driver->GetCompilerOptions().GetVerificationResults()->IsClassRejected(ref)) { 2587 return; 2588 } 2589 // Use a scoped object access to perform to the quick SkipClass check. 2590 ScopedObjectAccess soa(Thread::Current()); 2591 StackHandleScope<3> hs(soa.Self()); 2592 Handle<mirror::ClassLoader> class_loader( 2593 hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader))); 2594 Handle<mirror::Class> klass( 2595 hs.NewHandle(class_linker->FindClass(soa.Self(), accessor.GetDescriptor(), class_loader))); 2596 Handle<mirror::DexCache> dex_cache; 2597 if (klass == nullptr) { 2598 soa.Self()->AssertPendingException(); 2599 soa.Self()->ClearException(); 2600 dex_cache = hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file)); 2601 } else if (SkipClass(jclass_loader, dex_file, klass.Get())) { 2602 return; 2603 } else if (&klass->GetDexFile() != &dex_file) { 2604 // Skip a duplicate class (as the resolved class is from another, earlier dex file). 2605 return; // Do not update state. 2606 } else { 2607 dex_cache = hs.NewHandle(klass->GetDexCache()); 2608 } 2609 2610 // Avoid suspension if there are no methods to compile. 2611 if (accessor.NumDirectMethods() + accessor.NumVirtualMethods() == 0) { 2612 return; 2613 } 2614 2615 // Go to native so that we don't block GC during compilation. 2616 ScopedThreadSuspension sts(soa.Self(), kNative); 2617 2618 // Can we run DEX-to-DEX compiler on this class ? 2619 optimizer::DexToDexCompiler::CompilationLevel dex_to_dex_compilation_level = 2620 GetDexToDexCompilationLevel(soa.Self(), *driver, jclass_loader, dex_file, class_def); 2621 2622 // Compile direct and virtual methods. 2623 int64_t previous_method_idx = -1; 2624 for (const ClassAccessor::Method& method : accessor.GetMethods()) { 2625 const uint32_t method_idx = method.GetIndex(); 2626 if (method_idx == previous_method_idx) { 2627 // smali can create dex files with two encoded_methods sharing the same method_idx 2628 // http://code.google.com/p/smali/issues/detail?id=119 2629 continue; 2630 } 2631 previous_method_idx = method_idx; 2632 compile_fn(soa.Self(), 2633 driver, 2634 method.GetCodeItem(), 2635 method.GetAccessFlags(), 2636 method.GetInvokeType(class_def.access_flags_), 2637 class_def_index, 2638 method_idx, 2639 class_loader, 2640 dex_file, 2641 dex_to_dex_compilation_level, 2642 dex_cache); 2643 } 2644 }; 2645 context.ForAllLambda(0, dex_file.NumClassDefs(), compile, thread_count); 2646 } 2647 2648 void CompilerDriver::Compile(jobject class_loader, 2649 const std::vector<const DexFile*>& dex_files, 2650 TimingLogger* timings) { 2651 if (kDebugProfileGuidedCompilation) { 2652 const ProfileCompilationInfo* profile_compilation_info = 2653 GetCompilerOptions().GetProfileCompilationInfo(); 2654 LOG(INFO) << "[ProfileGuidedCompilation] " << 2655 ((profile_compilation_info == nullptr) 2656 ? "null" 2657 : profile_compilation_info->DumpInfo(dex_files)); 2658 } 2659 2660 dex_to_dex_compiler_.ClearState(); 2661 for (const DexFile* dex_file : dex_files) { 2662 CHECK(dex_file != nullptr); 2663 CompileDexFile(this, 2664 class_loader, 2665 *dex_file, 2666 dex_files, 2667 parallel_thread_pool_.get(), 2668 parallel_thread_count_, 2669 timings, 2670 "Compile Dex File Quick", 2671 CompileMethodQuick); 2672 const ArenaPool* const arena_pool = Runtime::Current()->GetArenaPool(); 2673 const size_t arena_alloc = arena_pool->GetBytesAllocated(); 2674 max_arena_alloc_ = std::max(arena_alloc, max_arena_alloc_); 2675 Runtime::Current()->ReclaimArenaPoolMemory(); 2676 } 2677 2678 if (dex_to_dex_compiler_.NumCodeItemsToQuicken(Thread::Current()) > 0u) { 2679 // TODO: Not visit all of the dex files, its probably rare that only one would have quickened 2680 // methods though. 2681 for (const DexFile* dex_file : dex_files) { 2682 CompileDexFile(this, 2683 class_loader, 2684 *dex_file, 2685 dex_files, 2686 parallel_thread_pool_.get(), 2687 parallel_thread_count_, 2688 timings, 2689 "Compile Dex File Dex2Dex", 2690 CompileMethodDex2Dex); 2691 } 2692 dex_to_dex_compiler_.ClearState(); 2693 } 2694 2695 VLOG(compiler) << "Compile: " << GetMemoryUsageString(false); 2696 } 2697 2698 void CompilerDriver::AddCompiledMethod(const MethodReference& method_ref, 2699 CompiledMethod* const compiled_method) { 2700 DCHECK(GetCompiledMethod(method_ref) == nullptr) << method_ref.PrettyMethod(); 2701 MethodTable::InsertResult result = compiled_methods_.Insert(method_ref, 2702 /*expected*/ nullptr, 2703 compiled_method); 2704 CHECK(result == MethodTable::kInsertResultSuccess); 2705 DCHECK(GetCompiledMethod(method_ref) != nullptr) << method_ref.PrettyMethod(); 2706 } 2707 2708 CompiledMethod* CompilerDriver::RemoveCompiledMethod(const MethodReference& method_ref) { 2709 CompiledMethod* ret = nullptr; 2710 CHECK(compiled_methods_.Remove(method_ref, &ret)); 2711 return ret; 2712 } 2713 2714 bool CompilerDriver::GetCompiledClass(const ClassReference& ref, ClassStatus* status) const { 2715 DCHECK(status != nullptr); 2716 // The table doesn't know if something wasn't inserted. For this case it will return 2717 // ClassStatus::kNotReady. To handle this, just assume anything we didn't try to verify 2718 // is not compiled. 2719 if (!compiled_classes_.Get(ref, status) || 2720 *status < ClassStatus::kRetryVerificationAtRuntime) { 2721 return false; 2722 } 2723 return true; 2724 } 2725 2726 ClassStatus CompilerDriver::GetClassStatus(const ClassReference& ref) const { 2727 ClassStatus status = ClassStatus::kNotReady; 2728 if (!GetCompiledClass(ref, &status)) { 2729 classpath_classes_.Get(ref, &status); 2730 } 2731 return status; 2732 } 2733 2734 void CompilerDriver::RecordClassStatus(const ClassReference& ref, ClassStatus status) { 2735 switch (status) { 2736 case ClassStatus::kErrorResolved: 2737 case ClassStatus::kErrorUnresolved: 2738 case ClassStatus::kNotReady: 2739 case ClassStatus::kResolved: 2740 case ClassStatus::kRetryVerificationAtRuntime: 2741 case ClassStatus::kVerified: 2742 case ClassStatus::kSuperclassValidated: 2743 case ClassStatus::kInitialized: 2744 break; // Expected states. 2745 default: 2746 LOG(FATAL) << "Unexpected class status for class " 2747 << PrettyDescriptor( 2748 ref.dex_file->GetClassDescriptor(ref.dex_file->GetClassDef(ref.index))) 2749 << " of " << status; 2750 } 2751 2752 ClassStateTable::InsertResult result; 2753 ClassStateTable* table = &compiled_classes_; 2754 do { 2755 ClassStatus existing = ClassStatus::kNotReady; 2756 if (!table->Get(ref, &existing)) { 2757 // A classpath class. 2758 if (kIsDebugBuild) { 2759 // Check to make sure it's not a dex file for an oat file we are compiling since these 2760 // should always succeed. These do not include classes in for used libraries. 2761 for (const DexFile* dex_file : GetCompilerOptions().GetDexFilesForOatFile()) { 2762 CHECK_NE(ref.dex_file, dex_file) << ref.dex_file->GetLocation(); 2763 } 2764 } 2765 if (!classpath_classes_.HaveDexFile(ref.dex_file)) { 2766 // Boot classpath dex file. 2767 return; 2768 } 2769 table = &classpath_classes_; 2770 table->Get(ref, &existing); 2771 } 2772 if (existing >= status) { 2773 // Existing status is already better than we expect, break. 2774 break; 2775 } 2776 // Update the status if we now have a greater one. This happens with vdex, 2777 // which records a class is verified, but does not resolve it. 2778 result = table->Insert(ref, existing, status); 2779 CHECK(result != ClassStateTable::kInsertResultInvalidDexFile) << ref.dex_file->GetLocation(); 2780 } while (result != ClassStateTable::kInsertResultSuccess); 2781 } 2782 2783 CompiledMethod* CompilerDriver::GetCompiledMethod(MethodReference ref) const { 2784 CompiledMethod* compiled_method = nullptr; 2785 compiled_methods_.Get(ref, &compiled_method); 2786 return compiled_method; 2787 } 2788 2789 std::string CompilerDriver::GetMemoryUsageString(bool extended) const { 2790 std::ostringstream oss; 2791 const gc::Heap* const heap = Runtime::Current()->GetHeap(); 2792 const size_t java_alloc = heap->GetBytesAllocated(); 2793 oss << "arena alloc=" << PrettySize(max_arena_alloc_) << " (" << max_arena_alloc_ << "B)"; 2794 oss << " java alloc=" << PrettySize(java_alloc) << " (" << java_alloc << "B)"; 2795 #if defined(__BIONIC__) || defined(__GLIBC__) 2796 const struct mallinfo info = mallinfo(); 2797 const size_t allocated_space = static_cast<size_t>(info.uordblks); 2798 const size_t free_space = static_cast<size_t>(info.fordblks); 2799 oss << " native alloc=" << PrettySize(allocated_space) << " (" << allocated_space << "B)" 2800 << " free=" << PrettySize(free_space) << " (" << free_space << "B)"; 2801 #endif 2802 compiled_method_storage_.DumpMemoryUsage(oss, extended); 2803 return oss.str(); 2804 } 2805 2806 void CompilerDriver::InitializeThreadPools() { 2807 size_t parallel_count = parallel_thread_count_ > 0 ? parallel_thread_count_ - 1 : 0; 2808 parallel_thread_pool_.reset( 2809 new ThreadPool("Compiler driver thread pool", parallel_count)); 2810 single_thread_pool_.reset(new ThreadPool("Single-threaded Compiler driver thread pool", 0)); 2811 } 2812 2813 void CompilerDriver::FreeThreadPools() { 2814 parallel_thread_pool_.reset(); 2815 single_thread_pool_.reset(); 2816 } 2817 2818 void CompilerDriver::SetClasspathDexFiles(const std::vector<const DexFile*>& dex_files) { 2819 classpath_classes_.AddDexFiles(dex_files); 2820 } 2821 2822 } // namespace art 2823