1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include "image_writer.h" 18 19 #include <sys/stat.h> 20 #include <lz4.h> 21 #include <lz4hc.h> 22 23 #include <memory> 24 #include <numeric> 25 #include <unordered_set> 26 #include <vector> 27 28 #include "art_field-inl.h" 29 #include "art_method-inl.h" 30 #include "base/callee_save_type.h" 31 #include "base/enums.h" 32 #include "base/logging.h" 33 #include "base/unix_file/fd_file.h" 34 #include "class_linker-inl.h" 35 #include "compiled_method.h" 36 #include "dex_file-inl.h" 37 #include "dex_file_types.h" 38 #include "driver/compiler_driver.h" 39 #include "elf_file.h" 40 #include "elf_utils.h" 41 #include "elf_writer.h" 42 #include "gc/accounting/card_table-inl.h" 43 #include "gc/accounting/heap_bitmap.h" 44 #include "gc/accounting/space_bitmap-inl.h" 45 #include "gc/collector/concurrent_copying.h" 46 #include "gc/heap.h" 47 #include "gc/heap-visit-objects-inl.h" 48 #include "gc/space/large_object_space.h" 49 #include "gc/space/space-inl.h" 50 #include "gc/verification.h" 51 #include "globals.h" 52 #include "handle_scope-inl.h" 53 #include "image.h" 54 #include "imt_conflict_table.h" 55 #include "jni_internal.h" 56 #include "linear_alloc.h" 57 #include "lock_word.h" 58 #include "mirror/array-inl.h" 59 #include "mirror/class-inl.h" 60 #include "mirror/class_ext.h" 61 #include "mirror/class_loader.h" 62 #include "mirror/dex_cache.h" 63 #include "mirror/dex_cache-inl.h" 64 #include "mirror/executable.h" 65 #include "mirror/method.h" 66 #include "mirror/object-inl.h" 67 #include "mirror/object-refvisitor-inl.h" 68 #include "mirror/object_array-inl.h" 69 #include "mirror/string-inl.h" 70 #include "oat.h" 71 #include "oat_file.h" 72 #include "oat_file_manager.h" 73 #include "runtime.h" 74 #include "scoped_thread_state_change-inl.h" 75 #include "utils/dex_cache_arrays_layout-inl.h" 76 77 using ::art::mirror::Class; 78 using ::art::mirror::DexCache; 79 using ::art::mirror::Object; 80 using ::art::mirror::ObjectArray; 81 using ::art::mirror::String; 82 83 namespace art { 84 85 // Separate objects into multiple bins to optimize dirty memory use. 86 static constexpr bool kBinObjects = true; 87 88 // Return true if an object is already in an image space. 89 bool ImageWriter::IsInBootImage(const void* obj) const { 90 gc::Heap* const heap = Runtime::Current()->GetHeap(); 91 if (!compile_app_image_) { 92 DCHECK(heap->GetBootImageSpaces().empty()); 93 return false; 94 } 95 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) { 96 const uint8_t* image_begin = boot_image_space->Begin(); 97 // Real image end including ArtMethods and ArtField sections. 98 const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize(); 99 if (image_begin <= obj && obj < image_end) { 100 return true; 101 } 102 } 103 return false; 104 } 105 106 bool ImageWriter::IsInBootOatFile(const void* ptr) const { 107 gc::Heap* const heap = Runtime::Current()->GetHeap(); 108 if (!compile_app_image_) { 109 DCHECK(heap->GetBootImageSpaces().empty()); 110 return false; 111 } 112 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) { 113 const ImageHeader& image_header = boot_image_space->GetImageHeader(); 114 if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) { 115 return true; 116 } 117 } 118 return false; 119 } 120 121 static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) { 122 auto visitor = [](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { 123 DCHECK(obj != nullptr); 124 Class* klass = obj->GetClass(); 125 if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) { 126 ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie); 127 // Null out the cookie to enable determinism. b/34090128 128 field->SetObject</*kTransactionActive*/false>(obj, nullptr); 129 } 130 }; 131 Runtime::Current()->GetHeap()->VisitObjects(visitor); 132 } 133 134 bool ImageWriter::PrepareImageAddressSpace() { 135 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet()); 136 gc::Heap* const heap = Runtime::Current()->GetHeap(); 137 { 138 ScopedObjectAccess soa(Thread::Current()); 139 PruneNonImageClasses(); // Remove junk 140 if (compile_app_image_) { 141 // Clear dex file cookies for app images to enable app image determinism. This is required 142 // since the cookie field contains long pointers to DexFiles which are not deterministic. 143 // b/34090128 144 ClearDexFileCookies(); 145 } else { 146 // Avoid for app image since this may increase RAM and image size. 147 ComputeLazyFieldsForImageClasses(); // Add useful information 148 } 149 } 150 heap->CollectGarbage(false); // Remove garbage. 151 152 if (kIsDebugBuild) { 153 ScopedObjectAccess soa(Thread::Current()); 154 CheckNonImageClassesRemoved(); 155 } 156 157 { 158 ScopedObjectAccess soa(Thread::Current()); 159 CalculateNewObjectOffsets(); 160 } 161 162 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and 163 // bin size sums being calculated. 164 if (!AllocMemory()) { 165 return false; 166 } 167 168 return true; 169 } 170 171 bool ImageWriter::Write(int image_fd, 172 const std::vector<const char*>& image_filenames, 173 const std::vector<const char*>& oat_filenames) { 174 // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or 175 // oat_filenames. 176 CHECK(!image_filenames.empty()); 177 if (image_fd != kInvalidFd) { 178 CHECK_EQ(image_filenames.size(), 1u); 179 } 180 CHECK(!oat_filenames.empty()); 181 CHECK_EQ(image_filenames.size(), oat_filenames.size()); 182 183 { 184 ScopedObjectAccess soa(Thread::Current()); 185 for (size_t i = 0; i < oat_filenames.size(); ++i) { 186 CreateHeader(i); 187 CopyAndFixupNativeData(i); 188 } 189 } 190 191 { 192 // TODO: heap validation can't handle these fix up passes. 193 ScopedObjectAccess soa(Thread::Current()); 194 Runtime::Current()->GetHeap()->DisableObjectValidation(); 195 CopyAndFixupObjects(); 196 } 197 198 for (size_t i = 0; i < image_filenames.size(); ++i) { 199 const char* image_filename = image_filenames[i]; 200 ImageInfo& image_info = GetImageInfo(i); 201 std::unique_ptr<File> image_file; 202 if (image_fd != kInvalidFd) { 203 if (strlen(image_filename) == 0u) { 204 image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage)); 205 // Empty the file in case it already exists. 206 if (image_file != nullptr) { 207 TEMP_FAILURE_RETRY(image_file->SetLength(0)); 208 TEMP_FAILURE_RETRY(image_file->Flush()); 209 } 210 } else { 211 LOG(ERROR) << "image fd " << image_fd << " name " << image_filename; 212 } 213 } else { 214 image_file.reset(OS::CreateEmptyFile(image_filename)); 215 } 216 217 if (image_file == nullptr) { 218 LOG(ERROR) << "Failed to open image file " << image_filename; 219 return false; 220 } 221 222 if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) { 223 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename; 224 image_file->Erase(); 225 return EXIT_FAILURE; 226 } 227 228 std::unique_ptr<char[]> compressed_data; 229 // Image data size excludes the bitmap and the header. 230 ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin()); 231 const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader); 232 char* image_data = reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader); 233 size_t data_size; 234 const char* image_data_to_write; 235 const uint64_t compress_start_time = NanoTime(); 236 237 CHECK_EQ(image_header->storage_mode_, image_storage_mode_); 238 switch (image_storage_mode_) { 239 case ImageHeader::kStorageModeLZ4HC: // Fall-through. 240 case ImageHeader::kStorageModeLZ4: { 241 const size_t compressed_max_size = LZ4_compressBound(image_data_size); 242 compressed_data.reset(new char[compressed_max_size]); 243 data_size = LZ4_compress_default( 244 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader), 245 &compressed_data[0], 246 image_data_size, 247 compressed_max_size); 248 249 break; 250 } 251 /* 252 * Disabled due to image_test64 flakyness. Both use same decompression. b/27560444 253 case ImageHeader::kStorageModeLZ4HC: { 254 // Bound is same as non HC. 255 const size_t compressed_max_size = LZ4_compressBound(image_data_size); 256 compressed_data.reset(new char[compressed_max_size]); 257 data_size = LZ4_compressHC( 258 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader), 259 &compressed_data[0], 260 image_data_size); 261 break; 262 } 263 */ 264 case ImageHeader::kStorageModeUncompressed: { 265 data_size = image_data_size; 266 image_data_to_write = image_data; 267 break; 268 } 269 default: { 270 LOG(FATAL) << "Unsupported"; 271 UNREACHABLE(); 272 } 273 } 274 275 if (compressed_data != nullptr) { 276 image_data_to_write = &compressed_data[0]; 277 VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size << " in " 278 << PrettyDuration(NanoTime() - compress_start_time); 279 if (kIsDebugBuild) { 280 std::unique_ptr<uint8_t[]> temp(new uint8_t[image_data_size]); 281 const size_t decompressed_size = LZ4_decompress_safe( 282 reinterpret_cast<char*>(&compressed_data[0]), 283 reinterpret_cast<char*>(&temp[0]), 284 data_size, 285 image_data_size); 286 CHECK_EQ(decompressed_size, image_data_size); 287 CHECK_EQ(memcmp(image_data, &temp[0], image_data_size), 0) << image_storage_mode_; 288 } 289 } 290 291 // Write out the image + fields + methods. 292 const bool is_compressed = compressed_data != nullptr; 293 if (!image_file->PwriteFully(image_data_to_write, data_size, sizeof(ImageHeader))) { 294 PLOG(ERROR) << "Failed to write image file data " << image_filename; 295 image_file->Erase(); 296 return false; 297 } 298 299 // Write out the image bitmap at the page aligned start of the image end, also uncompressed for 300 // convenience. 301 const ImageSection& bitmap_section = image_header->GetImageSection( 302 ImageHeader::kSectionImageBitmap); 303 // Align up since data size may be unaligned if the image is compressed. 304 size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize); 305 if (!is_compressed) { 306 CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset()); 307 } 308 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_bitmap_->Begin()), 309 bitmap_section.Size(), 310 bitmap_position_in_file)) { 311 PLOG(ERROR) << "Failed to write image file " << image_filename; 312 image_file->Erase(); 313 return false; 314 } 315 316 int err = image_file->Flush(); 317 if (err < 0) { 318 PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err; 319 image_file->Erase(); 320 return false; 321 } 322 323 // Write header last in case the compiler gets killed in the middle of image writing. 324 // We do not want to have a corrupted image with a valid header. 325 // The header is uncompressed since it contains whether the image is compressed or not. 326 image_header->data_size_ = data_size; 327 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_->Begin()), 328 sizeof(ImageHeader), 329 0)) { 330 PLOG(ERROR) << "Failed to write image file header " << image_filename; 331 image_file->Erase(); 332 return false; 333 } 334 335 CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(), 336 static_cast<size_t>(image_file->GetLength())); 337 if (image_file->FlushCloseOrErase() != 0) { 338 PLOG(ERROR) << "Failed to flush and close image file " << image_filename; 339 return false; 340 } 341 } 342 return true; 343 } 344 345 void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) { 346 DCHECK(object != nullptr); 347 DCHECK_NE(offset, 0U); 348 349 // The object is already deflated from when we set the bin slot. Just overwrite the lock word. 350 object->SetLockWord(LockWord::FromForwardingAddress(offset), false); 351 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u); 352 DCHECK(IsImageOffsetAssigned(object)); 353 } 354 355 void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) { 356 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset; 357 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false); 358 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u); 359 } 360 361 void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) { 362 DCHECK(object != nullptr); 363 DCHECK_NE(image_objects_offset_begin_, 0u); 364 365 size_t oat_index = GetOatIndex(object); 366 ImageInfo& image_info = GetImageInfo(oat_index); 367 size_t bin_slot_offset = image_info.bin_slot_offsets_[bin_slot.GetBin()]; 368 size_t new_offset = bin_slot_offset + bin_slot.GetIndex(); 369 DCHECK_ALIGNED(new_offset, kObjectAlignment); 370 371 SetImageOffset(object, new_offset); 372 DCHECK_LT(new_offset, image_info.image_end_); 373 } 374 375 bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const { 376 // Will also return true if the bin slot was assigned since we are reusing the lock word. 377 DCHECK(object != nullptr); 378 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress; 379 } 380 381 size_t ImageWriter::GetImageOffset(mirror::Object* object) const { 382 DCHECK(object != nullptr); 383 DCHECK(IsImageOffsetAssigned(object)); 384 LockWord lock_word = object->GetLockWord(false); 385 size_t offset = lock_word.ForwardingAddress(); 386 size_t oat_index = GetOatIndex(object); 387 const ImageInfo& image_info = GetImageInfo(oat_index); 388 DCHECK_LT(offset, image_info.image_end_); 389 return offset; 390 } 391 392 void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) { 393 DCHECK(object != nullptr); 394 DCHECK(!IsImageOffsetAssigned(object)); 395 DCHECK(!IsImageBinSlotAssigned(object)); 396 397 // Before we stomp over the lock word, save the hash code for later. 398 LockWord lw(object->GetLockWord(false)); 399 switch (lw.GetState()) { 400 case LockWord::kFatLocked: 401 FALLTHROUGH_INTENDED; 402 case LockWord::kThinLocked: { 403 std::ostringstream oss; 404 bool thin = (lw.GetState() == LockWord::kThinLocked); 405 oss << (thin ? "Thin" : "Fat") 406 << " locked object " << object << "(" << object->PrettyTypeOf() 407 << ") found during object copy"; 408 if (thin) { 409 oss << ". Lock owner:" << lw.ThinLockOwner(); 410 } 411 LOG(FATAL) << oss.str(); 412 break; 413 } 414 case LockWord::kUnlocked: 415 // No hash, don't need to save it. 416 break; 417 case LockWord::kHashCode: 418 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end()); 419 saved_hashcode_map_.emplace(object, lw.GetHashCode()); 420 break; 421 default: 422 LOG(FATAL) << "Unreachable."; 423 UNREACHABLE(); 424 } 425 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false); 426 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u); 427 DCHECK(IsImageBinSlotAssigned(object)); 428 } 429 430 void ImageWriter::PrepareDexCacheArraySlots() { 431 // Prepare dex cache array starts based on the ordering specified in the CompilerDriver. 432 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned() 433 // when AssignImageBinSlot() assigns their indexes out or order. 434 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) { 435 auto it = dex_file_oat_index_map_.find(dex_file); 436 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation(); 437 ImageInfo& image_info = GetImageInfo(it->second); 438 image_info.dex_cache_array_starts_.Put(dex_file, image_info.bin_slot_sizes_[kBinDexCacheArray]); 439 DexCacheArraysLayout layout(target_ptr_size_, dex_file); 440 image_info.bin_slot_sizes_[kBinDexCacheArray] += layout.Size(); 441 } 442 443 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 444 Thread* const self = Thread::Current(); 445 ReaderMutexLock mu(self, *Locks::dex_lock_); 446 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) { 447 ObjPtr<mirror::DexCache> dex_cache = 448 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root)); 449 if (dex_cache == nullptr || IsInBootImage(dex_cache.Ptr())) { 450 continue; 451 } 452 const DexFile* dex_file = dex_cache->GetDexFile(); 453 CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end()) 454 << "Dex cache should have been pruned " << dex_file->GetLocation() 455 << "; possibly in class path"; 456 DexCacheArraysLayout layout(target_ptr_size_, dex_file); 457 DCHECK(layout.Valid()); 458 size_t oat_index = GetOatIndexForDexCache(dex_cache); 459 ImageInfo& image_info = GetImageInfo(oat_index); 460 uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file); 461 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr); 462 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(), 463 start + layout.TypesOffset(), 464 dex_cache); 465 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr); 466 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(), 467 start + layout.MethodsOffset(), 468 dex_cache); 469 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr); 470 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(), 471 start + layout.FieldsOffset(), 472 dex_cache); 473 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr); 474 AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), dex_cache); 475 476 if (dex_cache->GetResolvedMethodTypes() != nullptr) { 477 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethodTypes(), 478 start + layout.MethodTypesOffset(), 479 dex_cache); 480 } 481 if (dex_cache->GetResolvedCallSites() != nullptr) { 482 AddDexCacheArrayRelocation(dex_cache->GetResolvedCallSites(), 483 start + layout.CallSitesOffset(), 484 dex_cache); 485 } 486 } 487 } 488 489 void ImageWriter::AddDexCacheArrayRelocation(void* array, 490 size_t offset, 491 ObjPtr<mirror::DexCache> dex_cache) { 492 if (array != nullptr) { 493 DCHECK(!IsInBootImage(array)); 494 size_t oat_index = GetOatIndexForDexCache(dex_cache); 495 native_object_relocations_.emplace(array, 496 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeDexCacheArray }); 497 } 498 } 499 500 void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) { 501 DCHECK(arr != nullptr); 502 if (kIsDebugBuild) { 503 for (size_t i = 0, len = arr->GetLength(); i < len; i++) { 504 ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_); 505 if (method != nullptr && !method->IsRuntimeMethod()) { 506 mirror::Class* klass = method->GetDeclaringClass(); 507 CHECK(klass == nullptr || KeepClass(klass)) 508 << Class::PrettyClass(klass) << " should be a kept class"; 509 } 510 } 511 } 512 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and 513 // ArtMethods. 514 pointer_arrays_.emplace(arr, kBinArtMethodClean); 515 } 516 517 void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) { 518 DCHECK(object != nullptr); 519 size_t object_size = object->SizeOf(); 520 521 // The magic happens here. We segregate objects into different bins based 522 // on how likely they are to get dirty at runtime. 523 // 524 // Likely-to-dirty objects get packed together into the same bin so that 525 // at runtime their page dirtiness ratio (how many dirty objects a page has) is 526 // maximized. 527 // 528 // This means more pages will stay either clean or shared dirty (with zygote) and 529 // the app will use less of its own (private) memory. 530 Bin bin = kBinRegular; 531 size_t current_offset = 0u; 532 533 if (kBinObjects) { 534 // 535 // Changing the bin of an object is purely a memory-use tuning. 536 // It has no change on runtime correctness. 537 // 538 // Memory analysis has determined that the following types of objects get dirtied 539 // the most: 540 // 541 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have 542 // a fixed layout which helps improve generated code (using PC-relative addressing), 543 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots(). 544 // Since these arrays are huge, most pages do not overlap other objects and it's not 545 // really important where they are for the clean/dirty separation. Due to their 546 // special PC-relative addressing, we arbitrarily keep them at the end. 547 // * Class'es which are verified [their clinit runs only at runtime] 548 // - classes in general [because their static fields get overwritten] 549 // - initialized classes with all-final statics are unlikely to be ever dirty, 550 // so bin them separately 551 // * Art Methods that are: 552 // - native [their native entry point is not looked up until runtime] 553 // - have declaring classes that aren't initialized 554 // [their interpreter/quick entry points are trampolines until the class 555 // becomes initialized] 556 // 557 // We also assume the following objects get dirtied either never or extremely rarely: 558 // * Strings (they are immutable) 559 // * Art methods that aren't native and have initialized declared classes 560 // 561 // We assume that "regular" bin objects are highly unlikely to become dirtied, 562 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio. 563 // 564 if (object->IsClass()) { 565 bin = kBinClassVerified; 566 mirror::Class* klass = object->AsClass(); 567 568 // Add non-embedded vtable to the pointer array table if there is one. 569 auto* vtable = klass->GetVTable(); 570 if (vtable != nullptr) { 571 AddMethodPointerArray(vtable); 572 } 573 auto* iftable = klass->GetIfTable(); 574 if (iftable != nullptr) { 575 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) { 576 if (iftable->GetMethodArrayCount(i) > 0) { 577 AddMethodPointerArray(iftable->GetMethodArray(i)); 578 } 579 } 580 } 581 582 // Move known dirty objects into their own sections. This includes: 583 // - classes with dirty static fields. 584 if (dirty_image_objects_ != nullptr && 585 dirty_image_objects_->find(klass->PrettyDescriptor()) != dirty_image_objects_->end()) { 586 bin = kBinKnownDirty; 587 } else if (klass->GetStatus() == Class::kStatusInitialized) { 588 bin = kBinClassInitialized; 589 590 // If the class's static fields are all final, put it into a separate bin 591 // since it's very likely it will stay clean. 592 uint32_t num_static_fields = klass->NumStaticFields(); 593 if (num_static_fields == 0) { 594 bin = kBinClassInitializedFinalStatics; 595 } else { 596 // Maybe all the statics are final? 597 bool all_final = true; 598 for (uint32_t i = 0; i < num_static_fields; ++i) { 599 ArtField* field = klass->GetStaticField(i); 600 if (!field->IsFinal()) { 601 all_final = false; 602 break; 603 } 604 } 605 606 if (all_final) { 607 bin = kBinClassInitializedFinalStatics; 608 } 609 } 610 } 611 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) { 612 bin = kBinString; // Strings are almost always immutable (except for object header). 613 } else if (object->GetClass<kVerifyNone>() == 614 Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kJavaLangObject)) { 615 // Instance of java lang object, probably a lock object. This means it will be dirty when we 616 // synchronize on it. 617 bin = kBinMiscDirty; 618 } else if (object->IsDexCache()) { 619 // Dex file field becomes dirty when the image is loaded. 620 bin = kBinMiscDirty; 621 } 622 // else bin = kBinRegular 623 } 624 625 // Assign the oat index too. 626 DCHECK(oat_index_map_.find(object) == oat_index_map_.end()); 627 oat_index_map_.emplace(object, oat_index); 628 629 ImageInfo& image_info = GetImageInfo(oat_index); 630 631 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment 632 current_offset = image_info.bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned). 633 // Move the current bin size up to accommodate the object we just assigned a bin slot. 634 image_info.bin_slot_sizes_[bin] += offset_delta; 635 636 BinSlot new_bin_slot(bin, current_offset); 637 SetImageBinSlot(object, new_bin_slot); 638 639 ++image_info.bin_slot_count_[bin]; 640 641 // Grow the image closer to the end by the object we just assigned. 642 image_info.image_end_ += offset_delta; 643 } 644 645 bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const { 646 if (m->IsNative()) { 647 return true; 648 } 649 mirror::Class* declaring_class = m->GetDeclaringClass(); 650 // Initialized is highly unlikely to dirty since there's no entry points to mutate. 651 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized; 652 } 653 654 bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const { 655 DCHECK(object != nullptr); 656 657 // We always stash the bin slot into a lockword, in the 'forwarding address' state. 658 // If it's in some other state, then we haven't yet assigned an image bin slot. 659 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) { 660 return false; 661 } else if (kIsDebugBuild) { 662 LockWord lock_word = object->GetLockWord(false); 663 size_t offset = lock_word.ForwardingAddress(); 664 BinSlot bin_slot(offset); 665 size_t oat_index = GetOatIndex(object); 666 const ImageInfo& image_info = GetImageInfo(oat_index); 667 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()]) 668 << "bin slot offset should not exceed the size of that bin"; 669 } 670 return true; 671 } 672 673 ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const { 674 DCHECK(object != nullptr); 675 DCHECK(IsImageBinSlotAssigned(object)); 676 677 LockWord lock_word = object->GetLockWord(false); 678 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t 679 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max()); 680 681 BinSlot bin_slot(static_cast<uint32_t>(offset)); 682 size_t oat_index = GetOatIndex(object); 683 const ImageInfo& image_info = GetImageInfo(oat_index); 684 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()]); 685 686 return bin_slot; 687 } 688 689 bool ImageWriter::AllocMemory() { 690 for (ImageInfo& image_info : image_infos_) { 691 ImageSection unused_sections[ImageHeader::kSectionCount]; 692 const size_t length = RoundUp( 693 image_info.CreateImageSections(unused_sections), kPageSize); 694 695 std::string error_msg; 696 image_info.image_.reset(MemMap::MapAnonymous("image writer image", 697 nullptr, 698 length, 699 PROT_READ | PROT_WRITE, 700 false, 701 false, 702 &error_msg)); 703 if (UNLIKELY(image_info.image_.get() == nullptr)) { 704 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg; 705 return false; 706 } 707 708 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_. 709 CHECK_LE(image_info.image_end_, length); 710 image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create( 711 "image bitmap", image_info.image_->Begin(), RoundUp(image_info.image_end_, kPageSize))); 712 if (image_info.image_bitmap_.get() == nullptr) { 713 LOG(ERROR) << "Failed to allocate memory for image bitmap"; 714 return false; 715 } 716 } 717 return true; 718 } 719 720 class ImageWriter::ComputeLazyFieldsForClassesVisitor : public ClassVisitor { 721 public: 722 bool operator()(ObjPtr<Class> c) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { 723 StackHandleScope<1> hs(Thread::Current()); 724 mirror::Class::ComputeName(hs.NewHandle(c)); 725 return true; 726 } 727 }; 728 729 void ImageWriter::ComputeLazyFieldsForImageClasses() { 730 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 731 ComputeLazyFieldsForClassesVisitor visitor; 732 class_linker->VisitClassesWithoutClassesLock(&visitor); 733 } 734 735 static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass) 736 REQUIRES_SHARED(Locks::mutator_lock_) { 737 return klass->GetClassLoader() == nullptr; 738 } 739 740 bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) { 741 return IsBootClassLoaderClass(klass) && !IsInBootImage(klass); 742 } 743 744 // This visitor follows the references of an instance, recursively then prune this class 745 // if a type of any field is pruned. 746 class ImageWriter::PruneObjectReferenceVisitor { 747 public: 748 PruneObjectReferenceVisitor(ImageWriter* image_writer, 749 bool* early_exit, 750 std::unordered_set<mirror::Object*>* visited, 751 bool* result) 752 : image_writer_(image_writer), early_exit_(early_exit), visited_(visited), result_(result) {} 753 754 ALWAYS_INLINE void VisitRootIfNonNull( 755 mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const 756 REQUIRES_SHARED(Locks::mutator_lock_) { } 757 758 ALWAYS_INLINE void VisitRoot( 759 mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const 760 REQUIRES_SHARED(Locks::mutator_lock_) { } 761 762 ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj, 763 MemberOffset offset, 764 bool is_static ATTRIBUTE_UNUSED) const 765 REQUIRES_SHARED(Locks::mutator_lock_) { 766 mirror::Object* ref = 767 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset); 768 if (ref == nullptr || visited_->find(ref) != visited_->end()) { 769 return; 770 } 771 772 ObjPtr<mirror::Class> klass = ref->IsClass() ? ref->AsClass() : ref->GetClass(); 773 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) { 774 // Prune all classes using reflection because the content they held will not be fixup. 775 *result_ = true; 776 } 777 778 // Record the object visited in case of circular reference. 779 visited_->emplace(ref); 780 if (ref->IsClass()) { 781 *result_ = *result_ || 782 image_writer_->PruneAppImageClassInternal(ref->AsClass(), early_exit_, visited_); 783 } else { 784 *result_ = *result_ || 785 image_writer_->PruneAppImageClassInternal(klass, early_exit_, visited_); 786 ref->VisitReferences(*this, *this); 787 } 788 // Clean up before exit for next call of this function. 789 visited_->erase(ref); 790 } 791 792 ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, 793 ObjPtr<mirror::Reference> ref) const 794 REQUIRES_SHARED(Locks::mutator_lock_) { 795 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); 796 } 797 798 ALWAYS_INLINE bool GetResult() const { 799 return result_; 800 } 801 802 private: 803 ImageWriter* image_writer_; 804 bool* early_exit_; 805 std::unordered_set<mirror::Object*>* visited_; 806 bool* const result_; 807 }; 808 809 810 bool ImageWriter::PruneAppImageClass(ObjPtr<mirror::Class> klass) { 811 bool early_exit = false; 812 std::unordered_set<mirror::Object*> visited; 813 return PruneAppImageClassInternal(klass, &early_exit, &visited); 814 } 815 816 bool ImageWriter::PruneAppImageClassInternal( 817 ObjPtr<mirror::Class> klass, 818 bool* early_exit, 819 std::unordered_set<mirror::Object*>* visited) { 820 DCHECK(early_exit != nullptr); 821 DCHECK(visited != nullptr); 822 DCHECK(compile_app_image_); 823 if (klass == nullptr || IsInBootImage(klass.Ptr())) { 824 return false; 825 } 826 auto found = prune_class_memo_.find(klass.Ptr()); 827 if (found != prune_class_memo_.end()) { 828 // Already computed, return the found value. 829 return found->second; 830 } 831 // Circular dependencies, return false but do not store the result in the memoization table. 832 if (visited->find(klass.Ptr()) != visited->end()) { 833 *early_exit = true; 834 return false; 835 } 836 visited->emplace(klass.Ptr()); 837 bool result = IsBootClassLoaderClass(klass); 838 std::string temp; 839 // Prune if not an image class, this handles any broken sets of image classes such as having a 840 // class in the set but not it's superclass. 841 result = result || !compiler_driver_.IsImageClass(klass->GetDescriptor(&temp)); 842 bool my_early_exit = false; // Only for ourselves, ignore caller. 843 // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the 844 // app image. 845 if (klass->IsErroneous()) { 846 result = true; 847 } else { 848 ObjPtr<mirror::ClassExt> ext(klass->GetExtData()); 849 CHECK(ext.IsNull() || ext->GetVerifyError() == nullptr) << klass->PrettyClass(); 850 } 851 if (!result) { 852 // Check interfaces since these wont be visited through VisitReferences.) 853 mirror::IfTable* if_table = klass->GetIfTable(); 854 for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) { 855 result = result || PruneAppImageClassInternal(if_table->GetInterface(i), 856 &my_early_exit, 857 visited); 858 } 859 } 860 if (klass->IsObjectArrayClass()) { 861 result = result || PruneAppImageClassInternal(klass->GetComponentType(), 862 &my_early_exit, 863 visited); 864 } 865 // Check static fields and their classes. 866 if (klass->IsResolved() && klass->NumReferenceStaticFields() != 0) { 867 size_t num_static_fields = klass->NumReferenceStaticFields(); 868 // Presumably GC can happen when we are cross compiling, it should not cause performance 869 // problems to do pointer size logic. 870 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset( 871 Runtime::Current()->GetClassLinker()->GetImagePointerSize()); 872 for (size_t i = 0u; i < num_static_fields; ++i) { 873 mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset); 874 if (ref != nullptr) { 875 if (ref->IsClass()) { 876 result = result || PruneAppImageClassInternal(ref->AsClass(), 877 &my_early_exit, 878 visited); 879 } else { 880 mirror::Class* type = ref->GetClass(); 881 result = result || PruneAppImageClassInternal(type, 882 &my_early_exit, 883 visited); 884 if (!result) { 885 // For non-class case, also go through all the types mentioned by it's fields' 886 // references recursively to decide whether to keep this class. 887 bool tmp = false; 888 PruneObjectReferenceVisitor visitor(this, &my_early_exit, visited, &tmp); 889 ref->VisitReferences(visitor, visitor); 890 result = result || tmp; 891 } 892 } 893 } 894 field_offset = MemberOffset(field_offset.Uint32Value() + 895 sizeof(mirror::HeapReference<mirror::Object>)); 896 } 897 } 898 result = result || PruneAppImageClassInternal(klass->GetSuperClass(), 899 &my_early_exit, 900 visited); 901 // Remove the class if the dex file is not in the set of dex files. This happens for classes that 902 // are from uses-library if there is no profile. b/30688277 903 mirror::DexCache* dex_cache = klass->GetDexCache(); 904 if (dex_cache != nullptr) { 905 result = result || 906 dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end(); 907 } 908 // Erase the element we stored earlier since we are exiting the function. 909 auto it = visited->find(klass.Ptr()); 910 DCHECK(it != visited->end()); 911 visited->erase(it); 912 // Only store result if it is true or none of the calls early exited due to circular 913 // dependencies. If visited is empty then we are the root caller, in this case the cycle was in 914 // a child call and we can remember the result. 915 if (result == true || !my_early_exit || visited->empty()) { 916 prune_class_memo_[klass.Ptr()] = result; 917 } 918 *early_exit |= my_early_exit; 919 return result; 920 } 921 922 bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) { 923 if (klass == nullptr) { 924 return false; 925 } 926 if (compile_app_image_ && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) { 927 // Already in boot image, return true. 928 return true; 929 } 930 std::string temp; 931 if (!compiler_driver_.IsImageClass(klass->GetDescriptor(&temp))) { 932 return false; 933 } 934 if (compile_app_image_) { 935 // For app images, we need to prune boot loader classes that are not in the boot image since 936 // these may have already been loaded when the app image is loaded. 937 // Keep classes in the boot image space since we don't want to re-resolve these. 938 return !PruneAppImageClass(klass); 939 } 940 return true; 941 } 942 943 class ImageWriter::PruneClassesVisitor : public ClassVisitor { 944 public: 945 PruneClassesVisitor(ImageWriter* image_writer, ObjPtr<mirror::ClassLoader> class_loader) 946 : image_writer_(image_writer), 947 class_loader_(class_loader), 948 classes_to_prune_(), 949 defined_class_count_(0u) { } 950 951 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { 952 if (!image_writer_->KeepClass(klass.Ptr())) { 953 classes_to_prune_.insert(klass.Ptr()); 954 if (klass->GetClassLoader() == class_loader_) { 955 ++defined_class_count_; 956 } 957 } 958 return true; 959 } 960 961 size_t Prune() REQUIRES_SHARED(Locks::mutator_lock_) { 962 ClassTable* class_table = 963 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader_); 964 for (mirror::Class* klass : classes_to_prune_) { 965 std::string storage; 966 const char* descriptor = klass->GetDescriptor(&storage); 967 bool result = class_table->Remove(descriptor); 968 DCHECK(result); 969 DCHECK(!class_table->Remove(descriptor)) << descriptor; 970 } 971 return defined_class_count_; 972 } 973 974 private: 975 ImageWriter* const image_writer_; 976 const ObjPtr<mirror::ClassLoader> class_loader_; 977 std::unordered_set<mirror::Class*> classes_to_prune_; 978 size_t defined_class_count_; 979 }; 980 981 class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor { 982 public: 983 explicit PruneClassLoaderClassesVisitor(ImageWriter* image_writer) 984 : image_writer_(image_writer), removed_class_count_(0) {} 985 986 virtual void Visit(ObjPtr<mirror::ClassLoader> class_loader) OVERRIDE 987 REQUIRES_SHARED(Locks::mutator_lock_) { 988 PruneClassesVisitor classes_visitor(image_writer_, class_loader); 989 ClassTable* class_table = 990 Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader); 991 class_table->Visit(classes_visitor); 992 removed_class_count_ += classes_visitor.Prune(); 993 994 // Record app image class loader. The fake boot class loader should not get registered 995 // and we should end up with only one class loader for an app and none for boot image. 996 if (class_loader != nullptr && class_table != nullptr) { 997 DCHECK(class_loader_ == nullptr); 998 class_loader_ = class_loader; 999 } 1000 } 1001 1002 size_t GetRemovedClassCount() const { 1003 return removed_class_count_; 1004 } 1005 1006 ObjPtr<mirror::ClassLoader> GetClassLoader() const REQUIRES_SHARED(Locks::mutator_lock_) { 1007 return class_loader_; 1008 } 1009 1010 private: 1011 ImageWriter* const image_writer_; 1012 size_t removed_class_count_; 1013 ObjPtr<mirror::ClassLoader> class_loader_; 1014 }; 1015 1016 void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) { 1017 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); 1018 visitor->Visit(nullptr); // Visit boot class loader. 1019 Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor); 1020 } 1021 1022 void ImageWriter::PruneAndPreloadDexCache(ObjPtr<mirror::DexCache> dex_cache, 1023 ObjPtr<mirror::ClassLoader> class_loader) { 1024 // To ensure deterministic contents of the hash-based arrays, each slot shall contain 1025 // the candidate with the lowest index. As we're processing entries in increasing index 1026 // order, this means trying to look up the entry for the current index if the slot is 1027 // empty or if it contains a higher index. 1028 1029 Runtime* runtime = Runtime::Current(); 1030 ClassLinker* class_linker = runtime->GetClassLinker(); 1031 const DexFile& dex_file = *dex_cache->GetDexFile(); 1032 // Prune methods. 1033 mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods(); 1034 dex::TypeIndex last_class_idx; // Initialized to invalid index. 1035 ObjPtr<mirror::Class> last_class = nullptr; 1036 for (size_t i = 0, num = dex_cache->GetDexFile()->NumMethodIds(); i != num; ++i) { 1037 uint32_t slot_idx = dex_cache->MethodSlotIndex(i); 1038 auto pair = 1039 mirror::DexCache::GetNativePairPtrSize(resolved_methods, slot_idx, target_ptr_size_); 1040 uint32_t stored_index = pair.index; 1041 ArtMethod* method = pair.object; 1042 if (method != nullptr && i > stored_index) { 1043 continue; // Already checked. 1044 } 1045 // Check if the referenced class is in the image. Note that we want to check the referenced 1046 // class rather than the declaring class to preserve the semantics, i.e. using a MethodId 1047 // results in resolving the referenced class and that can for example throw OOME. 1048 const DexFile::MethodId& method_id = dex_file.GetMethodId(i); 1049 if (method_id.class_idx_ != last_class_idx) { 1050 last_class_idx = method_id.class_idx_; 1051 last_class = class_linker->LookupResolvedType( 1052 dex_file, last_class_idx, dex_cache, class_loader); 1053 if (last_class != nullptr && !KeepClass(last_class)) { 1054 last_class = nullptr; 1055 } 1056 } 1057 if (method == nullptr || i < stored_index) { 1058 if (last_class != nullptr) { 1059 const char* name = dex_file.StringDataByIdx(method_id.name_idx_); 1060 Signature signature = dex_file.GetMethodSignature(method_id); 1061 if (last_class->IsInterface()) { 1062 method = last_class->FindInterfaceMethod(name, signature, target_ptr_size_); 1063 } else { 1064 method = last_class->FindClassMethod(name, signature, target_ptr_size_); 1065 } 1066 if (method != nullptr) { 1067 // If the referenced class is in the image, the defining class must also be there. 1068 DCHECK(KeepClass(method->GetDeclaringClass())); 1069 dex_cache->SetResolvedMethod(i, method, target_ptr_size_); 1070 } 1071 } 1072 } else { 1073 DCHECK_EQ(i, stored_index); 1074 if (last_class == nullptr) { 1075 dex_cache->ClearResolvedMethod(stored_index, target_ptr_size_); 1076 } 1077 } 1078 } 1079 // Prune fields and make the contents of the field array deterministic. 1080 mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields(); 1081 last_class_idx = dex::TypeIndex(); // Initialized to invalid index. 1082 last_class = nullptr; 1083 for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) { 1084 uint32_t slot_idx = dex_cache->FieldSlotIndex(i); 1085 auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_); 1086 uint32_t stored_index = pair.index; 1087 ArtField* field = pair.object; 1088 if (field != nullptr && i > stored_index) { 1089 continue; // Already checked. 1090 } 1091 // Check if the referenced class is in the image. Note that we want to check the referenced 1092 // class rather than the declaring class to preserve the semantics, i.e. using a FieldId 1093 // results in resolving the referenced class and that can for example throw OOME. 1094 const DexFile::FieldId& field_id = dex_file.GetFieldId(i); 1095 if (field_id.class_idx_ != last_class_idx) { 1096 last_class_idx = field_id.class_idx_; 1097 last_class = class_linker->LookupResolvedType( 1098 dex_file, last_class_idx, dex_cache, class_loader); 1099 if (last_class != nullptr && !KeepClass(last_class)) { 1100 last_class = nullptr; 1101 } 1102 } 1103 if (field == nullptr || i < stored_index) { 1104 if (last_class != nullptr) { 1105 const char* name = dex_file.StringDataByIdx(field_id.name_idx_); 1106 const char* type = dex_file.StringByTypeIdx(field_id.type_idx_); 1107 field = mirror::Class::FindField(Thread::Current(), last_class, name, type); 1108 if (field != nullptr) { 1109 // If the referenced class is in the image, the defining class must also be there. 1110 DCHECK(KeepClass(field->GetDeclaringClass())); 1111 dex_cache->SetResolvedField(i, field, target_ptr_size_); 1112 } 1113 } 1114 } else { 1115 DCHECK_EQ(i, stored_index); 1116 if (last_class == nullptr) { 1117 dex_cache->ClearResolvedField(stored_index, target_ptr_size_); 1118 } 1119 } 1120 } 1121 // Prune types and make the contents of the type array deterministic. 1122 // This is done after fields and methods as their lookup can touch the types array. 1123 for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) { 1124 dex::TypeIndex type_idx(i); 1125 uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx); 1126 mirror::TypeDexCachePair pair = 1127 dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed); 1128 uint32_t stored_index = pair.index; 1129 ObjPtr<mirror::Class> klass = pair.object.Read(); 1130 if (klass == nullptr || i < stored_index) { 1131 klass = class_linker->LookupResolvedType(dex_file, type_idx, dex_cache, class_loader); 1132 if (klass != nullptr) { 1133 DCHECK_EQ(dex_cache->GetResolvedType(type_idx), klass); 1134 stored_index = i; // For correct clearing below if not keeping the `klass`. 1135 } 1136 } else if (i == stored_index && !KeepClass(klass)) { 1137 dex_cache->ClearResolvedType(dex::TypeIndex(stored_index)); 1138 } 1139 } 1140 // Strings do not need pruning, but the contents of the string array must be deterministic. 1141 for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) { 1142 dex::StringIndex string_idx(i); 1143 uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx); 1144 mirror::StringDexCachePair pair = 1145 dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed); 1146 uint32_t stored_index = pair.index; 1147 ObjPtr<mirror::String> string = pair.object.Read(); 1148 if (string == nullptr || i < stored_index) { 1149 string = class_linker->LookupString(dex_file, string_idx, dex_cache); 1150 DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string); 1151 } 1152 } 1153 } 1154 1155 void ImageWriter::PruneNonImageClasses() { 1156 Runtime* runtime = Runtime::Current(); 1157 ClassLinker* class_linker = runtime->GetClassLinker(); 1158 Thread* self = Thread::Current(); 1159 ScopedAssertNoThreadSuspension sa(__FUNCTION__); 1160 1161 // Prune uses-library dex caches. Only prune the uses-library dex caches since we want to make 1162 // sure the other ones don't get unloaded before the OatWriter runs. 1163 class_linker->VisitClassTables( 1164 [&](ClassTable* table) REQUIRES_SHARED(Locks::mutator_lock_) { 1165 table->RemoveStrongRoots( 1166 [&](GcRoot<mirror::Object> root) REQUIRES_SHARED(Locks::mutator_lock_) { 1167 ObjPtr<mirror::Object> obj = root.Read(); 1168 if (obj->IsDexCache()) { 1169 // Return true if the dex file is not one of the ones in the map. 1170 return dex_file_oat_index_map_.find(obj->AsDexCache()->GetDexFile()) == 1171 dex_file_oat_index_map_.end(); 1172 } 1173 // Return false to avoid removing. 1174 return false; 1175 }); 1176 }); 1177 1178 // Remove the undesired classes from the class roots. 1179 ObjPtr<mirror::ClassLoader> class_loader; 1180 { 1181 PruneClassLoaderClassesVisitor class_loader_visitor(this); 1182 VisitClassLoaders(&class_loader_visitor); 1183 VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes"; 1184 class_loader = class_loader_visitor.GetClassLoader(); 1185 DCHECK_EQ(class_loader != nullptr, compile_app_image_); 1186 } 1187 1188 // Clear references to removed classes from the DexCaches. 1189 std::vector<ObjPtr<mirror::DexCache>> dex_caches; 1190 { 1191 ReaderMutexLock mu2(self, *Locks::dex_lock_); 1192 dex_caches.reserve(class_linker->GetDexCachesData().size()); 1193 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) { 1194 if (self->IsJWeakCleared(data.weak_root)) { 1195 continue; 1196 } 1197 dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache()); 1198 } 1199 } 1200 for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) { 1201 PruneAndPreloadDexCache(dex_cache, class_loader); 1202 } 1203 1204 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live. 1205 class_linker->DropFindArrayClassCache(); 1206 1207 // Clear to save RAM. 1208 prune_class_memo_.clear(); 1209 } 1210 1211 void ImageWriter::CheckNonImageClassesRemoved() { 1212 if (compiler_driver_.GetImageClasses() != nullptr) { 1213 auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { 1214 if (obj->IsClass() && !IsInBootImage(obj)) { 1215 Class* klass = obj->AsClass(); 1216 if (!KeepClass(klass)) { 1217 DumpImageClasses(); 1218 std::string temp; 1219 CHECK(KeepClass(klass)) 1220 << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass); 1221 } 1222 } 1223 }; 1224 gc::Heap* heap = Runtime::Current()->GetHeap(); 1225 heap->VisitObjects(visitor); 1226 } 1227 } 1228 1229 void ImageWriter::DumpImageClasses() { 1230 auto image_classes = compiler_driver_.GetImageClasses(); 1231 CHECK(image_classes != nullptr); 1232 for (const std::string& image_class : *image_classes) { 1233 LOG(INFO) << " " << image_class; 1234 } 1235 } 1236 1237 mirror::String* ImageWriter::FindInternedString(mirror::String* string) { 1238 Thread* const self = Thread::Current(); 1239 for (const ImageInfo& image_info : image_infos_) { 1240 ObjPtr<mirror::String> const found = image_info.intern_table_->LookupStrong(self, string); 1241 DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr) 1242 << string->ToModifiedUtf8(); 1243 if (found != nullptr) { 1244 return found.Ptr(); 1245 } 1246 } 1247 if (compile_app_image_) { 1248 Runtime* const runtime = Runtime::Current(); 1249 ObjPtr<mirror::String> found = runtime->GetInternTable()->LookupStrong(self, string); 1250 // If we found it in the runtime intern table it could either be in the boot image or interned 1251 // during app image compilation. If it was in the boot image return that, otherwise return null 1252 // since it belongs to another image space. 1253 if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found.Ptr())) { 1254 return found.Ptr(); 1255 } 1256 DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr) 1257 << string->ToModifiedUtf8(); 1258 } 1259 return nullptr; 1260 } 1261 1262 1263 ObjectArray<Object>* ImageWriter::CreateImageRoots(size_t oat_index) const { 1264 Runtime* runtime = Runtime::Current(); 1265 ClassLinker* class_linker = runtime->GetClassLinker(); 1266 Thread* self = Thread::Current(); 1267 StackHandleScope<3> hs(self); 1268 Handle<Class> object_array_class(hs.NewHandle( 1269 class_linker->FindSystemClass(self, "[Ljava/lang/Object;"))); 1270 1271 std::unordered_set<const DexFile*> image_dex_files; 1272 for (auto& pair : dex_file_oat_index_map_) { 1273 const DexFile* image_dex_file = pair.first; 1274 size_t image_oat_index = pair.second; 1275 if (oat_index == image_oat_index) { 1276 image_dex_files.insert(image_dex_file); 1277 } 1278 } 1279 1280 // build an Object[] of all the DexCaches used in the source_space_. 1281 // Since we can't hold the dex lock when allocating the dex_caches 1282 // ObjectArray, we lock the dex lock twice, first to get the number 1283 // of dex caches first and then lock it again to copy the dex 1284 // caches. We check that the number of dex caches does not change. 1285 size_t dex_cache_count = 0; 1286 { 1287 ReaderMutexLock mu(self, *Locks::dex_lock_); 1288 // Count number of dex caches not in the boot image. 1289 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) { 1290 ObjPtr<mirror::DexCache> dex_cache = 1291 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root)); 1292 if (dex_cache == nullptr) { 1293 continue; 1294 } 1295 const DexFile* dex_file = dex_cache->GetDexFile(); 1296 if (!IsInBootImage(dex_cache.Ptr())) { 1297 dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u; 1298 } 1299 } 1300 } 1301 Handle<ObjectArray<Object>> dex_caches( 1302 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count))); 1303 CHECK(dex_caches != nullptr) << "Failed to allocate a dex cache array."; 1304 { 1305 ReaderMutexLock mu(self, *Locks::dex_lock_); 1306 size_t non_image_dex_caches = 0; 1307 // Re-count number of non image dex caches. 1308 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) { 1309 ObjPtr<mirror::DexCache> dex_cache = 1310 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root)); 1311 if (dex_cache == nullptr) { 1312 continue; 1313 } 1314 const DexFile* dex_file = dex_cache->GetDexFile(); 1315 if (!IsInBootImage(dex_cache.Ptr())) { 1316 non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u; 1317 } 1318 } 1319 CHECK_EQ(dex_cache_count, non_image_dex_caches) 1320 << "The number of non-image dex caches changed."; 1321 size_t i = 0; 1322 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) { 1323 ObjPtr<mirror::DexCache> dex_cache = 1324 ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root)); 1325 if (dex_cache == nullptr) { 1326 continue; 1327 } 1328 const DexFile* dex_file = dex_cache->GetDexFile(); 1329 if (!IsInBootImage(dex_cache.Ptr()) && 1330 image_dex_files.find(dex_file) != image_dex_files.end()) { 1331 dex_caches->Set<false>(i, dex_cache.Ptr()); 1332 ++i; 1333 } 1334 } 1335 } 1336 1337 // build an Object[] of the roots needed to restore the runtime 1338 int32_t image_roots_size = ImageHeader::NumberOfImageRoots(compile_app_image_); 1339 auto image_roots(hs.NewHandle( 1340 ObjectArray<Object>::Alloc(self, object_array_class.Get(), image_roots_size))); 1341 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get()); 1342 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots()); 1343 // image_roots[ImageHeader::kClassLoader] will be set later for app image. 1344 static_assert(ImageHeader::kClassLoader + 1u == ImageHeader::kImageRootsMax, 1345 "Class loader should be the last image root."); 1346 for (int32_t i = 0; i < ImageHeader::kImageRootsMax - 1; ++i) { 1347 CHECK(image_roots->Get(i) != nullptr); 1348 } 1349 return image_roots.Get(); 1350 } 1351 1352 mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack, 1353 mirror::Object* obj, 1354 size_t oat_index) { 1355 if (obj == nullptr || IsInBootImage(obj)) { 1356 // Object is null or already in the image, there is no work to do. 1357 return obj; 1358 } 1359 if (!IsImageBinSlotAssigned(obj)) { 1360 // We want to intern all strings but also assign offsets for the source string. Since the 1361 // pruning phase has already happened, if we intern a string to one in the image we still 1362 // end up copying an unreachable string. 1363 if (obj->IsString()) { 1364 // Need to check if the string is already interned in another image info so that we don't have 1365 // the intern tables of two different images contain the same string. 1366 mirror::String* interned = FindInternedString(obj->AsString()); 1367 if (interned == nullptr) { 1368 // Not in another image space, insert to our table. 1369 interned = 1370 GetImageInfo(oat_index).intern_table_->InternStrongImageString(obj->AsString()).Ptr(); 1371 DCHECK_EQ(interned, obj); 1372 } 1373 } else if (obj->IsDexCache()) { 1374 oat_index = GetOatIndexForDexCache(obj->AsDexCache()); 1375 } else if (obj->IsClass()) { 1376 // Visit and assign offsets for fields and field arrays. 1377 mirror::Class* as_klass = obj->AsClass(); 1378 mirror::DexCache* dex_cache = as_klass->GetDexCache(); 1379 DCHECK(!as_klass->IsErroneous()) << as_klass->GetStatus(); 1380 if (compile_app_image_) { 1381 // Extra sanity, no boot loader classes should be left! 1382 CHECK(!IsBootClassLoaderClass(as_klass)) << as_klass->PrettyClass(); 1383 } 1384 LengthPrefixedArray<ArtField>* fields[] = { 1385 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(), 1386 }; 1387 // Overwrite the oat index value since the class' dex cache is more accurate of where it 1388 // belongs. 1389 oat_index = GetOatIndexForDexCache(dex_cache); 1390 ImageInfo& image_info = GetImageInfo(oat_index); 1391 if (!compile_app_image_) { 1392 // Note: Avoid locking to prevent lock order violations from root visiting; 1393 // image_info.class_table_ is only accessed from the image writer. 1394 image_info.class_table_->InsertWithoutLocks(as_klass); 1395 } 1396 for (LengthPrefixedArray<ArtField>* cur_fields : fields) { 1397 // Total array length including header. 1398 if (cur_fields != nullptr) { 1399 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0); 1400 // Forward the entire array at once. 1401 auto it = native_object_relocations_.find(cur_fields); 1402 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields 1403 << " already forwarded"; 1404 size_t& offset = image_info.bin_slot_sizes_[kBinArtField]; 1405 DCHECK(!IsInBootImage(cur_fields)); 1406 native_object_relocations_.emplace( 1407 cur_fields, 1408 NativeObjectRelocation { 1409 oat_index, offset, kNativeObjectRelocationTypeArtFieldArray 1410 }); 1411 offset += header_size; 1412 // Forward individual fields so that we can quickly find where they belong. 1413 for (size_t i = 0, count = cur_fields->size(); i < count; ++i) { 1414 // Need to forward arrays separate of fields. 1415 ArtField* field = &cur_fields->At(i); 1416 auto it2 = native_object_relocations_.find(field); 1417 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i 1418 << " already assigned " << field->PrettyField() << " static=" << field->IsStatic(); 1419 DCHECK(!IsInBootImage(field)); 1420 native_object_relocations_.emplace( 1421 field, 1422 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeArtField }); 1423 offset += sizeof(ArtField); 1424 } 1425 } 1426 } 1427 // Visit and assign offsets for methods. 1428 size_t num_methods = as_klass->NumMethods(); 1429 if (num_methods != 0) { 1430 bool any_dirty = false; 1431 for (auto& m : as_klass->GetMethods(target_ptr_size_)) { 1432 if (WillMethodBeDirty(&m)) { 1433 any_dirty = true; 1434 break; 1435 } 1436 } 1437 NativeObjectRelocationType type = any_dirty 1438 ? kNativeObjectRelocationTypeArtMethodDirty 1439 : kNativeObjectRelocationTypeArtMethodClean; 1440 Bin bin_type = BinTypeForNativeRelocationType(type); 1441 // Forward the entire array at once, but header first. 1442 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_); 1443 const size_t method_size = ArtMethod::Size(target_ptr_size_); 1444 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0, 1445 method_size, 1446 method_alignment); 1447 LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr(); 1448 auto it = native_object_relocations_.find(array); 1449 CHECK(it == native_object_relocations_.end()) 1450 << "Method array " << array << " already forwarded"; 1451 size_t& offset = image_info.bin_slot_sizes_[bin_type]; 1452 DCHECK(!IsInBootImage(array)); 1453 native_object_relocations_.emplace(array, 1454 NativeObjectRelocation { 1455 oat_index, 1456 offset, 1457 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty 1458 : kNativeObjectRelocationTypeArtMethodArrayClean }); 1459 offset += header_size; 1460 for (auto& m : as_klass->GetMethods(target_ptr_size_)) { 1461 AssignMethodOffset(&m, type, oat_index); 1462 } 1463 (any_dirty ? dirty_methods_ : clean_methods_) += num_methods; 1464 } 1465 // Assign offsets for all runtime methods in the IMT since these may hold conflict tables 1466 // live. 1467 if (as_klass->ShouldHaveImt()) { 1468 ImTable* imt = as_klass->GetImt(target_ptr_size_); 1469 if (TryAssignImTableOffset(imt, oat_index)) { 1470 // Since imt's can be shared only do this the first time to not double count imt method 1471 // fixups. 1472 for (size_t i = 0; i < ImTable::kSize; ++i) { 1473 ArtMethod* imt_method = imt->Get(i, target_ptr_size_); 1474 DCHECK(imt_method != nullptr); 1475 if (imt_method->IsRuntimeMethod() && 1476 !IsInBootImage(imt_method) && 1477 !NativeRelocationAssigned(imt_method)) { 1478 AssignMethodOffset(imt_method, kNativeObjectRelocationTypeRuntimeMethod, oat_index); 1479 } 1480 } 1481 } 1482 } 1483 } else if (obj->IsClassLoader()) { 1484 // Register the class loader if it has a class table. 1485 // The fake boot class loader should not get registered and we should end up with only one 1486 // class loader. 1487 mirror::ClassLoader* class_loader = obj->AsClassLoader(); 1488 if (class_loader->GetClassTable() != nullptr) { 1489 DCHECK(compile_app_image_); 1490 DCHECK(class_loaders_.empty()); 1491 class_loaders_.insert(class_loader); 1492 ImageInfo& image_info = GetImageInfo(oat_index); 1493 // Note: Avoid locking to prevent lock order violations from root visiting; 1494 // image_info.class_table_ table is only accessed from the image writer 1495 // and class_loader->GetClassTable() is iterated but not modified. 1496 image_info.class_table_->CopyWithoutLocks(*class_loader->GetClassTable()); 1497 } 1498 } 1499 AssignImageBinSlot(obj, oat_index); 1500 work_stack.emplace(obj, oat_index); 1501 } 1502 if (obj->IsString()) { 1503 // Always return the interned string if there exists one. 1504 mirror::String* interned = FindInternedString(obj->AsString()); 1505 if (interned != nullptr) { 1506 return interned; 1507 } 1508 } 1509 return obj; 1510 } 1511 1512 bool ImageWriter::NativeRelocationAssigned(void* ptr) const { 1513 return native_object_relocations_.find(ptr) != native_object_relocations_.end(); 1514 } 1515 1516 bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { 1517 // No offset, or already assigned. 1518 if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) { 1519 return false; 1520 } 1521 // If the method is a conflict method we also want to assign the conflict table offset. 1522 ImageInfo& image_info = GetImageInfo(oat_index); 1523 const size_t size = ImTable::SizeInBytes(target_ptr_size_); 1524 native_object_relocations_.emplace( 1525 imt, 1526 NativeObjectRelocation { 1527 oat_index, 1528 image_info.bin_slot_sizes_[kBinImTable], 1529 kNativeObjectRelocationTypeIMTable}); 1530 image_info.bin_slot_sizes_[kBinImTable] += size; 1531 return true; 1532 } 1533 1534 void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) { 1535 // No offset, or already assigned. 1536 if (table == nullptr || NativeRelocationAssigned(table)) { 1537 return; 1538 } 1539 CHECK(!IsInBootImage(table)); 1540 // If the method is a conflict method we also want to assign the conflict table offset. 1541 ImageInfo& image_info = GetImageInfo(oat_index); 1542 const size_t size = table->ComputeSize(target_ptr_size_); 1543 native_object_relocations_.emplace( 1544 table, 1545 NativeObjectRelocation { 1546 oat_index, 1547 image_info.bin_slot_sizes_[kBinIMTConflictTable], 1548 kNativeObjectRelocationTypeIMTConflictTable}); 1549 image_info.bin_slot_sizes_[kBinIMTConflictTable] += size; 1550 } 1551 1552 void ImageWriter::AssignMethodOffset(ArtMethod* method, 1553 NativeObjectRelocationType type, 1554 size_t oat_index) { 1555 DCHECK(!IsInBootImage(method)); 1556 CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned " 1557 << ArtMethod::PrettyMethod(method); 1558 if (method->IsRuntimeMethod()) { 1559 TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index); 1560 } 1561 ImageInfo& image_info = GetImageInfo(oat_index); 1562 size_t& offset = image_info.bin_slot_sizes_[BinTypeForNativeRelocationType(type)]; 1563 native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type }); 1564 offset += ArtMethod::Size(target_ptr_size_); 1565 } 1566 1567 void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) { 1568 DCHECK(!IsInBootImage(obj)); 1569 CHECK(obj != nullptr); 1570 1571 // We know the bin slot, and the total bin sizes for all objects by now, 1572 // so calculate the object's final image offset. 1573 1574 DCHECK(IsImageBinSlotAssigned(obj)); 1575 BinSlot bin_slot = GetImageBinSlot(obj); 1576 // Change the lockword from a bin slot into an offset 1577 AssignImageOffset(obj, bin_slot); 1578 } 1579 1580 class ImageWriter::VisitReferencesVisitor { 1581 public: 1582 VisitReferencesVisitor(ImageWriter* image_writer, WorkStack* work_stack, size_t oat_index) 1583 : image_writer_(image_writer), work_stack_(work_stack), oat_index_(oat_index) {} 1584 1585 // Fix up separately since we also need to fix up method entrypoints. 1586 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const 1587 REQUIRES_SHARED(Locks::mutator_lock_) { 1588 if (!root->IsNull()) { 1589 VisitRoot(root); 1590 } 1591 } 1592 1593 ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const 1594 REQUIRES_SHARED(Locks::mutator_lock_) { 1595 root->Assign(VisitReference(root->AsMirrorPtr())); 1596 } 1597 1598 ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj, 1599 MemberOffset offset, 1600 bool is_static ATTRIBUTE_UNUSED) const 1601 REQUIRES_SHARED(Locks::mutator_lock_) { 1602 mirror::Object* ref = 1603 obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset); 1604 obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref)); 1605 } 1606 1607 ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, 1608 ObjPtr<mirror::Reference> ref) const 1609 REQUIRES_SHARED(Locks::mutator_lock_) { 1610 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); 1611 } 1612 1613 private: 1614 mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) { 1615 return image_writer_->TryAssignBinSlot(*work_stack_, ref, oat_index_); 1616 } 1617 1618 ImageWriter* const image_writer_; 1619 WorkStack* const work_stack_; 1620 const size_t oat_index_; 1621 }; 1622 1623 class ImageWriter::GetRootsVisitor : public RootVisitor { 1624 public: 1625 explicit GetRootsVisitor(std::vector<mirror::Object*>* roots) : roots_(roots) {} 1626 1627 void VisitRoots(mirror::Object*** roots, 1628 size_t count, 1629 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE 1630 REQUIRES_SHARED(Locks::mutator_lock_) { 1631 for (size_t i = 0; i < count; ++i) { 1632 roots_->push_back(*roots[i]); 1633 } 1634 } 1635 1636 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, 1637 size_t count, 1638 const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE 1639 REQUIRES_SHARED(Locks::mutator_lock_) { 1640 for (size_t i = 0; i < count; ++i) { 1641 roots_->push_back(roots[i]->AsMirrorPtr()); 1642 } 1643 } 1644 1645 private: 1646 std::vector<mirror::Object*>* const roots_; 1647 }; 1648 1649 void ImageWriter::ProcessWorkStack(WorkStack* work_stack) { 1650 while (!work_stack->empty()) { 1651 std::pair<mirror::Object*, size_t> pair(work_stack->top()); 1652 work_stack->pop(); 1653 VisitReferencesVisitor visitor(this, work_stack, /*oat_index*/ pair.second); 1654 // Walk references and assign bin slots for them. 1655 pair.first->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>( 1656 visitor, 1657 visitor); 1658 } 1659 } 1660 1661 void ImageWriter::CalculateNewObjectOffsets() { 1662 Thread* const self = Thread::Current(); 1663 VariableSizedHandleScope handles(self); 1664 std::vector<Handle<ObjectArray<Object>>> image_roots; 1665 for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) { 1666 image_roots.push_back(handles.NewHandle(CreateImageRoots(i))); 1667 } 1668 1669 Runtime* const runtime = Runtime::Current(); 1670 gc::Heap* const heap = runtime->GetHeap(); 1671 1672 // Leave space for the header, but do not write it yet, we need to 1673 // know where image_roots is going to end up 1674 image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment 1675 1676 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_); 1677 // Write the image runtime methods. 1678 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod(); 1679 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod(); 1680 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod(); 1681 image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] = 1682 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves); 1683 image_methods_[ImageHeader::kSaveRefsOnlyMethod] = 1684 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly); 1685 image_methods_[ImageHeader::kSaveRefsAndArgsMethod] = 1686 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs); 1687 image_methods_[ImageHeader::kSaveEverythingMethod] = 1688 runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything); 1689 // Visit image methods first to have the main runtime methods in the first image. 1690 for (auto* m : image_methods_) { 1691 CHECK(m != nullptr); 1692 CHECK(m->IsRuntimeMethod()); 1693 DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image"; 1694 if (!IsInBootImage(m)) { 1695 AssignMethodOffset(m, kNativeObjectRelocationTypeRuntimeMethod, GetDefaultOatIndex()); 1696 } 1697 } 1698 1699 // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring 1700 // this lock while holding other locks may cause lock order violations. 1701 { 1702 auto deflate_monitor = [](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { 1703 Monitor::Deflate(Thread::Current(), obj); 1704 }; 1705 heap->VisitObjects(deflate_monitor); 1706 } 1707 1708 // Work list of <object, oat_index> for objects. Everything on the stack must already be 1709 // assigned a bin slot. 1710 WorkStack work_stack; 1711 1712 // Special case interned strings to put them in the image they are likely to be resolved from. 1713 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) { 1714 auto it = dex_file_oat_index_map_.find(dex_file); 1715 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation(); 1716 const size_t oat_index = it->second; 1717 InternTable* const intern_table = runtime->GetInternTable(); 1718 for (size_t i = 0, count = dex_file->NumStringIds(); i < count; ++i) { 1719 uint32_t utf16_length; 1720 const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(dex::StringIndex(i), 1721 &utf16_length); 1722 mirror::String* string = intern_table->LookupStrong(self, utf16_length, utf8_data).Ptr(); 1723 TryAssignBinSlot(work_stack, string, oat_index); 1724 } 1725 } 1726 1727 // Get the GC roots and then visit them separately to avoid lock violations since the root visitor 1728 // visits roots while holding various locks. 1729 { 1730 std::vector<mirror::Object*> roots; 1731 GetRootsVisitor root_visitor(&roots); 1732 runtime->VisitRoots(&root_visitor); 1733 for (mirror::Object* obj : roots) { 1734 TryAssignBinSlot(work_stack, obj, GetDefaultOatIndex()); 1735 } 1736 } 1737 ProcessWorkStack(&work_stack); 1738 1739 // For app images, there may be objects that are only held live by the by the boot image. One 1740 // example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback 1741 // does not fail any checks. TODO: We should probably avoid copying these objects. 1742 if (compile_app_image_) { 1743 for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) { 1744 DCHECK(space->IsImageSpace()); 1745 gc::accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); 1746 live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()), 1747 reinterpret_cast<uintptr_t>(space->Limit()), 1748 [this, &work_stack](mirror::Object* obj) 1749 REQUIRES_SHARED(Locks::mutator_lock_) { 1750 VisitReferencesVisitor visitor(this, &work_stack, GetDefaultOatIndex()); 1751 // Visit all references and try to assign bin slots for them (calls TryAssignBinSlot). 1752 obj->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>( 1753 visitor, 1754 visitor); 1755 }); 1756 } 1757 // Process the work stack in case anything was added by TryAssignBinSlot. 1758 ProcessWorkStack(&work_stack); 1759 1760 // Store the class loader in the class roots. 1761 CHECK_EQ(class_loaders_.size(), 1u); 1762 CHECK_EQ(image_roots.size(), 1u); 1763 CHECK(*class_loaders_.begin() != nullptr); 1764 image_roots[0]->Set<false>(ImageHeader::kClassLoader, *class_loaders_.begin()); 1765 } 1766 1767 // Verify that all objects have assigned image bin slots. 1768 { 1769 auto ensure_bin_slots_assigned = [&](mirror::Object* obj) 1770 REQUIRES_SHARED(Locks::mutator_lock_) { 1771 if (!Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(obj)) { 1772 CHECK(IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj; 1773 } 1774 }; 1775 heap->VisitObjects(ensure_bin_slots_assigned); 1776 } 1777 1778 // Calculate size of the dex cache arrays slot and prepare offsets. 1779 PrepareDexCacheArraySlots(); 1780 1781 // Calculate the sizes of the intern tables, class tables, and fixup tables. 1782 for (ImageInfo& image_info : image_infos_) { 1783 // Calculate how big the intern table will be after being serialized. 1784 InternTable* const intern_table = image_info.intern_table_.get(); 1785 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings"; 1786 if (intern_table->StrongSize() != 0u) { 1787 image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr); 1788 } 1789 1790 // Calculate the size of the class table. 1791 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); 1792 DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u); 1793 if (image_info.class_table_->NumReferencedNonZygoteClasses() != 0u) { 1794 image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr); 1795 } 1796 } 1797 1798 // Calculate bin slot offsets. 1799 for (ImageInfo& image_info : image_infos_) { 1800 size_t bin_offset = image_objects_offset_begin_; 1801 for (size_t i = 0; i != kBinSize; ++i) { 1802 switch (i) { 1803 case kBinArtMethodClean: 1804 case kBinArtMethodDirty: { 1805 bin_offset = RoundUp(bin_offset, method_alignment); 1806 break; 1807 } 1808 case kBinDexCacheArray: 1809 bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment(target_ptr_size_)); 1810 break; 1811 case kBinImTable: 1812 case kBinIMTConflictTable: { 1813 bin_offset = RoundUp(bin_offset, static_cast<size_t>(target_ptr_size_)); 1814 break; 1815 } 1816 default: { 1817 // Normal alignment. 1818 } 1819 } 1820 image_info.bin_slot_offsets_[i] = bin_offset; 1821 bin_offset += image_info.bin_slot_sizes_[i]; 1822 } 1823 // NOTE: There may be additional padding between the bin slots and the intern table. 1824 DCHECK_EQ(image_info.image_end_, 1825 GetBinSizeSum(image_info, kBinMirrorCount) + image_objects_offset_begin_); 1826 } 1827 1828 // Calculate image offsets. 1829 size_t image_offset = 0; 1830 for (ImageInfo& image_info : image_infos_) { 1831 image_info.image_begin_ = global_image_begin_ + image_offset; 1832 image_info.image_offset_ = image_offset; 1833 ImageSection unused_sections[ImageHeader::kSectionCount]; 1834 image_info.image_size_ = RoundUp(image_info.CreateImageSections(unused_sections), kPageSize); 1835 // There should be no gaps until the next image. 1836 image_offset += image_info.image_size_; 1837 } 1838 1839 // Transform each object's bin slot into an offset which will be used to do the final copy. 1840 { 1841 auto unbin_objects_into_offset = [&](mirror::Object* obj) 1842 REQUIRES_SHARED(Locks::mutator_lock_) { 1843 if (!IsInBootImage(obj)) { 1844 UnbinObjectsIntoOffset(obj); 1845 } 1846 }; 1847 heap->VisitObjects(unbin_objects_into_offset); 1848 } 1849 1850 size_t i = 0; 1851 for (ImageInfo& image_info : image_infos_) { 1852 image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get())); 1853 i++; 1854 } 1855 1856 // Update the native relocations by adding their bin sums. 1857 for (auto& pair : native_object_relocations_) { 1858 NativeObjectRelocation& relocation = pair.second; 1859 Bin bin_type = BinTypeForNativeRelocationType(relocation.type); 1860 ImageInfo& image_info = GetImageInfo(relocation.oat_index); 1861 relocation.offset += image_info.bin_slot_offsets_[bin_type]; 1862 } 1863 } 1864 1865 size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) const { 1866 DCHECK(out_sections != nullptr); 1867 1868 // Do not round up any sections here that are represented by the bins since it will break 1869 // offsets. 1870 1871 // Objects section 1872 ImageSection* objects_section = &out_sections[ImageHeader::kSectionObjects]; 1873 *objects_section = ImageSection(0u, image_end_); 1874 1875 // Add field section. 1876 ImageSection* field_section = &out_sections[ImageHeader::kSectionArtFields]; 1877 *field_section = ImageSection(bin_slot_offsets_[kBinArtField], bin_slot_sizes_[kBinArtField]); 1878 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset()); 1879 1880 // Add method section. 1881 ImageSection* methods_section = &out_sections[ImageHeader::kSectionArtMethods]; 1882 *methods_section = ImageSection( 1883 bin_slot_offsets_[kBinArtMethodClean], 1884 bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinArtMethodDirty]); 1885 1886 // IMT section. 1887 ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables]; 1888 *imt_section = ImageSection(bin_slot_offsets_[kBinImTable], bin_slot_sizes_[kBinImTable]); 1889 1890 // Conflict tables section. 1891 ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables]; 1892 *imt_conflict_tables_section = ImageSection(bin_slot_offsets_[kBinIMTConflictTable], 1893 bin_slot_sizes_[kBinIMTConflictTable]); 1894 1895 // Runtime methods section. 1896 ImageSection* runtime_methods_section = &out_sections[ImageHeader::kSectionRuntimeMethods]; 1897 *runtime_methods_section = ImageSection(bin_slot_offsets_[kBinRuntimeMethod], 1898 bin_slot_sizes_[kBinRuntimeMethod]); 1899 1900 // Add dex cache arrays section. 1901 ImageSection* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays]; 1902 *dex_cache_arrays_section = ImageSection(bin_slot_offsets_[kBinDexCacheArray], 1903 bin_slot_sizes_[kBinDexCacheArray]); 1904 // Round up to the alignment the string table expects. See HashSet::WriteToMemory. 1905 size_t cur_pos = RoundUp(dex_cache_arrays_section->End(), sizeof(uint64_t)); 1906 // Calculate the size of the interned strings. 1907 ImageSection* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings]; 1908 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_); 1909 cur_pos = interned_strings_section->End(); 1910 // Round up to the alignment the class table expects. See HashSet::WriteToMemory. 1911 cur_pos = RoundUp(cur_pos, sizeof(uint64_t)); 1912 // Calculate the size of the class table section. 1913 ImageSection* class_table_section = &out_sections[ImageHeader::kSectionClassTable]; 1914 *class_table_section = ImageSection(cur_pos, class_table_bytes_); 1915 cur_pos = class_table_section->End(); 1916 // Image end goes right before the start of the image bitmap. 1917 return cur_pos; 1918 } 1919 1920 void ImageWriter::CreateHeader(size_t oat_index) { 1921 ImageInfo& image_info = GetImageInfo(oat_index); 1922 const uint8_t* oat_file_begin = image_info.oat_file_begin_; 1923 const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_; 1924 const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_; 1925 1926 // Create the image sections. 1927 ImageSection sections[ImageHeader::kSectionCount]; 1928 const size_t image_end = image_info.CreateImageSections(sections); 1929 1930 // Finally bitmap section. 1931 const size_t bitmap_bytes = image_info.image_bitmap_->Size(); 1932 auto* bitmap_section = §ions[ImageHeader::kSectionImageBitmap]; 1933 *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize)); 1934 if (VLOG_IS_ON(compiler)) { 1935 LOG(INFO) << "Creating header for " << oat_filenames_[oat_index]; 1936 size_t idx = 0; 1937 for (const ImageSection& section : sections) { 1938 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section; 1939 ++idx; 1940 } 1941 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_; 1942 LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec; 1943 LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_) 1944 << " Image offset=" << image_info.image_offset_ << std::dec; 1945 LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin) 1946 << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_) 1947 << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end) 1948 << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end); 1949 } 1950 // Store boot image info for app image so that we can relocate. 1951 uint32_t boot_image_begin = 0; 1952 uint32_t boot_image_end = 0; 1953 uint32_t boot_oat_begin = 0; 1954 uint32_t boot_oat_end = 0; 1955 gc::Heap* const heap = Runtime::Current()->GetHeap(); 1956 heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end); 1957 1958 // Create the header, leave 0 for data size since we will fill this in as we are writing the 1959 // image. 1960 new (image_info.image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_info.image_begin_), 1961 image_end, 1962 sections, 1963 image_info.image_roots_address_, 1964 image_info.oat_checksum_, 1965 PointerToLowMemUInt32(oat_file_begin), 1966 PointerToLowMemUInt32(image_info.oat_data_begin_), 1967 PointerToLowMemUInt32(oat_data_end), 1968 PointerToLowMemUInt32(oat_file_end), 1969 boot_image_begin, 1970 boot_image_end - boot_image_begin, 1971 boot_oat_begin, 1972 boot_oat_end - boot_oat_begin, 1973 static_cast<uint32_t>(target_ptr_size_), 1974 compile_pic_, 1975 /*is_pic*/compile_app_image_, 1976 image_storage_mode_, 1977 /*data_size*/0u); 1978 } 1979 1980 ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) { 1981 auto it = native_object_relocations_.find(method); 1982 CHECK(it != native_object_relocations_.end()) << ArtMethod::PrettyMethod(method) << " @ " 1983 << method; 1984 size_t oat_index = GetOatIndex(method->GetDexCache()); 1985 ImageInfo& image_info = GetImageInfo(oat_index); 1986 CHECK_GE(it->second.offset, image_info.image_end_) << "ArtMethods should be after Objects"; 1987 return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + it->second.offset); 1988 } 1989 1990 class ImageWriter::FixupRootVisitor : public RootVisitor { 1991 public: 1992 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) { 1993 } 1994 1995 void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED, 1996 size_t count ATTRIBUTE_UNUSED, 1997 const RootInfo& info ATTRIBUTE_UNUSED) 1998 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { 1999 LOG(FATAL) << "Unsupported"; 2000 } 2001 2002 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, 2003 const RootInfo& info ATTRIBUTE_UNUSED) 2004 OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) { 2005 for (size_t i = 0; i < count; ++i) { 2006 image_writer_->CopyReference(roots[i], roots[i]->AsMirrorPtr()); 2007 } 2008 } 2009 2010 private: 2011 ImageWriter* const image_writer_; 2012 }; 2013 2014 void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) { 2015 for (size_t i = 0; i < ImTable::kSize; ++i) { 2016 ArtMethod* method = orig->Get(i, target_ptr_size_); 2017 void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_)); 2018 CopyAndFixupPointer(address, method); 2019 DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method)); 2020 } 2021 } 2022 2023 void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) { 2024 const size_t count = orig->NumEntries(target_ptr_size_); 2025 for (size_t i = 0; i < count; ++i) { 2026 ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_); 2027 ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_); 2028 CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method); 2029 CopyAndFixupPointer(copy->AddressOfImplementationMethod(i, target_ptr_size_), 2030 implementation_method); 2031 DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_), 2032 NativeLocationInImage(interface_method)); 2033 DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_), 2034 NativeLocationInImage(implementation_method)); 2035 } 2036 } 2037 2038 void ImageWriter::CopyAndFixupNativeData(size_t oat_index) { 2039 const ImageInfo& image_info = GetImageInfo(oat_index); 2040 // Copy ArtFields and methods to their locations and update the array for convenience. 2041 for (auto& pair : native_object_relocations_) { 2042 NativeObjectRelocation& relocation = pair.second; 2043 // Only work with fields and methods that are in the current oat file. 2044 if (relocation.oat_index != oat_index) { 2045 continue; 2046 } 2047 auto* dest = image_info.image_->Begin() + relocation.offset; 2048 DCHECK_GE(dest, image_info.image_->Begin() + image_info.image_end_); 2049 DCHECK(!IsInBootImage(pair.first)); 2050 switch (relocation.type) { 2051 case kNativeObjectRelocationTypeArtField: { 2052 memcpy(dest, pair.first, sizeof(ArtField)); 2053 CopyReference( 2054 reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(), 2055 reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass().Ptr()); 2056 break; 2057 } 2058 case kNativeObjectRelocationTypeRuntimeMethod: 2059 case kNativeObjectRelocationTypeArtMethodClean: 2060 case kNativeObjectRelocationTypeArtMethodDirty: { 2061 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first), 2062 reinterpret_cast<ArtMethod*>(dest), 2063 image_info); 2064 break; 2065 } 2066 // For arrays, copy just the header since the elements will get copied by their corresponding 2067 // relocations. 2068 case kNativeObjectRelocationTypeArtFieldArray: { 2069 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0)); 2070 break; 2071 } 2072 case kNativeObjectRelocationTypeArtMethodArrayClean: 2073 case kNativeObjectRelocationTypeArtMethodArrayDirty: { 2074 size_t size = ArtMethod::Size(target_ptr_size_); 2075 size_t alignment = ArtMethod::Alignment(target_ptr_size_); 2076 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment)); 2077 // Clear padding to avoid non-deterministic data in the image (and placate valgrind). 2078 reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment); 2079 break; 2080 } 2081 case kNativeObjectRelocationTypeDexCacheArray: 2082 // Nothing to copy here, everything is done in FixupDexCache(). 2083 break; 2084 case kNativeObjectRelocationTypeIMTable: { 2085 ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first); 2086 ImTable* dest_imt = reinterpret_cast<ImTable*>(dest); 2087 CopyAndFixupImTable(orig_imt, dest_imt); 2088 break; 2089 } 2090 case kNativeObjectRelocationTypeIMTConflictTable: { 2091 auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first); 2092 CopyAndFixupImtConflictTable( 2093 orig_table, 2094 new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_)); 2095 break; 2096 } 2097 } 2098 } 2099 // Fixup the image method roots. 2100 auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin()); 2101 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) { 2102 ArtMethod* method = image_methods_[i]; 2103 CHECK(method != nullptr); 2104 if (!IsInBootImage(method)) { 2105 method = NativeLocationInImage(method); 2106 } 2107 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method); 2108 } 2109 FixupRootVisitor root_visitor(this); 2110 2111 // Write the intern table into the image. 2112 if (image_info.intern_table_bytes_ > 0) { 2113 const ImageSection& intern_table_section = image_header->GetImageSection( 2114 ImageHeader::kSectionInternedStrings); 2115 InternTable* const intern_table = image_info.intern_table_.get(); 2116 uint8_t* const intern_table_memory_ptr = 2117 image_info.image_->Begin() + intern_table_section.Offset(); 2118 const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr); 2119 CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_); 2120 // Fixup the pointers in the newly written intern table to contain image addresses. 2121 InternTable temp_intern_table; 2122 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that 2123 // the VisitRoots() will update the memory directly rather than the copies. 2124 // This also relies on visit roots not doing any verification which could fail after we update 2125 // the roots to be the image addresses. 2126 temp_intern_table.AddTableFromMemory(intern_table_memory_ptr); 2127 CHECK_EQ(temp_intern_table.Size(), intern_table->Size()); 2128 temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots); 2129 } 2130 // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple 2131 // class loaders. Writing multiple class tables into the image is currently unsupported. 2132 if (image_info.class_table_bytes_ > 0u) { 2133 const ImageSection& class_table_section = image_header->GetImageSection( 2134 ImageHeader::kSectionClassTable); 2135 uint8_t* const class_table_memory_ptr = 2136 image_info.image_->Begin() + class_table_section.Offset(); 2137 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_); 2138 2139 ClassTable* table = image_info.class_table_.get(); 2140 CHECK(table != nullptr); 2141 const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr); 2142 CHECK_EQ(class_table_bytes, image_info.class_table_bytes_); 2143 // Fixup the pointers in the newly written class table to contain image addresses. See 2144 // above comment for intern tables. 2145 ClassTable temp_class_table; 2146 temp_class_table.ReadFromMemory(class_table_memory_ptr); 2147 CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(), 2148 table->NumReferencedNonZygoteClasses() + table->NumReferencedZygoteClasses()); 2149 UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown)); 2150 temp_class_table.VisitRoots(visitor); 2151 } 2152 } 2153 2154 void ImageWriter::CopyAndFixupObjects() { 2155 auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) { 2156 DCHECK(obj != nullptr); 2157 CopyAndFixupObject(obj); 2158 }; 2159 Runtime::Current()->GetHeap()->VisitObjects(visitor); 2160 // Fix up the object previously had hash codes. 2161 for (const auto& hash_pair : saved_hashcode_map_) { 2162 Object* obj = hash_pair.first; 2163 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U); 2164 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false); 2165 } 2166 saved_hashcode_map_.clear(); 2167 } 2168 2169 void ImageWriter::FixupPointerArray(mirror::Object* dst, 2170 mirror::PointerArray* arr, 2171 mirror::Class* klass, 2172 Bin array_type) { 2173 CHECK(klass->IsArrayClass()); 2174 CHECK(arr->IsIntArray() || arr->IsLongArray()) << klass->PrettyClass() << " " << arr; 2175 // Fixup int and long pointers for the ArtMethod or ArtField arrays. 2176 const size_t num_elements = arr->GetLength(); 2177 dst->SetClass(GetImageAddress(arr->GetClass())); 2178 auto* dest_array = down_cast<mirror::PointerArray*>(dst); 2179 for (size_t i = 0, count = num_elements; i < count; ++i) { 2180 void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_); 2181 if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) { 2182 auto it = native_object_relocations_.find(elem); 2183 if (UNLIKELY(it == native_object_relocations_.end())) { 2184 if (it->second.IsArtMethodRelocation()) { 2185 auto* method = reinterpret_cast<ArtMethod*>(elem); 2186 LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ " 2187 << method << " idx=" << i << "/" << num_elements << " with declaring class " 2188 << Class::PrettyClass(method->GetDeclaringClass()); 2189 } else { 2190 CHECK_EQ(array_type, kBinArtField); 2191 auto* field = reinterpret_cast<ArtField*>(elem); 2192 LOG(FATAL) << "No relocation entry for ArtField " << field->PrettyField() << " @ " 2193 << field << " idx=" << i << "/" << num_elements << " with declaring class " 2194 << Class::PrettyClass(field->GetDeclaringClass()); 2195 } 2196 UNREACHABLE(); 2197 } 2198 } 2199 CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem); 2200 } 2201 } 2202 2203 void ImageWriter::CopyAndFixupObject(Object* obj) { 2204 if (IsInBootImage(obj)) { 2205 return; 2206 } 2207 size_t offset = GetImageOffset(obj); 2208 size_t oat_index = GetOatIndex(obj); 2209 ImageInfo& image_info = GetImageInfo(oat_index); 2210 auto* dst = reinterpret_cast<Object*>(image_info.image_->Begin() + offset); 2211 DCHECK_LT(offset, image_info.image_end_); 2212 const auto* src = reinterpret_cast<const uint8_t*>(obj); 2213 2214 image_info.image_bitmap_->Set(dst); // Mark the obj as live. 2215 2216 const size_t n = obj->SizeOf(); 2217 DCHECK_LE(offset + n, image_info.image_->Size()); 2218 memcpy(dst, src, n); 2219 2220 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor 2221 // word. 2222 const auto it = saved_hashcode_map_.find(obj); 2223 dst->SetLockWord(it != saved_hashcode_map_.end() ? 2224 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false); 2225 if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) { 2226 // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is 2227 // safe since we mark all of the objects that may reference non immune objects as gray. 2228 CHECK(dst->AtomicSetMarkBit(0, 1)); 2229 } 2230 FixupObject(obj, dst); 2231 } 2232 2233 // Rewrite all the references in the copied object to point to their image address equivalent 2234 class ImageWriter::FixupVisitor { 2235 public: 2236 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) { 2237 } 2238 2239 // Ignore class roots since we don't have a way to map them to the destination. These are handled 2240 // with other logic. 2241 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) 2242 const {} 2243 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} 2244 2245 2246 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const 2247 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { 2248 ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset); 2249 // Copy the reference and record the fixup if necessary. 2250 image_writer_->CopyReference( 2251 copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset), 2252 ref.Ptr()); 2253 } 2254 2255 // java.lang.ref.Reference visitor. 2256 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, 2257 ObjPtr<mirror::Reference> ref) const 2258 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { 2259 operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false); 2260 } 2261 2262 protected: 2263 ImageWriter* const image_writer_; 2264 mirror::Object* const copy_; 2265 }; 2266 2267 class ImageWriter::FixupClassVisitor FINAL : public FixupVisitor { 2268 public: 2269 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) { 2270 } 2271 2272 void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const 2273 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { 2274 DCHECK(obj->IsClass()); 2275 FixupVisitor::operator()(obj, offset, /*is_static*/false); 2276 } 2277 2278 void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED, 2279 ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const 2280 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { 2281 LOG(FATAL) << "Reference not expected here."; 2282 } 2283 }; 2284 2285 uintptr_t ImageWriter::NativeOffsetInImage(void* obj) { 2286 DCHECK(obj != nullptr); 2287 DCHECK(!IsInBootImage(obj)); 2288 auto it = native_object_relocations_.find(obj); 2289 CHECK(it != native_object_relocations_.end()) << obj << " spaces " 2290 << Runtime::Current()->GetHeap()->DumpSpaces(); 2291 const NativeObjectRelocation& relocation = it->second; 2292 return relocation.offset; 2293 } 2294 2295 template <typename T> 2296 std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { 2297 std::ostringstream oss; 2298 oss << ptr; 2299 return oss.str(); 2300 } 2301 2302 template <> 2303 std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) { 2304 return ArtMethod::PrettyMethod(method); 2305 } 2306 2307 template <typename T> 2308 T* ImageWriter::NativeLocationInImage(T* obj) { 2309 if (obj == nullptr || IsInBootImage(obj)) { 2310 return obj; 2311 } else { 2312 auto it = native_object_relocations_.find(obj); 2313 CHECK(it != native_object_relocations_.end()) << obj << " " << PrettyPrint(obj) 2314 << " spaces " << Runtime::Current()->GetHeap()->DumpSpaces(); 2315 const NativeObjectRelocation& relocation = it->second; 2316 ImageInfo& image_info = GetImageInfo(relocation.oat_index); 2317 return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset); 2318 } 2319 } 2320 2321 template <typename T> 2322 T* ImageWriter::NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) { 2323 if (obj == nullptr || IsInBootImage(obj)) { 2324 return obj; 2325 } else { 2326 size_t oat_index = GetOatIndexForDexCache(dex_cache); 2327 ImageInfo& image_info = GetImageInfo(oat_index); 2328 return reinterpret_cast<T*>(image_info.image_->Begin() + NativeOffsetInImage(obj)); 2329 } 2330 } 2331 2332 class ImageWriter::NativeLocationVisitor { 2333 public: 2334 explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {} 2335 2336 template <typename T> 2337 T* operator()(T* ptr, void** dest_addr = nullptr) const REQUIRES_SHARED(Locks::mutator_lock_) { 2338 if (dest_addr != nullptr) { 2339 image_writer_->CopyAndFixupPointer(dest_addr, ptr); 2340 } 2341 return image_writer_->NativeLocationInImage(ptr); 2342 } 2343 2344 private: 2345 ImageWriter* const image_writer_; 2346 }; 2347 2348 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) { 2349 orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this)); 2350 FixupClassVisitor visitor(this, copy); 2351 ObjPtr<mirror::Object>(orig)->VisitReferences(visitor, visitor); 2352 2353 // Remove the clinitThreadId. This is required for image determinism. 2354 copy->SetClinitThreadId(static_cast<pid_t>(0)); 2355 } 2356 2357 void ImageWriter::FixupObject(Object* orig, Object* copy) { 2358 DCHECK(orig != nullptr); 2359 DCHECK(copy != nullptr); 2360 if (kUseBakerReadBarrier) { 2361 orig->AssertReadBarrierState(); 2362 } 2363 auto* klass = orig->GetClass(); 2364 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) { 2365 // Is this a native pointer array? 2366 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig)); 2367 if (it != pointer_arrays_.end()) { 2368 // Should only need to fixup every pointer array exactly once. 2369 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second); 2370 pointer_arrays_.erase(it); 2371 return; 2372 } 2373 } 2374 if (orig->IsClass()) { 2375 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy)); 2376 } else { 2377 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) { 2378 // Need to go update the ArtMethod. 2379 auto* dest = down_cast<mirror::Executable*>(copy); 2380 auto* src = down_cast<mirror::Executable*>(orig); 2381 ArtMethod* src_method = src->GetArtMethod(); 2382 dest->SetArtMethod(GetImageMethodAddress(src_method)); 2383 } else if (!klass->IsArrayClass()) { 2384 ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); 2385 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) { 2386 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy)); 2387 } else if (klass->IsClassLoaderClass()) { 2388 mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy); 2389 // If src is a ClassLoader, set the class table to null so that it gets recreated by the 2390 // ClassLoader. 2391 copy_loader->SetClassTable(nullptr); 2392 // Also set allocator to null to be safe. The allocator is created when we create the class 2393 // table. We also never expect to unload things in the image since they are held live as 2394 // roots. 2395 copy_loader->SetAllocator(nullptr); 2396 } 2397 } 2398 FixupVisitor visitor(this, copy); 2399 orig->VisitReferences(visitor, visitor); 2400 } 2401 } 2402 2403 class ImageWriter::ImageAddressVisitorForDexCacheArray { 2404 public: 2405 explicit ImageAddressVisitorForDexCacheArray(ImageWriter* image_writer) 2406 : image_writer_(image_writer) {} 2407 2408 template <typename T> 2409 T* operator()(T* ptr) const REQUIRES_SHARED(Locks::mutator_lock_) { 2410 return image_writer_->GetImageAddress(ptr); 2411 } 2412 2413 private: 2414 ImageWriter* const image_writer_; 2415 }; 2416 2417 void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache, 2418 mirror::DexCache* copy_dex_cache) { 2419 ImageAddressVisitorForDexCacheArray fixup_visitor(this); 2420 // Though the DexCache array fields are usually treated as native pointers, we set the full 2421 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is 2422 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e. 2423 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))). 2424 mirror::StringDexCacheType* orig_strings = orig_dex_cache->GetStrings(); 2425 if (orig_strings != nullptr) { 2426 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(), 2427 NativeLocationInImage(orig_strings), 2428 PointerSize::k64); 2429 orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache), fixup_visitor); 2430 } 2431 mirror::TypeDexCacheType* orig_types = orig_dex_cache->GetResolvedTypes(); 2432 if (orig_types != nullptr) { 2433 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(), 2434 NativeLocationInImage(orig_types), 2435 PointerSize::k64); 2436 orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache), 2437 fixup_visitor); 2438 } 2439 mirror::MethodDexCacheType* orig_methods = orig_dex_cache->GetResolvedMethods(); 2440 if (orig_methods != nullptr) { 2441 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(), 2442 NativeLocationInImage(orig_methods), 2443 PointerSize::k64); 2444 mirror::MethodDexCacheType* copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache); 2445 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) { 2446 mirror::MethodDexCachePair orig_pair = 2447 mirror::DexCache::GetNativePairPtrSize(orig_methods, i, target_ptr_size_); 2448 // NativeLocationInImage also handles runtime methods since these have relocation info. 2449 mirror::MethodDexCachePair copy_pair(NativeLocationInImage(orig_pair.object), 2450 orig_pair.index); 2451 mirror::DexCache::SetNativePairPtrSize(copy_methods, i, copy_pair, target_ptr_size_); 2452 } 2453 } 2454 mirror::FieldDexCacheType* orig_fields = orig_dex_cache->GetResolvedFields(); 2455 if (orig_fields != nullptr) { 2456 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(), 2457 NativeLocationInImage(orig_fields), 2458 PointerSize::k64); 2459 mirror::FieldDexCacheType* copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache); 2460 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) { 2461 mirror::FieldDexCachePair orig = 2462 mirror::DexCache::GetNativePairPtrSize(orig_fields, i, target_ptr_size_); 2463 mirror::FieldDexCachePair copy = orig; 2464 copy.object = NativeLocationInImage(orig.object); 2465 mirror::DexCache::SetNativePairPtrSize(copy_fields, i, copy, target_ptr_size_); 2466 } 2467 } 2468 mirror::MethodTypeDexCacheType* orig_method_types = orig_dex_cache->GetResolvedMethodTypes(); 2469 if (orig_method_types != nullptr) { 2470 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodTypesOffset(), 2471 NativeLocationInImage(orig_method_types), 2472 PointerSize::k64); 2473 orig_dex_cache->FixupResolvedMethodTypes(NativeCopyLocation(orig_method_types, orig_dex_cache), 2474 fixup_visitor); 2475 } 2476 GcRoot<mirror::CallSite>* orig_call_sites = orig_dex_cache->GetResolvedCallSites(); 2477 if (orig_call_sites != nullptr) { 2478 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedCallSitesOffset(), 2479 NativeLocationInImage(orig_call_sites), 2480 PointerSize::k64); 2481 orig_dex_cache->FixupResolvedCallSites(NativeCopyLocation(orig_call_sites, orig_dex_cache), 2482 fixup_visitor); 2483 } 2484 2485 // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving 2486 // compiler pointers in here will make the output non-deterministic. 2487 copy_dex_cache->SetDexFile(nullptr); 2488 } 2489 2490 const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const { 2491 DCHECK_LT(type, kOatAddressCount); 2492 // If we are compiling an app image, we need to use the stubs of the boot image. 2493 if (compile_app_image_) { 2494 // Use the current image pointers. 2495 const std::vector<gc::space::ImageSpace*>& image_spaces = 2496 Runtime::Current()->GetHeap()->GetBootImageSpaces(); 2497 DCHECK(!image_spaces.empty()); 2498 const OatFile* oat_file = image_spaces[0]->GetOatFile(); 2499 CHECK(oat_file != nullptr); 2500 const OatHeader& header = oat_file->GetOatHeader(); 2501 switch (type) { 2502 // TODO: We could maybe clean this up if we stored them in an array in the oat header. 2503 case kOatAddressQuickGenericJNITrampoline: 2504 return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline()); 2505 case kOatAddressInterpreterToInterpreterBridge: 2506 return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge()); 2507 case kOatAddressInterpreterToCompiledCodeBridge: 2508 return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge()); 2509 case kOatAddressJNIDlsymLookup: 2510 return static_cast<const uint8_t*>(header.GetJniDlsymLookup()); 2511 case kOatAddressQuickIMTConflictTrampoline: 2512 return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline()); 2513 case kOatAddressQuickResolutionTrampoline: 2514 return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline()); 2515 case kOatAddressQuickToInterpreterBridge: 2516 return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge()); 2517 default: 2518 UNREACHABLE(); 2519 } 2520 } 2521 const ImageInfo& primary_image_info = GetImageInfo(0); 2522 return GetOatAddressForOffset(primary_image_info.oat_address_offsets_[type], primary_image_info); 2523 } 2524 2525 const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method, 2526 const ImageInfo& image_info, 2527 bool* quick_is_interpreted) { 2528 DCHECK(!method->IsResolutionMethod()) << method->PrettyMethod(); 2529 DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << method->PrettyMethod(); 2530 DCHECK(!method->IsImtUnimplementedMethod()) << method->PrettyMethod(); 2531 DCHECK(method->IsInvokable()) << method->PrettyMethod(); 2532 DCHECK(!IsInBootImage(method)) << method->PrettyMethod(); 2533 2534 // Use original code if it exists. Otherwise, set the code pointer to the resolution 2535 // trampoline. 2536 2537 // Quick entrypoint: 2538 const void* quick_oat_entry_point = 2539 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_); 2540 const uint8_t* quick_code; 2541 2542 if (UNLIKELY(IsInBootImage(method->GetDeclaringClass()))) { 2543 DCHECK(method->IsCopied()); 2544 // If the code is not in the oat file corresponding to this image (e.g. default methods) 2545 quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point); 2546 } else { 2547 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point); 2548 quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info); 2549 } 2550 2551 *quick_is_interpreted = false; 2552 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() || 2553 method->GetDeclaringClass()->IsInitialized())) { 2554 // We have code for a non-static or initialized method, just use the code. 2555 } else if (quick_code == nullptr && method->IsNative() && 2556 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) { 2557 // Non-static or initialized native method missing compiled code, use generic JNI version. 2558 quick_code = GetOatAddress(kOatAddressQuickGenericJNITrampoline); 2559 } else if (quick_code == nullptr && !method->IsNative()) { 2560 // We don't have code at all for a non-native method, use the interpreter. 2561 quick_code = GetOatAddress(kOatAddressQuickToInterpreterBridge); 2562 *quick_is_interpreted = true; 2563 } else { 2564 CHECK(!method->GetDeclaringClass()->IsInitialized()); 2565 // We have code for a static method, but need to go through the resolution stub for class 2566 // initialization. 2567 quick_code = GetOatAddress(kOatAddressQuickResolutionTrampoline); 2568 } 2569 if (!IsInBootOatFile(quick_code)) { 2570 // DCHECK_GE(quick_code, oat_data_begin_); 2571 } 2572 return quick_code; 2573 } 2574 2575 void ImageWriter::CopyAndFixupMethod(ArtMethod* orig, 2576 ArtMethod* copy, 2577 const ImageInfo& image_info) { 2578 if (orig->IsAbstract()) { 2579 // Ignore the single-implementation info for abstract method. 2580 // Do this on orig instead of copy, otherwise there is a crash due to methods 2581 // are copied before classes. 2582 // TODO: handle fixup of single-implementation method for abstract method. 2583 orig->SetHasSingleImplementation(false); 2584 orig->SetSingleImplementation( 2585 nullptr, Runtime::Current()->GetClassLinker()->GetImagePointerSize()); 2586 } 2587 2588 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_)); 2589 2590 CopyReference(copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked()); 2591 2592 mirror::MethodDexCacheType* orig_resolved_methods = 2593 orig->GetDexCacheResolvedMethods(target_ptr_size_); 2594 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_); 2595 2596 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to 2597 // oat_begin_ 2598 2599 // The resolution method has a special trampoline to call. 2600 Runtime* runtime = Runtime::Current(); 2601 if (orig->IsRuntimeMethod()) { 2602 ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_); 2603 if (orig_table != nullptr) { 2604 // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method. 2605 copy->SetEntryPointFromQuickCompiledCodePtrSize( 2606 GetOatAddress(kOatAddressQuickIMTConflictTrampoline), target_ptr_size_); 2607 copy->SetImtConflictTable(NativeLocationInImage(orig_table), target_ptr_size_); 2608 } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) { 2609 copy->SetEntryPointFromQuickCompiledCodePtrSize( 2610 GetOatAddress(kOatAddressQuickResolutionTrampoline), target_ptr_size_); 2611 } else { 2612 bool found_one = false; 2613 for (size_t i = 0; i < static_cast<size_t>(CalleeSaveType::kLastCalleeSaveType); ++i) { 2614 auto idx = static_cast<CalleeSaveType>(i); 2615 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) { 2616 found_one = true; 2617 break; 2618 } 2619 } 2620 CHECK(found_one) << "Expected to find callee save method but got " << orig->PrettyMethod(); 2621 CHECK(copy->IsRuntimeMethod()); 2622 } 2623 } else { 2624 // We assume all methods have code. If they don't currently then we set them to the use the 2625 // resolution trampoline. Abstract methods never have code and so we need to make sure their 2626 // use results in an AbstractMethodError. We use the interpreter to achieve this. 2627 if (UNLIKELY(!orig->IsInvokable())) { 2628 copy->SetEntryPointFromQuickCompiledCodePtrSize( 2629 GetOatAddress(kOatAddressQuickToInterpreterBridge), target_ptr_size_); 2630 } else { 2631 bool quick_is_interpreted; 2632 const uint8_t* quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted); 2633 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_); 2634 2635 // JNI entrypoint: 2636 if (orig->IsNative()) { 2637 // The native method's pointer is set to a stub to lookup via dlsym. 2638 // Note this is not the code_ pointer, that is handled above. 2639 copy->SetEntryPointFromJniPtrSize( 2640 GetOatAddress(kOatAddressJNIDlsymLookup), target_ptr_size_); 2641 } 2642 } 2643 } 2644 } 2645 2646 size_t ImageWriter::GetBinSizeSum(ImageWriter::ImageInfo& image_info, ImageWriter::Bin up_to) const { 2647 DCHECK_LE(up_to, kBinSize); 2648 return std::accumulate(&image_info.bin_slot_sizes_[0], 2649 &image_info.bin_slot_sizes_[up_to], 2650 /*init*/0); 2651 } 2652 2653 ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) { 2654 // These values may need to get updated if more bins are added to the enum Bin 2655 static_assert(kBinBits == 3, "wrong number of bin bits"); 2656 static_assert(kBinShift == 27, "wrong number of shift"); 2657 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes"); 2658 2659 DCHECK_LT(GetBin(), kBinSize); 2660 DCHECK_ALIGNED(GetIndex(), kObjectAlignment); 2661 } 2662 2663 ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index) 2664 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) { 2665 DCHECK_EQ(index, GetIndex()); 2666 } 2667 2668 ImageWriter::Bin ImageWriter::BinSlot::GetBin() const { 2669 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift); 2670 } 2671 2672 uint32_t ImageWriter::BinSlot::GetIndex() const { 2673 return lockword_ & ~kBinMask; 2674 } 2675 2676 ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) { 2677 switch (type) { 2678 case kNativeObjectRelocationTypeArtField: 2679 case kNativeObjectRelocationTypeArtFieldArray: 2680 return kBinArtField; 2681 case kNativeObjectRelocationTypeArtMethodClean: 2682 case kNativeObjectRelocationTypeArtMethodArrayClean: 2683 return kBinArtMethodClean; 2684 case kNativeObjectRelocationTypeArtMethodDirty: 2685 case kNativeObjectRelocationTypeArtMethodArrayDirty: 2686 return kBinArtMethodDirty; 2687 case kNativeObjectRelocationTypeDexCacheArray: 2688 return kBinDexCacheArray; 2689 case kNativeObjectRelocationTypeRuntimeMethod: 2690 return kBinRuntimeMethod; 2691 case kNativeObjectRelocationTypeIMTable: 2692 return kBinImTable; 2693 case kNativeObjectRelocationTypeIMTConflictTable: 2694 return kBinIMTConflictTable; 2695 } 2696 UNREACHABLE(); 2697 } 2698 2699 size_t ImageWriter::GetOatIndex(mirror::Object* obj) const { 2700 if (!IsMultiImage()) { 2701 return GetDefaultOatIndex(); 2702 } 2703 auto it = oat_index_map_.find(obj); 2704 DCHECK(it != oat_index_map_.end()) << obj; 2705 return it->second; 2706 } 2707 2708 size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const { 2709 if (!IsMultiImage()) { 2710 return GetDefaultOatIndex(); 2711 } 2712 auto it = dex_file_oat_index_map_.find(dex_file); 2713 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation(); 2714 return it->second; 2715 } 2716 2717 size_t ImageWriter::GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const { 2718 return (dex_cache == nullptr) 2719 ? GetDefaultOatIndex() 2720 : GetOatIndexForDexFile(dex_cache->GetDexFile()); 2721 } 2722 2723 void ImageWriter::UpdateOatFileLayout(size_t oat_index, 2724 size_t oat_loaded_size, 2725 size_t oat_data_offset, 2726 size_t oat_data_size) { 2727 const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_; 2728 for (const ImageInfo& info : image_infos_) { 2729 DCHECK_LE(info.image_begin_ + info.image_size_, images_end); 2730 } 2731 DCHECK(images_end != nullptr); // Image space must be ready. 2732 2733 ImageInfo& cur_image_info = GetImageInfo(oat_index); 2734 cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_; 2735 cur_image_info.oat_loaded_size_ = oat_loaded_size; 2736 cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset; 2737 cur_image_info.oat_size_ = oat_data_size; 2738 2739 if (compile_app_image_) { 2740 CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image."; 2741 return; 2742 } 2743 2744 // Update the oat_offset of the next image info. 2745 if (oat_index + 1u != oat_filenames_.size()) { 2746 // There is a following one. 2747 ImageInfo& next_image_info = GetImageInfo(oat_index + 1u); 2748 next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size; 2749 } 2750 } 2751 2752 void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) { 2753 ImageInfo& cur_image_info = GetImageInfo(oat_index); 2754 cur_image_info.oat_checksum_ = oat_header.GetChecksum(); 2755 2756 if (oat_index == GetDefaultOatIndex()) { 2757 // Primary oat file, read the trampolines. 2758 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToInterpreterBridge] = 2759 oat_header.GetInterpreterToInterpreterBridgeOffset(); 2760 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToCompiledCodeBridge] = 2761 oat_header.GetInterpreterToCompiledCodeBridgeOffset(); 2762 cur_image_info.oat_address_offsets_[kOatAddressJNIDlsymLookup] = 2763 oat_header.GetJniDlsymLookupOffset(); 2764 cur_image_info.oat_address_offsets_[kOatAddressQuickGenericJNITrampoline] = 2765 oat_header.GetQuickGenericJniTrampolineOffset(); 2766 cur_image_info.oat_address_offsets_[kOatAddressQuickIMTConflictTrampoline] = 2767 oat_header.GetQuickImtConflictTrampolineOffset(); 2768 cur_image_info.oat_address_offsets_[kOatAddressQuickResolutionTrampoline] = 2769 oat_header.GetQuickResolutionTrampolineOffset(); 2770 cur_image_info.oat_address_offsets_[kOatAddressQuickToInterpreterBridge] = 2771 oat_header.GetQuickToInterpreterBridgeOffset(); 2772 } 2773 } 2774 2775 ImageWriter::ImageWriter( 2776 const CompilerDriver& compiler_driver, 2777 uintptr_t image_begin, 2778 bool compile_pic, 2779 bool compile_app_image, 2780 ImageHeader::StorageMode image_storage_mode, 2781 const std::vector<const char*>& oat_filenames, 2782 const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map, 2783 const std::unordered_set<std::string>* dirty_image_objects) 2784 : compiler_driver_(compiler_driver), 2785 global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)), 2786 image_objects_offset_begin_(0), 2787 compile_pic_(compile_pic), 2788 compile_app_image_(compile_app_image), 2789 target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())), 2790 image_infos_(oat_filenames.size()), 2791 dirty_methods_(0u), 2792 clean_methods_(0u), 2793 image_storage_mode_(image_storage_mode), 2794 oat_filenames_(oat_filenames), 2795 dex_file_oat_index_map_(dex_file_oat_index_map), 2796 dirty_image_objects_(dirty_image_objects) { 2797 CHECK_NE(image_begin, 0U); 2798 std::fill_n(image_methods_, arraysize(image_methods_), nullptr); 2799 CHECK_EQ(compile_app_image, !Runtime::Current()->GetHeap()->GetBootImageSpaces().empty()) 2800 << "Compiling a boot image should occur iff there are no boot image spaces loaded"; 2801 } 2802 2803 ImageWriter::ImageInfo::ImageInfo() 2804 : intern_table_(new InternTable), 2805 class_table_(new ClassTable) {} 2806 2807 void ImageWriter::CopyReference(mirror::HeapReference<mirror::Object>* dest, 2808 ObjPtr<mirror::Object> src) { 2809 dest->Assign(GetImageAddress(src.Ptr())); 2810 } 2811 2812 void ImageWriter::CopyReference(mirror::CompressedReference<mirror::Object>* dest, 2813 ObjPtr<mirror::Object> src) { 2814 dest->Assign(GetImageAddress(src.Ptr())); 2815 } 2816 2817 void ImageWriter::CopyAndFixupPointer(void** target, void* value) { 2818 void* new_value = value; 2819 if (value != nullptr && !IsInBootImage(value)) { 2820 auto it = native_object_relocations_.find(value); 2821 CHECK(it != native_object_relocations_.end()) << value; 2822 const NativeObjectRelocation& relocation = it->second; 2823 ImageInfo& image_info = GetImageInfo(relocation.oat_index); 2824 new_value = reinterpret_cast<void*>(image_info.image_begin_ + relocation.offset); 2825 } 2826 if (target_ptr_size_ == PointerSize::k32) { 2827 *reinterpret_cast<uint32_t*>(target) = PointerToLowMemUInt32(new_value); 2828 } else { 2829 *reinterpret_cast<uint64_t*>(target) = reinterpret_cast<uintptr_t>(new_value); 2830 } 2831 } 2832 2833 2834 } // namespace art 2835