Home | History | Annotate | Download | only in linker
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "image_writer.h"
     18 
     19 #include <lz4.h>
     20 #include <lz4hc.h>
     21 #include <sys/stat.h>
     22 #include <zlib.h>
     23 
     24 #include <memory>
     25 #include <numeric>
     26 #include <unordered_set>
     27 #include <vector>
     28 
     29 #include "art_field-inl.h"
     30 #include "art_method-inl.h"
     31 #include "base/callee_save_type.h"
     32 #include "base/enums.h"
     33 #include "base/globals.h"
     34 #include "base/logging.h"  // For VLOG.
     35 #include "base/stl_util.h"
     36 #include "base/unix_file/fd_file.h"
     37 #include "class_linker-inl.h"
     38 #include "class_root.h"
     39 #include "compiled_method.h"
     40 #include "dex/dex_file-inl.h"
     41 #include "dex/dex_file_types.h"
     42 #include "driver/compiler_options.h"
     43 #include "elf/elf_utils.h"
     44 #include "elf_file.h"
     45 #include "gc/accounting/card_table-inl.h"
     46 #include "gc/accounting/heap_bitmap.h"
     47 #include "gc/accounting/space_bitmap-inl.h"
     48 #include "gc/collector/concurrent_copying.h"
     49 #include "gc/heap-visit-objects-inl.h"
     50 #include "gc/heap.h"
     51 #include "gc/space/large_object_space.h"
     52 #include "gc/space/region_space.h"
     53 #include "gc/space/space-inl.h"
     54 #include "gc/verification.h"
     55 #include "handle_scope-inl.h"
     56 #include "image.h"
     57 #include "imt_conflict_table.h"
     58 #include "intern_table-inl.h"
     59 #include "jni/jni_internal.h"
     60 #include "linear_alloc.h"
     61 #include "lock_word.h"
     62 #include "mirror/array-inl.h"
     63 #include "mirror/class-inl.h"
     64 #include "mirror/class_ext-inl.h"
     65 #include "mirror/class_loader.h"
     66 #include "mirror/dex_cache-inl.h"
     67 #include "mirror/dex_cache.h"
     68 #include "mirror/executable.h"
     69 #include "mirror/method.h"
     70 #include "mirror/object-inl.h"
     71 #include "mirror/object-refvisitor-inl.h"
     72 #include "mirror/object_array-alloc-inl.h"
     73 #include "mirror/object_array-inl.h"
     74 #include "mirror/string-inl.h"
     75 #include "oat.h"
     76 #include "oat_file.h"
     77 #include "oat_file_manager.h"
     78 #include "optimizing/intrinsic_objects.h"
     79 #include "runtime.h"
     80 #include "scoped_thread_state_change-inl.h"
     81 #include "subtype_check.h"
     82 #include "utils/dex_cache_arrays_layout-inl.h"
     83 #include "well_known_classes.h"
     84 
     85 using ::art::mirror::Class;
     86 using ::art::mirror::DexCache;
     87 using ::art::mirror::Object;
     88 using ::art::mirror::ObjectArray;
     89 using ::art::mirror::String;
     90 
     91 namespace art {
     92 namespace linker {
     93 
     94 static ArrayRef<const uint8_t> MaybeCompressData(ArrayRef<const uint8_t> source,
     95                                                  ImageHeader::StorageMode image_storage_mode,
     96                                                  /*out*/ std::vector<uint8_t>* storage) {
     97   const uint64_t compress_start_time = NanoTime();
     98 
     99   switch (image_storage_mode) {
    100     case ImageHeader::kStorageModeLZ4: {
    101       storage->resize(LZ4_compressBound(source.size()));
    102       size_t data_size = LZ4_compress_default(
    103           reinterpret_cast<char*>(const_cast<uint8_t*>(source.data())),
    104           reinterpret_cast<char*>(storage->data()),
    105           source.size(),
    106           storage->size());
    107       storage->resize(data_size);
    108       break;
    109     }
    110     case ImageHeader::kStorageModeLZ4HC: {
    111       // Bound is same as non HC.
    112       storage->resize(LZ4_compressBound(source.size()));
    113       size_t data_size = LZ4_compress_HC(
    114           reinterpret_cast<const char*>(const_cast<uint8_t*>(source.data())),
    115           reinterpret_cast<char*>(storage->data()),
    116           source.size(),
    117           storage->size(),
    118           LZ4HC_CLEVEL_MAX);
    119       storage->resize(data_size);
    120       break;
    121     }
    122     case ImageHeader::kStorageModeUncompressed: {
    123       return source;
    124     }
    125     default: {
    126       LOG(FATAL) << "Unsupported";
    127       UNREACHABLE();
    128     }
    129   }
    130 
    131   DCHECK(image_storage_mode == ImageHeader::kStorageModeLZ4 ||
    132          image_storage_mode == ImageHeader::kStorageModeLZ4HC);
    133   VLOG(compiler) << "Compressed from " << source.size() << " to " << storage->size() << " in "
    134                  << PrettyDuration(NanoTime() - compress_start_time);
    135   if (kIsDebugBuild) {
    136     std::vector<uint8_t> decompressed(source.size());
    137     const size_t decompressed_size = LZ4_decompress_safe(
    138         reinterpret_cast<char*>(storage->data()),
    139         reinterpret_cast<char*>(decompressed.data()),
    140         storage->size(),
    141         decompressed.size());
    142     CHECK_EQ(decompressed_size, decompressed.size());
    143     CHECK_EQ(memcmp(source.data(), decompressed.data(), source.size()), 0) << image_storage_mode;
    144   }
    145   return ArrayRef<const uint8_t>(*storage);
    146 }
    147 
    148 // Separate objects into multiple bins to optimize dirty memory use.
    149 static constexpr bool kBinObjects = true;
    150 
    151 ObjPtr<mirror::ClassLoader> ImageWriter::GetAppClassLoader() const
    152     REQUIRES_SHARED(Locks::mutator_lock_) {
    153   return compiler_options_.IsAppImage()
    154       ? ObjPtr<mirror::ClassLoader>::DownCast(Thread::Current()->DecodeJObject(app_class_loader_))
    155       : nullptr;
    156 }
    157 
    158 bool ImageWriter::IsImageObject(ObjPtr<mirror::Object> obj) const {
    159   // For boot image, we keep all objects remaining after the GC in PrepareImageAddressSpace().
    160   if (compiler_options_.IsBootImage()) {
    161     return true;
    162   }
    163   // Objects already in the boot image do not belong to the image being written.
    164   if (IsInBootImage(obj.Ptr())) {
    165     return false;
    166   }
    167   // DexCaches for the boot class path components that are not a part of the boot image
    168   // cannot be garbage collected in PrepareImageAddressSpace() but we do not want to
    169   // include them in the app image. So make sure we include only the app DexCaches.
    170   if (obj->IsDexCache() &&
    171       !ContainsElement(compiler_options_.GetDexFilesForOatFile(),
    172                        obj->AsDexCache()->GetDexFile())) {
    173     return false;
    174   }
    175   return true;
    176 }
    177 
    178 // Return true if an object is already in an image space.
    179 bool ImageWriter::IsInBootImage(const void* obj) const {
    180   gc::Heap* const heap = Runtime::Current()->GetHeap();
    181   if (compiler_options_.IsBootImage()) {
    182     DCHECK(heap->GetBootImageSpaces().empty());
    183     return false;
    184   }
    185   for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
    186     const uint8_t* image_begin = boot_image_space->Begin();
    187     // Real image end including ArtMethods and ArtField sections.
    188     const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize();
    189     if (image_begin <= obj && obj < image_end) {
    190       return true;
    191     }
    192   }
    193   return false;
    194 }
    195 
    196 bool ImageWriter::IsInBootOatFile(const void* ptr) const {
    197   gc::Heap* const heap = Runtime::Current()->GetHeap();
    198   if (compiler_options_.IsBootImage()) {
    199     DCHECK(heap->GetBootImageSpaces().empty());
    200     return false;
    201   }
    202   for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
    203     const ImageHeader& image_header = boot_image_space->GetImageHeader();
    204     if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) {
    205       return true;
    206     }
    207   }
    208   return false;
    209 }
    210 
    211 static void ClearDexFileCookies() REQUIRES_SHARED(Locks::mutator_lock_) {
    212   auto visitor = [](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
    213     DCHECK(obj != nullptr);
    214     Class* klass = obj->GetClass();
    215     if (klass == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_DexFile)) {
    216       ArtField* field = jni::DecodeArtField(WellKnownClasses::dalvik_system_DexFile_cookie);
    217       // Null out the cookie to enable determinism. b/34090128
    218       field->SetObject</*kTransactionActive*/false>(obj, nullptr);
    219     }
    220   };
    221   Runtime::Current()->GetHeap()->VisitObjects(visitor);
    222 }
    223 
    224 bool ImageWriter::PrepareImageAddressSpace(TimingLogger* timings) {
    225   target_ptr_size_ = InstructionSetPointerSize(compiler_options_.GetInstructionSet());
    226 
    227   Thread* const self = Thread::Current();
    228 
    229   gc::Heap* const heap = Runtime::Current()->GetHeap();
    230   {
    231     ScopedObjectAccess soa(self);
    232     {
    233       TimingLogger::ScopedTiming t("PruneNonImageClasses", timings);
    234       PruneNonImageClasses();  // Remove junk
    235     }
    236 
    237     if (compiler_options_.IsAppImage()) {
    238       TimingLogger::ScopedTiming t("ClearDexFileCookies", timings);
    239       // Clear dex file cookies for app images to enable app image determinism. This is required
    240       // since the cookie field contains long pointers to DexFiles which are not deterministic.
    241       // b/34090128
    242       ClearDexFileCookies();
    243     }
    244   }
    245 
    246   {
    247     TimingLogger::ScopedTiming t("CollectGarbage", timings);
    248     heap->CollectGarbage(/* clear_soft_references */ false);  // Remove garbage.
    249   }
    250 
    251   if (kIsDebugBuild) {
    252     ScopedObjectAccess soa(self);
    253     CheckNonImageClassesRemoved();
    254   }
    255 
    256   {
    257     // Preload deterministic contents to the dex cache arrays we're going to write.
    258     ScopedObjectAccess soa(self);
    259     ObjPtr<mirror::ClassLoader> class_loader = GetAppClassLoader();
    260     std::vector<ObjPtr<mirror::DexCache>> dex_caches = FindDexCaches(self);
    261     for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
    262       if (!IsImageObject(dex_cache)) {
    263         continue;  // Boot image DexCache is not written to the app image.
    264       }
    265       PreloadDexCache(dex_cache, class_loader);
    266     }
    267   }
    268 
    269   // Used to store information that will later be used to calculate image
    270   // offsets to string references in the AppImage.
    271   std::vector<HeapReferencePointerInfo> string_ref_info;
    272   if (ClassLinker::kAppImageMayContainStrings && compiler_options_.IsAppImage()) {
    273     // Count the number of string fields so we can allocate the appropriate
    274     // amount of space in the image section.
    275     TimingLogger::ScopedTiming t("AppImage:CollectStringReferenceInfo", timings);
    276     ScopedObjectAccess soa(self);
    277 
    278     if (kIsDebugBuild) {
    279       VerifyNativeGCRootInvariants();
    280       CHECK_EQ(image_infos_.size(), 1u);
    281     }
    282 
    283     string_ref_info = CollectStringReferenceInfo();
    284     image_infos_.back().num_string_references_ = string_ref_info.size();
    285   }
    286 
    287   {
    288     TimingLogger::ScopedTiming t("CalculateNewObjectOffsets", timings);
    289     ScopedObjectAccess soa(self);
    290     CalculateNewObjectOffsets();
    291   }
    292 
    293   // Obtain class count for debugging purposes
    294   if (VLOG_IS_ON(compiler) && compiler_options_.IsAppImage()) {
    295     ScopedObjectAccess soa(self);
    296 
    297     size_t app_image_class_count  = 0;
    298 
    299     for (ImageInfo& info : image_infos_) {
    300       info.class_table_->Visit([&](ObjPtr<mirror::Class> klass)
    301                                    REQUIRES_SHARED(Locks::mutator_lock_) {
    302         if (!IsInBootImage(klass.Ptr())) {
    303           ++app_image_class_count;
    304         }
    305 
    306         // Indicate that we would like to continue visiting classes.
    307         return true;
    308       });
    309     }
    310 
    311     VLOG(compiler) << "Dex2Oat:AppImage:classCount = " << app_image_class_count;
    312   }
    313 
    314   if (ClassLinker::kAppImageMayContainStrings && compiler_options_.IsAppImage()) {
    315     // Use the string reference information obtained earlier to calculate image
    316     // offsets.  These will later be written to the image by Write/CopyMetadata.
    317     TimingLogger::ScopedTiming t("AppImage:CalculateImageOffsets", timings);
    318     ScopedObjectAccess soa(self);
    319 
    320     size_t managed_string_refs = 0;
    321     size_t native_string_refs = 0;
    322 
    323     /*
    324      * Iterate over the string reference info and calculate image offsets.
    325      * The first element of the pair is either the object the reference belongs
    326      * to or the beginning of the native reference array it is located in.  In
    327      * the first case the second element is the offset of the field relative to
    328      * the object's base address.  In the second case, it is the index of the
    329      * StringDexCacheType object in the array.
    330      */
    331     for (const HeapReferencePointerInfo& ref_info : string_ref_info) {
    332       uint32_t base_offset;
    333 
    334       if (HasDexCacheStringNativeRefTag(ref_info.first)) {
    335         ++native_string_refs;
    336         auto* obj_ptr = reinterpret_cast<mirror::Object*>(ClearDexCacheNativeRefTags(
    337             ref_info.first));
    338         base_offset = SetDexCacheStringNativeRefTag(GetImageOffset(obj_ptr));
    339       } else if (HasDexCachePreResolvedStringNativeRefTag(ref_info.first)) {
    340         ++native_string_refs;
    341         auto* obj_ptr = reinterpret_cast<mirror::Object*>(ClearDexCacheNativeRefTags(
    342             ref_info.first));
    343         base_offset = SetDexCachePreResolvedStringNativeRefTag(GetImageOffset(obj_ptr));
    344       } else {
    345         ++managed_string_refs;
    346         base_offset = GetImageOffset(reinterpret_cast<mirror::Object*>(ref_info.first));
    347       }
    348 
    349       string_reference_offsets_.emplace_back(base_offset, ref_info.second);
    350     }
    351 
    352     CHECK_EQ(image_infos_.back().num_string_references_,
    353              string_reference_offsets_.size());
    354 
    355     VLOG(compiler) << "Dex2Oat:AppImage:stringReferences = " << string_reference_offsets_.size();
    356     VLOG(compiler) << "Dex2Oat:AppImage:managedStringReferences = " << managed_string_refs;
    357     VLOG(compiler) << "Dex2Oat:AppImage:nativeStringReferences = " << native_string_refs;
    358   }
    359 
    360   // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
    361   // bin size sums being calculated.
    362   TimingLogger::ScopedTiming t("AllocMemory", timings);
    363   return AllocMemory();
    364 }
    365 
    366 class ImageWriter::CollectStringReferenceVisitor {
    367  public:
    368   explicit CollectStringReferenceVisitor(const ImageWriter& image_writer)
    369       : image_writer_(image_writer),
    370         curr_obj_(nullptr),
    371         string_ref_info_(0),
    372         dex_cache_string_ref_counter_(0) {}
    373 
    374   // Used to prevent repeated null checks in the code that calls the visitor.
    375   ALWAYS_INLINE
    376   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    377       REQUIRES_SHARED(Locks::mutator_lock_) {
    378     if (!root->IsNull()) {
    379       VisitRoot(root);
    380     }
    381   }
    382 
    383   /*
    384    * Counts the number of native references to strings reachable through
    385    * DexCache objects for verification later.
    386    */
    387   ALWAYS_INLINE
    388   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    389       REQUIRES_SHARED(Locks::mutator_lock_)  {
    390     ObjPtr<mirror::Object> referred_obj = root->AsMirrorPtr();
    391 
    392     if (curr_obj_->IsDexCache() &&
    393         image_writer_.IsValidAppImageStringReference(referred_obj)) {
    394       ++dex_cache_string_ref_counter_;
    395     }
    396   }
    397 
    398   // Collects info for managed fields that reference managed Strings.
    399   ALWAYS_INLINE
    400   void operator() (ObjPtr<mirror::Object> obj,
    401                    MemberOffset member_offset,
    402                    bool is_static ATTRIBUTE_UNUSED) const
    403       REQUIRES_SHARED(Locks::mutator_lock_) {
    404     ObjPtr<mirror::Object> referred_obj =
    405         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(
    406             member_offset);
    407 
    408     if (image_writer_.IsValidAppImageStringReference(referred_obj)) {
    409       string_ref_info_.emplace_back(reinterpret_cast<uintptr_t>(obj.Ptr()),
    410                                     member_offset.Uint32Value());
    411     }
    412   }
    413 
    414   ALWAYS_INLINE
    415   void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
    416                    ObjPtr<mirror::Reference> ref) const
    417       REQUIRES_SHARED(Locks::mutator_lock_) {
    418     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
    419   }
    420 
    421   void AddStringRefInfo(uint32_t first, uint32_t second) {
    422     string_ref_info_.emplace_back(first, second);
    423   }
    424 
    425   std::vector<HeapReferencePointerInfo>&& MoveRefInfo() {
    426     return std::move(string_ref_info_);
    427   }
    428 
    429   // Used by the wrapper function to obtain a native reference count.
    430   size_t GetDexCacheStringRefCount() const {
    431     return dex_cache_string_ref_counter_;
    432   }
    433 
    434   void SetObject(ObjPtr<mirror::Object> obj) {
    435     curr_obj_ = obj;
    436     dex_cache_string_ref_counter_ = 0;
    437   }
    438 
    439  private:
    440   const ImageWriter& image_writer_;
    441   ObjPtr<mirror::Object> curr_obj_;
    442   mutable std::vector<HeapReferencePointerInfo> string_ref_info_;
    443   mutable size_t dex_cache_string_ref_counter_;
    444 };
    445 
    446 std::vector<ImageWriter::HeapReferencePointerInfo> ImageWriter::CollectStringReferenceInfo() const
    447     REQUIRES_SHARED(Locks::mutator_lock_) {
    448   gc::Heap* const heap = Runtime::Current()->GetHeap();
    449   CollectStringReferenceVisitor visitor(*this);
    450 
    451   /*
    452    * References to managed strings can occur either in the managed heap or in
    453    * native memory regions.  Information about managed references is collected
    454    * by the CollectStringReferenceVisitor and directly added to the internal
    455    * info vector.
    456    *
    457    * Native references to managed strings can only occur through DexCache
    458    * objects.  This is verified by VerifyNativeGCRootInvariants().  Due to the
    459    * fact that these native references are encapsulated in std::atomic objects
    460    * the VisitReferences() function can't pass the visiting object the address
    461    * of the reference.  Instead, the VisitReferences() function loads the
    462    * reference into a temporary variable and passes that address to the
    463    * visitor.  As a consequence of this we can't uniquely identify the location
    464    * of the string reference in the visitor.
    465    *
    466    * Due to these limitations, the visitor will only count the number of
    467    * managed strings reachable through the native references of a DexCache
    468    * object.  If there are any such strings, this function will then iterate
    469    * over the native references, test the string for membership in the
    470    * AppImage, and add the tagged DexCache pointer and string array offset to
    471    * the info vector if necessary.
    472    */
    473   heap->VisitObjects([this, &visitor](ObjPtr<mirror::Object> object)
    474       REQUIRES_SHARED(Locks::mutator_lock_) {
    475     if (IsImageObject(object)) {
    476       visitor.SetObject(object);
    477 
    478       if (object->IsDexCache()) {
    479         object->VisitReferences</* kVisitNativeRoots= */ true,
    480                                 kVerifyNone,
    481                                 kWithoutReadBarrier>(visitor, visitor);
    482 
    483         if (visitor.GetDexCacheStringRefCount() > 0) {
    484           size_t string_info_collected = 0;
    485 
    486           ObjPtr<mirror::DexCache> dex_cache = object->AsDexCache();
    487           // Both of the dex cache string arrays are visited, so add up the total in the check.
    488           DCHECK_LE(visitor.GetDexCacheStringRefCount(),
    489                     dex_cache->NumPreResolvedStrings() + dex_cache->NumStrings());
    490 
    491           for (uint32_t index = 0; index < dex_cache->NumStrings(); ++index) {
    492             // GetResolvedString() can't be used here due to the circular
    493             // nature of the cache and the collision detection this requires.
    494             ObjPtr<mirror::String> referred_string =
    495                 dex_cache->GetStrings()[index].load().object.Read();
    496 
    497             if (IsValidAppImageStringReference(referred_string)) {
    498               ++string_info_collected;
    499               visitor.AddStringRefInfo(
    500                   SetDexCacheStringNativeRefTag(reinterpret_cast<uintptr_t>(object.Ptr())), index);
    501             }
    502           }
    503 
    504           // Visit all of the preinitialized strings.
    505           GcRoot<mirror::String>* preresolved_strings = dex_cache->GetPreResolvedStrings();
    506           for (size_t index = 0; index < dex_cache->NumPreResolvedStrings(); ++index) {
    507             ObjPtr<mirror::String> referred_string = preresolved_strings[index].Read();
    508             if (IsValidAppImageStringReference(referred_string)) {
    509               ++string_info_collected;
    510               visitor.AddStringRefInfo(SetDexCachePreResolvedStringNativeRefTag(
    511                 reinterpret_cast<uintptr_t>(object.Ptr())),
    512                 index);
    513             }
    514           }
    515 
    516           DCHECK_EQ(string_info_collected, visitor.GetDexCacheStringRefCount());
    517         }
    518       } else {
    519         object->VisitReferences</* kVisitNativeRoots= */ false,
    520                                 kVerifyNone,
    521                                 kWithoutReadBarrier>(visitor, visitor);
    522       }
    523     }
    524   });
    525 
    526   return visitor.MoveRefInfo();
    527 }
    528 
    529 class ImageWriter::NativeGCRootInvariantVisitor {
    530  public:
    531   explicit NativeGCRootInvariantVisitor(const ImageWriter& image_writer) :
    532     curr_obj_(nullptr), class_violation_(false), class_loader_violation_(false),
    533     image_writer_(image_writer) {}
    534 
    535   ALWAYS_INLINE
    536   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    537       REQUIRES_SHARED(Locks::mutator_lock_) {
    538     if (!root->IsNull()) {
    539       VisitRoot(root);
    540     }
    541   }
    542 
    543   ALWAYS_INLINE
    544   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    545       REQUIRES_SHARED(Locks::mutator_lock_)  {
    546     ObjPtr<mirror::Object> referred_obj = root->AsMirrorPtr();
    547 
    548     if (curr_obj_->IsClass()) {
    549       class_violation_ = class_violation_ ||
    550                          image_writer_.IsValidAppImageStringReference(referred_obj);
    551 
    552     } else if (curr_obj_->IsClassLoader()) {
    553       class_loader_violation_ = class_loader_violation_ ||
    554                                 image_writer_.IsValidAppImageStringReference(referred_obj);
    555 
    556     } else if (!curr_obj_->IsDexCache()) {
    557       LOG(FATAL) << "Dex2Oat:AppImage | " <<
    558                     "Native reference to String found in unexpected object type.";
    559     }
    560   }
    561 
    562   ALWAYS_INLINE
    563   void operator() (ObjPtr<mirror::Object> obj ATTRIBUTE_UNUSED,
    564                    MemberOffset member_offset ATTRIBUTE_UNUSED,
    565                    bool is_static ATTRIBUTE_UNUSED) const
    566       REQUIRES_SHARED(Locks::mutator_lock_) {}
    567 
    568   ALWAYS_INLINE
    569   void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
    570                    ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
    571       REQUIRES_SHARED(Locks::mutator_lock_) {}
    572 
    573   // Returns true iff the only reachable native string references are through DexCache objects.
    574   bool InvariantsHold() const {
    575     return !(class_violation_ || class_loader_violation_);
    576   }
    577 
    578   ObjPtr<mirror::Object> curr_obj_;
    579   mutable bool class_violation_;
    580   mutable bool class_loader_violation_;
    581 
    582  private:
    583   const ImageWriter& image_writer_;
    584 };
    585 
    586 void ImageWriter::VerifyNativeGCRootInvariants() const REQUIRES_SHARED(Locks::mutator_lock_) {
    587   gc::Heap* const heap = Runtime::Current()->GetHeap();
    588 
    589   NativeGCRootInvariantVisitor visitor(*this);
    590 
    591   heap->VisitObjects([this, &visitor](ObjPtr<mirror::Object> object)
    592       REQUIRES_SHARED(Locks::mutator_lock_) {
    593     visitor.curr_obj_ = object;
    594 
    595     if (!IsInBootImage(object.Ptr())) {
    596       object->VisitReferences</* kVisitNativeReferences= */ true,
    597                               kVerifyNone,
    598                               kWithoutReadBarrier>(visitor, visitor);
    599     }
    600   });
    601 
    602   bool error = false;
    603   std::ostringstream error_str;
    604 
    605   /*
    606    * Build the error string
    607    */
    608 
    609   if (UNLIKELY(visitor.class_violation_)) {
    610     error_str << "Class";
    611     error = true;
    612   }
    613 
    614   if (UNLIKELY(visitor.class_loader_violation_)) {
    615     if (error) {
    616       error_str << ", ";
    617     }
    618 
    619     error_str << "ClassLoader";
    620   }
    621 
    622   CHECK(visitor.InvariantsHold()) <<
    623     "Native GC root invariant failure. String ref invariants don't hold for the following " <<
    624     "object types: " << error_str.str();
    625 }
    626 
    627 void ImageWriter::CopyMetadata() {
    628   DCHECK(compiler_options_.IsAppImage());
    629   CHECK_EQ(image_infos_.size(), 1u);
    630 
    631   const ImageInfo& image_info = image_infos_.back();
    632   std::vector<ImageSection> image_sections = image_info.CreateImageSections().second;
    633 
    634   auto* sfo_section_base = reinterpret_cast<AppImageReferenceOffsetInfo*>(
    635       image_info.image_.Begin() +
    636       image_sections[ImageHeader::kSectionStringReferenceOffsets].Offset());
    637 
    638   std::copy(string_reference_offsets_.begin(),
    639             string_reference_offsets_.end(),
    640             sfo_section_base);
    641 }
    642 
    643 bool ImageWriter::IsValidAppImageStringReference(ObjPtr<mirror::Object> referred_obj) const {
    644   return referred_obj != nullptr &&
    645          !IsInBootImage(referred_obj.Ptr()) &&
    646          referred_obj->IsString();
    647 }
    648 
    649 // Helper class that erases the image file if it isn't properly flushed and closed.
    650 class ImageWriter::ImageFileGuard {
    651  public:
    652   ImageFileGuard() noexcept = default;
    653   ImageFileGuard(ImageFileGuard&& other) noexcept = default;
    654   ImageFileGuard& operator=(ImageFileGuard&& other) noexcept = default;
    655 
    656   ~ImageFileGuard() {
    657     if (image_file_ != nullptr) {
    658       // Failure, erase the image file.
    659       image_file_->Erase();
    660     }
    661   }
    662 
    663   void reset(File* image_file) {
    664     image_file_.reset(image_file);
    665   }
    666 
    667   bool operator==(std::nullptr_t) {
    668     return image_file_ == nullptr;
    669   }
    670 
    671   bool operator!=(std::nullptr_t) {
    672     return image_file_ != nullptr;
    673   }
    674 
    675   File* operator->() const {
    676     return image_file_.get();
    677   }
    678 
    679   bool WriteHeaderAndClose(const std::string& image_filename, const ImageHeader* image_header) {
    680     // The header is uncompressed since it contains whether the image is compressed or not.
    681     if (!image_file_->PwriteFully(image_header, sizeof(ImageHeader), 0)) {
    682       PLOG(ERROR) << "Failed to write image file header " << image_filename;
    683       return false;
    684     }
    685 
    686     // FlushCloseOrErase() takes care of erasing, so the destructor does not need
    687     // to do that whether the FlushCloseOrErase() succeeds or fails.
    688     std::unique_ptr<File> image_file = std::move(image_file_);
    689     if (image_file->FlushCloseOrErase() != 0) {
    690       PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
    691       return false;
    692     }
    693 
    694     return true;
    695   }
    696 
    697  private:
    698   std::unique_ptr<File> image_file_;
    699 };
    700 
    701 bool ImageWriter::Write(int image_fd,
    702                         const std::vector<std::string>& image_filenames,
    703                         const std::vector<std::string>& oat_filenames) {
    704   // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or
    705   // oat_filenames.
    706   CHECK(!image_filenames.empty());
    707   if (image_fd != kInvalidFd) {
    708     CHECK_EQ(image_filenames.size(), 1u);
    709   }
    710   CHECK(!oat_filenames.empty());
    711   CHECK_EQ(image_filenames.size(), oat_filenames.size());
    712 
    713   Thread* const self = Thread::Current();
    714   {
    715     ScopedObjectAccess soa(self);
    716     for (size_t i = 0; i < oat_filenames.size(); ++i) {
    717       CreateHeader(i);
    718       CopyAndFixupNativeData(i);
    719     }
    720   }
    721 
    722   {
    723     // TODO: heap validation can't handle these fix up passes.
    724     ScopedObjectAccess soa(self);
    725     Runtime::Current()->GetHeap()->DisableObjectValidation();
    726     CopyAndFixupObjects();
    727   }
    728 
    729   if (compiler_options_.IsAppImage()) {
    730     CopyMetadata();
    731   }
    732 
    733   // Primary image header shall be written last for two reasons. First, this ensures
    734   // that we shall not end up with a valid primary image and invalid secondary image.
    735   // Second, its checksum shall include the checksums of the secondary images (XORed).
    736   // This way only the primary image checksum needs to be checked to determine whether
    737   // any of the images or oat files are out of date. (Oat file checksums are included
    738   // in the image checksum calculation.)
    739   ImageHeader* primary_header = reinterpret_cast<ImageHeader*>(image_infos_[0].image_.Begin());
    740   ImageFileGuard primary_image_file;
    741   for (size_t i = 0; i < image_filenames.size(); ++i) {
    742     const std::string& image_filename = image_filenames[i];
    743     ImageInfo& image_info = GetImageInfo(i);
    744     ImageFileGuard image_file;
    745     if (image_fd != kInvalidFd) {
    746       if (image_filename.empty()) {
    747         image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
    748         // Empty the file in case it already exists.
    749         if (image_file != nullptr) {
    750           TEMP_FAILURE_RETRY(image_file->SetLength(0));
    751           TEMP_FAILURE_RETRY(image_file->Flush());
    752         }
    753       } else {
    754         LOG(ERROR) << "image fd " << image_fd << " name " << image_filename;
    755       }
    756     } else {
    757       image_file.reset(OS::CreateEmptyFile(image_filename.c_str()));
    758     }
    759 
    760     if (image_file == nullptr) {
    761       LOG(ERROR) << "Failed to open image file " << image_filename;
    762       return false;
    763     }
    764 
    765     if (!compiler_options_.IsAppImage() && fchmod(image_file->Fd(), 0644) != 0) {
    766       PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
    767       return EXIT_FAILURE;
    768     }
    769 
    770     // Image data size excludes the bitmap and the header.
    771     ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_.Begin());
    772 
    773     // Block sources (from the image).
    774     const bool is_compressed = image_storage_mode_ != ImageHeader::kStorageModeUncompressed;
    775     std::vector<std::pair<uint32_t, uint32_t>> block_sources;
    776     std::vector<ImageHeader::Block> blocks;
    777 
    778     // Add a set of solid blocks such that no block is larger than the maximum size. A solid block
    779     // is a block that must be decompressed all at once.
    780     auto add_blocks = [&](uint32_t offset, uint32_t size) {
    781       while (size != 0u) {
    782         const uint32_t cur_size = std::min(size, compiler_options_.MaxImageBlockSize());
    783         block_sources.emplace_back(offset, cur_size);
    784         offset += cur_size;
    785         size -= cur_size;
    786       }
    787     };
    788 
    789     add_blocks(sizeof(ImageHeader), image_header->GetImageSize() - sizeof(ImageHeader));
    790 
    791     // Checksum of compressed image data and header.
    792     uint32_t image_checksum = adler32(0L, Z_NULL, 0);
    793     image_checksum = adler32(image_checksum,
    794                              reinterpret_cast<const uint8_t*>(image_header),
    795                              sizeof(ImageHeader));
    796     // Copy and compress blocks.
    797     size_t out_offset = sizeof(ImageHeader);
    798     for (const std::pair<uint32_t, uint32_t> block : block_sources) {
    799       ArrayRef<const uint8_t> raw_image_data(image_info.image_.Begin() + block.first,
    800                                              block.second);
    801       std::vector<uint8_t> compressed_data;
    802       ArrayRef<const uint8_t> image_data =
    803           MaybeCompressData(raw_image_data, image_storage_mode_, &compressed_data);
    804 
    805       if (!is_compressed) {
    806         // For uncompressed, preserve alignment since the image will be directly mapped.
    807         out_offset = block.first;
    808       }
    809 
    810       // Fill in the compressed location of the block.
    811       blocks.emplace_back(ImageHeader::Block(
    812           image_storage_mode_,
    813           /*data_offset=*/ out_offset,
    814           /*data_size=*/ image_data.size(),
    815           /*image_offset=*/ block.first,
    816           /*image_size=*/ block.second));
    817 
    818       // Write out the image + fields + methods.
    819       if (!image_file->PwriteFully(image_data.data(), image_data.size(), out_offset)) {
    820         PLOG(ERROR) << "Failed to write image file data " << image_filename;
    821         image_file->Erase();
    822         return false;
    823       }
    824       out_offset += image_data.size();
    825       image_checksum = adler32(image_checksum, image_data.data(), image_data.size());
    826     }
    827 
    828     // Write the block metadata directly after the image sections.
    829     // Note: This is not part of the mapped image and is not preserved after decompressing, it's
    830     // only used for image loading. For this reason, only write it out for compressed images.
    831     if (is_compressed) {
    832       // Align up since the compressed data is not necessarily aligned.
    833       out_offset = RoundUp(out_offset, alignof(ImageHeader::Block));
    834       CHECK(!blocks.empty());
    835       const size_t blocks_bytes = blocks.size() * sizeof(blocks[0]);
    836       if (!image_file->PwriteFully(&blocks[0], blocks_bytes, out_offset)) {
    837         PLOG(ERROR) << "Failed to write image blocks " << image_filename;
    838         image_file->Erase();
    839         return false;
    840       }
    841       image_header->blocks_offset_ = out_offset;
    842       image_header->blocks_count_ = blocks.size();
    843       out_offset += blocks_bytes;
    844     }
    845 
    846     // Data size includes everything except the bitmap.
    847     image_header->data_size_ = out_offset - sizeof(ImageHeader);
    848 
    849     // Update and write the bitmap section. Note that the bitmap section is relative to the
    850     // possibly compressed image.
    851     ImageSection& bitmap_section = image_header->GetImageSection(ImageHeader::kSectionImageBitmap);
    852     // Align up since data size may be unaligned if the image is compressed.
    853     out_offset = RoundUp(out_offset, kPageSize);
    854     bitmap_section = ImageSection(out_offset, bitmap_section.Size());
    855 
    856     if (!image_file->PwriteFully(image_info.image_bitmap_->Begin(),
    857                                  bitmap_section.Size(),
    858                                  bitmap_section.Offset())) {
    859       PLOG(ERROR) << "Failed to write image file bitmap " << image_filename;
    860       return false;
    861     }
    862 
    863     int err = image_file->Flush();
    864     if (err < 0) {
    865       PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
    866       return false;
    867     }
    868 
    869     // Calculate the image checksum of the remaining data.
    870     image_checksum = adler32(image_checksum,
    871                              reinterpret_cast<const uint8_t*>(image_info.image_bitmap_->Begin()),
    872                              bitmap_section.Size());
    873     image_header->SetImageChecksum(image_checksum);
    874 
    875     if (VLOG_IS_ON(compiler)) {
    876       const size_t separately_written_section_size = bitmap_section.Size();
    877       const size_t total_uncompressed_size = image_info.image_size_ +
    878           separately_written_section_size;
    879       const size_t total_compressed_size = out_offset + separately_written_section_size;
    880 
    881       VLOG(compiler) << "Dex2Oat:uncompressedImageSize = " << total_uncompressed_size;
    882       if (total_uncompressed_size != total_compressed_size) {
    883         VLOG(compiler) << "Dex2Oat:compressedImageSize = " << total_compressed_size;
    884       }
    885     }
    886 
    887     CHECK_EQ(bitmap_section.End(), static_cast<size_t>(image_file->GetLength()))
    888         << "Bitmap should be at the end of the file";
    889 
    890     // Write header last in case the compiler gets killed in the middle of image writing.
    891     // We do not want to have a corrupted image with a valid header.
    892     // Delay the writing of the primary image header until after writing secondary images.
    893     if (i == 0u) {
    894       primary_image_file = std::move(image_file);
    895     } else {
    896       if (!image_file.WriteHeaderAndClose(image_filename, image_header)) {
    897         return false;
    898       }
    899       // Update the primary image checksum with the secondary image checksum.
    900       primary_header->SetImageChecksum(primary_header->GetImageChecksum() ^ image_checksum);
    901     }
    902   }
    903   DCHECK(primary_image_file != nullptr);
    904   if (!primary_image_file.WriteHeaderAndClose(image_filenames[0], primary_header)) {
    905     return false;
    906   }
    907 
    908   return true;
    909 }
    910 
    911 void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
    912   DCHECK(object != nullptr);
    913   DCHECK_NE(offset, 0U);
    914 
    915   // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
    916   object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
    917   DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
    918   DCHECK(IsImageOffsetAssigned(object));
    919 }
    920 
    921 void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
    922   DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
    923   obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
    924   DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
    925 }
    926 
    927 void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
    928   DCHECK(object != nullptr);
    929   DCHECK_NE(image_objects_offset_begin_, 0u);
    930 
    931   size_t oat_index = GetOatIndex(object);
    932   ImageInfo& image_info = GetImageInfo(oat_index);
    933   size_t bin_slot_offset = image_info.GetBinSlotOffset(bin_slot.GetBin());
    934   size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
    935   DCHECK_ALIGNED(new_offset, kObjectAlignment);
    936 
    937   SetImageOffset(object, new_offset);
    938   DCHECK_LT(new_offset, image_info.image_end_);
    939 }
    940 
    941 bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
    942   // Will also return true if the bin slot was assigned since we are reusing the lock word.
    943   DCHECK(object != nullptr);
    944   return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
    945 }
    946 
    947 size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
    948   DCHECK(object != nullptr);
    949   DCHECK(IsImageOffsetAssigned(object));
    950   LockWord lock_word = object->GetLockWord(false);
    951   size_t offset = lock_word.ForwardingAddress();
    952   size_t oat_index = GetOatIndex(object);
    953   const ImageInfo& image_info = GetImageInfo(oat_index);
    954   DCHECK_LT(offset, image_info.image_end_);
    955   return offset;
    956 }
    957 
    958 void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
    959   DCHECK(object != nullptr);
    960   DCHECK(!IsImageOffsetAssigned(object));
    961   DCHECK(!IsImageBinSlotAssigned(object));
    962 
    963   // Before we stomp over the lock word, save the hash code for later.
    964   LockWord lw(object->GetLockWord(false));
    965   switch (lw.GetState()) {
    966     case LockWord::kFatLocked:
    967       FALLTHROUGH_INTENDED;
    968     case LockWord::kThinLocked: {
    969       std::ostringstream oss;
    970       bool thin = (lw.GetState() == LockWord::kThinLocked);
    971       oss << (thin ? "Thin" : "Fat")
    972           << " locked object " << object << "(" << object->PrettyTypeOf()
    973           << ") found during object copy";
    974       if (thin) {
    975         oss << ". Lock owner:" << lw.ThinLockOwner();
    976       }
    977       LOG(FATAL) << oss.str();
    978       UNREACHABLE();
    979     }
    980     case LockWord::kUnlocked:
    981       // No hash, don't need to save it.
    982       break;
    983     case LockWord::kHashCode:
    984       DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
    985       saved_hashcode_map_.emplace(object, lw.GetHashCode());
    986       break;
    987     default:
    988       LOG(FATAL) << "Unreachable.";
    989       UNREACHABLE();
    990   }
    991   object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
    992   DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
    993   DCHECK(IsImageBinSlotAssigned(object));
    994 }
    995 
    996 void ImageWriter::PrepareDexCacheArraySlots() {
    997   // Prepare dex cache array starts based on the ordering specified in the CompilerOptions.
    998   // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
    999   // when AssignImageBinSlot() assigns their indexes out or order.
   1000   for (const DexFile* dex_file : compiler_options_.GetDexFilesForOatFile()) {
   1001     auto it = dex_file_oat_index_map_.find(dex_file);
   1002     DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
   1003     ImageInfo& image_info = GetImageInfo(it->second);
   1004     image_info.dex_cache_array_starts_.Put(
   1005         dex_file, image_info.GetBinSlotSize(Bin::kDexCacheArray));
   1006     DexCacheArraysLayout layout(target_ptr_size_, dex_file);
   1007     image_info.IncrementBinSlotSize(Bin::kDexCacheArray, layout.Size());
   1008   }
   1009 
   1010   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
   1011   Thread* const self = Thread::Current();
   1012   ReaderMutexLock mu(self, *Locks::dex_lock_);
   1013   for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
   1014     ObjPtr<mirror::DexCache> dex_cache =
   1015         ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
   1016     if (dex_cache == nullptr || !IsImageObject(dex_cache)) {
   1017       continue;
   1018     }
   1019     const DexFile* dex_file = dex_cache->GetDexFile();
   1020     CHECK(dex_file_oat_index_map_.find(dex_file) != dex_file_oat_index_map_.end())
   1021         << "Dex cache should have been pruned " << dex_file->GetLocation()
   1022         << "; possibly in class path";
   1023     DexCacheArraysLayout layout(target_ptr_size_, dex_file);
   1024     DCHECK(layout.Valid());
   1025     size_t oat_index = GetOatIndexForDexCache(dex_cache);
   1026     ImageInfo& image_info = GetImageInfo(oat_index);
   1027     uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
   1028     DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
   1029     AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
   1030                                start + layout.TypesOffset(),
   1031                                oat_index);
   1032     DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
   1033     AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
   1034                                start + layout.MethodsOffset(),
   1035                                oat_index);
   1036     DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
   1037     AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
   1038                                start + layout.FieldsOffset(),
   1039                                oat_index);
   1040     DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
   1041     AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), oat_index);
   1042 
   1043     AddDexCacheArrayRelocation(dex_cache->GetResolvedMethodTypes(),
   1044                                start + layout.MethodTypesOffset(),
   1045                                oat_index);
   1046     AddDexCacheArrayRelocation(dex_cache->GetResolvedCallSites(),
   1047                                 start + layout.CallSitesOffset(),
   1048                                 oat_index);
   1049 
   1050     // Preresolved strings aren't part of the special layout.
   1051     GcRoot<mirror::String>* preresolved_strings = dex_cache->GetPreResolvedStrings();
   1052     if (preresolved_strings != nullptr) {
   1053       DCHECK(!IsInBootImage(preresolved_strings));
   1054       // Add the array to the metadata section.
   1055       const size_t count = dex_cache->NumPreResolvedStrings();
   1056       auto bin = BinTypeForNativeRelocationType(NativeObjectRelocationType::kGcRootPointer);
   1057       for (size_t i = 0; i < count; ++i) {
   1058         native_object_relocations_.emplace(&preresolved_strings[i],
   1059             NativeObjectRelocation { oat_index,
   1060                                      image_info.GetBinSlotSize(bin),
   1061                                      NativeObjectRelocationType::kGcRootPointer });
   1062         image_info.IncrementBinSlotSize(bin, sizeof(GcRoot<mirror::Object>));
   1063       }
   1064     }
   1065   }
   1066 }
   1067 
   1068 void ImageWriter::AddDexCacheArrayRelocation(void* array,
   1069                                              size_t offset,
   1070                                              size_t oat_index) {
   1071   if (array != nullptr) {
   1072     DCHECK(!IsInBootImage(array));
   1073     native_object_relocations_.emplace(array,
   1074         NativeObjectRelocation { oat_index, offset, NativeObjectRelocationType::kDexCacheArray });
   1075   }
   1076 }
   1077 
   1078 void ImageWriter::AddMethodPointerArray(ObjPtr<mirror::PointerArray> arr) {
   1079   DCHECK(arr != nullptr);
   1080   if (kIsDebugBuild) {
   1081     for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
   1082       ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
   1083       if (method != nullptr && !method->IsRuntimeMethod()) {
   1084         ObjPtr<mirror::Class> klass = method->GetDeclaringClass();
   1085         CHECK(klass == nullptr || KeepClass(klass))
   1086             << Class::PrettyClass(klass) << " should be a kept class";
   1087       }
   1088     }
   1089   }
   1090   // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
   1091   // ArtMethods.
   1092   pointer_arrays_.emplace(arr.Ptr(), Bin::kArtMethodClean);
   1093 }
   1094 
   1095 void ImageWriter::AssignImageBinSlot(mirror::Object* object, size_t oat_index) {
   1096   DCHECK(object != nullptr);
   1097   size_t object_size = object->SizeOf();
   1098 
   1099   // The magic happens here. We segregate objects into different bins based
   1100   // on how likely they are to get dirty at runtime.
   1101   //
   1102   // Likely-to-dirty objects get packed together into the same bin so that
   1103   // at runtime their page dirtiness ratio (how many dirty objects a page has) is
   1104   // maximized.
   1105   //
   1106   // This means more pages will stay either clean or shared dirty (with zygote) and
   1107   // the app will use less of its own (private) memory.
   1108   Bin bin = Bin::kRegular;
   1109 
   1110   if (kBinObjects) {
   1111     //
   1112     // Changing the bin of an object is purely a memory-use tuning.
   1113     // It has no change on runtime correctness.
   1114     //
   1115     // Memory analysis has determined that the following types of objects get dirtied
   1116     // the most:
   1117     //
   1118     // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
   1119     //   a fixed layout which helps improve generated code (using PC-relative addressing),
   1120     //   so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
   1121     //   Since these arrays are huge, most pages do not overlap other objects and it's not
   1122     //   really important where they are for the clean/dirty separation. Due to their
   1123     //   special PC-relative addressing, we arbitrarily keep them at the end.
   1124     // * Class'es which are verified [their clinit runs only at runtime]
   1125     //   - classes in general [because their static fields get overwritten]
   1126     //   - initialized classes with all-final statics are unlikely to be ever dirty,
   1127     //     so bin them separately
   1128     // * Art Methods that are:
   1129     //   - native [their native entry point is not looked up until runtime]
   1130     //   - have declaring classes that aren't initialized
   1131     //            [their interpreter/quick entry points are trampolines until the class
   1132     //             becomes initialized]
   1133     //
   1134     // We also assume the following objects get dirtied either never or extremely rarely:
   1135     //  * Strings (they are immutable)
   1136     //  * Art methods that aren't native and have initialized declared classes
   1137     //
   1138     // We assume that "regular" bin objects are highly unlikely to become dirtied,
   1139     // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
   1140     //
   1141     if (object->IsClass()) {
   1142       bin = Bin::kClassVerified;
   1143       ObjPtr<mirror::Class> klass = object->AsClass();
   1144 
   1145       // Add non-embedded vtable to the pointer array table if there is one.
   1146       ObjPtr<mirror::PointerArray> vtable = klass->GetVTable();
   1147       if (vtable != nullptr) {
   1148         AddMethodPointerArray(vtable);
   1149       }
   1150       ObjPtr<mirror::IfTable> iftable = klass->GetIfTable();
   1151       if (iftable != nullptr) {
   1152         for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
   1153           if (iftable->GetMethodArrayCount(i) > 0) {
   1154             AddMethodPointerArray(iftable->GetMethodArray(i));
   1155           }
   1156         }
   1157       }
   1158 
   1159       // Move known dirty objects into their own sections. This includes:
   1160       //   - classes with dirty static fields.
   1161       if (dirty_image_objects_ != nullptr &&
   1162           dirty_image_objects_->find(klass->PrettyDescriptor()) != dirty_image_objects_->end()) {
   1163         bin = Bin::kKnownDirty;
   1164       } else if (klass->GetStatus() == ClassStatus::kInitialized) {
   1165         bin = Bin::kClassInitialized;
   1166 
   1167         // If the class's static fields are all final, put it into a separate bin
   1168         // since it's very likely it will stay clean.
   1169         uint32_t num_static_fields = klass->NumStaticFields();
   1170         if (num_static_fields == 0) {
   1171           bin = Bin::kClassInitializedFinalStatics;
   1172         } else {
   1173           // Maybe all the statics are final?
   1174           bool all_final = true;
   1175           for (uint32_t i = 0; i < num_static_fields; ++i) {
   1176             ArtField* field = klass->GetStaticField(i);
   1177             if (!field->IsFinal()) {
   1178               all_final = false;
   1179               break;
   1180             }
   1181           }
   1182 
   1183           if (all_final) {
   1184             bin = Bin::kClassInitializedFinalStatics;
   1185           }
   1186         }
   1187       }
   1188     } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
   1189       bin = Bin::kString;  // Strings are almost always immutable (except for object header).
   1190     } else if (object->GetClass<kVerifyNone>() == GetClassRoot<mirror::Object>()) {
   1191       // Instance of java lang object, probably a lock object. This means it will be dirty when we
   1192       // synchronize on it.
   1193       bin = Bin::kMiscDirty;
   1194     } else if (object->IsDexCache()) {
   1195       // Dex file field becomes dirty when the image is loaded.
   1196       bin = Bin::kMiscDirty;
   1197     }
   1198     // else bin = kBinRegular
   1199   }
   1200 
   1201   // Assign the oat index too.
   1202   DCHECK(oat_index_map_.find(object) == oat_index_map_.end());
   1203   oat_index_map_.emplace(object, oat_index);
   1204 
   1205   ImageInfo& image_info = GetImageInfo(oat_index);
   1206 
   1207   size_t offset_delta = RoundUp(object_size, kObjectAlignment);  // 64-bit alignment
   1208   // How many bytes the current bin is at (aligned).
   1209   size_t current_offset = image_info.GetBinSlotSize(bin);
   1210   // Move the current bin size up to accommodate the object we just assigned a bin slot.
   1211   image_info.IncrementBinSlotSize(bin, offset_delta);
   1212 
   1213   BinSlot new_bin_slot(bin, current_offset);
   1214   SetImageBinSlot(object, new_bin_slot);
   1215 
   1216   image_info.IncrementBinSlotCount(bin, 1u);
   1217 
   1218   // Grow the image closer to the end by the object we just assigned.
   1219   image_info.image_end_ += offset_delta;
   1220 }
   1221 
   1222 bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
   1223   if (m->IsNative()) {
   1224     return true;
   1225   }
   1226   ObjPtr<mirror::Class> declaring_class = m->GetDeclaringClass();
   1227   // Initialized is highly unlikely to dirty since there's no entry points to mutate.
   1228   return declaring_class == nullptr || declaring_class->GetStatus() != ClassStatus::kInitialized;
   1229 }
   1230 
   1231 bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
   1232   DCHECK(object != nullptr);
   1233 
   1234   // We always stash the bin slot into a lockword, in the 'forwarding address' state.
   1235   // If it's in some other state, then we haven't yet assigned an image bin slot.
   1236   if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
   1237     return false;
   1238   } else if (kIsDebugBuild) {
   1239     LockWord lock_word = object->GetLockWord(false);
   1240     size_t offset = lock_word.ForwardingAddress();
   1241     BinSlot bin_slot(offset);
   1242     size_t oat_index = GetOatIndex(object);
   1243     const ImageInfo& image_info = GetImageInfo(oat_index);
   1244     DCHECK_LT(bin_slot.GetIndex(), image_info.GetBinSlotSize(bin_slot.GetBin()))
   1245         << "bin slot offset should not exceed the size of that bin";
   1246   }
   1247   return true;
   1248 }
   1249 
   1250 ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
   1251   DCHECK(object != nullptr);
   1252   DCHECK(IsImageBinSlotAssigned(object));
   1253 
   1254   LockWord lock_word = object->GetLockWord(false);
   1255   size_t offset = lock_word.ForwardingAddress();  // TODO: ForwardingAddress should be uint32_t
   1256   DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
   1257 
   1258   BinSlot bin_slot(static_cast<uint32_t>(offset));
   1259   size_t oat_index = GetOatIndex(object);
   1260   const ImageInfo& image_info = GetImageInfo(oat_index);
   1261   DCHECK_LT(bin_slot.GetIndex(), image_info.GetBinSlotSize(bin_slot.GetBin()));
   1262 
   1263   return bin_slot;
   1264 }
   1265 
   1266 bool ImageWriter::AllocMemory() {
   1267   for (ImageInfo& image_info : image_infos_) {
   1268     const size_t length = RoundUp(image_info.CreateImageSections().first, kPageSize);
   1269 
   1270     std::string error_msg;
   1271     image_info.image_ = MemMap::MapAnonymous("image writer image",
   1272                                              length,
   1273                                              PROT_READ | PROT_WRITE,
   1274                                              /*low_4gb=*/ false,
   1275                                              &error_msg);
   1276     if (UNLIKELY(!image_info.image_.IsValid())) {
   1277       LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
   1278       return false;
   1279     }
   1280 
   1281     // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
   1282     CHECK_LE(image_info.image_end_, length);
   1283     image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
   1284         "image bitmap", image_info.image_.Begin(), RoundUp(image_info.image_end_, kPageSize)));
   1285     if (image_info.image_bitmap_.get() == nullptr) {
   1286       LOG(ERROR) << "Failed to allocate memory for image bitmap";
   1287       return false;
   1288     }
   1289   }
   1290   return true;
   1291 }
   1292 
   1293 static bool IsBootClassLoaderClass(ObjPtr<mirror::Class> klass)
   1294     REQUIRES_SHARED(Locks::mutator_lock_) {
   1295   return klass->GetClassLoader() == nullptr;
   1296 }
   1297 
   1298 bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
   1299   return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
   1300 }
   1301 
   1302 // This visitor follows the references of an instance, recursively then prune this class
   1303 // if a type of any field is pruned.
   1304 class ImageWriter::PruneObjectReferenceVisitor {
   1305  public:
   1306   PruneObjectReferenceVisitor(ImageWriter* image_writer,
   1307                         bool* early_exit,
   1308                         std::unordered_set<mirror::Object*>* visited,
   1309                         bool* result)
   1310       : image_writer_(image_writer), early_exit_(early_exit), visited_(visited), result_(result) {}
   1311 
   1312   ALWAYS_INLINE void VisitRootIfNonNull(
   1313       mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
   1314       REQUIRES_SHARED(Locks::mutator_lock_) { }
   1315 
   1316   ALWAYS_INLINE void VisitRoot(
   1317       mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const
   1318       REQUIRES_SHARED(Locks::mutator_lock_) { }
   1319 
   1320   ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
   1321                                  MemberOffset offset,
   1322                                  bool is_static ATTRIBUTE_UNUSED) const
   1323       REQUIRES_SHARED(Locks::mutator_lock_) {
   1324     mirror::Object* ref =
   1325         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
   1326     if (ref == nullptr || visited_->find(ref) != visited_->end()) {
   1327       return;
   1328     }
   1329 
   1330     ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots =
   1331         Runtime::Current()->GetClassLinker()->GetClassRoots();
   1332     ObjPtr<mirror::Class> klass = ref->IsClass() ? ref->AsClass() : ref->GetClass();
   1333     if (klass == GetClassRoot<mirror::Method>(class_roots) ||
   1334         klass == GetClassRoot<mirror::Constructor>(class_roots)) {
   1335       // Prune all classes using reflection because the content they held will not be fixup.
   1336       *result_ = true;
   1337     }
   1338 
   1339     if (ref->IsClass()) {
   1340       *result_ = *result_ ||
   1341           image_writer_->PruneAppImageClassInternal(ref->AsClass(), early_exit_, visited_);
   1342     } else {
   1343       // Record the object visited in case of circular reference.
   1344       visited_->emplace(ref);
   1345       *result_ = *result_ ||
   1346           image_writer_->PruneAppImageClassInternal(klass, early_exit_, visited_);
   1347       ref->VisitReferences(*this, *this);
   1348       // Clean up before exit for next call of this function.
   1349       visited_->erase(ref);
   1350     }
   1351   }
   1352 
   1353   ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
   1354                                  ObjPtr<mirror::Reference> ref) const
   1355       REQUIRES_SHARED(Locks::mutator_lock_) {
   1356     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
   1357   }
   1358 
   1359   ALWAYS_INLINE bool GetResult() const {
   1360     return result_;
   1361   }
   1362 
   1363  private:
   1364   ImageWriter* image_writer_;
   1365   bool* early_exit_;
   1366   std::unordered_set<mirror::Object*>* visited_;
   1367   bool* const result_;
   1368 };
   1369 
   1370 
   1371 bool ImageWriter::PruneAppImageClass(ObjPtr<mirror::Class> klass) {
   1372   bool early_exit = false;
   1373   std::unordered_set<mirror::Object*> visited;
   1374   return PruneAppImageClassInternal(klass, &early_exit, &visited);
   1375 }
   1376 
   1377 bool ImageWriter::PruneAppImageClassInternal(
   1378     ObjPtr<mirror::Class> klass,
   1379     bool* early_exit,
   1380     std::unordered_set<mirror::Object*>* visited) {
   1381   DCHECK(early_exit != nullptr);
   1382   DCHECK(visited != nullptr);
   1383   DCHECK(compiler_options_.IsAppImage());
   1384   if (klass == nullptr || IsInBootImage(klass.Ptr())) {
   1385     return false;
   1386   }
   1387   auto found = prune_class_memo_.find(klass.Ptr());
   1388   if (found != prune_class_memo_.end()) {
   1389     // Already computed, return the found value.
   1390     return found->second;
   1391   }
   1392   // Circular dependencies, return false but do not store the result in the memoization table.
   1393   if (visited->find(klass.Ptr()) != visited->end()) {
   1394     *early_exit = true;
   1395     return false;
   1396   }
   1397   visited->emplace(klass.Ptr());
   1398   bool result = IsBootClassLoaderClass(klass);
   1399   std::string temp;
   1400   // Prune if not an image class, this handles any broken sets of image classes such as having a
   1401   // class in the set but not it's superclass.
   1402   result = result || !compiler_options_.IsImageClass(klass->GetDescriptor(&temp));
   1403   bool my_early_exit = false;  // Only for ourselves, ignore caller.
   1404   // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
   1405   // app image.
   1406   if (klass->IsErroneous()) {
   1407     result = true;
   1408   } else {
   1409     ObjPtr<mirror::ClassExt> ext(klass->GetExtData());
   1410     CHECK(ext.IsNull() || ext->GetVerifyError() == nullptr) << klass->PrettyClass();
   1411   }
   1412   if (!result) {
   1413     // Check interfaces since these wont be visited through VisitReferences.)
   1414     ObjPtr<mirror::IfTable> if_table = klass->GetIfTable();
   1415     for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
   1416       result = result || PruneAppImageClassInternal(if_table->GetInterface(i),
   1417                                                     &my_early_exit,
   1418                                                     visited);
   1419     }
   1420   }
   1421   if (klass->IsObjectArrayClass()) {
   1422     result = result || PruneAppImageClassInternal(klass->GetComponentType(),
   1423                                                   &my_early_exit,
   1424                                                   visited);
   1425   }
   1426   // Check static fields and their classes.
   1427   if (klass->IsResolved() && klass->NumReferenceStaticFields() != 0) {
   1428     size_t num_static_fields = klass->NumReferenceStaticFields();
   1429     // Presumably GC can happen when we are cross compiling, it should not cause performance
   1430     // problems to do pointer size logic.
   1431     MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
   1432         Runtime::Current()->GetClassLinker()->GetImagePointerSize());
   1433     for (size_t i = 0u; i < num_static_fields; ++i) {
   1434       mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
   1435       if (ref != nullptr) {
   1436         if (ref->IsClass()) {
   1437           result = result || PruneAppImageClassInternal(ref->AsClass(),
   1438                                                         &my_early_exit,
   1439                                                         visited);
   1440         } else {
   1441           mirror::Class* type = ref->GetClass();
   1442           result = result || PruneAppImageClassInternal(type,
   1443                                                         &my_early_exit,
   1444                                                         visited);
   1445           if (!result) {
   1446             // For non-class case, also go through all the types mentioned by it's fields'
   1447             // references recursively to decide whether to keep this class.
   1448             bool tmp = false;
   1449             PruneObjectReferenceVisitor visitor(this, &my_early_exit, visited, &tmp);
   1450             ref->VisitReferences(visitor, visitor);
   1451             result = result || tmp;
   1452           }
   1453         }
   1454       }
   1455       field_offset = MemberOffset(field_offset.Uint32Value() +
   1456                                   sizeof(mirror::HeapReference<mirror::Object>));
   1457     }
   1458   }
   1459   result = result || PruneAppImageClassInternal(klass->GetSuperClass(),
   1460                                                 &my_early_exit,
   1461                                                 visited);
   1462   // Remove the class if the dex file is not in the set of dex files. This happens for classes that
   1463   // are from uses-library if there is no profile. b/30688277
   1464   ObjPtr<mirror::DexCache> dex_cache = klass->GetDexCache();
   1465   if (dex_cache != nullptr) {
   1466     result = result ||
   1467         dex_file_oat_index_map_.find(dex_cache->GetDexFile()) == dex_file_oat_index_map_.end();
   1468   }
   1469   // Erase the element we stored earlier since we are exiting the function.
   1470   auto it = visited->find(klass.Ptr());
   1471   DCHECK(it != visited->end());
   1472   visited->erase(it);
   1473   // Only store result if it is true or none of the calls early exited due to circular
   1474   // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
   1475   // a child call and we can remember the result.
   1476   if (result == true || !my_early_exit || visited->empty()) {
   1477     prune_class_memo_[klass.Ptr()] = result;
   1478   }
   1479   *early_exit |= my_early_exit;
   1480   return result;
   1481 }
   1482 
   1483 bool ImageWriter::KeepClass(ObjPtr<mirror::Class> klass) {
   1484   if (klass == nullptr) {
   1485     return false;
   1486   }
   1487   if (!compiler_options_.IsBootImage() &&
   1488       Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
   1489     // Already in boot image, return true.
   1490     return true;
   1491   }
   1492   std::string temp;
   1493   if (!compiler_options_.IsImageClass(klass->GetDescriptor(&temp))) {
   1494     return false;
   1495   }
   1496   if (compiler_options_.IsAppImage()) {
   1497     // For app images, we need to prune boot loader classes that are not in the boot image since
   1498     // these may have already been loaded when the app image is loaded.
   1499     // Keep classes in the boot image space since we don't want to re-resolve these.
   1500     return !PruneAppImageClass(klass);
   1501   }
   1502   return true;
   1503 }
   1504 
   1505 class ImageWriter::PruneClassesVisitor : public ClassVisitor {
   1506  public:
   1507   PruneClassesVisitor(ImageWriter* image_writer, ObjPtr<mirror::ClassLoader> class_loader)
   1508       : image_writer_(image_writer),
   1509         class_loader_(class_loader),
   1510         classes_to_prune_(),
   1511         defined_class_count_(0u) { }
   1512 
   1513   bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES_SHARED(Locks::mutator_lock_) {
   1514     if (!image_writer_->KeepClass(klass.Ptr())) {
   1515       classes_to_prune_.insert(klass.Ptr());
   1516       if (klass->GetClassLoader() == class_loader_) {
   1517         ++defined_class_count_;
   1518       }
   1519     }
   1520     return true;
   1521   }
   1522 
   1523   size_t Prune() REQUIRES_SHARED(Locks::mutator_lock_) {
   1524     ClassTable* class_table =
   1525         Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader_);
   1526     for (mirror::Class* klass : classes_to_prune_) {
   1527       std::string storage;
   1528       const char* descriptor = klass->GetDescriptor(&storage);
   1529       bool result = class_table->Remove(descriptor);
   1530       DCHECK(result);
   1531       DCHECK(!class_table->Remove(descriptor)) << descriptor;
   1532     }
   1533     return defined_class_count_;
   1534   }
   1535 
   1536  private:
   1537   ImageWriter* const image_writer_;
   1538   const ObjPtr<mirror::ClassLoader> class_loader_;
   1539   std::unordered_set<mirror::Class*> classes_to_prune_;
   1540   size_t defined_class_count_;
   1541 };
   1542 
   1543 class ImageWriter::PruneClassLoaderClassesVisitor : public ClassLoaderVisitor {
   1544  public:
   1545   explicit PruneClassLoaderClassesVisitor(ImageWriter* image_writer)
   1546       : image_writer_(image_writer), removed_class_count_(0) {}
   1547 
   1548   void Visit(ObjPtr<mirror::ClassLoader> class_loader) override
   1549       REQUIRES_SHARED(Locks::mutator_lock_) {
   1550     PruneClassesVisitor classes_visitor(image_writer_, class_loader);
   1551     ClassTable* class_table =
   1552         Runtime::Current()->GetClassLinker()->ClassTableForClassLoader(class_loader);
   1553     class_table->Visit(classes_visitor);
   1554     removed_class_count_ += classes_visitor.Prune();
   1555   }
   1556 
   1557   size_t GetRemovedClassCount() const {
   1558     return removed_class_count_;
   1559   }
   1560 
   1561  private:
   1562   ImageWriter* const image_writer_;
   1563   size_t removed_class_count_;
   1564 };
   1565 
   1566 void ImageWriter::VisitClassLoaders(ClassLoaderVisitor* visitor) {
   1567   WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
   1568   visitor->Visit(nullptr);  // Visit boot class loader.
   1569   Runtime::Current()->GetClassLinker()->VisitClassLoaders(visitor);
   1570 }
   1571 
   1572 void ImageWriter::PruneDexCache(ObjPtr<mirror::DexCache> dex_cache,
   1573                                 ObjPtr<mirror::ClassLoader> class_loader) {
   1574   Runtime* runtime = Runtime::Current();
   1575   ClassLinker* class_linker = runtime->GetClassLinker();
   1576   const DexFile& dex_file = *dex_cache->GetDexFile();
   1577   // Prune methods.
   1578   dex::TypeIndex last_class_idx;  // Initialized to invalid index.
   1579   ObjPtr<mirror::Class> last_class = nullptr;
   1580   mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
   1581   for (size_t slot_idx = 0, num = dex_cache->NumResolvedMethods(); slot_idx != num; ++slot_idx) {
   1582     auto pair =
   1583         mirror::DexCache::GetNativePairPtrSize(resolved_methods, slot_idx, target_ptr_size_);
   1584     uint32_t stored_index = pair.index;
   1585     ArtMethod* method = pair.object;
   1586     if (method == nullptr) {
   1587       continue;  // Empty entry.
   1588     }
   1589     // Check if the referenced class is in the image. Note that we want to check the referenced
   1590     // class rather than the declaring class to preserve the semantics, i.e. using a MethodId
   1591     // results in resolving the referenced class and that can for example throw OOME.
   1592     const dex::MethodId& method_id = dex_file.GetMethodId(stored_index);
   1593     if (method_id.class_idx_ != last_class_idx) {
   1594       last_class_idx = method_id.class_idx_;
   1595       last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
   1596       if (last_class != nullptr && !KeepClass(last_class)) {
   1597         last_class = nullptr;
   1598       }
   1599     }
   1600     if (last_class == nullptr) {
   1601       dex_cache->ClearResolvedMethod(stored_index, target_ptr_size_);
   1602     }
   1603   }
   1604   // Prune fields.
   1605   mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
   1606   last_class_idx = dex::TypeIndex();  // Initialized to invalid index.
   1607   last_class = nullptr;
   1608   for (size_t slot_idx = 0, num = dex_cache->NumResolvedFields(); slot_idx != num; ++slot_idx) {
   1609     auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
   1610     uint32_t stored_index = pair.index;
   1611     ArtField* field = pair.object;
   1612     if (field == nullptr) {
   1613       continue;  // Empty entry.
   1614     }
   1615     // Check if the referenced class is in the image. Note that we want to check the referenced
   1616     // class rather than the declaring class to preserve the semantics, i.e. using a FieldId
   1617     // results in resolving the referenced class and that can for example throw OOME.
   1618     const dex::FieldId& field_id = dex_file.GetFieldId(stored_index);
   1619     if (field_id.class_idx_ != last_class_idx) {
   1620       last_class_idx = field_id.class_idx_;
   1621       last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
   1622       if (last_class != nullptr && !KeepClass(last_class)) {
   1623         last_class = nullptr;
   1624       }
   1625     }
   1626     if (last_class == nullptr) {
   1627       dex_cache->ClearResolvedField(stored_index, target_ptr_size_);
   1628     }
   1629   }
   1630   // Prune types.
   1631   for (size_t slot_idx = 0, num = dex_cache->NumResolvedTypes(); slot_idx != num; ++slot_idx) {
   1632     mirror::TypeDexCachePair pair =
   1633         dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
   1634     uint32_t stored_index = pair.index;
   1635     ObjPtr<mirror::Class> klass = pair.object.Read();
   1636     if (klass != nullptr && !KeepClass(klass)) {
   1637       dex_cache->ClearResolvedType(dex::TypeIndex(stored_index));
   1638     }
   1639   }
   1640   // Strings do not need pruning.
   1641 }
   1642 
   1643 void ImageWriter::PreloadDexCache(ObjPtr<mirror::DexCache> dex_cache,
   1644                                   ObjPtr<mirror::ClassLoader> class_loader) {
   1645   // To ensure deterministic contents of the hash-based arrays, each slot shall contain
   1646   // the candidate with the lowest index. As we're processing entries in increasing index
   1647   // order, this means trying to look up the entry for the current index if the slot is
   1648   // empty or if it contains a higher index.
   1649 
   1650   Runtime* runtime = Runtime::Current();
   1651   ClassLinker* class_linker = runtime->GetClassLinker();
   1652   const DexFile& dex_file = *dex_cache->GetDexFile();
   1653   // Preload the methods array and make the contents deterministic.
   1654   mirror::MethodDexCacheType* resolved_methods = dex_cache->GetResolvedMethods();
   1655   dex::TypeIndex last_class_idx;  // Initialized to invalid index.
   1656   ObjPtr<mirror::Class> last_class = nullptr;
   1657   for (size_t i = 0, num = dex_cache->GetDexFile()->NumMethodIds(); i != num; ++i) {
   1658     uint32_t slot_idx = dex_cache->MethodSlotIndex(i);
   1659     auto pair =
   1660         mirror::DexCache::GetNativePairPtrSize(resolved_methods, slot_idx, target_ptr_size_);
   1661     uint32_t stored_index = pair.index;
   1662     ArtMethod* method = pair.object;
   1663     if (method != nullptr && i > stored_index) {
   1664       continue;  // Already checked.
   1665     }
   1666     // Check if the referenced class is in the image. Note that we want to check the referenced
   1667     // class rather than the declaring class to preserve the semantics, i.e. using a MethodId
   1668     // results in resolving the referenced class and that can for example throw OOME.
   1669     const dex::MethodId& method_id = dex_file.GetMethodId(i);
   1670     if (method_id.class_idx_ != last_class_idx) {
   1671       last_class_idx = method_id.class_idx_;
   1672       last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
   1673     }
   1674     if (method == nullptr || i < stored_index) {
   1675       if (last_class != nullptr) {
   1676         // Try to resolve the method with the class linker, which will insert
   1677         // it into the dex cache if successful.
   1678         method = class_linker->FindResolvedMethod(last_class, dex_cache, class_loader, i);
   1679         DCHECK(method == nullptr || dex_cache->GetResolvedMethod(i, target_ptr_size_) == method);
   1680       }
   1681     } else {
   1682       DCHECK_EQ(i, stored_index);
   1683       DCHECK(last_class != nullptr);
   1684     }
   1685   }
   1686   // Preload the fields array and make the contents deterministic.
   1687   mirror::FieldDexCacheType* resolved_fields = dex_cache->GetResolvedFields();
   1688   last_class_idx = dex::TypeIndex();  // Initialized to invalid index.
   1689   last_class = nullptr;
   1690   for (size_t i = 0, end = dex_file.NumFieldIds(); i < end; ++i) {
   1691     uint32_t slot_idx = dex_cache->FieldSlotIndex(i);
   1692     auto pair = mirror::DexCache::GetNativePairPtrSize(resolved_fields, slot_idx, target_ptr_size_);
   1693     uint32_t stored_index = pair.index;
   1694     ArtField* field = pair.object;
   1695     if (field != nullptr && i > stored_index) {
   1696       continue;  // Already checked.
   1697     }
   1698     // Check if the referenced class is in the image. Note that we want to check the referenced
   1699     // class rather than the declaring class to preserve the semantics, i.e. using a FieldId
   1700     // results in resolving the referenced class and that can for example throw OOME.
   1701     const dex::FieldId& field_id = dex_file.GetFieldId(i);
   1702     if (field_id.class_idx_ != last_class_idx) {
   1703       last_class_idx = field_id.class_idx_;
   1704       last_class = class_linker->LookupResolvedType(last_class_idx, dex_cache, class_loader);
   1705       if (last_class != nullptr && !KeepClass(last_class)) {
   1706         last_class = nullptr;
   1707       }
   1708     }
   1709     if (field == nullptr || i < stored_index) {
   1710       if (last_class != nullptr) {
   1711         // Try to resolve the field with the class linker, which will insert
   1712         // it into the dex cache if successful.
   1713         field = class_linker->FindResolvedFieldJLS(last_class, dex_cache, class_loader, i);
   1714         DCHECK(field == nullptr || dex_cache->GetResolvedField(i, target_ptr_size_) == field);
   1715       }
   1716     } else {
   1717       DCHECK_EQ(i, stored_index);
   1718       DCHECK(last_class != nullptr);
   1719     }
   1720   }
   1721   // Preload the types array and make the contents deterministic.
   1722   // This is done after fields and methods as their lookup can touch the types array.
   1723   for (size_t i = 0, end = dex_cache->GetDexFile()->NumTypeIds(); i < end; ++i) {
   1724     dex::TypeIndex type_idx(i);
   1725     uint32_t slot_idx = dex_cache->TypeSlotIndex(type_idx);
   1726     mirror::TypeDexCachePair pair =
   1727         dex_cache->GetResolvedTypes()[slot_idx].load(std::memory_order_relaxed);
   1728     uint32_t stored_index = pair.index;
   1729     ObjPtr<mirror::Class> klass = pair.object.Read();
   1730     if (klass == nullptr || i < stored_index) {
   1731       klass = class_linker->LookupResolvedType(type_idx, dex_cache, class_loader);
   1732       DCHECK(klass == nullptr || dex_cache->GetResolvedType(type_idx) == klass);
   1733     }
   1734   }
   1735   // Preload the strings array and make the contents deterministic.
   1736   for (size_t i = 0, end = dex_cache->GetDexFile()->NumStringIds(); i < end; ++i) {
   1737     dex::StringIndex string_idx(i);
   1738     uint32_t slot_idx = dex_cache->StringSlotIndex(string_idx);
   1739     mirror::StringDexCachePair pair =
   1740         dex_cache->GetStrings()[slot_idx].load(std::memory_order_relaxed);
   1741     uint32_t stored_index = pair.index;
   1742     ObjPtr<mirror::String> string = pair.object.Read();
   1743     if (string == nullptr || i < stored_index) {
   1744       string = class_linker->LookupString(string_idx, dex_cache);
   1745       DCHECK(string == nullptr || dex_cache->GetResolvedString(string_idx) == string);
   1746     }
   1747   }
   1748 }
   1749 
   1750 void ImageWriter::PruneNonImageClasses() {
   1751   Runtime* runtime = Runtime::Current();
   1752   ClassLinker* class_linker = runtime->GetClassLinker();
   1753   Thread* self = Thread::Current();
   1754   ScopedAssertNoThreadSuspension sa(__FUNCTION__);
   1755 
   1756   // Prune uses-library dex caches. Only prune the uses-library dex caches since we want to make
   1757   // sure the other ones don't get unloaded before the OatWriter runs.
   1758   class_linker->VisitClassTables(
   1759       [&](ClassTable* table) REQUIRES_SHARED(Locks::mutator_lock_) {
   1760     table->RemoveStrongRoots(
   1761         [&](GcRoot<mirror::Object> root) REQUIRES_SHARED(Locks::mutator_lock_) {
   1762       ObjPtr<mirror::Object> obj = root.Read();
   1763       if (obj->IsDexCache()) {
   1764         // Return true if the dex file is not one of the ones in the map.
   1765         return dex_file_oat_index_map_.find(obj->AsDexCache()->GetDexFile()) ==
   1766             dex_file_oat_index_map_.end();
   1767       }
   1768       // Return false to avoid removing.
   1769       return false;
   1770     });
   1771   });
   1772 
   1773   // Remove the undesired classes from the class roots.
   1774   {
   1775     PruneClassLoaderClassesVisitor class_loader_visitor(this);
   1776     VisitClassLoaders(&class_loader_visitor);
   1777     VLOG(compiler) << "Pruned " << class_loader_visitor.GetRemovedClassCount() << " classes";
   1778   }
   1779 
   1780   // Clear references to removed classes from the DexCaches.
   1781   std::vector<ObjPtr<mirror::DexCache>> dex_caches = FindDexCaches(self);
   1782   for (ObjPtr<mirror::DexCache> dex_cache : dex_caches) {
   1783     // Pass the class loader associated with the DexCache. This can either be
   1784     // the app's `class_loader` or `nullptr` if boot class loader.
   1785     bool is_app_image_dex_cache = compiler_options_.IsAppImage() && IsImageObject(dex_cache);
   1786     PruneDexCache(dex_cache, is_app_image_dex_cache ? GetAppClassLoader() : nullptr);
   1787   }
   1788 
   1789   // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
   1790   class_linker->DropFindArrayClassCache();
   1791 
   1792   // Clear to save RAM.
   1793   prune_class_memo_.clear();
   1794 }
   1795 
   1796 std::vector<ObjPtr<mirror::DexCache>> ImageWriter::FindDexCaches(Thread* self) {
   1797   std::vector<ObjPtr<mirror::DexCache>> dex_caches;
   1798   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
   1799   ReaderMutexLock mu2(self, *Locks::dex_lock_);
   1800   dex_caches.reserve(class_linker->GetDexCachesData().size());
   1801   for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
   1802     if (self->IsJWeakCleared(data.weak_root)) {
   1803       continue;
   1804     }
   1805     dex_caches.push_back(self->DecodeJObject(data.weak_root)->AsDexCache());
   1806   }
   1807   return dex_caches;
   1808 }
   1809 
   1810 void ImageWriter::CheckNonImageClassesRemoved() {
   1811   auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
   1812     if (obj->IsClass() && !IsInBootImage(obj)) {
   1813       ObjPtr<Class> klass = obj->AsClass();
   1814       if (!KeepClass(klass)) {
   1815         DumpImageClasses();
   1816         CHECK(KeepClass(klass))
   1817             << Runtime::Current()->GetHeap()->GetVerification()->FirstPathFromRootSet(klass);
   1818       }
   1819     }
   1820   };
   1821   gc::Heap* heap = Runtime::Current()->GetHeap();
   1822   heap->VisitObjects(visitor);
   1823 }
   1824 
   1825 void ImageWriter::DumpImageClasses() {
   1826   for (const std::string& image_class : compiler_options_.GetImageClasses()) {
   1827     LOG(INFO) << " " << image_class;
   1828   }
   1829 }
   1830 
   1831 mirror::String* ImageWriter::FindInternedString(mirror::String* string) {
   1832   Thread* const self = Thread::Current();
   1833   for (const ImageInfo& image_info : image_infos_) {
   1834     const ObjPtr<mirror::String> found = image_info.intern_table_->LookupStrong(self, string);
   1835     DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr)
   1836         << string->ToModifiedUtf8();
   1837     if (found != nullptr) {
   1838       return found.Ptr();
   1839     }
   1840   }
   1841   if (!compiler_options_.IsBootImage()) {
   1842     Runtime* const runtime = Runtime::Current();
   1843     ObjPtr<mirror::String> found = runtime->GetInternTable()->LookupStrong(self, string);
   1844     // If we found it in the runtime intern table it could either be in the boot image or interned
   1845     // during app image compilation. If it was in the boot image return that, otherwise return null
   1846     // since it belongs to another image space.
   1847     if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found.Ptr())) {
   1848       return found.Ptr();
   1849     }
   1850     DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr)
   1851         << string->ToModifiedUtf8();
   1852   }
   1853   return nullptr;
   1854 }
   1855 
   1856 ObjPtr<mirror::ObjectArray<mirror::Object>> ImageWriter::CollectDexCaches(Thread* self,
   1857                                                                           size_t oat_index) const {
   1858   std::unordered_set<const DexFile*> image_dex_files;
   1859   for (auto& pair : dex_file_oat_index_map_) {
   1860     const DexFile* image_dex_file = pair.first;
   1861     size_t image_oat_index = pair.second;
   1862     if (oat_index == image_oat_index) {
   1863       image_dex_files.insert(image_dex_file);
   1864     }
   1865   }
   1866 
   1867   // build an Object[] of all the DexCaches used in the source_space_.
   1868   // Since we can't hold the dex lock when allocating the dex_caches
   1869   // ObjectArray, we lock the dex lock twice, first to get the number
   1870   // of dex caches first and then lock it again to copy the dex
   1871   // caches. We check that the number of dex caches does not change.
   1872   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
   1873   size_t dex_cache_count = 0;
   1874   {
   1875     ReaderMutexLock mu(self, *Locks::dex_lock_);
   1876     // Count number of dex caches not in the boot image.
   1877     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
   1878       ObjPtr<mirror::DexCache> dex_cache =
   1879           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
   1880       if (dex_cache == nullptr) {
   1881         continue;
   1882       }
   1883       const DexFile* dex_file = dex_cache->GetDexFile();
   1884       if (IsImageObject(dex_cache)) {
   1885         dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
   1886       }
   1887     }
   1888   }
   1889   ObjPtr<ObjectArray<Object>> dex_caches = ObjectArray<Object>::Alloc(
   1890       self, GetClassRoot<ObjectArray<Object>>(class_linker), dex_cache_count);
   1891   CHECK(dex_caches != nullptr) << "Failed to allocate a dex cache array.";
   1892   {
   1893     ReaderMutexLock mu(self, *Locks::dex_lock_);
   1894     size_t non_image_dex_caches = 0;
   1895     // Re-count number of non image dex caches.
   1896     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
   1897       ObjPtr<mirror::DexCache> dex_cache =
   1898           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
   1899       if (dex_cache == nullptr) {
   1900         continue;
   1901       }
   1902       const DexFile* dex_file = dex_cache->GetDexFile();
   1903       if (IsImageObject(dex_cache)) {
   1904         non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
   1905       }
   1906     }
   1907     CHECK_EQ(dex_cache_count, non_image_dex_caches)
   1908         << "The number of non-image dex caches changed.";
   1909     size_t i = 0;
   1910     for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
   1911       ObjPtr<mirror::DexCache> dex_cache =
   1912           ObjPtr<mirror::DexCache>::DownCast(self->DecodeJObject(data.weak_root));
   1913       if (dex_cache == nullptr) {
   1914         continue;
   1915       }
   1916       const DexFile* dex_file = dex_cache->GetDexFile();
   1917       if (IsImageObject(dex_cache) &&
   1918           image_dex_files.find(dex_file) != image_dex_files.end()) {
   1919         dex_caches->Set<false>(i, dex_cache.Ptr());
   1920         ++i;
   1921       }
   1922     }
   1923   }
   1924   return dex_caches;
   1925 }
   1926 
   1927 ObjPtr<ObjectArray<Object>> ImageWriter::CreateImageRoots(
   1928     size_t oat_index,
   1929     Handle<mirror::ObjectArray<mirror::Object>> boot_image_live_objects) const {
   1930   Runtime* runtime = Runtime::Current();
   1931   ClassLinker* class_linker = runtime->GetClassLinker();
   1932   Thread* self = Thread::Current();
   1933   StackHandleScope<2> hs(self);
   1934 
   1935   Handle<ObjectArray<Object>> dex_caches(hs.NewHandle(CollectDexCaches(self, oat_index)));
   1936 
   1937   // build an Object[] of the roots needed to restore the runtime
   1938   int32_t image_roots_size = ImageHeader::NumberOfImageRoots(compiler_options_.IsAppImage());
   1939   Handle<ObjectArray<Object>> image_roots(hs.NewHandle(ObjectArray<Object>::Alloc(
   1940       self, GetClassRoot<ObjectArray<Object>>(class_linker), image_roots_size)));
   1941   image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
   1942   image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
   1943   image_roots->Set<false>(ImageHeader::kOomeWhenThrowingException,
   1944                           runtime->GetPreAllocatedOutOfMemoryErrorWhenThrowingException());
   1945   image_roots->Set<false>(ImageHeader::kOomeWhenThrowingOome,
   1946                           runtime->GetPreAllocatedOutOfMemoryErrorWhenThrowingOOME());
   1947   image_roots->Set<false>(ImageHeader::kOomeWhenHandlingStackOverflow,
   1948                           runtime->GetPreAllocatedOutOfMemoryErrorWhenHandlingStackOverflow());
   1949   image_roots->Set<false>(ImageHeader::kNoClassDefFoundError,
   1950                           runtime->GetPreAllocatedNoClassDefFoundError());
   1951   if (!compiler_options_.IsAppImage()) {
   1952     DCHECK(boot_image_live_objects != nullptr);
   1953     image_roots->Set<false>(ImageHeader::kBootImageLiveObjects, boot_image_live_objects.Get());
   1954   } else {
   1955     DCHECK(boot_image_live_objects == nullptr);
   1956   }
   1957   for (int32_t i = 0; i != image_roots_size; ++i) {
   1958     if (compiler_options_.IsAppImage() && i == ImageHeader::kAppImageClassLoader) {
   1959       // image_roots[ImageHeader::kAppImageClassLoader] will be set later for app image.
   1960       continue;
   1961     }
   1962     CHECK(image_roots->Get(i) != nullptr);
   1963   }
   1964   return image_roots.Get();
   1965 }
   1966 
   1967 mirror::Object* ImageWriter::TryAssignBinSlot(WorkStack& work_stack,
   1968                                               mirror::Object* obj,
   1969                                               size_t oat_index) {
   1970   if (obj == nullptr || !IsImageObject(obj)) {
   1971     // Object is null or already in the image, there is no work to do.
   1972     return obj;
   1973   }
   1974   if (!IsImageBinSlotAssigned(obj)) {
   1975     // We want to intern all strings but also assign offsets for the source string. Since the
   1976     // pruning phase has already happened, if we intern a string to one in the image we still
   1977     // end up copying an unreachable string.
   1978     if (obj->IsString()) {
   1979       // Need to check if the string is already interned in another image info so that we don't have
   1980       // the intern tables of two different images contain the same string.
   1981       mirror::String* interned = FindInternedString(obj->AsString().Ptr());
   1982       if (interned == nullptr) {
   1983         // Not in another image space, insert to our table.
   1984         interned =
   1985             GetImageInfo(oat_index).intern_table_->InternStrongImageString(obj->AsString()).Ptr();
   1986         DCHECK_EQ(interned, obj);
   1987       }
   1988     } else if (obj->IsDexCache()) {
   1989       oat_index = GetOatIndexForDexCache(obj->AsDexCache());
   1990     } else if (obj->IsClass()) {
   1991       // Visit and assign offsets for fields and field arrays.
   1992       ObjPtr<mirror::Class> as_klass = obj->AsClass();
   1993       ObjPtr<mirror::DexCache> dex_cache = as_klass->GetDexCache();
   1994       DCHECK(!as_klass->IsErroneous()) << as_klass->GetStatus();
   1995       if (compiler_options_.IsAppImage()) {
   1996         // Extra sanity, no boot loader classes should be left!
   1997         CHECK(!IsBootClassLoaderClass(as_klass)) << as_klass->PrettyClass();
   1998       }
   1999       LengthPrefixedArray<ArtField>* fields[] = {
   2000           as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
   2001       };
   2002       // Overwrite the oat index value since the class' dex cache is more accurate of where it
   2003       // belongs.
   2004       oat_index = GetOatIndexForDexCache(dex_cache);
   2005       ImageInfo& image_info = GetImageInfo(oat_index);
   2006       if (!compiler_options_.IsAppImage()) {
   2007         // Note: Avoid locking to prevent lock order violations from root visiting;
   2008         // image_info.class_table_ is only accessed from the image writer.
   2009         image_info.class_table_->InsertWithoutLocks(as_klass);
   2010       }
   2011       for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
   2012         // Total array length including header.
   2013         if (cur_fields != nullptr) {
   2014           const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
   2015           // Forward the entire array at once.
   2016           auto it = native_object_relocations_.find(cur_fields);
   2017           CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
   2018                                                   << " already forwarded";
   2019           size_t offset = image_info.GetBinSlotSize(Bin::kArtField);
   2020           DCHECK(!IsInBootImage(cur_fields));
   2021           native_object_relocations_.emplace(
   2022               cur_fields,
   2023               NativeObjectRelocation {
   2024                   oat_index, offset, NativeObjectRelocationType::kArtFieldArray
   2025               });
   2026           offset += header_size;
   2027           // Forward individual fields so that we can quickly find where they belong.
   2028           for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
   2029             // Need to forward arrays separate of fields.
   2030             ArtField* field = &cur_fields->At(i);
   2031             auto it2 = native_object_relocations_.find(field);
   2032             CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
   2033                 << " already assigned " << field->PrettyField() << " static=" << field->IsStatic();
   2034             DCHECK(!IsInBootImage(field));
   2035             native_object_relocations_.emplace(
   2036                 field,
   2037                 NativeObjectRelocation { oat_index,
   2038                                          offset,
   2039                                          NativeObjectRelocationType::kArtField });
   2040             offset += sizeof(ArtField);
   2041           }
   2042           image_info.IncrementBinSlotSize(
   2043               Bin::kArtField, header_size + cur_fields->size() * sizeof(ArtField));
   2044           DCHECK_EQ(offset, image_info.GetBinSlotSize(Bin::kArtField));
   2045         }
   2046       }
   2047       // Visit and assign offsets for methods.
   2048       size_t num_methods = as_klass->NumMethods();
   2049       if (num_methods != 0) {
   2050         bool any_dirty = false;
   2051         for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
   2052           if (WillMethodBeDirty(&m)) {
   2053             any_dirty = true;
   2054             break;
   2055           }
   2056         }
   2057         NativeObjectRelocationType type = any_dirty
   2058             ? NativeObjectRelocationType::kArtMethodDirty
   2059             : NativeObjectRelocationType::kArtMethodClean;
   2060         Bin bin_type = BinTypeForNativeRelocationType(type);
   2061         // Forward the entire array at once, but header first.
   2062         const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
   2063         const size_t method_size = ArtMethod::Size(target_ptr_size_);
   2064         const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
   2065                                                                                method_size,
   2066                                                                                method_alignment);
   2067         LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
   2068         auto it = native_object_relocations_.find(array);
   2069         CHECK(it == native_object_relocations_.end())
   2070             << "Method array " << array << " already forwarded";
   2071         size_t offset = image_info.GetBinSlotSize(bin_type);
   2072         DCHECK(!IsInBootImage(array));
   2073         native_object_relocations_.emplace(array,
   2074             NativeObjectRelocation {
   2075                 oat_index,
   2076                 offset,
   2077                 any_dirty ? NativeObjectRelocationType::kArtMethodArrayDirty
   2078                           : NativeObjectRelocationType::kArtMethodArrayClean });
   2079         image_info.IncrementBinSlotSize(bin_type, header_size);
   2080         for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
   2081           AssignMethodOffset(&m, type, oat_index);
   2082         }
   2083         (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
   2084       }
   2085       // Assign offsets for all runtime methods in the IMT since these may hold conflict tables
   2086       // live.
   2087       if (as_klass->ShouldHaveImt()) {
   2088         ImTable* imt = as_klass->GetImt(target_ptr_size_);
   2089         if (TryAssignImTableOffset(imt, oat_index)) {
   2090           // Since imt's can be shared only do this the first time to not double count imt method
   2091           // fixups.
   2092           for (size_t i = 0; i < ImTable::kSize; ++i) {
   2093             ArtMethod* imt_method = imt->Get(i, target_ptr_size_);
   2094             DCHECK(imt_method != nullptr);
   2095             if (imt_method->IsRuntimeMethod() &&
   2096                 !IsInBootImage(imt_method) &&
   2097                 !NativeRelocationAssigned(imt_method)) {
   2098               AssignMethodOffset(imt_method, NativeObjectRelocationType::kRuntimeMethod, oat_index);
   2099             }
   2100           }
   2101         }
   2102       }
   2103     } else if (obj->IsClassLoader()) {
   2104       // Register the class loader if it has a class table.
   2105       // The fake boot class loader should not get registered.
   2106       ObjPtr<mirror::ClassLoader> class_loader = obj->AsClassLoader();
   2107       if (class_loader->GetClassTable() != nullptr) {
   2108         DCHECK(compiler_options_.IsAppImage());
   2109         if (class_loader == GetAppClassLoader()) {
   2110           ImageInfo& image_info = GetImageInfo(oat_index);
   2111           // Note: Avoid locking to prevent lock order violations from root visiting;
   2112           // image_info.class_table_ table is only accessed from the image writer
   2113           // and class_loader->GetClassTable() is iterated but not modified.
   2114           image_info.class_table_->CopyWithoutLocks(*class_loader->GetClassTable());
   2115         }
   2116       }
   2117     }
   2118     AssignImageBinSlot(obj, oat_index);
   2119     work_stack.emplace(obj, oat_index);
   2120   }
   2121   if (obj->IsString()) {
   2122     // Always return the interned string if there exists one.
   2123     mirror::String* interned = FindInternedString(obj->AsString().Ptr());
   2124     if (interned != nullptr) {
   2125       return interned;
   2126     }
   2127   }
   2128   return obj;
   2129 }
   2130 
   2131 bool ImageWriter::NativeRelocationAssigned(void* ptr) const {
   2132   return native_object_relocations_.find(ptr) != native_object_relocations_.end();
   2133 }
   2134 
   2135 bool ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) {
   2136   // No offset, or already assigned.
   2137   if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) {
   2138     return false;
   2139   }
   2140   // If the method is a conflict method we also want to assign the conflict table offset.
   2141   ImageInfo& image_info = GetImageInfo(oat_index);
   2142   const size_t size = ImTable::SizeInBytes(target_ptr_size_);
   2143   native_object_relocations_.emplace(
   2144       imt,
   2145       NativeObjectRelocation {
   2146           oat_index,
   2147           image_info.GetBinSlotSize(Bin::kImTable),
   2148           NativeObjectRelocationType::kIMTable});
   2149   image_info.IncrementBinSlotSize(Bin::kImTable, size);
   2150   return true;
   2151 }
   2152 
   2153 void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) {
   2154   // No offset, or already assigned.
   2155   if (table == nullptr || NativeRelocationAssigned(table)) {
   2156     return;
   2157   }
   2158   CHECK(!IsInBootImage(table));
   2159   // If the method is a conflict method we also want to assign the conflict table offset.
   2160   ImageInfo& image_info = GetImageInfo(oat_index);
   2161   const size_t size = table->ComputeSize(target_ptr_size_);
   2162   native_object_relocations_.emplace(
   2163       table,
   2164       NativeObjectRelocation {
   2165           oat_index,
   2166           image_info.GetBinSlotSize(Bin::kIMTConflictTable),
   2167           NativeObjectRelocationType::kIMTConflictTable});
   2168   image_info.IncrementBinSlotSize(Bin::kIMTConflictTable, size);
   2169 }
   2170 
   2171 void ImageWriter::AssignMethodOffset(ArtMethod* method,
   2172                                      NativeObjectRelocationType type,
   2173                                      size_t oat_index) {
   2174   DCHECK(!IsInBootImage(method));
   2175   CHECK(!NativeRelocationAssigned(method)) << "Method " << method << " already assigned "
   2176       << ArtMethod::PrettyMethod(method);
   2177   if (method->IsRuntimeMethod()) {
   2178     TryAssignConflictTableOffset(method->GetImtConflictTable(target_ptr_size_), oat_index);
   2179   }
   2180   ImageInfo& image_info = GetImageInfo(oat_index);
   2181   Bin bin_type = BinTypeForNativeRelocationType(type);
   2182   size_t offset = image_info.GetBinSlotSize(bin_type);
   2183   native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type });
   2184   image_info.IncrementBinSlotSize(bin_type, ArtMethod::Size(target_ptr_size_));
   2185 }
   2186 
   2187 void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
   2188   DCHECK(!IsInBootImage(obj));
   2189   CHECK(obj != nullptr);
   2190 
   2191   // We know the bin slot, and the total bin sizes for all objects by now,
   2192   // so calculate the object's final image offset.
   2193 
   2194   DCHECK(IsImageBinSlotAssigned(obj));
   2195   BinSlot bin_slot = GetImageBinSlot(obj);
   2196   // Change the lockword from a bin slot into an offset
   2197   AssignImageOffset(obj, bin_slot);
   2198 }
   2199 
   2200 class ImageWriter::VisitReferencesVisitor {
   2201  public:
   2202   VisitReferencesVisitor(ImageWriter* image_writer, WorkStack* work_stack, size_t oat_index)
   2203       : image_writer_(image_writer), work_stack_(work_stack), oat_index_(oat_index) {}
   2204 
   2205   // Fix up separately since we also need to fix up method entrypoints.
   2206   ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
   2207       REQUIRES_SHARED(Locks::mutator_lock_) {
   2208     if (!root->IsNull()) {
   2209       VisitRoot(root);
   2210     }
   2211   }
   2212 
   2213   ALWAYS_INLINE void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
   2214       REQUIRES_SHARED(Locks::mutator_lock_) {
   2215     root->Assign(VisitReference(root->AsMirrorPtr()));
   2216   }
   2217 
   2218   ALWAYS_INLINE void operator() (ObjPtr<mirror::Object> obj,
   2219                                  MemberOffset offset,
   2220                                  bool is_static ATTRIBUTE_UNUSED) const
   2221       REQUIRES_SHARED(Locks::mutator_lock_) {
   2222     mirror::Object* ref =
   2223         obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier>(offset);
   2224     obj->SetFieldObject</*kTransactionActive*/false>(offset, VisitReference(ref));
   2225   }
   2226 
   2227   ALWAYS_INLINE void operator() (ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
   2228                                  ObjPtr<mirror::Reference> ref) const
   2229       REQUIRES_SHARED(Locks::mutator_lock_) {
   2230     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
   2231   }
   2232 
   2233  private:
   2234   mirror::Object* VisitReference(mirror::Object* ref) const REQUIRES_SHARED(Locks::mutator_lock_) {
   2235     return image_writer_->TryAssignBinSlot(*work_stack_, ref, oat_index_);
   2236   }
   2237 
   2238   ImageWriter* const image_writer_;
   2239   WorkStack* const work_stack_;
   2240   const size_t oat_index_;
   2241 };
   2242 
   2243 class ImageWriter::GetRootsVisitor : public RootVisitor  {
   2244  public:
   2245   explicit GetRootsVisitor(std::vector<mirror::Object*>* roots) : roots_(roots) {}
   2246 
   2247   void VisitRoots(mirror::Object*** roots,
   2248                   size_t count,
   2249                   const RootInfo& info ATTRIBUTE_UNUSED) override
   2250       REQUIRES_SHARED(Locks::mutator_lock_) {
   2251     for (size_t i = 0; i < count; ++i) {
   2252       roots_->push_back(*roots[i]);
   2253     }
   2254   }
   2255 
   2256   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
   2257                   size_t count,
   2258                   const RootInfo& info ATTRIBUTE_UNUSED) override
   2259       REQUIRES_SHARED(Locks::mutator_lock_) {
   2260     for (size_t i = 0; i < count; ++i) {
   2261       roots_->push_back(roots[i]->AsMirrorPtr());
   2262     }
   2263   }
   2264 
   2265  private:
   2266   std::vector<mirror::Object*>* const roots_;
   2267 };
   2268 
   2269 void ImageWriter::ProcessWorkStack(WorkStack* work_stack) {
   2270   while (!work_stack->empty()) {
   2271     std::pair<mirror::Object*, size_t> pair(work_stack->top());
   2272     work_stack->pop();
   2273     VisitReferencesVisitor visitor(this, work_stack, /*oat_index*/ pair.second);
   2274     // Walk references and assign bin slots for them.
   2275     pair.first->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
   2276         visitor,
   2277         visitor);
   2278   }
   2279 }
   2280 
   2281 void ImageWriter::CalculateNewObjectOffsets() {
   2282   Thread* const self = Thread::Current();
   2283   Runtime* const runtime = Runtime::Current();
   2284   VariableSizedHandleScope handles(self);
   2285   MutableHandle<ObjectArray<Object>> boot_image_live_objects = handles.NewHandle(
   2286       compiler_options_.IsAppImage()
   2287           ? nullptr
   2288           : IntrinsicObjects::AllocateBootImageLiveObjects(self, runtime->GetClassLinker()));
   2289   std::vector<Handle<ObjectArray<Object>>> image_roots;
   2290   for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) {
   2291     image_roots.push_back(handles.NewHandle(CreateImageRoots(i, boot_image_live_objects)));
   2292   }
   2293 
   2294   gc::Heap* const heap = runtime->GetHeap();
   2295 
   2296   // Leave space for the header, but do not write it yet, we need to
   2297   // know where image_roots is going to end up
   2298   image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment);  // 64-bit-alignment
   2299 
   2300   const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
   2301   // Write the image runtime methods.
   2302   image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
   2303   image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
   2304   image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
   2305   image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
   2306       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves);
   2307   image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
   2308       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly);
   2309   image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
   2310       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
   2311   image_methods_[ImageHeader::kSaveEverythingMethod] =
   2312       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything);
   2313   image_methods_[ImageHeader::kSaveEverythingMethodForClinit] =
   2314       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit);
   2315   image_methods_[ImageHeader::kSaveEverythingMethodForSuspendCheck] =
   2316       runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck);
   2317   // Visit image methods first to have the main runtime methods in the first image.
   2318   for (auto* m : image_methods_) {
   2319     CHECK(m != nullptr);
   2320     CHECK(m->IsRuntimeMethod());
   2321     DCHECK_EQ(!compiler_options_.IsBootImage(), IsInBootImage(m))
   2322         << "Trampolines should be in boot image";
   2323     if (!IsInBootImage(m)) {
   2324       AssignMethodOffset(m, NativeObjectRelocationType::kRuntimeMethod, GetDefaultOatIndex());
   2325     }
   2326   }
   2327 
   2328   // Deflate monitors before we visit roots since deflating acquires the monitor lock. Acquiring
   2329   // this lock while holding other locks may cause lock order violations.
   2330   {
   2331     auto deflate_monitor = [](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
   2332       Monitor::Deflate(Thread::Current(), obj);
   2333     };
   2334     heap->VisitObjects(deflate_monitor);
   2335   }
   2336 
   2337   // From this point on, there shall be no GC anymore and no objects shall be allocated.
   2338   // We can now assign a BitSlot to each object and store it in its lockword.
   2339 
   2340   // Work list of <object, oat_index> for objects. Everything on the stack must already be
   2341   // assigned a bin slot.
   2342   WorkStack work_stack;
   2343 
   2344   // Special case interned strings to put them in the image they are likely to be resolved from.
   2345   for (const DexFile* dex_file : compiler_options_.GetDexFilesForOatFile()) {
   2346     auto it = dex_file_oat_index_map_.find(dex_file);
   2347     DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
   2348     const size_t oat_index = it->second;
   2349     InternTable* const intern_table = runtime->GetInternTable();
   2350     for (size_t i = 0, count = dex_file->NumStringIds(); i < count; ++i) {
   2351       uint32_t utf16_length;
   2352       const char* utf8_data = dex_file->StringDataAndUtf16LengthByIdx(dex::StringIndex(i),
   2353                                                                       &utf16_length);
   2354       mirror::String* string = intern_table->LookupStrong(self, utf16_length, utf8_data).Ptr();
   2355       TryAssignBinSlot(work_stack, string, oat_index);
   2356     }
   2357   }
   2358 
   2359   // Get the GC roots and then visit them separately to avoid lock violations since the root visitor
   2360   // visits roots while holding various locks.
   2361   {
   2362     std::vector<mirror::Object*> roots;
   2363     GetRootsVisitor root_visitor(&roots);
   2364     runtime->VisitRoots(&root_visitor);
   2365     for (mirror::Object* obj : roots) {
   2366       TryAssignBinSlot(work_stack, obj, GetDefaultOatIndex());
   2367     }
   2368   }
   2369   ProcessWorkStack(&work_stack);
   2370 
   2371   // For app images, there may be objects that are only held live by the boot image. One
   2372   // example is finalizer references. Forward these objects so that EnsureBinSlotAssignedCallback
   2373   // does not fail any checks.
   2374   if (compiler_options_.IsAppImage()) {
   2375     for (gc::space::ImageSpace* space : heap->GetBootImageSpaces()) {
   2376       DCHECK(space->IsImageSpace());
   2377       gc::accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
   2378       live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
   2379                                     reinterpret_cast<uintptr_t>(space->Limit()),
   2380                                     [this, &work_stack](mirror::Object* obj)
   2381           REQUIRES_SHARED(Locks::mutator_lock_) {
   2382         VisitReferencesVisitor visitor(this, &work_stack, GetDefaultOatIndex());
   2383         // Visit all references and try to assign bin slots for them (calls TryAssignBinSlot).
   2384         obj->VisitReferences</*kVisitNativeRoots*/true, kVerifyNone, kWithoutReadBarrier>(
   2385             visitor,
   2386             visitor);
   2387       });
   2388     }
   2389     // Process the work stack in case anything was added by TryAssignBinSlot.
   2390     ProcessWorkStack(&work_stack);
   2391 
   2392     // Store the class loader in the class roots.
   2393     CHECK_EQ(image_roots.size(), 1u);
   2394     image_roots[0]->Set<false>(ImageHeader::kAppImageClassLoader, GetAppClassLoader());
   2395   }
   2396 
   2397   // Verify that all objects have assigned image bin slots.
   2398   {
   2399     auto ensure_bin_slots_assigned = [&](mirror::Object* obj)
   2400         REQUIRES_SHARED(Locks::mutator_lock_) {
   2401       if (IsImageObject(obj)) {
   2402         CHECK(IsImageBinSlotAssigned(obj)) << mirror::Object::PrettyTypeOf(obj) << " " << obj;
   2403       }
   2404     };
   2405     heap->VisitObjects(ensure_bin_slots_assigned);
   2406   }
   2407 
   2408   // Calculate size of the dex cache arrays slot and prepare offsets.
   2409   PrepareDexCacheArraySlots();
   2410 
   2411   // Calculate the sizes of the intern tables, class tables, and fixup tables.
   2412   for (ImageInfo& image_info : image_infos_) {
   2413     // Calculate how big the intern table will be after being serialized.
   2414     InternTable* const intern_table = image_info.intern_table_.get();
   2415     CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
   2416     if (intern_table->StrongSize() != 0u) {
   2417       image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
   2418     }
   2419 
   2420     // Calculate the size of the class table.
   2421     ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
   2422     DCHECK_EQ(image_info.class_table_->NumReferencedZygoteClasses(), 0u);
   2423     if (image_info.class_table_->NumReferencedNonZygoteClasses() != 0u) {
   2424       image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr);
   2425     }
   2426   }
   2427 
   2428   // Calculate bin slot offsets.
   2429   for (size_t oat_index = 0; oat_index < image_infos_.size(); ++oat_index) {
   2430     ImageInfo& image_info = image_infos_[oat_index];
   2431     size_t bin_offset = image_objects_offset_begin_;
   2432     // Need to visit the objects in bin order since alignment requirements might change the
   2433     // section sizes.
   2434     // Avoid using ObjPtr since VisitObjects invalidates. This is safe since concurrent GC can not
   2435     // occur during image writing.
   2436     using BinPair = std::pair<BinSlot, mirror::Object*>;
   2437     std::vector<BinPair> objects;
   2438     heap->VisitObjects([&](mirror::Object* obj)
   2439         REQUIRES_SHARED(Locks::mutator_lock_) {
   2440       // Only visit the oat index for the current image.
   2441       if (IsImageObject(obj) && GetOatIndex(obj) == oat_index) {
   2442         objects.emplace_back(GetImageBinSlot(obj), obj);
   2443       }
   2444     });
   2445     std::sort(objects.begin(), objects.end(), [](const BinPair& a, const BinPair& b) -> bool {
   2446       if (a.first.GetBin() != b.first.GetBin()) {
   2447         return a.first.GetBin() < b.first.GetBin();
   2448       }
   2449       // Note that the index is really the relative offset in this case.
   2450       return a.first.GetIndex() < b.first.GetIndex();
   2451     });
   2452     auto it = objects.begin();
   2453     for (size_t i = 0; i != kNumberOfBins; ++i) {
   2454       Bin bin = enum_cast<Bin>(i);
   2455       switch (bin) {
   2456         case Bin::kArtMethodClean:
   2457         case Bin::kArtMethodDirty: {
   2458           bin_offset = RoundUp(bin_offset, method_alignment);
   2459           break;
   2460         }
   2461         case Bin::kDexCacheArray:
   2462           bin_offset = RoundUp(bin_offset, DexCacheArraysLayout::Alignment(target_ptr_size_));
   2463           break;
   2464         case Bin::kImTable:
   2465         case Bin::kIMTConflictTable: {
   2466           bin_offset = RoundUp(bin_offset, static_cast<size_t>(target_ptr_size_));
   2467           break;
   2468         }
   2469         default: {
   2470           // Normal alignment.
   2471         }
   2472       }
   2473       image_info.bin_slot_offsets_[i] = bin_offset;
   2474 
   2475       // If the bin is for mirror objects, assign the offsets since we may need to change sizes
   2476       // from alignment requirements.
   2477       if (i < static_cast<size_t>(Bin::kMirrorCount)) {
   2478         const size_t start_offset = bin_offset;
   2479         // Visit and assign offsets for all objects of the bin type.
   2480         while (it != objects.end() && it->first.GetBin() == bin) {
   2481           ObjPtr<mirror::Object> obj(it->second);
   2482           const size_t object_size = RoundUp(obj->SizeOf(), kObjectAlignment);
   2483           // If the object spans region bondaries, add padding objects between.
   2484           // TODO: Instead of adding padding, we should consider reordering the bins to reduce
   2485           // wasted space.
   2486           if (region_size_ != 0u) {
   2487             const size_t offset_after_header = bin_offset - sizeof(ImageHeader);
   2488             const size_t next_region = RoundUp(offset_after_header, region_size_);
   2489             if (offset_after_header != next_region &&
   2490                 offset_after_header + object_size > next_region) {
   2491               // Add padding objects until aligned.
   2492               while (bin_offset - sizeof(ImageHeader) < next_region) {
   2493                 image_info.padding_object_offsets_.push_back(bin_offset);
   2494                 bin_offset += kObjectAlignment;
   2495                 region_alignment_wasted_ += kObjectAlignment;
   2496                 image_info.image_end_ += kObjectAlignment;
   2497               }
   2498               CHECK_EQ(bin_offset - sizeof(ImageHeader), next_region);
   2499             }
   2500           }
   2501           SetImageOffset(obj.Ptr(), bin_offset);
   2502           bin_offset = bin_offset + object_size;
   2503           ++it;
   2504         }
   2505         image_info.bin_slot_sizes_[i] = bin_offset - start_offset;
   2506       } else {
   2507         bin_offset += image_info.bin_slot_sizes_[i];
   2508       }
   2509     }
   2510     // NOTE: There may be additional padding between the bin slots and the intern table.
   2511     DCHECK_EQ(image_info.image_end_,
   2512               image_info.GetBinSizeSum(Bin::kMirrorCount) + image_objects_offset_begin_);
   2513   }
   2514 
   2515   VLOG(image) << "Space wasted for region alignment " << region_alignment_wasted_;
   2516 
   2517   // Calculate image offsets.
   2518   size_t image_offset = 0;
   2519   for (ImageInfo& image_info : image_infos_) {
   2520     image_info.image_begin_ = global_image_begin_ + image_offset;
   2521     image_info.image_offset_ = image_offset;
   2522     image_info.image_size_ = RoundUp(image_info.CreateImageSections().first, kPageSize);
   2523     // There should be no gaps until the next image.
   2524     image_offset += image_info.image_size_;
   2525   }
   2526 
   2527   size_t i = 0;
   2528   for (ImageInfo& image_info : image_infos_) {
   2529     image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get()));
   2530     i++;
   2531   }
   2532 
   2533   // Update the native relocations by adding their bin sums.
   2534   for (auto& pair : native_object_relocations_) {
   2535     NativeObjectRelocation& relocation = pair.second;
   2536     Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
   2537     ImageInfo& image_info = GetImageInfo(relocation.oat_index);
   2538     relocation.offset += image_info.GetBinSlotOffset(bin_type);
   2539   }
   2540 
   2541   // Remember the boot image live objects as raw pointer. No GC can happen anymore.
   2542   boot_image_live_objects_ = boot_image_live_objects.Get();
   2543 }
   2544 
   2545 std::pair<size_t, std::vector<ImageSection>> ImageWriter::ImageInfo::CreateImageSections() const {
   2546   std::vector<ImageSection> sections(ImageHeader::kSectionCount);
   2547 
   2548   // Do not round up any sections here that are represented by the bins since it
   2549   // will break offsets.
   2550 
   2551   /*
   2552    * Objects section
   2553    */
   2554   sections[ImageHeader::kSectionObjects] =
   2555       ImageSection(0u, image_end_);
   2556 
   2557   /*
   2558    * Field section
   2559    */
   2560   sections[ImageHeader::kSectionArtFields] =
   2561       ImageSection(GetBinSlotOffset(Bin::kArtField), GetBinSlotSize(Bin::kArtField));
   2562 
   2563   /*
   2564    * Method section
   2565    */
   2566   sections[ImageHeader::kSectionArtMethods] =
   2567       ImageSection(GetBinSlotOffset(Bin::kArtMethodClean),
   2568                    GetBinSlotSize(Bin::kArtMethodClean) +
   2569                    GetBinSlotSize(Bin::kArtMethodDirty));
   2570 
   2571   /*
   2572    * IMT section
   2573    */
   2574   sections[ImageHeader::kSectionImTables] =
   2575       ImageSection(GetBinSlotOffset(Bin::kImTable), GetBinSlotSize(Bin::kImTable));
   2576 
   2577   /*
   2578    * Conflict Tables section
   2579    */
   2580   sections[ImageHeader::kSectionIMTConflictTables] =
   2581       ImageSection(GetBinSlotOffset(Bin::kIMTConflictTable), GetBinSlotSize(Bin::kIMTConflictTable));
   2582 
   2583   /*
   2584    * Runtime Methods section
   2585    */
   2586   sections[ImageHeader::kSectionRuntimeMethods] =
   2587       ImageSection(GetBinSlotOffset(Bin::kRuntimeMethod), GetBinSlotSize(Bin::kRuntimeMethod));
   2588 
   2589   /*
   2590    * DexCache Arrays section.
   2591    */
   2592   const ImageSection& dex_cache_arrays_section =
   2593       sections[ImageHeader::kSectionDexCacheArrays] =
   2594           ImageSection(GetBinSlotOffset(Bin::kDexCacheArray),
   2595                        GetBinSlotSize(Bin::kDexCacheArray));
   2596 
   2597   /*
   2598    * Interned Strings section
   2599    */
   2600 
   2601   // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
   2602   size_t cur_pos = RoundUp(dex_cache_arrays_section.End(), sizeof(uint64_t));
   2603 
   2604   const ImageSection& interned_strings_section =
   2605       sections[ImageHeader::kSectionInternedStrings] =
   2606           ImageSection(cur_pos, intern_table_bytes_);
   2607 
   2608   /*
   2609    * Class Table section
   2610    */
   2611 
   2612   // Obtain the new position and round it up to the appropriate alignment.
   2613   cur_pos = RoundUp(interned_strings_section.End(), sizeof(uint64_t));
   2614 
   2615   const ImageSection& class_table_section =
   2616       sections[ImageHeader::kSectionClassTable] =
   2617           ImageSection(cur_pos, class_table_bytes_);
   2618 
   2619   /*
   2620    * String Field Offsets section
   2621    */
   2622 
   2623   // Round up to the alignment of the offsets we are going to store.
   2624   cur_pos = RoundUp(class_table_section.End(), sizeof(uint32_t));
   2625 
   2626   // The size of string_reference_offsets_ can't be used here because it hasn't
   2627   // been filled with AppImageReferenceOffsetInfo objects yet.  The
   2628   // num_string_references_ value is calculated separately, before we can
   2629   // compute the actual offsets.
   2630   const ImageSection& string_reference_offsets =
   2631       sections[ImageHeader::kSectionStringReferenceOffsets] =
   2632           ImageSection(cur_pos,
   2633                        sizeof(typename decltype(string_reference_offsets_)::value_type) *
   2634                            num_string_references_);
   2635 
   2636   /*
   2637    * Metadata section.
   2638    */
   2639 
   2640   // Round up to the alignment of the offsets we are going to store.
   2641   cur_pos = RoundUp(string_reference_offsets.End(),
   2642                     mirror::DexCache::PreResolvedStringsAlignment());
   2643 
   2644   const ImageSection& metadata_section =
   2645       sections[ImageHeader::kSectionMetadata] =
   2646           ImageSection(cur_pos, GetBinSlotSize(Bin::kMetadata));
   2647 
   2648   // Return the number of bytes described by these sections, and the sections
   2649   // themselves.
   2650   return make_pair(metadata_section.End(), std::move(sections));
   2651 }
   2652 
   2653 void ImageWriter::CreateHeader(size_t oat_index) {
   2654   ImageInfo& image_info = GetImageInfo(oat_index);
   2655   const uint8_t* oat_file_begin = image_info.oat_file_begin_;
   2656   const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
   2657   const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
   2658 
   2659   uint32_t image_reservation_size = image_info.image_size_;
   2660   DCHECK_ALIGNED(image_reservation_size, kPageSize);
   2661   uint32_t component_count = 1u;
   2662   if (!compiler_options_.IsAppImage()) {
   2663     if (oat_index == 0u) {
   2664       const ImageInfo& last_info = image_infos_.back();
   2665       const uint8_t* end = last_info.oat_file_begin_ + last_info.oat_loaded_size_;
   2666       DCHECK_ALIGNED(image_info.image_begin_, kPageSize);
   2667       image_reservation_size =
   2668           dchecked_integral_cast<uint32_t>(RoundUp(end - image_info.image_begin_, kPageSize));
   2669       component_count = image_infos_.size();
   2670     } else {
   2671       image_reservation_size = 0u;
   2672       component_count = 0u;
   2673     }
   2674   }
   2675 
   2676   // Create the image sections.
   2677   auto section_info_pair = image_info.CreateImageSections();
   2678   const size_t image_end = section_info_pair.first;
   2679   std::vector<ImageSection>& sections = section_info_pair.second;
   2680 
   2681   // Finally bitmap section.
   2682   const size_t bitmap_bytes = image_info.image_bitmap_->Size();
   2683   auto* bitmap_section = &sections[ImageHeader::kSectionImageBitmap];
   2684   *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
   2685   if (VLOG_IS_ON(compiler)) {
   2686     LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
   2687     size_t idx = 0;
   2688     for (const ImageSection& section : sections) {
   2689       LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
   2690       ++idx;
   2691     }
   2692     LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
   2693     LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
   2694     LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
   2695               << " Image offset=" << image_info.image_offset_ << std::dec;
   2696     LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
   2697               << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
   2698               << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
   2699               << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
   2700   }
   2701   // Store boot image info for app image so that we can relocate.
   2702   uint32_t boot_image_begin = 0;
   2703   uint32_t boot_image_end = 0;
   2704   uint32_t boot_oat_begin = 0;
   2705   uint32_t boot_oat_end = 0;
   2706   gc::Heap* const heap = Runtime::Current()->GetHeap();
   2707   heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
   2708 
   2709   // Create the header, leave 0 for data size since we will fill this in as we are writing the
   2710   // image.
   2711   new (image_info.image_.Begin()) ImageHeader(
   2712       image_reservation_size,
   2713       component_count,
   2714       PointerToLowMemUInt32(image_info.image_begin_),
   2715       image_end,
   2716       sections.data(),
   2717       image_info.image_roots_address_,
   2718       image_info.oat_checksum_,
   2719       PointerToLowMemUInt32(oat_file_begin),
   2720       PointerToLowMemUInt32(image_info.oat_data_begin_),
   2721       PointerToLowMemUInt32(oat_data_end),
   2722       PointerToLowMemUInt32(oat_file_end),
   2723       boot_image_begin,
   2724       boot_oat_end - boot_image_begin,
   2725       static_cast<uint32_t>(target_ptr_size_));
   2726 }
   2727 
   2728 ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
   2729   NativeObjectRelocation relocation = GetNativeRelocation(method);
   2730   const ImageInfo& image_info = GetImageInfo(relocation.oat_index);
   2731   CHECK_GE(relocation.offset, image_info.image_end_) << "ArtMethods should be after Objects";
   2732   return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + relocation.offset);
   2733 }
   2734 
   2735 const void* ImageWriter::GetIntrinsicReferenceAddress(uint32_t intrinsic_data) {
   2736   DCHECK(compiler_options_.IsBootImage());
   2737   switch (IntrinsicObjects::DecodePatchType(intrinsic_data)) {
   2738     case IntrinsicObjects::PatchType::kIntegerValueOfArray: {
   2739       const uint8_t* base_address =
   2740           reinterpret_cast<const uint8_t*>(GetImageAddress(boot_image_live_objects_));
   2741       MemberOffset data_offset =
   2742           IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects_);
   2743       return base_address + data_offset.Uint32Value();
   2744     }
   2745     case IntrinsicObjects::PatchType::kIntegerValueOfObject: {
   2746       uint32_t index = IntrinsicObjects::DecodePatchIndex(intrinsic_data);
   2747       ObjPtr<mirror::Object> value =
   2748           IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects_, index);
   2749       return GetImageAddress(value.Ptr());
   2750     }
   2751   }
   2752   LOG(FATAL) << "UNREACHABLE";
   2753   UNREACHABLE();
   2754 }
   2755 
   2756 
   2757 class ImageWriter::FixupRootVisitor : public RootVisitor {
   2758  public:
   2759   explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
   2760   }
   2761 
   2762   void VisitRoots(mirror::Object*** roots ATTRIBUTE_UNUSED,
   2763                   size_t count ATTRIBUTE_UNUSED,
   2764                   const RootInfo& info ATTRIBUTE_UNUSED)
   2765       override REQUIRES_SHARED(Locks::mutator_lock_) {
   2766     LOG(FATAL) << "Unsupported";
   2767   }
   2768 
   2769   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
   2770                   size_t count,
   2771                   const RootInfo& info ATTRIBUTE_UNUSED)
   2772       override REQUIRES_SHARED(Locks::mutator_lock_) {
   2773     for (size_t i = 0; i < count; ++i) {
   2774       // Copy the reference. Since we do not have the address for recording the relocation,
   2775       // it needs to be recorded explicitly by the user of FixupRootVisitor.
   2776       ObjPtr<mirror::Object> old_ptr = roots[i]->AsMirrorPtr();
   2777       roots[i]->Assign(image_writer_->GetImageAddress(old_ptr.Ptr()));
   2778     }
   2779   }
   2780 
   2781  private:
   2782   ImageWriter* const image_writer_;
   2783 };
   2784 
   2785 void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) {
   2786   for (size_t i = 0; i < ImTable::kSize; ++i) {
   2787     ArtMethod* method = orig->Get(i, target_ptr_size_);
   2788     void** address = reinterpret_cast<void**>(copy->AddressOfElement(i, target_ptr_size_));
   2789     CopyAndFixupPointer(address, method);
   2790     DCHECK_EQ(copy->Get(i, target_ptr_size_), NativeLocationInImage(method));
   2791   }
   2792 }
   2793 
   2794 void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) {
   2795   const size_t count = orig->NumEntries(target_ptr_size_);
   2796   for (size_t i = 0; i < count; ++i) {
   2797     ArtMethod* interface_method = orig->GetInterfaceMethod(i, target_ptr_size_);
   2798     ArtMethod* implementation_method = orig->GetImplementationMethod(i, target_ptr_size_);
   2799     CopyAndFixupPointer(copy->AddressOfInterfaceMethod(i, target_ptr_size_), interface_method);
   2800     CopyAndFixupPointer(
   2801         copy->AddressOfImplementationMethod(i, target_ptr_size_), implementation_method);
   2802     DCHECK_EQ(copy->GetInterfaceMethod(i, target_ptr_size_),
   2803               NativeLocationInImage(interface_method));
   2804     DCHECK_EQ(copy->GetImplementationMethod(i, target_ptr_size_),
   2805               NativeLocationInImage(implementation_method));
   2806   }
   2807 }
   2808 
   2809 void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
   2810   const ImageInfo& image_info = GetImageInfo(oat_index);
   2811   // Copy ArtFields and methods to their locations and update the array for convenience.
   2812   for (auto& pair : native_object_relocations_) {
   2813     NativeObjectRelocation& relocation = pair.second;
   2814     // Only work with fields and methods that are in the current oat file.
   2815     if (relocation.oat_index != oat_index) {
   2816       continue;
   2817     }
   2818     auto* dest = image_info.image_.Begin() + relocation.offset;
   2819     DCHECK_GE(dest, image_info.image_.Begin() + image_info.image_end_);
   2820     DCHECK(!IsInBootImage(pair.first));
   2821     switch (relocation.type) {
   2822       case NativeObjectRelocationType::kArtField: {
   2823         memcpy(dest, pair.first, sizeof(ArtField));
   2824         CopyAndFixupReference(
   2825             reinterpret_cast<ArtField*>(dest)->GetDeclaringClassAddressWithoutBarrier(),
   2826             reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass());
   2827         break;
   2828       }
   2829       case NativeObjectRelocationType::kRuntimeMethod:
   2830       case NativeObjectRelocationType::kArtMethodClean:
   2831       case NativeObjectRelocationType::kArtMethodDirty: {
   2832         CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
   2833                            reinterpret_cast<ArtMethod*>(dest),
   2834                            oat_index);
   2835         break;
   2836       }
   2837       // For arrays, copy just the header since the elements will get copied by their corresponding
   2838       // relocations.
   2839       case NativeObjectRelocationType::kArtFieldArray: {
   2840         memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
   2841         break;
   2842       }
   2843       case NativeObjectRelocationType::kArtMethodArrayClean:
   2844       case NativeObjectRelocationType::kArtMethodArrayDirty: {
   2845         size_t size = ArtMethod::Size(target_ptr_size_);
   2846         size_t alignment = ArtMethod::Alignment(target_ptr_size_);
   2847         memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
   2848         // Clear padding to avoid non-deterministic data in the image.
   2849         // Historical note: We also did that to placate Valgrind.
   2850         reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
   2851         break;
   2852       }
   2853       case NativeObjectRelocationType::kDexCacheArray:
   2854         // Nothing to copy here, everything is done in FixupDexCache().
   2855         break;
   2856       case NativeObjectRelocationType::kIMTable: {
   2857         ImTable* orig_imt = reinterpret_cast<ImTable*>(pair.first);
   2858         ImTable* dest_imt = reinterpret_cast<ImTable*>(dest);
   2859         CopyAndFixupImTable(orig_imt, dest_imt);
   2860         break;
   2861       }
   2862       case NativeObjectRelocationType::kIMTConflictTable: {
   2863         auto* orig_table = reinterpret_cast<ImtConflictTable*>(pair.first);
   2864         CopyAndFixupImtConflictTable(
   2865             orig_table,
   2866             new(dest)ImtConflictTable(orig_table->NumEntries(target_ptr_size_), target_ptr_size_));
   2867         break;
   2868       }
   2869       case NativeObjectRelocationType::kGcRootPointer: {
   2870         auto* orig_pointer = reinterpret_cast<GcRoot<mirror::Object>*>(pair.first);
   2871         auto* dest_pointer = reinterpret_cast<GcRoot<mirror::Object>*>(dest);
   2872         CopyAndFixupReference(dest_pointer->AddressWithoutBarrier(), orig_pointer->Read());
   2873         break;
   2874       }
   2875     }
   2876   }
   2877   // Fixup the image method roots.
   2878   auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_.Begin());
   2879   for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
   2880     ArtMethod* method = image_methods_[i];
   2881     CHECK(method != nullptr);
   2882     CopyAndFixupPointer(
   2883         reinterpret_cast<void**>(&image_header->image_methods_[i]), method, PointerSize::k32);
   2884   }
   2885   FixupRootVisitor root_visitor(this);
   2886 
   2887   // Write the intern table into the image.
   2888   if (image_info.intern_table_bytes_ > 0) {
   2889     const ImageSection& intern_table_section = image_header->GetInternedStringsSection();
   2890     InternTable* const intern_table = image_info.intern_table_.get();
   2891     uint8_t* const intern_table_memory_ptr =
   2892         image_info.image_.Begin() + intern_table_section.Offset();
   2893     const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
   2894     CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
   2895     // Fixup the pointers in the newly written intern table to contain image addresses.
   2896     InternTable temp_intern_table;
   2897     // Note that we require that ReadFromMemory does not make an internal copy of the elements so
   2898     // that the VisitRoots() will update the memory directly rather than the copies.
   2899     // This also relies on visit roots not doing any verification which could fail after we update
   2900     // the roots to be the image addresses.
   2901     temp_intern_table.AddTableFromMemory(intern_table_memory_ptr,
   2902                                          VoidFunctor(),
   2903                                          /*is_boot_image=*/ false);
   2904     CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
   2905     temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
   2906     // Record relocations. (The root visitor does not get to see the slot addresses.)
   2907     MutexLock lock(Thread::Current(), *Locks::intern_table_lock_);
   2908     DCHECK(!temp_intern_table.strong_interns_.tables_.empty());
   2909     DCHECK(!temp_intern_table.strong_interns_.tables_[0].Empty());  // Inserted at the beginning.
   2910   }
   2911   // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
   2912   // class loaders. Writing multiple class tables into the image is currently unsupported.
   2913   if (image_info.class_table_bytes_ > 0u) {
   2914     const ImageSection& class_table_section = image_header->GetClassTableSection();
   2915     uint8_t* const class_table_memory_ptr =
   2916         image_info.image_.Begin() + class_table_section.Offset();
   2917     Thread* self = Thread::Current();
   2918     ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
   2919 
   2920     ClassTable* table = image_info.class_table_.get();
   2921     CHECK(table != nullptr);
   2922     const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr);
   2923     CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
   2924     // Fixup the pointers in the newly written class table to contain image addresses. See
   2925     // above comment for intern tables.
   2926     ClassTable temp_class_table;
   2927     temp_class_table.ReadFromMemory(class_table_memory_ptr);
   2928     CHECK_EQ(temp_class_table.NumReferencedZygoteClasses(),
   2929              table->NumReferencedNonZygoteClasses() + table->NumReferencedZygoteClasses());
   2930     UnbufferedRootVisitor visitor(&root_visitor, RootInfo(kRootUnknown));
   2931     temp_class_table.VisitRoots(visitor);
   2932     // Record relocations. (The root visitor does not get to see the slot addresses.)
   2933     // Note that the low bits in the slots contain bits of the descriptors' hash codes
   2934     // but the relocation works fine for these "adjusted" references.
   2935     ReaderMutexLock lock(self, temp_class_table.lock_);
   2936     DCHECK(!temp_class_table.classes_.empty());
   2937     DCHECK(!temp_class_table.classes_[0].empty());  // The ClassSet was inserted at the beginning.
   2938   }
   2939 }
   2940 
   2941 void ImageWriter::FixupPointerArray(mirror::Object* dst,
   2942                                     mirror::PointerArray* arr,
   2943                                     Bin array_type) {
   2944   CHECK(arr->IsIntArray() || arr->IsLongArray()) << arr->GetClass()->PrettyClass() << " " << arr;
   2945   // Fixup int and long pointers for the ArtMethod or ArtField arrays.
   2946   const size_t num_elements = arr->GetLength();
   2947   CopyAndFixupReference(
   2948       dst->GetFieldObjectReferenceAddr<kVerifyNone>(Class::ClassOffset()), arr->GetClass());
   2949   auto* dest_array = down_cast<mirror::PointerArray*>(dst);
   2950   for (size_t i = 0, count = num_elements; i < count; ++i) {
   2951     void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
   2952     if (kIsDebugBuild && elem != nullptr && !IsInBootImage(elem)) {
   2953       auto it = native_object_relocations_.find(elem);
   2954       if (UNLIKELY(it == native_object_relocations_.end())) {
   2955         if (it->second.IsArtMethodRelocation()) {
   2956           auto* method = reinterpret_cast<ArtMethod*>(elem);
   2957           LOG(FATAL) << "No relocation entry for ArtMethod " << method->PrettyMethod() << " @ "
   2958                      << method << " idx=" << i << "/" << num_elements << " with declaring class "
   2959                      << Class::PrettyClass(method->GetDeclaringClass());
   2960         } else {
   2961           CHECK_EQ(array_type, Bin::kArtField);
   2962           auto* field = reinterpret_cast<ArtField*>(elem);
   2963           LOG(FATAL) << "No relocation entry for ArtField " << field->PrettyField() << " @ "
   2964               << field << " idx=" << i << "/" << num_elements << " with declaring class "
   2965               << Class::PrettyClass(field->GetDeclaringClass());
   2966         }
   2967         UNREACHABLE();
   2968       }
   2969     }
   2970     CopyAndFixupPointer(dest_array->ElementAddress(i, target_ptr_size_), elem);
   2971   }
   2972 }
   2973 
   2974 void ImageWriter::CopyAndFixupObject(Object* obj) {
   2975   if (!IsImageObject(obj)) {
   2976     return;
   2977   }
   2978   size_t offset = GetImageOffset(obj);
   2979   size_t oat_index = GetOatIndex(obj);
   2980   ImageInfo& image_info = GetImageInfo(oat_index);
   2981   auto* dst = reinterpret_cast<Object*>(image_info.image_.Begin() + offset);
   2982   DCHECK_LT(offset, image_info.image_end_);
   2983   const auto* src = reinterpret_cast<const uint8_t*>(obj);
   2984 
   2985   image_info.image_bitmap_->Set(dst);  // Mark the obj as live.
   2986 
   2987   const size_t n = obj->SizeOf();
   2988 
   2989   if (kIsDebugBuild && region_size_ != 0u) {
   2990     const size_t offset_after_header = offset - sizeof(ImageHeader);
   2991     const size_t next_region = RoundUp(offset_after_header, region_size_);
   2992     if (offset_after_header != next_region) {
   2993       // If the object is not on a region bondary, it must not be cross region.
   2994       CHECK_LT(offset_after_header, next_region)
   2995           << "offset_after_header=" << offset_after_header << " size=" << n;
   2996       CHECK_LE(offset_after_header + n, next_region)
   2997           << "offset_after_header=" << offset_after_header << " size=" << n;
   2998     }
   2999   }
   3000   DCHECK_LE(offset + n, image_info.image_.Size());
   3001   memcpy(dst, src, n);
   3002 
   3003   // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
   3004   // word.
   3005   const auto it = saved_hashcode_map_.find(obj);
   3006   dst->SetLockWord(it != saved_hashcode_map_.end() ?
   3007       LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
   3008   if (kUseBakerReadBarrier && gc::collector::ConcurrentCopying::kGrayDirtyImmuneObjects) {
   3009     // Treat all of the objects in the image as marked to avoid unnecessary dirty pages. This is
   3010     // safe since we mark all of the objects that may reference non immune objects as gray.
   3011     CHECK(dst->AtomicSetMarkBit(0, 1));
   3012   }
   3013   FixupObject(obj, dst);
   3014 }
   3015 
   3016 // Rewrite all the references in the copied object to point to their image address equivalent
   3017 class ImageWriter::FixupVisitor {
   3018  public:
   3019   FixupVisitor(ImageWriter* image_writer, Object* copy)
   3020       : image_writer_(image_writer), copy_(copy) {
   3021   }
   3022 
   3023   // Ignore class roots since we don't have a way to map them to the destination. These are handled
   3024   // with other logic.
   3025   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
   3026       const {}
   3027   void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
   3028 
   3029   void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
   3030       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
   3031     ObjPtr<Object> ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
   3032     // Copy the reference and record the fixup if necessary.
   3033     image_writer_->CopyAndFixupReference(
   3034         copy_->GetFieldObjectReferenceAddr<kVerifyNone>(offset), ref);
   3035   }
   3036 
   3037   // java.lang.ref.Reference visitor.
   3038   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
   3039                   ObjPtr<mirror::Reference> ref) const
   3040       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
   3041     operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
   3042   }
   3043 
   3044  protected:
   3045   ImageWriter* const image_writer_;
   3046   mirror::Object* const copy_;
   3047 };
   3048 
   3049 void ImageWriter::CopyAndFixupObjects() {
   3050   auto visitor = [&](Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
   3051     DCHECK(obj != nullptr);
   3052     CopyAndFixupObject(obj);
   3053   };
   3054   Runtime::Current()->GetHeap()->VisitObjects(visitor);
   3055   // Copy the padding objects since they are required for in order traversal of the image space.
   3056   for (const ImageInfo& image_info : image_infos_) {
   3057     for (const size_t offset : image_info.padding_object_offsets_) {
   3058       auto* dst = reinterpret_cast<Object*>(image_info.image_.Begin() + offset);
   3059       dst->SetClass<kVerifyNone>(GetImageAddress(GetClassRoot<mirror::Object>().Ptr()));
   3060       dst->SetLockWord<kVerifyNone>(LockWord::Default(), /*as_volatile=*/ false);
   3061       image_info.image_bitmap_->Set(dst);  // Mark the obj as live.
   3062     }
   3063   }
   3064   // We no longer need the hashcode map, values have already been copied to target objects.
   3065   saved_hashcode_map_.clear();
   3066 }
   3067 
   3068 class ImageWriter::FixupClassVisitor final : public FixupVisitor {
   3069  public:
   3070   FixupClassVisitor(ImageWriter* image_writer, Object* copy)
   3071       : FixupVisitor(image_writer, copy) {}
   3072 
   3073   void operator()(ObjPtr<Object> obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
   3074       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
   3075     DCHECK(obj->IsClass());
   3076     FixupVisitor::operator()(obj, offset, /*is_static*/false);
   3077   }
   3078 
   3079   void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
   3080                   ObjPtr<mirror::Reference> ref ATTRIBUTE_UNUSED) const
   3081       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
   3082     LOG(FATAL) << "Reference not expected here.";
   3083   }
   3084 };
   3085 
   3086 ImageWriter::NativeObjectRelocation ImageWriter::GetNativeRelocation(void* obj) {
   3087   DCHECK(obj != nullptr);
   3088   DCHECK(!IsInBootImage(obj));
   3089   auto it = native_object_relocations_.find(obj);
   3090   CHECK(it != native_object_relocations_.end()) << obj << " spaces "
   3091       << Runtime::Current()->GetHeap()->DumpSpaces();
   3092   return it->second;
   3093 }
   3094 
   3095 template <typename T>
   3096 std::string PrettyPrint(T* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
   3097   std::ostringstream oss;
   3098   oss << ptr;
   3099   return oss.str();
   3100 }
   3101 
   3102 template <>
   3103 std::string PrettyPrint(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
   3104   return ArtMethod::PrettyMethod(method);
   3105 }
   3106 
   3107 template <typename T>
   3108 T* ImageWriter::NativeLocationInImage(T* obj) {
   3109   if (obj == nullptr || IsInBootImage(obj)) {
   3110     return obj;
   3111   } else {
   3112     NativeObjectRelocation relocation = GetNativeRelocation(obj);
   3113     const ImageInfo& image_info = GetImageInfo(relocation.oat_index);
   3114     return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
   3115   }
   3116 }
   3117 
   3118 template <typename T>
   3119 T* ImageWriter::NativeCopyLocation(T* obj) {
   3120   const NativeObjectRelocation relocation = GetNativeRelocation(obj);
   3121   const ImageInfo& image_info = GetImageInfo(relocation.oat_index);
   3122   return reinterpret_cast<T*>(image_info.image_.Begin() + relocation.offset);
   3123 }
   3124 
   3125 class ImageWriter::NativeLocationVisitor {
   3126  public:
   3127   explicit NativeLocationVisitor(ImageWriter* image_writer)
   3128       : image_writer_(image_writer) {}
   3129 
   3130   template <typename T>
   3131   T* operator()(T* ptr, void** dest_addr) const REQUIRES_SHARED(Locks::mutator_lock_) {
   3132     if (ptr != nullptr) {
   3133       image_writer_->CopyAndFixupPointer(dest_addr, ptr);
   3134     }
   3135     // TODO: The caller shall overwrite the value stored by CopyAndFixupPointer()
   3136     // with the value we return here. We should try to avoid the duplicate work.
   3137     return image_writer_->NativeLocationInImage(ptr);
   3138   }
   3139 
   3140  private:
   3141   ImageWriter* const image_writer_;
   3142 };
   3143 
   3144 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
   3145   orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
   3146   FixupClassVisitor visitor(this, copy);
   3147   ObjPtr<mirror::Object>(orig)->VisitReferences(visitor, visitor);
   3148 
   3149   if (kBitstringSubtypeCheckEnabled && compiler_options_.IsAppImage()) {
   3150     // When we call SubtypeCheck::EnsureInitialize, it Assigns new bitstring
   3151     // values to the parent of that class.
   3152     //
   3153     // Every time this happens, the parent class has to mutate to increment
   3154     // the "Next" value.
   3155     //
   3156     // If any of these parents are in the boot image, the changes [in the parents]
   3157     // would be lost when the app image is reloaded.
   3158     //
   3159     // To prevent newly loaded classes (not in the app image) from being reassigned
   3160     // the same bitstring value as an existing app image class, uninitialize
   3161     // all the classes in the app image.
   3162     //
   3163     // On startup, the class linker will then re-initialize all the app
   3164     // image bitstrings. See also ClassLinker::AddImageSpace.
   3165     MutexLock subtype_check_lock(Thread::Current(), *Locks::subtype_check_lock_);
   3166     // Lock every time to prevent a dcheck failure when we suspend with the lock held.
   3167     SubtypeCheck<mirror::Class*>::ForceUninitialize(copy);
   3168   }
   3169 
   3170   // Remove the clinitThreadId. This is required for image determinism.
   3171   copy->SetClinitThreadId(static_cast<pid_t>(0));
   3172 }
   3173 
   3174 void ImageWriter::FixupObject(Object* orig, Object* copy) {
   3175   DCHECK(orig != nullptr);
   3176   DCHECK(copy != nullptr);
   3177   if (kUseBakerReadBarrier) {
   3178     orig->AssertReadBarrierState();
   3179   }
   3180   if (orig->IsIntArray() || orig->IsLongArray()) {
   3181     // Is this a native pointer array?
   3182     auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
   3183     if (it != pointer_arrays_.end()) {
   3184       // Should only need to fixup every pointer array exactly once.
   3185       FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), it->second);
   3186       pointer_arrays_.erase(it);
   3187       return;
   3188     }
   3189   }
   3190   if (orig->IsClass()) {
   3191     FixupClass(orig->AsClass<kVerifyNone>().Ptr(), down_cast<mirror::Class*>(copy));
   3192   } else {
   3193     ObjPtr<mirror::ObjectArray<mirror::Class>> class_roots =
   3194         Runtime::Current()->GetClassLinker()->GetClassRoots();
   3195     ObjPtr<mirror::Class> klass = orig->GetClass();
   3196     if (klass == GetClassRoot<mirror::Method>(class_roots) ||
   3197         klass == GetClassRoot<mirror::Constructor>(class_roots)) {
   3198       // Need to go update the ArtMethod.
   3199       auto* dest = down_cast<mirror::Executable*>(copy);
   3200       auto* src = down_cast<mirror::Executable*>(orig);
   3201       ArtMethod* src_method = src->GetArtMethod();
   3202       CopyAndFixupPointer(dest, mirror::Executable::ArtMethodOffset(), src_method);
   3203     } else if (klass == GetClassRoot<mirror::DexCache>(class_roots)) {
   3204       FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
   3205     } else if (klass->IsClassLoaderClass()) {
   3206       mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
   3207       // If src is a ClassLoader, set the class table to null so that it gets recreated by the
   3208       // ClassLoader.
   3209       copy_loader->SetClassTable(nullptr);
   3210       // Also set allocator to null to be safe. The allocator is created when we create the class
   3211       // table. We also never expect to unload things in the image since they are held live as
   3212       // roots.
   3213       copy_loader->SetAllocator(nullptr);
   3214     }
   3215     FixupVisitor visitor(this, copy);
   3216     orig->VisitReferences(visitor, visitor);
   3217   }
   3218 }
   3219 
   3220 template <typename T>
   3221 void ImageWriter::FixupDexCacheArrayEntry(std::atomic<mirror::DexCachePair<T>>* orig_array,
   3222                                           std::atomic<mirror::DexCachePair<T>>* new_array,
   3223                                           uint32_t array_index) {
   3224   static_assert(sizeof(std::atomic<mirror::DexCachePair<T>>) == sizeof(mirror::DexCachePair<T>),
   3225                 "Size check for removing std::atomic<>.");
   3226   mirror::DexCachePair<T>* orig_pair =
   3227       reinterpret_cast<mirror::DexCachePair<T>*>(&orig_array[array_index]);
   3228   mirror::DexCachePair<T>* new_pair =
   3229       reinterpret_cast<mirror::DexCachePair<T>*>(&new_array[array_index]);
   3230   CopyAndFixupReference(
   3231       new_pair->object.AddressWithoutBarrier(), orig_pair->object.Read());
   3232   new_pair->index = orig_pair->index;
   3233 }
   3234 
   3235 template <typename T>
   3236 void ImageWriter::FixupDexCacheArrayEntry(std::atomic<mirror::NativeDexCachePair<T>>* orig_array,
   3237                                           std::atomic<mirror::NativeDexCachePair<T>>* new_array,
   3238                                           uint32_t array_index) {
   3239   static_assert(
   3240       sizeof(std::atomic<mirror::NativeDexCachePair<T>>) == sizeof(mirror::NativeDexCachePair<T>),
   3241       "Size check for removing std::atomic<>.");
   3242   if (target_ptr_size_ == PointerSize::k64) {
   3243     DexCache::ConversionPair64* orig_pair =
   3244         reinterpret_cast<DexCache::ConversionPair64*>(orig_array) + array_index;
   3245     DexCache::ConversionPair64* new_pair =
   3246         reinterpret_cast<DexCache::ConversionPair64*>(new_array) + array_index;
   3247     *new_pair = *orig_pair;  // Copy original value and index.
   3248     if (orig_pair->first != 0u) {
   3249       CopyAndFixupPointer(
   3250           reinterpret_cast<void**>(&new_pair->first), reinterpret_cast64<void*>(orig_pair->first));
   3251     }
   3252   } else {
   3253     DexCache::ConversionPair32* orig_pair =
   3254         reinterpret_cast<DexCache::ConversionPair32*>(orig_array) + array_index;
   3255     DexCache::ConversionPair32* new_pair =
   3256         reinterpret_cast<DexCache::ConversionPair32*>(new_array) + array_index;
   3257     *new_pair = *orig_pair;  // Copy original value and index.
   3258     if (orig_pair->first != 0u) {
   3259       CopyAndFixupPointer(
   3260           reinterpret_cast<void**>(&new_pair->first), reinterpret_cast32<void*>(orig_pair->first));
   3261     }
   3262   }
   3263 }
   3264 
   3265 void ImageWriter::FixupDexCacheArrayEntry(GcRoot<mirror::CallSite>* orig_array,
   3266                                           GcRoot<mirror::CallSite>* new_array,
   3267                                           uint32_t array_index) {
   3268   CopyAndFixupReference(
   3269       new_array[array_index].AddressWithoutBarrier(), orig_array[array_index].Read());
   3270 }
   3271 
   3272 template <typename EntryType>
   3273 void ImageWriter::FixupDexCacheArray(DexCache* orig_dex_cache,
   3274                                      DexCache* copy_dex_cache,
   3275                                      MemberOffset array_offset,
   3276                                      uint32_t size) {
   3277   EntryType* orig_array = orig_dex_cache->GetFieldPtr64<EntryType*>(array_offset);
   3278   DCHECK_EQ(orig_array != nullptr, size != 0u);
   3279   if (orig_array != nullptr) {
   3280     // Though the DexCache array fields are usually treated as native pointers, we clear
   3281     // the top 32 bits for 32-bit targets.
   3282     CopyAndFixupPointer(copy_dex_cache, array_offset, orig_array, PointerSize::k64);
   3283     EntryType* new_array = NativeCopyLocation(orig_array);
   3284     for (uint32_t i = 0; i != size; ++i) {
   3285       FixupDexCacheArrayEntry(orig_array, new_array, i);
   3286     }
   3287   }
   3288 }
   3289 
   3290 void ImageWriter::FixupDexCache(DexCache* orig_dex_cache, DexCache* copy_dex_cache) {
   3291   FixupDexCacheArray<mirror::StringDexCacheType>(orig_dex_cache,
   3292                                                  copy_dex_cache,
   3293                                                  DexCache::StringsOffset(),
   3294                                                  orig_dex_cache->NumStrings());
   3295   FixupDexCacheArray<mirror::TypeDexCacheType>(orig_dex_cache,
   3296                                                copy_dex_cache,
   3297                                                DexCache::ResolvedTypesOffset(),
   3298                                                orig_dex_cache->NumResolvedTypes());
   3299   FixupDexCacheArray<mirror::MethodDexCacheType>(orig_dex_cache,
   3300                                                  copy_dex_cache,
   3301                                                  DexCache::ResolvedMethodsOffset(),
   3302                                                  orig_dex_cache->NumResolvedMethods());
   3303   FixupDexCacheArray<mirror::FieldDexCacheType>(orig_dex_cache,
   3304                                                 copy_dex_cache,
   3305                                                 DexCache::ResolvedFieldsOffset(),
   3306                                                 orig_dex_cache->NumResolvedFields());
   3307   FixupDexCacheArray<mirror::MethodTypeDexCacheType>(orig_dex_cache,
   3308                                                      copy_dex_cache,
   3309                                                      DexCache::ResolvedMethodTypesOffset(),
   3310                                                      orig_dex_cache->NumResolvedMethodTypes());
   3311   FixupDexCacheArray<GcRoot<mirror::CallSite>>(orig_dex_cache,
   3312                                                copy_dex_cache,
   3313                                                DexCache::ResolvedCallSitesOffset(),
   3314                                                orig_dex_cache->NumResolvedCallSites());
   3315   if (orig_dex_cache->GetPreResolvedStrings() != nullptr) {
   3316     CopyAndFixupPointer(copy_dex_cache,
   3317                         DexCache::PreResolvedStringsOffset(),
   3318                         orig_dex_cache->GetPreResolvedStrings(),
   3319                         PointerSize::k64);
   3320   }
   3321 
   3322   // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
   3323   // compiler pointers in here will make the output non-deterministic.
   3324   copy_dex_cache->SetDexFile(nullptr);
   3325 }
   3326 
   3327 const uint8_t* ImageWriter::GetOatAddress(StubType type) const {
   3328   DCHECK_LE(type, StubType::kLast);
   3329   // If we are compiling an app image, we need to use the stubs of the boot image.
   3330   if (!compiler_options_.IsBootImage()) {
   3331     // Use the current image pointers.
   3332     const std::vector<gc::space::ImageSpace*>& image_spaces =
   3333         Runtime::Current()->GetHeap()->GetBootImageSpaces();
   3334     DCHECK(!image_spaces.empty());
   3335     const OatFile* oat_file = image_spaces[0]->GetOatFile();
   3336     CHECK(oat_file != nullptr);
   3337     const OatHeader& header = oat_file->GetOatHeader();
   3338     switch (type) {
   3339       // TODO: We could maybe clean this up if we stored them in an array in the oat header.
   3340       case StubType::kQuickGenericJNITrampoline:
   3341         return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
   3342       case StubType::kJNIDlsymLookup:
   3343         return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
   3344       case StubType::kQuickIMTConflictTrampoline:
   3345         return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
   3346       case StubType::kQuickResolutionTrampoline:
   3347         return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
   3348       case StubType::kQuickToInterpreterBridge:
   3349         return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
   3350       default:
   3351         UNREACHABLE();
   3352     }
   3353   }
   3354   const ImageInfo& primary_image_info = GetImageInfo(0);
   3355   return GetOatAddressForOffset(primary_image_info.GetStubOffset(type), primary_image_info);
   3356 }
   3357 
   3358 const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method,
   3359                                          const ImageInfo& image_info,
   3360                                          bool* quick_is_interpreted) {
   3361   DCHECK(!method->IsResolutionMethod()) << method->PrettyMethod();
   3362   DCHECK_NE(method, Runtime::Current()->GetImtConflictMethod()) << method->PrettyMethod();
   3363   DCHECK(!method->IsImtUnimplementedMethod()) << method->PrettyMethod();
   3364   DCHECK(method->IsInvokable()) << method->PrettyMethod();
   3365   DCHECK(!IsInBootImage(method)) << method->PrettyMethod();
   3366 
   3367   // Use original code if it exists. Otherwise, set the code pointer to the resolution
   3368   // trampoline.
   3369 
   3370   // Quick entrypoint:
   3371   const void* quick_oat_entry_point =
   3372       method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
   3373   const uint8_t* quick_code;
   3374 
   3375   if (UNLIKELY(IsInBootImage(method->GetDeclaringClass().Ptr()))) {
   3376     DCHECK(method->IsCopied());
   3377     // If the code is not in the oat file corresponding to this image (e.g. default methods)
   3378     quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
   3379   } else {
   3380     uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
   3381     quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
   3382   }
   3383 
   3384   *quick_is_interpreted = false;
   3385   if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
   3386       method->GetDeclaringClass()->IsInitialized())) {
   3387     // We have code for a non-static or initialized method, just use the code.
   3388   } else if (quick_code == nullptr && method->IsNative() &&
   3389       (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
   3390     // Non-static or initialized native method missing compiled code, use generic JNI version.
   3391     quick_code = GetOatAddress(StubType::kQuickGenericJNITrampoline);
   3392   } else if (quick_code == nullptr && !method->IsNative()) {
   3393     // We don't have code at all for a non-native method, use the interpreter.
   3394     quick_code = GetOatAddress(StubType::kQuickToInterpreterBridge);
   3395     *quick_is_interpreted = true;
   3396   } else {
   3397     CHECK(!method->GetDeclaringClass()->IsInitialized());
   3398     // We have code for a static method, but need to go through the resolution stub for class
   3399     // initialization.
   3400     quick_code = GetOatAddress(StubType::kQuickResolutionTrampoline);
   3401   }
   3402   if (!IsInBootOatFile(quick_code)) {
   3403     // DCHECK_GE(quick_code, oat_data_begin_);
   3404   }
   3405   return quick_code;
   3406 }
   3407 
   3408 void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
   3409                                      ArtMethod* copy,
   3410                                      size_t oat_index) {
   3411   if (orig->IsAbstract()) {
   3412     // Ignore the single-implementation info for abstract method.
   3413     // Do this on orig instead of copy, otherwise there is a crash due to methods
   3414     // are copied before classes.
   3415     // TODO: handle fixup of single-implementation method for abstract method.
   3416     orig->SetHasSingleImplementation(false);
   3417     orig->SetSingleImplementation(
   3418         nullptr, Runtime::Current()->GetClassLinker()->GetImagePointerSize());
   3419   }
   3420 
   3421   memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
   3422 
   3423   CopyAndFixupReference(
   3424       copy->GetDeclaringClassAddressWithoutBarrier(), orig->GetDeclaringClassUnchecked());
   3425 
   3426   // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
   3427   // oat_begin_
   3428 
   3429   // The resolution method has a special trampoline to call.
   3430   Runtime* runtime = Runtime::Current();
   3431   const void* quick_code;
   3432   if (orig->IsRuntimeMethod()) {
   3433     ImtConflictTable* orig_table = orig->GetImtConflictTable(target_ptr_size_);
   3434     if (orig_table != nullptr) {
   3435       // Special IMT conflict method, normal IMT conflict method or unimplemented IMT method.
   3436       quick_code = GetOatAddress(StubType::kQuickIMTConflictTrampoline);
   3437       CopyAndFixupPointer(copy, ArtMethod::DataOffset(target_ptr_size_), orig_table);
   3438     } else if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
   3439       quick_code = GetOatAddress(StubType::kQuickResolutionTrampoline);
   3440     } else {
   3441       bool found_one = false;
   3442       for (size_t i = 0; i < static_cast<size_t>(CalleeSaveType::kLastCalleeSaveType); ++i) {
   3443         auto idx = static_cast<CalleeSaveType>(i);
   3444         if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
   3445           found_one = true;
   3446           break;
   3447         }
   3448       }
   3449       CHECK(found_one) << "Expected to find callee save method but got " << orig->PrettyMethod();
   3450       CHECK(copy->IsRuntimeMethod());
   3451       CHECK(copy->GetEntryPointFromQuickCompiledCode() == nullptr);
   3452       quick_code = nullptr;
   3453     }
   3454   } else {
   3455     // We assume all methods have code. If they don't currently then we set them to the use the
   3456     // resolution trampoline. Abstract methods never have code and so we need to make sure their
   3457     // use results in an AbstractMethodError. We use the interpreter to achieve this.
   3458     if (UNLIKELY(!orig->IsInvokable())) {
   3459       quick_code = GetOatAddress(StubType::kQuickToInterpreterBridge);
   3460     } else {
   3461       bool quick_is_interpreted;
   3462       const ImageInfo& image_info = image_infos_[oat_index];
   3463       quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted);
   3464 
   3465       // JNI entrypoint:
   3466       if (orig->IsNative()) {
   3467         // The native method's pointer is set to a stub to lookup via dlsym.
   3468         // Note this is not the code_ pointer, that is handled above.
   3469         copy->SetEntryPointFromJniPtrSize(
   3470             GetOatAddress(StubType::kJNIDlsymLookup), target_ptr_size_);
   3471       } else {
   3472         CHECK(copy->GetDataPtrSize(target_ptr_size_) == nullptr);
   3473       }
   3474     }
   3475   }
   3476   if (quick_code != nullptr) {
   3477     copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
   3478   }
   3479 }
   3480 
   3481 size_t ImageWriter::ImageInfo::GetBinSizeSum(Bin up_to) const {
   3482   DCHECK_LE(static_cast<size_t>(up_to), kNumberOfBins);
   3483   return std::accumulate(&bin_slot_sizes_[0],
   3484                          &bin_slot_sizes_[0] + static_cast<size_t>(up_to),
   3485                          /*init*/ static_cast<size_t>(0));
   3486 }
   3487 
   3488 ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
   3489   // These values may need to get updated if more bins are added to the enum Bin
   3490   static_assert(kBinBits == 3, "wrong number of bin bits");
   3491   static_assert(kBinShift == 27, "wrong number of shift");
   3492   static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
   3493 
   3494   DCHECK_LT(GetBin(), Bin::kMirrorCount);
   3495   DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
   3496 }
   3497 
   3498 ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
   3499     : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
   3500   DCHECK_EQ(index, GetIndex());
   3501 }
   3502 
   3503 ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
   3504   return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
   3505 }
   3506 
   3507 uint32_t ImageWriter::BinSlot::GetIndex() const {
   3508   return lockword_ & ~kBinMask;
   3509 }
   3510 
   3511 ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
   3512   switch (type) {
   3513     case NativeObjectRelocationType::kArtField:
   3514     case NativeObjectRelocationType::kArtFieldArray:
   3515       return Bin::kArtField;
   3516     case NativeObjectRelocationType::kArtMethodClean:
   3517     case NativeObjectRelocationType::kArtMethodArrayClean:
   3518       return Bin::kArtMethodClean;
   3519     case NativeObjectRelocationType::kArtMethodDirty:
   3520     case NativeObjectRelocationType::kArtMethodArrayDirty:
   3521       return Bin::kArtMethodDirty;
   3522     case NativeObjectRelocationType::kDexCacheArray:
   3523       return Bin::kDexCacheArray;
   3524     case NativeObjectRelocationType::kRuntimeMethod:
   3525       return Bin::kRuntimeMethod;
   3526     case NativeObjectRelocationType::kIMTable:
   3527       return Bin::kImTable;
   3528     case NativeObjectRelocationType::kIMTConflictTable:
   3529       return Bin::kIMTConflictTable;
   3530     case NativeObjectRelocationType::kGcRootPointer:
   3531       return Bin::kMetadata;
   3532   }
   3533   UNREACHABLE();
   3534 }
   3535 
   3536 size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
   3537   if (!IsMultiImage()) {
   3538     return GetDefaultOatIndex();
   3539   }
   3540   auto it = oat_index_map_.find(obj);
   3541   DCHECK(it != oat_index_map_.end()) << obj;
   3542   return it->second;
   3543 }
   3544 
   3545 size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
   3546   if (!IsMultiImage()) {
   3547     return GetDefaultOatIndex();
   3548   }
   3549   auto it = dex_file_oat_index_map_.find(dex_file);
   3550   DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
   3551   return it->second;
   3552 }
   3553 
   3554 size_t ImageWriter::GetOatIndexForDexCache(ObjPtr<mirror::DexCache> dex_cache) const {
   3555   return (dex_cache == nullptr)
   3556       ? GetDefaultOatIndex()
   3557       : GetOatIndexForDexFile(dex_cache->GetDexFile());
   3558 }
   3559 
   3560 void ImageWriter::UpdateOatFileLayout(size_t oat_index,
   3561                                       size_t oat_loaded_size,
   3562                                       size_t oat_data_offset,
   3563                                       size_t oat_data_size) {
   3564   DCHECK_GE(oat_loaded_size, oat_data_offset);
   3565   DCHECK_GE(oat_loaded_size - oat_data_offset, oat_data_size);
   3566 
   3567   const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
   3568   DCHECK(images_end != nullptr);  // Image space must be ready.
   3569   for (const ImageInfo& info : image_infos_) {
   3570     DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
   3571   }
   3572 
   3573   ImageInfo& cur_image_info = GetImageInfo(oat_index);
   3574   cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
   3575   cur_image_info.oat_loaded_size_ = oat_loaded_size;
   3576   cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
   3577   cur_image_info.oat_size_ = oat_data_size;
   3578 
   3579   if (compiler_options_.IsAppImage()) {
   3580     CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
   3581     return;
   3582   }
   3583 
   3584   // Update the oat_offset of the next image info.
   3585   if (oat_index + 1u != oat_filenames_.size()) {
   3586     // There is a following one.
   3587     ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
   3588     next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
   3589   }
   3590 }
   3591 
   3592 void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
   3593   ImageInfo& cur_image_info = GetImageInfo(oat_index);
   3594   cur_image_info.oat_checksum_ = oat_header.GetChecksum();
   3595 
   3596   if (oat_index == GetDefaultOatIndex()) {
   3597     // Primary oat file, read the trampolines.
   3598     cur_image_info.SetStubOffset(StubType::kJNIDlsymLookup,
   3599                                  oat_header.GetJniDlsymLookupOffset());
   3600     cur_image_info.SetStubOffset(StubType::kQuickGenericJNITrampoline,
   3601                                  oat_header.GetQuickGenericJniTrampolineOffset());
   3602     cur_image_info.SetStubOffset(StubType::kQuickIMTConflictTrampoline,
   3603                                  oat_header.GetQuickImtConflictTrampolineOffset());
   3604     cur_image_info.SetStubOffset(StubType::kQuickResolutionTrampoline,
   3605                                  oat_header.GetQuickResolutionTrampolineOffset());
   3606     cur_image_info.SetStubOffset(StubType::kQuickToInterpreterBridge,
   3607                                  oat_header.GetQuickToInterpreterBridgeOffset());
   3608   }
   3609 }
   3610 
   3611 ImageWriter::ImageWriter(
   3612     const CompilerOptions& compiler_options,
   3613     uintptr_t image_begin,
   3614     ImageHeader::StorageMode image_storage_mode,
   3615     const std::vector<std::string>& oat_filenames,
   3616     const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map,
   3617     jobject class_loader,
   3618     const HashSet<std::string>* dirty_image_objects)
   3619     : compiler_options_(compiler_options),
   3620       global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
   3621       image_objects_offset_begin_(0),
   3622       target_ptr_size_(InstructionSetPointerSize(compiler_options.GetInstructionSet())),
   3623       image_infos_(oat_filenames.size()),
   3624       dirty_methods_(0u),
   3625       clean_methods_(0u),
   3626       app_class_loader_(class_loader),
   3627       boot_image_live_objects_(nullptr),
   3628       image_storage_mode_(image_storage_mode),
   3629       oat_filenames_(oat_filenames),
   3630       dex_file_oat_index_map_(dex_file_oat_index_map),
   3631       dirty_image_objects_(dirty_image_objects) {
   3632   DCHECK(compiler_options.IsBootImage() || compiler_options.IsAppImage());
   3633   CHECK_NE(image_begin, 0U);
   3634   std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
   3635   CHECK_EQ(compiler_options.IsBootImage(),
   3636            Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
   3637       << "Compiling a boot image should occur iff there are no boot image spaces loaded";
   3638   if (compiler_options_.IsAppImage()) {
   3639     // Make sure objects are not crossing region boundaries for app images.
   3640     region_size_ = gc::space::RegionSpace::kRegionSize;
   3641   }
   3642 }
   3643 
   3644 ImageWriter::ImageInfo::ImageInfo()
   3645     : intern_table_(new InternTable),
   3646       class_table_(new ClassTable) {}
   3647 
   3648 template <typename DestType>
   3649 void ImageWriter::CopyAndFixupReference(DestType* dest, ObjPtr<mirror::Object> src) {
   3650   static_assert(std::is_same<DestType, mirror::CompressedReference<mirror::Object>>::value ||
   3651                     std::is_same<DestType, mirror::HeapReference<mirror::Object>>::value,
   3652                 "DestType must be a Compressed-/HeapReference<Object>.");
   3653   dest->Assign(GetImageAddress(src.Ptr()));
   3654 }
   3655 
   3656 void ImageWriter::CopyAndFixupPointer(void** target, void* value, PointerSize pointer_size) {
   3657   void* new_value = NativeLocationInImage(value);
   3658   if (pointer_size == PointerSize::k32) {
   3659     *reinterpret_cast<uint32_t*>(target) = reinterpret_cast32<uint32_t>(new_value);
   3660   } else {
   3661     *reinterpret_cast<uint64_t*>(target) = reinterpret_cast64<uint64_t>(new_value);
   3662   }
   3663   DCHECK(value != nullptr);
   3664 }
   3665 
   3666 void ImageWriter::CopyAndFixupPointer(void** target, void* value)
   3667     REQUIRES_SHARED(Locks::mutator_lock_) {
   3668   CopyAndFixupPointer(target, value, target_ptr_size_);
   3669 }
   3670 
   3671 void ImageWriter::CopyAndFixupPointer(
   3672     void* object, MemberOffset offset, void* value, PointerSize pointer_size) {
   3673   void** target =
   3674       reinterpret_cast<void**>(reinterpret_cast<uint8_t*>(object) + offset.Uint32Value());
   3675   return CopyAndFixupPointer(target, value, pointer_size);
   3676 }
   3677 
   3678 void ImageWriter::CopyAndFixupPointer(void* object, MemberOffset offset, void* value) {
   3679   return CopyAndFixupPointer(object, offset, value, target_ptr_size_);
   3680 }
   3681 
   3682 }  // namespace linker
   3683 }  // namespace art
   3684