Home | History | Annotate | Download | only in collector
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "mark_compact.h"
     18 
     19 #include "base/logging.h"
     20 #include "base/mutex-inl.h"
     21 #include "base/timing_logger.h"
     22 #include "gc/accounting/heap_bitmap-inl.h"
     23 #include "gc/accounting/mod_union_table.h"
     24 #include "gc/accounting/remembered_set.h"
     25 #include "gc/accounting/space_bitmap-inl.h"
     26 #include "gc/heap.h"
     27 #include "gc/reference_processor.h"
     28 #include "gc/space/bump_pointer_space.h"
     29 #include "gc/space/bump_pointer_space-inl.h"
     30 #include "gc/space/image_space.h"
     31 #include "gc/space/large_object_space.h"
     32 #include "gc/space/space-inl.h"
     33 #include "indirect_reference_table.h"
     34 #include "intern_table.h"
     35 #include "jni_internal.h"
     36 #include "mark_sweep-inl.h"
     37 #include "monitor.h"
     38 #include "mirror/art_field.h"
     39 #include "mirror/art_field-inl.h"
     40 #include "mirror/class-inl.h"
     41 #include "mirror/class_loader.h"
     42 #include "mirror/dex_cache.h"
     43 #include "mirror/reference-inl.h"
     44 #include "mirror/object-inl.h"
     45 #include "mirror/object_array.h"
     46 #include "mirror/object_array-inl.h"
     47 #include "runtime.h"
     48 #include "stack.h"
     49 #include "thread-inl.h"
     50 #include "thread_list.h"
     51 
     52 using ::art::mirror::Object;
     53 
     54 namespace art {
     55 namespace gc {
     56 namespace collector {
     57 
     58 void MarkCompact::BindBitmaps() {
     59   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
     60   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
     61   // Mark all of the spaces we never collect as immune.
     62   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
     63     if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
     64         space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
     65       CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space;
     66     }
     67   }
     68 }
     69 
     70 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
     71     : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
     72       space_(nullptr), collector_name_(name_) {
     73 }
     74 
     75 void MarkCompact::RunPhases() {
     76   Thread* self = Thread::Current();
     77   InitializePhase();
     78   CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
     79   {
     80     ScopedPause pause(this);
     81     GetHeap()->PreGcVerificationPaused(this);
     82     GetHeap()->PrePauseRosAllocVerification(this);
     83     MarkingPhase();
     84     ReclaimPhase();
     85   }
     86   GetHeap()->PostGcVerification(this);
     87   FinishPhase();
     88 }
     89 
     90 void MarkCompact::ForwardObject(mirror::Object* obj) {
     91   const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
     92   LockWord lock_word = obj->GetLockWord(false);
     93   // If we have a non empty lock word, store it and restore it later.
     94   if (lock_word.GetValue() != LockWord().GetValue()) {
     95     // Set the bit in the bitmap so that we know to restore it later.
     96     objects_with_lockword_->Set(obj);
     97     lock_words_to_restore_.push_back(lock_word);
     98   }
     99   obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
    100                    false);
    101   bump_pointer_ += alloc_size;
    102   ++live_objects_in_space_;
    103 }
    104 
    105 class CalculateObjectForwardingAddressVisitor {
    106  public:
    107   explicit CalculateObjectForwardingAddressVisitor(MarkCompact* collector)
    108       : collector_(collector) {}
    109   void operator()(mirror::Object* obj) const EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_,
    110                                                                       Locks::heap_bitmap_lock_) {
    111     DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
    112     DCHECK(collector_->IsMarked(obj));
    113     collector_->ForwardObject(obj);
    114   }
    115 
    116  private:
    117   MarkCompact* const collector_;
    118 };
    119 
    120 void MarkCompact::CalculateObjectForwardingAddresses() {
    121   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    122   // The bump pointer in the space where the next forwarding address will be.
    123   bump_pointer_ = reinterpret_cast<byte*>(space_->Begin());
    124   // Visit all the marked objects in the bitmap.
    125   CalculateObjectForwardingAddressVisitor visitor(this);
    126   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    127                                                reinterpret_cast<uintptr_t>(space_->End()),
    128                                                visitor);
    129 }
    130 
    131 void MarkCompact::InitializePhase() {
    132   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    133   mark_stack_ = heap_->GetMarkStack();
    134   DCHECK(mark_stack_ != nullptr);
    135   immune_region_.Reset();
    136   CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
    137   // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
    138   ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    139   mark_bitmap_ = heap_->GetMarkBitmap();
    140   live_objects_in_space_ = 0;
    141 }
    142 
    143 void MarkCompact::ProcessReferences(Thread* self) {
    144   WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    145   heap_->GetReferenceProcessor()->ProcessReferences(
    146       false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(),
    147       &HeapReferenceMarkedCallback, &MarkObjectCallback, &ProcessMarkStackCallback, this);
    148 }
    149 
    150 class BitmapSetSlowPathVisitor {
    151  public:
    152   void operator()(const mirror::Object* obj) const {
    153     // Marking a large object, make sure its aligned as a sanity check.
    154     if (!IsAligned<kPageSize>(obj)) {
    155       Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
    156       LOG(FATAL) << obj;
    157     }
    158   }
    159 };
    160 
    161 inline void MarkCompact::MarkObject(mirror::Object* obj) {
    162   if (obj == nullptr) {
    163     return;
    164   }
    165   if (kUseBakerOrBrooksReadBarrier) {
    166     // Verify all the objects have the correct forward pointer installed.
    167     obj->AssertReadBarrierPointer();
    168   }
    169   if (immune_region_.ContainsObject(obj)) {
    170     return;
    171   }
    172   if (objects_before_forwarding_->HasAddress(obj)) {
    173     if (!objects_before_forwarding_->Set(obj)) {
    174       MarkStackPush(obj);  // This object was not previously marked.
    175     }
    176   } else {
    177     DCHECK(!space_->HasAddress(obj));
    178     BitmapSetSlowPathVisitor visitor;
    179     if (!mark_bitmap_->Set(obj, visitor)) {
    180       // This object was not previously marked.
    181       MarkStackPush(obj);
    182     }
    183   }
    184 }
    185 
    186 void MarkCompact::MarkingPhase() {
    187   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    188   Thread* self = Thread::Current();
    189   // Bitmap which describes which objects we have to move.
    190   objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
    191       "objects before forwarding", space_->Begin(), space_->Size()));
    192   // Bitmap which describes which lock words we need to restore.
    193   objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
    194       "objects with lock words", space_->Begin(), space_->Size()));
    195   CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
    196   // Assume the cleared space is already empty.
    197   BindBitmaps();
    198   t.NewTiming("ProcessCards");
    199   // Process dirty cards and add dirty cards to mod-union tables.
    200   heap_->ProcessCards(GetTimings(), false);
    201   // Clear the whole card table since we can not Get any additional dirty cards during the
    202   // paused GC. This saves memory but only works for pause the world collectors.
    203   t.NewTiming("ClearCardTable");
    204   heap_->GetCardTable()->ClearCardTable();
    205   // Need to do this before the checkpoint since we don't want any threads to add references to
    206   // the live stack during the recursive mark.
    207   if (kUseThreadLocalAllocationStack) {
    208     t.NewTiming("RevokeAllThreadLocalAllocationStacks");
    209     heap_->RevokeAllThreadLocalAllocationStacks(self);
    210   }
    211   t.NewTiming("SwapStacks");
    212   heap_->SwapStacks(self);
    213   {
    214     WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    215     MarkRoots();
    216     // Mark roots of immune spaces.
    217     UpdateAndMarkModUnion();
    218     // Recursively mark remaining objects.
    219     MarkReachableObjects();
    220   }
    221   ProcessReferences(self);
    222   {
    223     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
    224     SweepSystemWeaks();
    225   }
    226   // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
    227   // before they are properly counted.
    228   RevokeAllThreadLocalBuffers();
    229   // Disabled due to an issue where we have objects in the bump pointer space which reference dead
    230   // objects.
    231   // heap_->PreSweepingGcVerification(this);
    232 }
    233 
    234 void MarkCompact::UpdateAndMarkModUnion() {
    235   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    236   for (auto& space : heap_->GetContinuousSpaces()) {
    237     // If the space is immune then we need to mark the references to other spaces.
    238     if (immune_region_.ContainsSpace(space)) {
    239       accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    240       if (table != nullptr) {
    241         // TODO: Improve naming.
    242         TimingLogger::ScopedTiming t(
    243             space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
    244                                      "UpdateAndMarkImageModUnionTable", GetTimings());
    245         table->UpdateAndMarkReferences(MarkHeapReferenceCallback, this);
    246       }
    247     }
    248   }
    249 }
    250 
    251 void MarkCompact::MarkReachableObjects() {
    252   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    253   accounting::ObjectStack* live_stack = heap_->GetLiveStack();
    254   {
    255     TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
    256     heap_->MarkAllocStackAsLive(live_stack);
    257   }
    258   live_stack->Reset();
    259   // Recursively process the mark stack.
    260   ProcessMarkStack();
    261 }
    262 
    263 void MarkCompact::ReclaimPhase() {
    264   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    265   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    266   // Reclaim unmarked objects.
    267   Sweep(false);
    268   // Swap the live and mark bitmaps for each space which we modified space. This is an
    269   // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
    270   // bitmaps.
    271   SwapBitmaps();
    272   GetHeap()->UnBindBitmaps();  // Unbind the live and mark bitmaps.
    273   Compact();
    274 }
    275 
    276 void MarkCompact::ResizeMarkStack(size_t new_size) {
    277   std::vector<Object*> temp(mark_stack_->Begin(), mark_stack_->End());
    278   CHECK_LE(mark_stack_->Size(), new_size);
    279   mark_stack_->Resize(new_size);
    280   for (const auto& obj : temp) {
    281     mark_stack_->PushBack(obj);
    282   }
    283 }
    284 
    285 inline void MarkCompact::MarkStackPush(Object* obj) {
    286   if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
    287     ResizeMarkStack(mark_stack_->Capacity() * 2);
    288   }
    289   // The object must be pushed on to the mark stack.
    290   mark_stack_->PushBack(obj);
    291 }
    292 
    293 void MarkCompact::ProcessMarkStackCallback(void* arg) {
    294   reinterpret_cast<MarkCompact*>(arg)->ProcessMarkStack();
    295 }
    296 
    297 mirror::Object* MarkCompact::MarkObjectCallback(mirror::Object* root, void* arg) {
    298   reinterpret_cast<MarkCompact*>(arg)->MarkObject(root);
    299   return root;
    300 }
    301 
    302 void MarkCompact::MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr,
    303                                             void* arg) {
    304   reinterpret_cast<MarkCompact*>(arg)->MarkObject(obj_ptr->AsMirrorPtr());
    305 }
    306 
    307 void MarkCompact::DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
    308                                                  void* arg) {
    309   reinterpret_cast<MarkCompact*>(arg)->DelayReferenceReferent(klass, ref);
    310 }
    311 
    312 void MarkCompact::MarkRootCallback(Object** root, void* arg, const RootInfo& /*root_info*/) {
    313   reinterpret_cast<MarkCompact*>(arg)->MarkObject(*root);
    314 }
    315 
    316 void MarkCompact::UpdateRootCallback(Object** root, void* arg, const RootInfo& /*root_info*/) {
    317   mirror::Object* obj = *root;
    318   mirror::Object* new_obj = reinterpret_cast<MarkCompact*>(arg)->GetMarkedForwardAddress(obj);
    319   if (obj != new_obj) {
    320     *root = new_obj;
    321     DCHECK(new_obj != nullptr);
    322   }
    323 }
    324 
    325 class UpdateObjectReferencesVisitor {
    326  public:
    327   explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {
    328   }
    329   void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    330           EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
    331     collector_->UpdateObjectReferences(obj);
    332   }
    333 
    334  private:
    335   MarkCompact* const collector_;
    336 };
    337 
    338 void MarkCompact::UpdateReferences() {
    339   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    340   Runtime* runtime = Runtime::Current();
    341   // Update roots.
    342   runtime->VisitRoots(UpdateRootCallback, this);
    343   // Update object references in mod union tables and spaces.
    344   for (const auto& space : heap_->GetContinuousSpaces()) {
    345     // If the space is immune then we need to mark the references to other spaces.
    346     accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    347     if (table != nullptr) {
    348       // TODO: Improve naming.
    349       TimingLogger::ScopedTiming t(
    350           space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
    351                                    "UpdateImageModUnionTableReferences",
    352                                    GetTimings());
    353       table->UpdateAndMarkReferences(&UpdateHeapReferenceCallback, this);
    354     } else {
    355       // No mod union table, so we need to scan the space using bitmap visit.
    356       // Scan the space using bitmap visit.
    357       accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
    358       if (bitmap != nullptr) {
    359         UpdateObjectReferencesVisitor visitor(this);
    360         bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
    361                                  reinterpret_cast<uintptr_t>(space->End()),
    362                                  visitor);
    363       }
    364     }
    365   }
    366   CHECK(!kMovingClasses)
    367       << "Didn't update large object classes since they are assumed to not move.";
    368   // Update the system weaks, these should already have been swept.
    369   runtime->SweepSystemWeaks(&MarkedForwardingAddressCallback, this);
    370   // Update the objects in the bump pointer space last, these objects don't have a bitmap.
    371   UpdateObjectReferencesVisitor visitor(this);
    372   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    373                                                reinterpret_cast<uintptr_t>(space_->End()),
    374                                                visitor);
    375   // Update the reference processor cleared list.
    376   heap_->GetReferenceProcessor()->UpdateRoots(&MarkedForwardingAddressCallback, this);
    377 }
    378 
    379 void MarkCompact::Compact() {
    380   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    381   CalculateObjectForwardingAddresses();
    382   UpdateReferences();
    383   MoveObjects();
    384   // Space
    385   int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
    386   int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
    387       reinterpret_cast<int64_t>(bump_pointer_);
    388   t.NewTiming("RecordFree");
    389   space_->RecordFree(objects_freed, bytes_freed);
    390   RecordFree(ObjectBytePair(objects_freed, bytes_freed));
    391   space_->SetEnd(bump_pointer_);
    392   // Need to zero out the memory we freed. TODO: Use madvise for pages.
    393   memset(bump_pointer_, 0, bytes_freed);
    394 }
    395 
    396 // Marks all objects in the root set.
    397 void MarkCompact::MarkRoots() {
    398   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    399   Runtime::Current()->VisitRoots(MarkRootCallback, this);
    400 }
    401 
    402 mirror::Object* MarkCompact::MarkedForwardingAddressCallback(mirror::Object* obj, void* arg) {
    403   return reinterpret_cast<MarkCompact*>(arg)->GetMarkedForwardAddress(obj);
    404 }
    405 
    406 inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
    407   mirror::Object* obj = reference->AsMirrorPtr();
    408   if (obj != nullptr) {
    409     mirror::Object* new_obj = GetMarkedForwardAddress(obj);
    410     if (obj != new_obj) {
    411       DCHECK(new_obj != nullptr);
    412       reference->Assign(new_obj);
    413     }
    414   }
    415 }
    416 
    417 void MarkCompact::UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
    418                                               void* arg) {
    419   reinterpret_cast<MarkCompact*>(arg)->UpdateHeapReference(reference);
    420 }
    421 
    422 class UpdateReferenceVisitor {
    423  public:
    424   explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {
    425   }
    426 
    427   void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const
    428       ALWAYS_INLINE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    429     collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
    430   }
    431 
    432   void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
    433       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    434     collector_->UpdateHeapReference(
    435         ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
    436   }
    437 
    438  private:
    439   MarkCompact* const collector_;
    440 };
    441 
    442 void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
    443   UpdateReferenceVisitor visitor(this);
    444   obj->VisitReferences<kMovingClasses>(visitor, visitor);
    445 }
    446 
    447 inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) const {
    448   DCHECK(obj != nullptr);
    449   if (objects_before_forwarding_->HasAddress(obj)) {
    450     DCHECK(objects_before_forwarding_->Test(obj));
    451     mirror::Object* ret =
    452         reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
    453     DCHECK(ret != nullptr);
    454     return ret;
    455   }
    456   DCHECK(!space_->HasAddress(obj));
    457   DCHECK(IsMarked(obj));
    458   return obj;
    459 }
    460 
    461 inline bool MarkCompact::IsMarked(const Object* object) const {
    462   if (immune_region_.ContainsObject(object)) {
    463     return true;
    464   }
    465   if (objects_before_forwarding_->HasAddress(object)) {
    466     return objects_before_forwarding_->Test(object);
    467   }
    468   return mark_bitmap_->Test(object);
    469 }
    470 
    471 mirror::Object* MarkCompact::IsMarkedCallback(mirror::Object* object, void* arg) {
    472   return reinterpret_cast<MarkCompact*>(arg)->IsMarked(object) ? object : nullptr;
    473 }
    474 
    475 bool MarkCompact::HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
    476                                               void* arg) {
    477   // Side effect free since we call this before ever moving objects.
    478   return reinterpret_cast<MarkCompact*>(arg)->IsMarked(ref_ptr->AsMirrorPtr());
    479 }
    480 
    481 void MarkCompact::SweepSystemWeaks() {
    482   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    483   Runtime::Current()->SweepSystemWeaks(IsMarkedCallback, this);
    484 }
    485 
    486 bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
    487   return space != space_ && !immune_region_.ContainsSpace(space);
    488 }
    489 
    490 class MoveObjectVisitor {
    491  public:
    492   explicit MoveObjectVisitor(MarkCompact* collector) : collector_(collector) {
    493   }
    494   void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    495           EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
    496       collector_->MoveObject(obj, obj->SizeOf());
    497   }
    498 
    499  private:
    500   MarkCompact* const collector_;
    501 };
    502 
    503 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
    504   // Look at the forwarding address stored in the lock word to know where to copy.
    505   DCHECK(space_->HasAddress(obj)) << obj;
    506   uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
    507   mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
    508   DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
    509   // Use memmove since there may be overlap.
    510   memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
    511   // Restore the saved lock word if needed.
    512   LockWord lock_word;
    513   if (UNLIKELY(objects_with_lockword_->Test(obj))) {
    514     lock_word = lock_words_to_restore_.front();
    515     lock_words_to_restore_.pop_front();
    516   }
    517   dest_obj->SetLockWord(lock_word, false);
    518 }
    519 
    520 void MarkCompact::MoveObjects() {
    521   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    522   // Move the objects in the before forwarding bitmap.
    523   MoveObjectVisitor visitor(this);
    524   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    525                                                reinterpret_cast<uintptr_t>(space_->End()),
    526                                                visitor);
    527   CHECK(lock_words_to_restore_.empty());
    528 }
    529 
    530 void MarkCompact::Sweep(bool swap_bitmaps) {
    531   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    532   DCHECK(mark_stack_->IsEmpty());
    533   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
    534     if (space->IsContinuousMemMapAllocSpace()) {
    535       space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
    536       if (!ShouldSweepSpace(alloc_space)) {
    537         continue;
    538       }
    539       TimingLogger::ScopedTiming t(
    540           alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
    541       RecordFree(alloc_space->Sweep(swap_bitmaps));
    542     }
    543   }
    544   SweepLargeObjects(swap_bitmaps);
    545 }
    546 
    547 void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
    548   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    549   RecordFreeLOS(heap_->GetLargeObjectsSpace()->Sweep(swap_bitmaps));
    550 }
    551 
    552 // Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
    553 // marked, put it on the appropriate list in the heap for later processing.
    554 void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
    555   heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference,
    556                                                          &HeapReferenceMarkedCallback, this);
    557 }
    558 
    559 class MarkCompactMarkObjectVisitor {
    560  public:
    561   explicit MarkCompactMarkObjectVisitor(MarkCompact* collector) : collector_(collector) {
    562   }
    563 
    564   void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
    565       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    566     // Object was already verified when we scanned it.
    567     collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
    568   }
    569 
    570   void operator()(mirror::Class* klass, mirror::Reference* ref) const
    571       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    572       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
    573     collector_->DelayReferenceReferent(klass, ref);
    574   }
    575 
    576  private:
    577   MarkCompact* const collector_;
    578 };
    579 
    580 // Visit all of the references of an object and update.
    581 void MarkCompact::ScanObject(Object* obj) {
    582   MarkCompactMarkObjectVisitor visitor(this);
    583   obj->VisitReferences<kMovingClasses>(visitor, visitor);
    584 }
    585 
    586 // Scan anything that's on the mark stack.
    587 void MarkCompact::ProcessMarkStack() {
    588   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    589   while (!mark_stack_->IsEmpty()) {
    590     Object* obj = mark_stack_->PopBack();
    591     DCHECK(obj != nullptr);
    592     ScanObject(obj);
    593   }
    594 }
    595 
    596 void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
    597   DCHECK(space != nullptr);
    598   space_ = space;
    599 }
    600 
    601 void MarkCompact::FinishPhase() {
    602   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    603   space_ = nullptr;
    604   CHECK(mark_stack_->IsEmpty());
    605   mark_stack_->Reset();
    606   // Clear all of the spaces' mark bitmaps.
    607   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    608   heap_->ClearMarkedObjects();
    609   // Release our bitmaps.
    610   objects_before_forwarding_.reset(nullptr);
    611   objects_with_lockword_.reset(nullptr);
    612 }
    613 
    614 void MarkCompact::RevokeAllThreadLocalBuffers() {
    615   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    616   GetHeap()->RevokeAllThreadLocalBuffers();
    617 }
    618 
    619 }  // namespace collector
    620 }  // namespace gc
    621 }  // namespace art
    622