Home | History | Annotate | Download | only in collector
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "mark_compact.h"
     18 
     19 #include <android-base/logging.h>
     20 
     21 #include "base/macros.h"
     22 #include "base/mutex-inl.h"
     23 #include "base/timing_logger.h"
     24 #include "gc/accounting/heap_bitmap-inl.h"
     25 #include "gc/accounting/mod_union_table.h"
     26 #include "gc/accounting/space_bitmap-inl.h"
     27 #include "gc/heap.h"
     28 #include "gc/reference_processor.h"
     29 #include "gc/space/bump_pointer_space-inl.h"
     30 #include "gc/space/large_object_space.h"
     31 #include "gc/space/space-inl.h"
     32 #include "mirror/class-inl.h"
     33 #include "mirror/object-inl.h"
     34 #include "mirror/object-refvisitor-inl.h"
     35 #include "runtime.h"
     36 #include "stack.h"
     37 #include "thread-current-inl.h"
     38 #include "thread_list.h"
     39 
     40 namespace art {
     41 namespace gc {
     42 namespace collector {
     43 
     44 void MarkCompact::BindBitmaps() {
     45   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
     46   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
     47   // Mark all of the spaces we never collect as immune.
     48   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
     49     if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
     50         space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
     51       immune_spaces_.AddSpace(space);
     52     }
     53   }
     54 }
     55 
     56 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
     57     : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
     58       mark_stack_(nullptr),
     59       space_(nullptr),
     60       mark_bitmap_(nullptr),
     61       collector_name_(name_),
     62       bump_pointer_(nullptr),
     63       live_objects_in_space_(0),
     64       updating_references_(false) {}
     65 
     66 void MarkCompact::RunPhases() {
     67   Thread* self = Thread::Current();
     68   InitializePhase();
     69   CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
     70   {
     71     ScopedPause pause(this);
     72     GetHeap()->PreGcVerificationPaused(this);
     73     GetHeap()->PrePauseRosAllocVerification(this);
     74     MarkingPhase();
     75     ReclaimPhase();
     76   }
     77   GetHeap()->PostGcVerification(this);
     78   FinishPhase();
     79 }
     80 
     81 void MarkCompact::ForwardObject(mirror::Object* obj) {
     82   const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
     83   LockWord lock_word = obj->GetLockWord(false);
     84   // If we have a non empty lock word, store it and restore it later.
     85   if (!LockWord::IsDefault(lock_word)) {
     86     // Set the bit in the bitmap so that we know to restore it later.
     87     objects_with_lockword_->Set(obj);
     88     lock_words_to_restore_.push_back(lock_word);
     89   }
     90   obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
     91                    false);
     92   bump_pointer_ += alloc_size;
     93   ++live_objects_in_space_;
     94 }
     95 
     96 
     97 void MarkCompact::CalculateObjectForwardingAddresses() {
     98   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
     99   // The bump pointer in the space where the next forwarding address will be.
    100   bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
    101   // Visit all the marked objects in the bitmap.
    102   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    103                                                reinterpret_cast<uintptr_t>(space_->End()),
    104                                                [this](mirror::Object* obj)
    105       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    106     DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
    107     DCHECK(IsMarked(obj) != nullptr);
    108     ForwardObject(obj);
    109   });
    110 }
    111 
    112 void MarkCompact::InitializePhase() {
    113   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    114   mark_stack_ = heap_->GetMarkStack();
    115   DCHECK(mark_stack_ != nullptr);
    116   immune_spaces_.Reset();
    117   CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
    118   // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
    119   ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    120   mark_bitmap_ = heap_->GetMarkBitmap();
    121   live_objects_in_space_ = 0;
    122 }
    123 
    124 void MarkCompact::ProcessReferences(Thread* self) {
    125   WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    126   heap_->GetReferenceProcessor()->ProcessReferences(
    127       false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(), this);
    128 }
    129 
    130 inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) {
    131   if (obj == nullptr) {
    132     return nullptr;
    133   }
    134   if (kUseBakerReadBarrier) {
    135     // Verify all the objects have the correct forward state installed.
    136     obj->AssertReadBarrierState();
    137   }
    138   if (!immune_spaces_.IsInImmuneRegion(obj)) {
    139     if (objects_before_forwarding_->HasAddress(obj)) {
    140       if (!objects_before_forwarding_->Set(obj)) {
    141         MarkStackPush(obj);  // This object was not previously marked.
    142       }
    143     } else {
    144       DCHECK(!space_->HasAddress(obj));
    145       auto slow_path = [](const mirror::Object* ref)
    146           REQUIRES_SHARED(Locks::mutator_lock_) {
    147         // Marking a large object, make sure its aligned as a sanity check.
    148         if (!IsAligned<kPageSize>(ref)) {
    149           Runtime::Current()->GetHeap()->DumpSpaces(LOG_STREAM(ERROR));
    150           LOG(FATAL) << ref;
    151         }
    152       };
    153       if (!mark_bitmap_->Set(obj, slow_path)) {
    154         // This object was not previously marked.
    155         MarkStackPush(obj);
    156       }
    157     }
    158   }
    159   return obj;
    160 }
    161 
    162 void MarkCompact::MarkingPhase() {
    163   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    164   Thread* self = Thread::Current();
    165   // Bitmap which describes which objects we have to move.
    166   objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
    167       "objects before forwarding", space_->Begin(), space_->Size()));
    168   // Bitmap which describes which lock words we need to restore.
    169   objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
    170       "objects with lock words", space_->Begin(), space_->Size()));
    171   CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
    172   // Assume the cleared space is already empty.
    173   BindBitmaps();
    174   t.NewTiming("ProcessCards");
    175   // Process dirty cards and add dirty cards to mod-union tables.
    176   heap_->ProcessCards(GetTimings(), false, false, true);
    177   // Clear the whole card table since we cannot get any additional dirty cards during the
    178   // paused GC. This saves memory but only works for pause the world collectors.
    179   t.NewTiming("ClearCardTable");
    180   heap_->GetCardTable()->ClearCardTable();
    181   // Need to do this before the checkpoint since we don't want any threads to add references to
    182   // the live stack during the recursive mark.
    183   if (kUseThreadLocalAllocationStack) {
    184     t.NewTiming("RevokeAllThreadLocalAllocationStacks");
    185     heap_->RevokeAllThreadLocalAllocationStacks(self);
    186   }
    187   t.NewTiming("SwapStacks");
    188   heap_->SwapStacks();
    189   {
    190     WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    191     MarkRoots();
    192     // Mark roots of immune spaces.
    193     UpdateAndMarkModUnion();
    194     // Recursively mark remaining objects.
    195     MarkReachableObjects();
    196   }
    197   ProcessReferences(self);
    198   {
    199     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
    200     SweepSystemWeaks();
    201   }
    202   Runtime::Current()->GetClassLinker()->CleanupClassLoaders();
    203   // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
    204   // before they are properly counted.
    205   RevokeAllThreadLocalBuffers();
    206   // Disabled due to an issue where we have objects in the bump pointer space which reference dead
    207   // objects.
    208   // heap_->PreSweepingGcVerification(this);
    209 }
    210 
    211 void MarkCompact::UpdateAndMarkModUnion() {
    212   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    213   for (auto& space : heap_->GetContinuousSpaces()) {
    214     // If the space is immune then we need to mark the references to other spaces.
    215     if (immune_spaces_.ContainsSpace(space)) {
    216       accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    217       if (table != nullptr) {
    218         // TODO: Improve naming.
    219         TimingLogger::ScopedTiming t2(
    220             space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
    221                                      "UpdateAndMarkImageModUnionTable", GetTimings());
    222         table->UpdateAndMarkReferences(this);
    223       }
    224     }
    225   }
    226 }
    227 
    228 void MarkCompact::MarkReachableObjects() {
    229   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    230   accounting::ObjectStack* live_stack = heap_->GetLiveStack();
    231   {
    232     TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
    233     heap_->MarkAllocStackAsLive(live_stack);
    234   }
    235   live_stack->Reset();
    236   // Recursively process the mark stack.
    237   ProcessMarkStack();
    238 }
    239 
    240 void MarkCompact::ReclaimPhase() {
    241   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    242   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    243   // Reclaim unmarked objects.
    244   Sweep(false);
    245   // Swap the live and mark bitmaps for each space which we modified space. This is an
    246   // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
    247   // bitmaps.
    248   SwapBitmaps();
    249   GetHeap()->UnBindBitmaps();  // Unbind the live and mark bitmaps.
    250   Compact();
    251 }
    252 
    253 void MarkCompact::ResizeMarkStack(size_t new_size) {
    254   std::vector<StackReference<mirror::Object>> temp(mark_stack_->Begin(), mark_stack_->End());
    255   CHECK_LE(mark_stack_->Size(), new_size);
    256   mark_stack_->Resize(new_size);
    257   for (auto& obj : temp) {
    258     mark_stack_->PushBack(obj.AsMirrorPtr());
    259   }
    260 }
    261 
    262 inline void MarkCompact::MarkStackPush(mirror::Object* obj) {
    263   if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
    264     ResizeMarkStack(mark_stack_->Capacity() * 2);
    265   }
    266   // The object must be pushed on to the mark stack.
    267   mark_stack_->PushBack(obj);
    268 }
    269 
    270 void MarkCompact::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr,
    271                                     bool do_atomic_update ATTRIBUTE_UNUSED) {
    272   if (updating_references_) {
    273     UpdateHeapReference(obj_ptr);
    274   } else {
    275     MarkObject(obj_ptr->AsMirrorPtr());
    276   }
    277 }
    278 
    279 void MarkCompact::VisitRoots(
    280     mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) {
    281   for (size_t i = 0; i < count; ++i) {
    282     MarkObject(*roots[i]);
    283   }
    284 }
    285 
    286 void MarkCompact::VisitRoots(
    287     mirror::CompressedReference<mirror::Object>** roots, size_t count,
    288     const RootInfo& info ATTRIBUTE_UNUSED) {
    289   for (size_t i = 0; i < count; ++i) {
    290     MarkObject(roots[i]->AsMirrorPtr());
    291   }
    292 }
    293 
    294 class MarkCompact::UpdateRootVisitor : public RootVisitor {
    295  public:
    296   explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {}
    297 
    298   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
    299       OVERRIDE REQUIRES(Locks::mutator_lock_)
    300       REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
    301     for (size_t i = 0; i < count; ++i) {
    302       mirror::Object* obj = *roots[i];
    303       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
    304       if (obj != new_obj) {
    305         *roots[i] = new_obj;
    306         DCHECK(new_obj != nullptr);
    307       }
    308     }
    309   }
    310 
    311   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
    312                   const RootInfo& info ATTRIBUTE_UNUSED)
    313       OVERRIDE REQUIRES(Locks::mutator_lock_)
    314       REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
    315     for (size_t i = 0; i < count; ++i) {
    316       mirror::Object* obj = roots[i]->AsMirrorPtr();
    317       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
    318       if (obj != new_obj) {
    319         roots[i]->Assign(new_obj);
    320         DCHECK(new_obj != nullptr);
    321       }
    322     }
    323   }
    324 
    325  private:
    326   MarkCompact* const collector_;
    327 };
    328 
    329 class MarkCompact::UpdateObjectReferencesVisitor {
    330  public:
    331   explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {}
    332 
    333   void operator()(mirror::Object* obj) const REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    334           REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
    335     collector_->UpdateObjectReferences(obj);
    336   }
    337 
    338  private:
    339   MarkCompact* const collector_;
    340 };
    341 
    342 void MarkCompact::UpdateReferences() {
    343   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    344   updating_references_ = true;
    345   Runtime* runtime = Runtime::Current();
    346   // Update roots.
    347   UpdateRootVisitor update_root_visitor(this);
    348   runtime->VisitRoots(&update_root_visitor);
    349   // Update object references in mod union tables and spaces.
    350   for (const auto& space : heap_->GetContinuousSpaces()) {
    351     // If the space is immune then we need to mark the references to other spaces.
    352     accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    353     if (table != nullptr) {
    354       // TODO: Improve naming.
    355       TimingLogger::ScopedTiming t2(
    356           space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
    357                                    "UpdateImageModUnionTableReferences",
    358                                    GetTimings());
    359       table->UpdateAndMarkReferences(this);
    360     } else {
    361       // No mod union table, so we need to scan the space using bitmap visit.
    362       // Scan the space using bitmap visit.
    363       accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
    364       if (bitmap != nullptr) {
    365         UpdateObjectReferencesVisitor visitor(this);
    366         bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
    367                                  reinterpret_cast<uintptr_t>(space->End()),
    368                                  visitor);
    369       }
    370     }
    371   }
    372   CHECK(!kMovingClasses)
    373       << "Didn't update large object classes since they are assumed to not move.";
    374   // Update the system weaks, these should already have been swept.
    375   runtime->SweepSystemWeaks(this);
    376   // Update the objects in the bump pointer space last, these objects don't have a bitmap.
    377   UpdateObjectReferencesVisitor visitor(this);
    378   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    379                                                reinterpret_cast<uintptr_t>(space_->End()),
    380                                                visitor);
    381   // Update the reference processor cleared list.
    382   heap_->GetReferenceProcessor()->UpdateRoots(this);
    383   updating_references_ = false;
    384 }
    385 
    386 void MarkCompact::Compact() {
    387   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    388   CalculateObjectForwardingAddresses();
    389   UpdateReferences();
    390   MoveObjects();
    391   // Space
    392   int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
    393   int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
    394       reinterpret_cast<int64_t>(bump_pointer_);
    395   t.NewTiming("RecordFree");
    396   space_->RecordFree(objects_freed, bytes_freed);
    397   RecordFree(ObjectBytePair(objects_freed, bytes_freed));
    398   space_->SetEnd(bump_pointer_);
    399   // Need to zero out the memory we freed. TODO: Use madvise for pages.
    400   memset(bump_pointer_, 0, bytes_freed);
    401 }
    402 
    403 // Marks all objects in the root set.
    404 void MarkCompact::MarkRoots() {
    405   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    406   Runtime::Current()->VisitRoots(this);
    407 }
    408 
    409 inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
    410   mirror::Object* obj = reference->AsMirrorPtr();
    411   if (obj != nullptr) {
    412     mirror::Object* new_obj = GetMarkedForwardAddress(obj);
    413     if (obj != new_obj) {
    414       DCHECK(new_obj != nullptr);
    415       reference->Assign(new_obj);
    416     }
    417   }
    418 }
    419 
    420 class MarkCompact::UpdateReferenceVisitor {
    421  public:
    422   explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {}
    423 
    424   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const
    425       ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    426     collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
    427   }
    428 
    429   void operator()(ObjPtr<mirror::Class> /*klass*/, mirror::Reference* ref) const
    430       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    431     collector_->UpdateHeapReference(
    432         ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
    433   }
    434 
    435   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
    436   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    437       NO_THREAD_SAFETY_ANALYSIS {
    438     if (!root->IsNull()) {
    439       VisitRoot(root);
    440     }
    441   }
    442 
    443   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    444       NO_THREAD_SAFETY_ANALYSIS {
    445     root->Assign(collector_->GetMarkedForwardAddress(root->AsMirrorPtr()));
    446   }
    447 
    448  private:
    449   MarkCompact* const collector_;
    450 };
    451 
    452 void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
    453   UpdateReferenceVisitor visitor(this);
    454   obj->VisitReferences(visitor, visitor);
    455 }
    456 
    457 inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) {
    458   DCHECK(obj != nullptr);
    459   if (objects_before_forwarding_->HasAddress(obj)) {
    460     DCHECK(objects_before_forwarding_->Test(obj));
    461     mirror::Object* ret =
    462         reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
    463     DCHECK(ret != nullptr);
    464     return ret;
    465   }
    466   DCHECK(!space_->HasAddress(obj));
    467   return obj;
    468 }
    469 
    470 mirror::Object* MarkCompact::IsMarked(mirror::Object* object) {
    471   if (immune_spaces_.IsInImmuneRegion(object)) {
    472     return object;
    473   }
    474   if (updating_references_) {
    475     return GetMarkedForwardAddress(object);
    476   }
    477   if (objects_before_forwarding_->HasAddress(object)) {
    478     return objects_before_forwarding_->Test(object) ? object : nullptr;
    479   }
    480   return mark_bitmap_->Test(object) ? object : nullptr;
    481 }
    482 
    483 bool MarkCompact::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref_ptr,
    484                                               // MarkCompact does the GC in a pause. No CAS needed.
    485                                               bool do_atomic_update ATTRIBUTE_UNUSED) {
    486   // Side effect free since we call this before ever moving objects.
    487   mirror::Object* obj = ref_ptr->AsMirrorPtr();
    488   if (obj == nullptr) {
    489     return true;
    490   }
    491   return IsMarked(obj) != nullptr;
    492 }
    493 
    494 void MarkCompact::SweepSystemWeaks() {
    495   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    496   Runtime::Current()->SweepSystemWeaks(this);
    497 }
    498 
    499 bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
    500   return space != space_ && !immune_spaces_.ContainsSpace(space);
    501 }
    502 
    503 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
    504   // Look at the forwarding address stored in the lock word to know where to copy.
    505   DCHECK(space_->HasAddress(obj)) << obj;
    506   uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
    507   mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
    508   DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
    509   // Use memmove since there may be overlap.
    510   memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
    511   // Restore the saved lock word if needed.
    512   LockWord lock_word = LockWord::Default();
    513   if (UNLIKELY(objects_with_lockword_->Test(obj))) {
    514     lock_word = lock_words_to_restore_.front();
    515     lock_words_to_restore_.pop_front();
    516   }
    517   dest_obj->SetLockWord(lock_word, false);
    518 }
    519 
    520 void MarkCompact::MoveObjects() {
    521   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    522   // Move the objects in the before forwarding bitmap.
    523   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    524                                                reinterpret_cast<uintptr_t>(space_->End()),
    525                                                [this](mirror::Object* obj)
    526       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    527       REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
    528     MoveObject(obj, obj->SizeOf());
    529   });
    530   CHECK(lock_words_to_restore_.empty());
    531 }
    532 
    533 void MarkCompact::Sweep(bool swap_bitmaps) {
    534   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    535   DCHECK(mark_stack_->IsEmpty());
    536   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
    537     if (space->IsContinuousMemMapAllocSpace()) {
    538       space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
    539       if (!ShouldSweepSpace(alloc_space)) {
    540         continue;
    541       }
    542       TimingLogger::ScopedTiming t2(
    543           alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
    544       RecordFree(alloc_space->Sweep(swap_bitmaps));
    545     }
    546   }
    547   SweepLargeObjects(swap_bitmaps);
    548 }
    549 
    550 void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
    551   space::LargeObjectSpace* los = heap_->GetLargeObjectsSpace();
    552   if (los != nullptr) {
    553     TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());\
    554     RecordFreeLOS(los->Sweep(swap_bitmaps));
    555   }
    556 }
    557 
    558 // Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
    559 // marked, put it on the appropriate list in the heap for later processing.
    560 void MarkCompact::DelayReferenceReferent(ObjPtr<mirror::Class> klass,
    561                                          ObjPtr<mirror::Reference> reference) {
    562   heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
    563 }
    564 
    565 class MarkCompact::MarkObjectVisitor {
    566  public:
    567   explicit MarkObjectVisitor(MarkCompact* collector) : collector_(collector) {}
    568 
    569   void operator()(ObjPtr<mirror::Object> obj,
    570                   MemberOffset offset,
    571                   bool /*is_static*/) const ALWAYS_INLINE
    572       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    573     // Object was already verified when we scanned it.
    574     collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
    575   }
    576 
    577   void operator()(ObjPtr<mirror::Class> klass,
    578                   ObjPtr<mirror::Reference> ref) const
    579       REQUIRES_SHARED(Locks::mutator_lock_)
    580       REQUIRES(Locks::heap_bitmap_lock_) {
    581     collector_->DelayReferenceReferent(klass, ref);
    582   }
    583 
    584   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
    585   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    586       NO_THREAD_SAFETY_ANALYSIS {
    587     if (!root->IsNull()) {
    588       VisitRoot(root);
    589     }
    590   }
    591 
    592   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    593       NO_THREAD_SAFETY_ANALYSIS {
    594     collector_->MarkObject(root->AsMirrorPtr());
    595   }
    596 
    597  private:
    598   MarkCompact* const collector_;
    599 };
    600 
    601 // Visit all of the references of an object and update.
    602 void MarkCompact::ScanObject(mirror::Object* obj) {
    603   MarkObjectVisitor visitor(this);
    604   obj->VisitReferences(visitor, visitor);
    605 }
    606 
    607 // Scan anything that's on the mark stack.
    608 void MarkCompact::ProcessMarkStack() {
    609   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    610   while (!mark_stack_->IsEmpty()) {
    611     mirror::Object* obj = mark_stack_->PopBack();
    612     DCHECK(obj != nullptr);
    613     ScanObject(obj);
    614   }
    615 }
    616 
    617 void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
    618   DCHECK(space != nullptr);
    619   space_ = space;
    620 }
    621 
    622 void MarkCompact::FinishPhase() {
    623   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    624   space_ = nullptr;
    625   CHECK(mark_stack_->IsEmpty());
    626   mark_stack_->Reset();
    627   // Clear all of the spaces' mark bitmaps.
    628   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    629   heap_->ClearMarkedObjects();
    630   // Release our bitmaps.
    631   objects_before_forwarding_.reset(nullptr);
    632   objects_with_lockword_.reset(nullptr);
    633 }
    634 
    635 void MarkCompact::RevokeAllThreadLocalBuffers() {
    636   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    637   GetHeap()->RevokeAllThreadLocalBuffers();
    638 }
    639 
    640 }  // namespace collector
    641 }  // namespace gc
    642 }  // namespace art
    643