Home | History | Annotate | Download | only in collector
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "mark_compact.h"
     18 
     19 #include "base/logging.h"
     20 #include "base/mutex-inl.h"
     21 #include "base/timing_logger.h"
     22 #include "gc/accounting/heap_bitmap-inl.h"
     23 #include "gc/accounting/mod_union_table.h"
     24 #include "gc/accounting/space_bitmap-inl.h"
     25 #include "gc/heap.h"
     26 #include "gc/reference_processor.h"
     27 #include "gc/space/bump_pointer_space-inl.h"
     28 #include "gc/space/large_object_space.h"
     29 #include "gc/space/space-inl.h"
     30 #include "mirror/class-inl.h"
     31 #include "mirror/object-inl.h"
     32 #include "runtime.h"
     33 #include "stack.h"
     34 #include "thread-inl.h"
     35 #include "thread_list.h"
     36 
     37 namespace art {
     38 namespace gc {
     39 namespace collector {
     40 
     41 void MarkCompact::BindBitmaps() {
     42   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
     43   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
     44   // Mark all of the spaces we never collect as immune.
     45   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
     46     if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
     47         space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
     48       immune_spaces_.AddSpace(space);
     49     }
     50   }
     51 }
     52 
     53 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
     54     : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
     55       space_(nullptr),
     56       collector_name_(name_),
     57       updating_references_(false) {}
     58 
     59 void MarkCompact::RunPhases() {
     60   Thread* self = Thread::Current();
     61   InitializePhase();
     62   CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
     63   {
     64     ScopedPause pause(this);
     65     GetHeap()->PreGcVerificationPaused(this);
     66     GetHeap()->PrePauseRosAllocVerification(this);
     67     MarkingPhase();
     68     ReclaimPhase();
     69   }
     70   GetHeap()->PostGcVerification(this);
     71   FinishPhase();
     72 }
     73 
     74 void MarkCompact::ForwardObject(mirror::Object* obj) {
     75   const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
     76   LockWord lock_word = obj->GetLockWord(false);
     77   // If we have a non empty lock word, store it and restore it later.
     78   if (!LockWord::IsDefault(lock_word)) {
     79     // Set the bit in the bitmap so that we know to restore it later.
     80     objects_with_lockword_->Set(obj);
     81     lock_words_to_restore_.push_back(lock_word);
     82   }
     83   obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
     84                    false);
     85   bump_pointer_ += alloc_size;
     86   ++live_objects_in_space_;
     87 }
     88 
     89 
     90 void MarkCompact::CalculateObjectForwardingAddresses() {
     91   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
     92   // The bump pointer in the space where the next forwarding address will be.
     93   bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
     94   // Visit all the marked objects in the bitmap.
     95   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
     96                                                reinterpret_cast<uintptr_t>(space_->End()),
     97                                                [this](mirror::Object* obj)
     98       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
     99     DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
    100     DCHECK(IsMarked(obj) != nullptr);
    101     ForwardObject(obj);
    102   });
    103 }
    104 
    105 void MarkCompact::InitializePhase() {
    106   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    107   mark_stack_ = heap_->GetMarkStack();
    108   DCHECK(mark_stack_ != nullptr);
    109   immune_spaces_.Reset();
    110   CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
    111   // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
    112   ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    113   mark_bitmap_ = heap_->GetMarkBitmap();
    114   live_objects_in_space_ = 0;
    115 }
    116 
    117 void MarkCompact::ProcessReferences(Thread* self) {
    118   WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    119   heap_->GetReferenceProcessor()->ProcessReferences(
    120       false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(), this);
    121 }
    122 
    123 inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) {
    124   if (obj == nullptr) {
    125     return nullptr;
    126   }
    127   if (kUseBakerOrBrooksReadBarrier) {
    128     // Verify all the objects have the correct forward pointer installed.
    129     obj->AssertReadBarrierPointer();
    130   }
    131   if (!immune_spaces_.IsInImmuneRegion(obj)) {
    132     if (objects_before_forwarding_->HasAddress(obj)) {
    133       if (!objects_before_forwarding_->Set(obj)) {
    134         MarkStackPush(obj);  // This object was not previously marked.
    135       }
    136     } else {
    137       DCHECK(!space_->HasAddress(obj));
    138       auto slow_path = [this](const mirror::Object* ref)
    139           SHARED_REQUIRES(Locks::mutator_lock_) {
    140         // Marking a large object, make sure its aligned as a sanity check.
    141         if (!IsAligned<kPageSize>(ref)) {
    142           Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
    143           LOG(FATAL) << ref;
    144         }
    145       };
    146       if (!mark_bitmap_->Set(obj, slow_path)) {
    147         // This object was not previously marked.
    148         MarkStackPush(obj);
    149       }
    150     }
    151   }
    152   return obj;
    153 }
    154 
    155 void MarkCompact::MarkingPhase() {
    156   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    157   Thread* self = Thread::Current();
    158   // Bitmap which describes which objects we have to move.
    159   objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
    160       "objects before forwarding", space_->Begin(), space_->Size()));
    161   // Bitmap which describes which lock words we need to restore.
    162   objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
    163       "objects with lock words", space_->Begin(), space_->Size()));
    164   CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
    165   // Assume the cleared space is already empty.
    166   BindBitmaps();
    167   t.NewTiming("ProcessCards");
    168   // Process dirty cards and add dirty cards to mod-union tables.
    169   heap_->ProcessCards(GetTimings(), false, false, true);
    170   // Clear the whole card table since we cannot get any additional dirty cards during the
    171   // paused GC. This saves memory but only works for pause the world collectors.
    172   t.NewTiming("ClearCardTable");
    173   heap_->GetCardTable()->ClearCardTable();
    174   // Need to do this before the checkpoint since we don't want any threads to add references to
    175   // the live stack during the recursive mark.
    176   if (kUseThreadLocalAllocationStack) {
    177     t.NewTiming("RevokeAllThreadLocalAllocationStacks");
    178     heap_->RevokeAllThreadLocalAllocationStacks(self);
    179   }
    180   t.NewTiming("SwapStacks");
    181   heap_->SwapStacks();
    182   {
    183     WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    184     MarkRoots();
    185     // Mark roots of immune spaces.
    186     UpdateAndMarkModUnion();
    187     // Recursively mark remaining objects.
    188     MarkReachableObjects();
    189   }
    190   ProcessReferences(self);
    191   {
    192     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
    193     SweepSystemWeaks();
    194   }
    195   Runtime::Current()->GetClassLinker()->CleanupClassLoaders();
    196   // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
    197   // before they are properly counted.
    198   RevokeAllThreadLocalBuffers();
    199   // Disabled due to an issue where we have objects in the bump pointer space which reference dead
    200   // objects.
    201   // heap_->PreSweepingGcVerification(this);
    202 }
    203 
    204 void MarkCompact::UpdateAndMarkModUnion() {
    205   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    206   for (auto& space : heap_->GetContinuousSpaces()) {
    207     // If the space is immune then we need to mark the references to other spaces.
    208     if (immune_spaces_.ContainsSpace(space)) {
    209       accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    210       if (table != nullptr) {
    211         // TODO: Improve naming.
    212         TimingLogger::ScopedTiming t2(
    213             space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
    214                                      "UpdateAndMarkImageModUnionTable", GetTimings());
    215         table->UpdateAndMarkReferences(this);
    216       }
    217     }
    218   }
    219 }
    220 
    221 void MarkCompact::MarkReachableObjects() {
    222   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    223   accounting::ObjectStack* live_stack = heap_->GetLiveStack();
    224   {
    225     TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
    226     heap_->MarkAllocStackAsLive(live_stack);
    227   }
    228   live_stack->Reset();
    229   // Recursively process the mark stack.
    230   ProcessMarkStack();
    231 }
    232 
    233 void MarkCompact::ReclaimPhase() {
    234   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    235   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    236   // Reclaim unmarked objects.
    237   Sweep(false);
    238   // Swap the live and mark bitmaps for each space which we modified space. This is an
    239   // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
    240   // bitmaps.
    241   SwapBitmaps();
    242   GetHeap()->UnBindBitmaps();  // Unbind the live and mark bitmaps.
    243   Compact();
    244 }
    245 
    246 void MarkCompact::ResizeMarkStack(size_t new_size) {
    247   std::vector<StackReference<mirror::Object>> temp(mark_stack_->Begin(), mark_stack_->End());
    248   CHECK_LE(mark_stack_->Size(), new_size);
    249   mark_stack_->Resize(new_size);
    250   for (auto& obj : temp) {
    251     mark_stack_->PushBack(obj.AsMirrorPtr());
    252   }
    253 }
    254 
    255 inline void MarkCompact::MarkStackPush(mirror::Object* obj) {
    256   if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
    257     ResizeMarkStack(mark_stack_->Capacity() * 2);
    258   }
    259   // The object must be pushed on to the mark stack.
    260   mark_stack_->PushBack(obj);
    261 }
    262 
    263 void MarkCompact::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr) {
    264   if (updating_references_) {
    265     UpdateHeapReference(obj_ptr);
    266   } else {
    267     MarkObject(obj_ptr->AsMirrorPtr());
    268   }
    269 }
    270 
    271 void MarkCompact::VisitRoots(
    272     mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) {
    273   for (size_t i = 0; i < count; ++i) {
    274     MarkObject(*roots[i]);
    275   }
    276 }
    277 
    278 void MarkCompact::VisitRoots(
    279     mirror::CompressedReference<mirror::Object>** roots, size_t count,
    280     const RootInfo& info ATTRIBUTE_UNUSED) {
    281   for (size_t i = 0; i < count; ++i) {
    282     MarkObject(roots[i]->AsMirrorPtr());
    283   }
    284 }
    285 
    286 class MarkCompact::UpdateRootVisitor : public RootVisitor {
    287  public:
    288   explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {}
    289 
    290   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
    291       OVERRIDE REQUIRES(Locks::mutator_lock_)
    292       SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
    293     for (size_t i = 0; i < count; ++i) {
    294       mirror::Object* obj = *roots[i];
    295       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
    296       if (obj != new_obj) {
    297         *roots[i] = new_obj;
    298         DCHECK(new_obj != nullptr);
    299       }
    300     }
    301   }
    302 
    303   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
    304                   const RootInfo& info ATTRIBUTE_UNUSED)
    305       OVERRIDE REQUIRES(Locks::mutator_lock_)
    306       SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
    307     for (size_t i = 0; i < count; ++i) {
    308       mirror::Object* obj = roots[i]->AsMirrorPtr();
    309       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
    310       if (obj != new_obj) {
    311         roots[i]->Assign(new_obj);
    312         DCHECK(new_obj != nullptr);
    313       }
    314     }
    315   }
    316 
    317  private:
    318   MarkCompact* const collector_;
    319 };
    320 
    321 class MarkCompact::UpdateObjectReferencesVisitor {
    322  public:
    323   explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {}
    324 
    325   void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_)
    326           REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
    327     collector_->UpdateObjectReferences(obj);
    328   }
    329 
    330  private:
    331   MarkCompact* const collector_;
    332 };
    333 
    334 void MarkCompact::UpdateReferences() {
    335   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    336   updating_references_ = true;
    337   Runtime* runtime = Runtime::Current();
    338   // Update roots.
    339   UpdateRootVisitor update_root_visitor(this);
    340   runtime->VisitRoots(&update_root_visitor);
    341   // Update object references in mod union tables and spaces.
    342   for (const auto& space : heap_->GetContinuousSpaces()) {
    343     // If the space is immune then we need to mark the references to other spaces.
    344     accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    345     if (table != nullptr) {
    346       // TODO: Improve naming.
    347       TimingLogger::ScopedTiming t2(
    348           space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
    349                                    "UpdateImageModUnionTableReferences",
    350                                    GetTimings());
    351       table->UpdateAndMarkReferences(this);
    352     } else {
    353       // No mod union table, so we need to scan the space using bitmap visit.
    354       // Scan the space using bitmap visit.
    355       accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
    356       if (bitmap != nullptr) {
    357         UpdateObjectReferencesVisitor visitor(this);
    358         bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
    359                                  reinterpret_cast<uintptr_t>(space->End()),
    360                                  visitor);
    361       }
    362     }
    363   }
    364   CHECK(!kMovingClasses)
    365       << "Didn't update large object classes since they are assumed to not move.";
    366   // Update the system weaks, these should already have been swept.
    367   runtime->SweepSystemWeaks(this);
    368   // Update the objects in the bump pointer space last, these objects don't have a bitmap.
    369   UpdateObjectReferencesVisitor visitor(this);
    370   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    371                                                reinterpret_cast<uintptr_t>(space_->End()),
    372                                                visitor);
    373   // Update the reference processor cleared list.
    374   heap_->GetReferenceProcessor()->UpdateRoots(this);
    375   updating_references_ = false;
    376 }
    377 
    378 void MarkCompact::Compact() {
    379   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    380   CalculateObjectForwardingAddresses();
    381   UpdateReferences();
    382   MoveObjects();
    383   // Space
    384   int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
    385   int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
    386       reinterpret_cast<int64_t>(bump_pointer_);
    387   t.NewTiming("RecordFree");
    388   space_->RecordFree(objects_freed, bytes_freed);
    389   RecordFree(ObjectBytePair(objects_freed, bytes_freed));
    390   space_->SetEnd(bump_pointer_);
    391   // Need to zero out the memory we freed. TODO: Use madvise for pages.
    392   memset(bump_pointer_, 0, bytes_freed);
    393 }
    394 
    395 // Marks all objects in the root set.
    396 void MarkCompact::MarkRoots() {
    397   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    398   Runtime::Current()->VisitRoots(this);
    399 }
    400 
    401 inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
    402   mirror::Object* obj = reference->AsMirrorPtr();
    403   if (obj != nullptr) {
    404     mirror::Object* new_obj = GetMarkedForwardAddress(obj);
    405     if (obj != new_obj) {
    406       DCHECK(new_obj != nullptr);
    407       reference->Assign(new_obj);
    408     }
    409   }
    410 }
    411 
    412 class MarkCompact::UpdateReferenceVisitor {
    413  public:
    414   explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {}
    415 
    416   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const
    417       ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    418     collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
    419   }
    420 
    421   void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
    422       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    423     collector_->UpdateHeapReference(
    424         ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
    425   }
    426 
    427   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
    428   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    429       NO_THREAD_SAFETY_ANALYSIS {
    430     if (!root->IsNull()) {
    431       VisitRoot(root);
    432     }
    433   }
    434 
    435   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    436       NO_THREAD_SAFETY_ANALYSIS {
    437     root->Assign(collector_->GetMarkedForwardAddress(root->AsMirrorPtr()));
    438   }
    439 
    440  private:
    441   MarkCompact* const collector_;
    442 };
    443 
    444 void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
    445   UpdateReferenceVisitor visitor(this);
    446   obj->VisitReferences(visitor, visitor);
    447 }
    448 
    449 inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) {
    450   DCHECK(obj != nullptr);
    451   if (objects_before_forwarding_->HasAddress(obj)) {
    452     DCHECK(objects_before_forwarding_->Test(obj));
    453     mirror::Object* ret =
    454         reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
    455     DCHECK(ret != nullptr);
    456     return ret;
    457   }
    458   DCHECK(!space_->HasAddress(obj));
    459   return obj;
    460 }
    461 
    462 mirror::Object* MarkCompact::IsMarked(mirror::Object* object) {
    463   if (immune_spaces_.IsInImmuneRegion(object)) {
    464     return object;
    465   }
    466   if (updating_references_) {
    467     return GetMarkedForwardAddress(object);
    468   }
    469   if (objects_before_forwarding_->HasAddress(object)) {
    470     return objects_before_forwarding_->Test(object) ? object : nullptr;
    471   }
    472   return mark_bitmap_->Test(object) ? object : nullptr;
    473 }
    474 
    475 bool MarkCompact::IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref_ptr) {
    476   // Side effect free since we call this before ever moving objects.
    477   return IsMarked(ref_ptr->AsMirrorPtr()) != nullptr;
    478 }
    479 
    480 void MarkCompact::SweepSystemWeaks() {
    481   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    482   Runtime::Current()->SweepSystemWeaks(this);
    483 }
    484 
    485 bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
    486   return space != space_ && !immune_spaces_.ContainsSpace(space);
    487 }
    488 
    489 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
    490   // Look at the forwarding address stored in the lock word to know where to copy.
    491   DCHECK(space_->HasAddress(obj)) << obj;
    492   uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
    493   mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
    494   DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
    495   // Use memmove since there may be overlap.
    496   memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
    497   // Restore the saved lock word if needed.
    498   LockWord lock_word = LockWord::Default();
    499   if (UNLIKELY(objects_with_lockword_->Test(obj))) {
    500     lock_word = lock_words_to_restore_.front();
    501     lock_words_to_restore_.pop_front();
    502   }
    503   dest_obj->SetLockWord(lock_word, false);
    504 }
    505 
    506 void MarkCompact::MoveObjects() {
    507   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    508   // Move the objects in the before forwarding bitmap.
    509   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    510                                                reinterpret_cast<uintptr_t>(space_->End()),
    511                                                [this](mirror::Object* obj)
    512       SHARED_REQUIRES(Locks::heap_bitmap_lock_)
    513       REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
    514     MoveObject(obj, obj->SizeOf());
    515   });
    516   CHECK(lock_words_to_restore_.empty());
    517 }
    518 
    519 void MarkCompact::Sweep(bool swap_bitmaps) {
    520   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    521   DCHECK(mark_stack_->IsEmpty());
    522   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
    523     if (space->IsContinuousMemMapAllocSpace()) {
    524       space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
    525       if (!ShouldSweepSpace(alloc_space)) {
    526         continue;
    527       }
    528       TimingLogger::ScopedTiming t2(
    529           alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
    530       RecordFree(alloc_space->Sweep(swap_bitmaps));
    531     }
    532   }
    533   SweepLargeObjects(swap_bitmaps);
    534 }
    535 
    536 void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
    537   space::LargeObjectSpace* los = heap_->GetLargeObjectsSpace();
    538   if (los != nullptr) {
    539     TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());\
    540     RecordFreeLOS(los->Sweep(swap_bitmaps));
    541   }
    542 }
    543 
    544 // Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
    545 // marked, put it on the appropriate list in the heap for later processing.
    546 void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
    547   heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
    548 }
    549 
    550 class MarkCompact::MarkObjectVisitor {
    551  public:
    552   explicit MarkObjectVisitor(MarkCompact* collector) : collector_(collector) {}
    553 
    554   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
    555       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    556     // Object was already verified when we scanned it.
    557     collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
    558   }
    559 
    560   void operator()(mirror::Class* klass, mirror::Reference* ref) const
    561       SHARED_REQUIRES(Locks::mutator_lock_)
    562       REQUIRES(Locks::heap_bitmap_lock_) {
    563     collector_->DelayReferenceReferent(klass, ref);
    564   }
    565 
    566   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
    567   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    568       NO_THREAD_SAFETY_ANALYSIS {
    569     if (!root->IsNull()) {
    570       VisitRoot(root);
    571     }
    572   }
    573 
    574   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    575       NO_THREAD_SAFETY_ANALYSIS {
    576     collector_->MarkObject(root->AsMirrorPtr());
    577   }
    578 
    579  private:
    580   MarkCompact* const collector_;
    581 };
    582 
    583 // Visit all of the references of an object and update.
    584 void MarkCompact::ScanObject(mirror::Object* obj) {
    585   MarkObjectVisitor visitor(this);
    586   obj->VisitReferences(visitor, visitor);
    587 }
    588 
    589 // Scan anything that's on the mark stack.
    590 void MarkCompact::ProcessMarkStack() {
    591   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    592   while (!mark_stack_->IsEmpty()) {
    593     mirror::Object* obj = mark_stack_->PopBack();
    594     DCHECK(obj != nullptr);
    595     ScanObject(obj);
    596   }
    597 }
    598 
    599 void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
    600   DCHECK(space != nullptr);
    601   space_ = space;
    602 }
    603 
    604 void MarkCompact::FinishPhase() {
    605   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    606   space_ = nullptr;
    607   CHECK(mark_stack_->IsEmpty());
    608   mark_stack_->Reset();
    609   // Clear all of the spaces' mark bitmaps.
    610   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    611   heap_->ClearMarkedObjects();
    612   // Release our bitmaps.
    613   objects_before_forwarding_.reset(nullptr);
    614   objects_with_lockword_.reset(nullptr);
    615 }
    616 
    617 void MarkCompact::RevokeAllThreadLocalBuffers() {
    618   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    619   GetHeap()->RevokeAllThreadLocalBuffers();
    620 }
    621 
    622 }  // namespace collector
    623 }  // namespace gc
    624 }  // namespace art
    625