Home | History | Annotate | Download | only in collector
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "mark_compact.h"
     18 
     19 #include "base/logging.h"
     20 #include "base/mutex-inl.h"
     21 #include "base/timing_logger.h"
     22 #include "gc/accounting/heap_bitmap-inl.h"
     23 #include "gc/accounting/mod_union_table.h"
     24 #include "gc/accounting/space_bitmap-inl.h"
     25 #include "gc/heap.h"
     26 #include "gc/reference_processor.h"
     27 #include "gc/space/bump_pointer_space-inl.h"
     28 #include "gc/space/large_object_space.h"
     29 #include "gc/space/space-inl.h"
     30 #include "mirror/class-inl.h"
     31 #include "mirror/object-inl.h"
     32 #include "mirror/object-refvisitor-inl.h"
     33 #include "runtime.h"
     34 #include "stack.h"
     35 #include "thread-inl.h"
     36 #include "thread_list.h"
     37 
     38 namespace art {
     39 namespace gc {
     40 namespace collector {
     41 
     42 void MarkCompact::BindBitmaps() {
     43   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
     44   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
     45   // Mark all of the spaces we never collect as immune.
     46   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
     47     if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
     48         space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
     49       immune_spaces_.AddSpace(space);
     50     }
     51   }
     52 }
     53 
     54 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
     55     : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
     56       mark_stack_(nullptr),
     57       space_(nullptr),
     58       mark_bitmap_(nullptr),
     59       collector_name_(name_),
     60       bump_pointer_(nullptr),
     61       live_objects_in_space_(0),
     62       updating_references_(false) {}
     63 
     64 void MarkCompact::RunPhases() {
     65   Thread* self = Thread::Current();
     66   InitializePhase();
     67   CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
     68   {
     69     ScopedPause pause(this);
     70     GetHeap()->PreGcVerificationPaused(this);
     71     GetHeap()->PrePauseRosAllocVerification(this);
     72     MarkingPhase();
     73     ReclaimPhase();
     74   }
     75   GetHeap()->PostGcVerification(this);
     76   FinishPhase();
     77 }
     78 
     79 void MarkCompact::ForwardObject(mirror::Object* obj) {
     80   const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
     81   LockWord lock_word = obj->GetLockWord(false);
     82   // If we have a non empty lock word, store it and restore it later.
     83   if (!LockWord::IsDefault(lock_word)) {
     84     // Set the bit in the bitmap so that we know to restore it later.
     85     objects_with_lockword_->Set(obj);
     86     lock_words_to_restore_.push_back(lock_word);
     87   }
     88   obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
     89                    false);
     90   bump_pointer_ += alloc_size;
     91   ++live_objects_in_space_;
     92 }
     93 
     94 
     95 void MarkCompact::CalculateObjectForwardingAddresses() {
     96   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
     97   // The bump pointer in the space where the next forwarding address will be.
     98   bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
     99   // Visit all the marked objects in the bitmap.
    100   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    101                                                reinterpret_cast<uintptr_t>(space_->End()),
    102                                                [this](mirror::Object* obj)
    103       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    104     DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
    105     DCHECK(IsMarked(obj) != nullptr);
    106     ForwardObject(obj);
    107   });
    108 }
    109 
    110 void MarkCompact::InitializePhase() {
    111   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    112   mark_stack_ = heap_->GetMarkStack();
    113   DCHECK(mark_stack_ != nullptr);
    114   immune_spaces_.Reset();
    115   CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
    116   // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
    117   ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    118   mark_bitmap_ = heap_->GetMarkBitmap();
    119   live_objects_in_space_ = 0;
    120 }
    121 
    122 void MarkCompact::ProcessReferences(Thread* self) {
    123   WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    124   heap_->GetReferenceProcessor()->ProcessReferences(
    125       false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(), this);
    126 }
    127 
    128 inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) {
    129   if (obj == nullptr) {
    130     return nullptr;
    131   }
    132   if (kUseBakerReadBarrier) {
    133     // Verify all the objects have the correct forward state installed.
    134     obj->AssertReadBarrierState();
    135   }
    136   if (!immune_spaces_.IsInImmuneRegion(obj)) {
    137     if (objects_before_forwarding_->HasAddress(obj)) {
    138       if (!objects_before_forwarding_->Set(obj)) {
    139         MarkStackPush(obj);  // This object was not previously marked.
    140       }
    141     } else {
    142       DCHECK(!space_->HasAddress(obj));
    143       auto slow_path = [this](const mirror::Object* ref)
    144           REQUIRES_SHARED(Locks::mutator_lock_) {
    145         // Marking a large object, make sure its aligned as a sanity check.
    146         if (!IsAligned<kPageSize>(ref)) {
    147           Runtime::Current()->GetHeap()->DumpSpaces(LOG_STREAM(ERROR));
    148           LOG(FATAL) << ref;
    149         }
    150       };
    151       if (!mark_bitmap_->Set(obj, slow_path)) {
    152         // This object was not previously marked.
    153         MarkStackPush(obj);
    154       }
    155     }
    156   }
    157   return obj;
    158 }
    159 
    160 void MarkCompact::MarkingPhase() {
    161   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    162   Thread* self = Thread::Current();
    163   // Bitmap which describes which objects we have to move.
    164   objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
    165       "objects before forwarding", space_->Begin(), space_->Size()));
    166   // Bitmap which describes which lock words we need to restore.
    167   objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
    168       "objects with lock words", space_->Begin(), space_->Size()));
    169   CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
    170   // Assume the cleared space is already empty.
    171   BindBitmaps();
    172   t.NewTiming("ProcessCards");
    173   // Process dirty cards and add dirty cards to mod-union tables.
    174   heap_->ProcessCards(GetTimings(), false, false, true);
    175   // Clear the whole card table since we cannot get any additional dirty cards during the
    176   // paused GC. This saves memory but only works for pause the world collectors.
    177   t.NewTiming("ClearCardTable");
    178   heap_->GetCardTable()->ClearCardTable();
    179   // Need to do this before the checkpoint since we don't want any threads to add references to
    180   // the live stack during the recursive mark.
    181   if (kUseThreadLocalAllocationStack) {
    182     t.NewTiming("RevokeAllThreadLocalAllocationStacks");
    183     heap_->RevokeAllThreadLocalAllocationStacks(self);
    184   }
    185   t.NewTiming("SwapStacks");
    186   heap_->SwapStacks();
    187   {
    188     WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
    189     MarkRoots();
    190     // Mark roots of immune spaces.
    191     UpdateAndMarkModUnion();
    192     // Recursively mark remaining objects.
    193     MarkReachableObjects();
    194   }
    195   ProcessReferences(self);
    196   {
    197     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
    198     SweepSystemWeaks();
    199   }
    200   Runtime::Current()->GetClassLinker()->CleanupClassLoaders();
    201   // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
    202   // before they are properly counted.
    203   RevokeAllThreadLocalBuffers();
    204   // Disabled due to an issue where we have objects in the bump pointer space which reference dead
    205   // objects.
    206   // heap_->PreSweepingGcVerification(this);
    207 }
    208 
    209 void MarkCompact::UpdateAndMarkModUnion() {
    210   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    211   for (auto& space : heap_->GetContinuousSpaces()) {
    212     // If the space is immune then we need to mark the references to other spaces.
    213     if (immune_spaces_.ContainsSpace(space)) {
    214       accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    215       if (table != nullptr) {
    216         // TODO: Improve naming.
    217         TimingLogger::ScopedTiming t2(
    218             space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
    219                                      "UpdateAndMarkImageModUnionTable", GetTimings());
    220         table->UpdateAndMarkReferences(this);
    221       }
    222     }
    223   }
    224 }
    225 
    226 void MarkCompact::MarkReachableObjects() {
    227   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    228   accounting::ObjectStack* live_stack = heap_->GetLiveStack();
    229   {
    230     TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
    231     heap_->MarkAllocStackAsLive(live_stack);
    232   }
    233   live_stack->Reset();
    234   // Recursively process the mark stack.
    235   ProcessMarkStack();
    236 }
    237 
    238 void MarkCompact::ReclaimPhase() {
    239   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    240   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    241   // Reclaim unmarked objects.
    242   Sweep(false);
    243   // Swap the live and mark bitmaps for each space which we modified space. This is an
    244   // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
    245   // bitmaps.
    246   SwapBitmaps();
    247   GetHeap()->UnBindBitmaps();  // Unbind the live and mark bitmaps.
    248   Compact();
    249 }
    250 
    251 void MarkCompact::ResizeMarkStack(size_t new_size) {
    252   std::vector<StackReference<mirror::Object>> temp(mark_stack_->Begin(), mark_stack_->End());
    253   CHECK_LE(mark_stack_->Size(), new_size);
    254   mark_stack_->Resize(new_size);
    255   for (auto& obj : temp) {
    256     mark_stack_->PushBack(obj.AsMirrorPtr());
    257   }
    258 }
    259 
    260 inline void MarkCompact::MarkStackPush(mirror::Object* obj) {
    261   if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
    262     ResizeMarkStack(mark_stack_->Capacity() * 2);
    263   }
    264   // The object must be pushed on to the mark stack.
    265   mark_stack_->PushBack(obj);
    266 }
    267 
    268 void MarkCompact::MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr,
    269                                     bool do_atomic_update ATTRIBUTE_UNUSED) {
    270   if (updating_references_) {
    271     UpdateHeapReference(obj_ptr);
    272   } else {
    273     MarkObject(obj_ptr->AsMirrorPtr());
    274   }
    275 }
    276 
    277 void MarkCompact::VisitRoots(
    278     mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) {
    279   for (size_t i = 0; i < count; ++i) {
    280     MarkObject(*roots[i]);
    281   }
    282 }
    283 
    284 void MarkCompact::VisitRoots(
    285     mirror::CompressedReference<mirror::Object>** roots, size_t count,
    286     const RootInfo& info ATTRIBUTE_UNUSED) {
    287   for (size_t i = 0; i < count; ++i) {
    288     MarkObject(roots[i]->AsMirrorPtr());
    289   }
    290 }
    291 
    292 class MarkCompact::UpdateRootVisitor : public RootVisitor {
    293  public:
    294   explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {}
    295 
    296   void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
    297       OVERRIDE REQUIRES(Locks::mutator_lock_)
    298       REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
    299     for (size_t i = 0; i < count; ++i) {
    300       mirror::Object* obj = *roots[i];
    301       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
    302       if (obj != new_obj) {
    303         *roots[i] = new_obj;
    304         DCHECK(new_obj != nullptr);
    305       }
    306     }
    307   }
    308 
    309   void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
    310                   const RootInfo& info ATTRIBUTE_UNUSED)
    311       OVERRIDE REQUIRES(Locks::mutator_lock_)
    312       REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
    313     for (size_t i = 0; i < count; ++i) {
    314       mirror::Object* obj = roots[i]->AsMirrorPtr();
    315       mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
    316       if (obj != new_obj) {
    317         roots[i]->Assign(new_obj);
    318         DCHECK(new_obj != nullptr);
    319       }
    320     }
    321   }
    322 
    323  private:
    324   MarkCompact* const collector_;
    325 };
    326 
    327 class MarkCompact::UpdateObjectReferencesVisitor {
    328  public:
    329   explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {}
    330 
    331   void operator()(mirror::Object* obj) const REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    332           REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
    333     collector_->UpdateObjectReferences(obj);
    334   }
    335 
    336  private:
    337   MarkCompact* const collector_;
    338 };
    339 
    340 void MarkCompact::UpdateReferences() {
    341   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    342   updating_references_ = true;
    343   Runtime* runtime = Runtime::Current();
    344   // Update roots.
    345   UpdateRootVisitor update_root_visitor(this);
    346   runtime->VisitRoots(&update_root_visitor);
    347   // Update object references in mod union tables and spaces.
    348   for (const auto& space : heap_->GetContinuousSpaces()) {
    349     // If the space is immune then we need to mark the references to other spaces.
    350     accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
    351     if (table != nullptr) {
    352       // TODO: Improve naming.
    353       TimingLogger::ScopedTiming t2(
    354           space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
    355                                    "UpdateImageModUnionTableReferences",
    356                                    GetTimings());
    357       table->UpdateAndMarkReferences(this);
    358     } else {
    359       // No mod union table, so we need to scan the space using bitmap visit.
    360       // Scan the space using bitmap visit.
    361       accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
    362       if (bitmap != nullptr) {
    363         UpdateObjectReferencesVisitor visitor(this);
    364         bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
    365                                  reinterpret_cast<uintptr_t>(space->End()),
    366                                  visitor);
    367       }
    368     }
    369   }
    370   CHECK(!kMovingClasses)
    371       << "Didn't update large object classes since they are assumed to not move.";
    372   // Update the system weaks, these should already have been swept.
    373   runtime->SweepSystemWeaks(this);
    374   // Update the objects in the bump pointer space last, these objects don't have a bitmap.
    375   UpdateObjectReferencesVisitor visitor(this);
    376   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    377                                                reinterpret_cast<uintptr_t>(space_->End()),
    378                                                visitor);
    379   // Update the reference processor cleared list.
    380   heap_->GetReferenceProcessor()->UpdateRoots(this);
    381   updating_references_ = false;
    382 }
    383 
    384 void MarkCompact::Compact() {
    385   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    386   CalculateObjectForwardingAddresses();
    387   UpdateReferences();
    388   MoveObjects();
    389   // Space
    390   int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
    391   int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
    392       reinterpret_cast<int64_t>(bump_pointer_);
    393   t.NewTiming("RecordFree");
    394   space_->RecordFree(objects_freed, bytes_freed);
    395   RecordFree(ObjectBytePair(objects_freed, bytes_freed));
    396   space_->SetEnd(bump_pointer_);
    397   // Need to zero out the memory we freed. TODO: Use madvise for pages.
    398   memset(bump_pointer_, 0, bytes_freed);
    399 }
    400 
    401 // Marks all objects in the root set.
    402 void MarkCompact::MarkRoots() {
    403   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    404   Runtime::Current()->VisitRoots(this);
    405 }
    406 
    407 inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
    408   mirror::Object* obj = reference->AsMirrorPtr();
    409   if (obj != nullptr) {
    410     mirror::Object* new_obj = GetMarkedForwardAddress(obj);
    411     if (obj != new_obj) {
    412       DCHECK(new_obj != nullptr);
    413       reference->Assign(new_obj);
    414     }
    415   }
    416 }
    417 
    418 class MarkCompact::UpdateReferenceVisitor {
    419  public:
    420   explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {}
    421 
    422   void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const
    423       ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    424     collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
    425   }
    426 
    427   void operator()(ObjPtr<mirror::Class> /*klass*/, mirror::Reference* ref) const
    428       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    429     collector_->UpdateHeapReference(
    430         ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
    431   }
    432 
    433   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
    434   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    435       NO_THREAD_SAFETY_ANALYSIS {
    436     if (!root->IsNull()) {
    437       VisitRoot(root);
    438     }
    439   }
    440 
    441   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    442       NO_THREAD_SAFETY_ANALYSIS {
    443     root->Assign(collector_->GetMarkedForwardAddress(root->AsMirrorPtr()));
    444   }
    445 
    446  private:
    447   MarkCompact* const collector_;
    448 };
    449 
    450 void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
    451   UpdateReferenceVisitor visitor(this);
    452   obj->VisitReferences(visitor, visitor);
    453 }
    454 
    455 inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) {
    456   DCHECK(obj != nullptr);
    457   if (objects_before_forwarding_->HasAddress(obj)) {
    458     DCHECK(objects_before_forwarding_->Test(obj));
    459     mirror::Object* ret =
    460         reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
    461     DCHECK(ret != nullptr);
    462     return ret;
    463   }
    464   DCHECK(!space_->HasAddress(obj));
    465   return obj;
    466 }
    467 
    468 mirror::Object* MarkCompact::IsMarked(mirror::Object* object) {
    469   if (immune_spaces_.IsInImmuneRegion(object)) {
    470     return object;
    471   }
    472   if (updating_references_) {
    473     return GetMarkedForwardAddress(object);
    474   }
    475   if (objects_before_forwarding_->HasAddress(object)) {
    476     return objects_before_forwarding_->Test(object) ? object : nullptr;
    477   }
    478   return mark_bitmap_->Test(object) ? object : nullptr;
    479 }
    480 
    481 bool MarkCompact::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref_ptr,
    482                                               // MarkCompact does the GC in a pause. No CAS needed.
    483                                               bool do_atomic_update ATTRIBUTE_UNUSED) {
    484   // Side effect free since we call this before ever moving objects.
    485   mirror::Object* obj = ref_ptr->AsMirrorPtr();
    486   if (obj == nullptr) {
    487     return true;
    488   }
    489   return IsMarked(obj) != nullptr;
    490 }
    491 
    492 void MarkCompact::SweepSystemWeaks() {
    493   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    494   Runtime::Current()->SweepSystemWeaks(this);
    495 }
    496 
    497 bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
    498   return space != space_ && !immune_spaces_.ContainsSpace(space);
    499 }
    500 
    501 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
    502   // Look at the forwarding address stored in the lock word to know where to copy.
    503   DCHECK(space_->HasAddress(obj)) << obj;
    504   uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
    505   mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
    506   DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
    507   // Use memmove since there may be overlap.
    508   memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
    509   // Restore the saved lock word if needed.
    510   LockWord lock_word = LockWord::Default();
    511   if (UNLIKELY(objects_with_lockword_->Test(obj))) {
    512     lock_word = lock_words_to_restore_.front();
    513     lock_words_to_restore_.pop_front();
    514   }
    515   dest_obj->SetLockWord(lock_word, false);
    516 }
    517 
    518 void MarkCompact::MoveObjects() {
    519   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    520   // Move the objects in the before forwarding bitmap.
    521   objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
    522                                                reinterpret_cast<uintptr_t>(space_->End()),
    523                                                [this](mirror::Object* obj)
    524       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    525       REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
    526     MoveObject(obj, obj->SizeOf());
    527   });
    528   CHECK(lock_words_to_restore_.empty());
    529 }
    530 
    531 void MarkCompact::Sweep(bool swap_bitmaps) {
    532   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    533   DCHECK(mark_stack_->IsEmpty());
    534   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
    535     if (space->IsContinuousMemMapAllocSpace()) {
    536       space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
    537       if (!ShouldSweepSpace(alloc_space)) {
    538         continue;
    539       }
    540       TimingLogger::ScopedTiming t2(
    541           alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
    542       RecordFree(alloc_space->Sweep(swap_bitmaps));
    543     }
    544   }
    545   SweepLargeObjects(swap_bitmaps);
    546 }
    547 
    548 void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
    549   space::LargeObjectSpace* los = heap_->GetLargeObjectsSpace();
    550   if (los != nullptr) {
    551     TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());\
    552     RecordFreeLOS(los->Sweep(swap_bitmaps));
    553   }
    554 }
    555 
    556 // Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
    557 // marked, put it on the appropriate list in the heap for later processing.
    558 void MarkCompact::DelayReferenceReferent(ObjPtr<mirror::Class> klass,
    559                                          ObjPtr<mirror::Reference> reference) {
    560   heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this);
    561 }
    562 
    563 class MarkCompact::MarkObjectVisitor {
    564  public:
    565   explicit MarkObjectVisitor(MarkCompact* collector) : collector_(collector) {}
    566 
    567   void operator()(ObjPtr<mirror::Object> obj,
    568                   MemberOffset offset,
    569                   bool /*is_static*/) const ALWAYS_INLINE
    570       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
    571     // Object was already verified when we scanned it.
    572     collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
    573   }
    574 
    575   void operator()(ObjPtr<mirror::Class> klass,
    576                   ObjPtr<mirror::Reference> ref) const
    577       REQUIRES_SHARED(Locks::mutator_lock_)
    578       REQUIRES(Locks::heap_bitmap_lock_) {
    579     collector_->DelayReferenceReferent(klass, ref);
    580   }
    581 
    582   // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors.
    583   void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
    584       NO_THREAD_SAFETY_ANALYSIS {
    585     if (!root->IsNull()) {
    586       VisitRoot(root);
    587     }
    588   }
    589 
    590   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
    591       NO_THREAD_SAFETY_ANALYSIS {
    592     collector_->MarkObject(root->AsMirrorPtr());
    593   }
    594 
    595  private:
    596   MarkCompact* const collector_;
    597 };
    598 
    599 // Visit all of the references of an object and update.
    600 void MarkCompact::ScanObject(mirror::Object* obj) {
    601   MarkObjectVisitor visitor(this);
    602   obj->VisitReferences(visitor, visitor);
    603 }
    604 
    605 // Scan anything that's on the mark stack.
    606 void MarkCompact::ProcessMarkStack() {
    607   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    608   while (!mark_stack_->IsEmpty()) {
    609     mirror::Object* obj = mark_stack_->PopBack();
    610     DCHECK(obj != nullptr);
    611     ScanObject(obj);
    612   }
    613 }
    614 
    615 void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
    616   DCHECK(space != nullptr);
    617   space_ = space;
    618 }
    619 
    620 void MarkCompact::FinishPhase() {
    621   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    622   space_ = nullptr;
    623   CHECK(mark_stack_->IsEmpty());
    624   mark_stack_->Reset();
    625   // Clear all of the spaces' mark bitmaps.
    626   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
    627   heap_->ClearMarkedObjects();
    628   // Release our bitmaps.
    629   objects_before_forwarding_.reset(nullptr);
    630   objects_with_lockword_.reset(nullptr);
    631 }
    632 
    633 void MarkCompact::RevokeAllThreadLocalBuffers() {
    634   TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
    635   GetHeap()->RevokeAllThreadLocalBuffers();
    636 }
    637 
    638 }  // namespace collector
    639 }  // namespace gc
    640 }  // namespace art
    641