Home | History | Annotate | Download | only in collector
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
     18 #define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
     19 
     20 #include <memory>
     21 
     22 #include "atomic.h"
     23 #include "barrier.h"
     24 #include "base/macros.h"
     25 #include "base/mutex.h"
     26 #include "garbage_collector.h"
     27 #include "gc_root.h"
     28 #include "gc/accounting/heap_bitmap.h"
     29 #include "immune_region.h"
     30 #include "object_callbacks.h"
     31 #include "offsets.h"
     32 
     33 namespace art {
     34 
     35 namespace mirror {
     36   class Class;
     37   class Object;
     38   class Reference;
     39 }  // namespace mirror
     40 
     41 class Thread;
     42 enum VisitRootFlags : uint8_t;
     43 
     44 namespace gc {
     45 
     46 class Heap;
     47 
     48 namespace accounting {
     49   template<typename T> class AtomicStack;
     50   typedef AtomicStack<mirror::Object*> ObjectStack;
     51 }  // namespace accounting
     52 
     53 namespace collector {
     54 
     55 class MarkSweep : public GarbageCollector {
     56  public:
     57   explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
     58 
     59   ~MarkSweep() {}
     60 
     61   virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
     62   void InitializePhase();
     63   void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     64   void PausePhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
     65   void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     66   void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     67   virtual void MarkReachableObjects()
     68       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
     69       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
     70 
     71   bool IsConcurrent() const {
     72     return is_concurrent_;
     73   }
     74 
     75   virtual GcType GetGcType() const OVERRIDE {
     76     return kGcTypeFull;
     77   }
     78 
     79   virtual CollectorType GetCollectorType() const OVERRIDE {
     80     return is_concurrent_ ? kCollectorTypeCMS : kCollectorTypeMS;
     81   }
     82 
     83   // Initializes internal structures.
     84   void Init();
     85 
     86   // Find the default mark bitmap.
     87   void FindDefaultSpaceBitmap();
     88 
     89   // Marks all objects in the root set at the start of a garbage collection.
     90   void MarkRoots(Thread* self)
     91       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
     92       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     93 
     94   void MarkNonThreadRoots()
     95       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
     96       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
     97 
     98   void MarkConcurrentRoots(VisitRootFlags flags)
     99       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    100       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    101 
    102   void MarkRootsCheckpoint(Thread* self, bool revoke_ros_alloc_thread_local_buffers_at_checkpoint)
    103       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    104       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    105 
    106   // Builds a mark stack and recursively mark until it empties.
    107   void RecursiveMark()
    108       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    109       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    110 
    111   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
    112   // the image. Mark that portion of the heap as immune.
    113   virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    114 
    115   // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
    116   void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
    117       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    118       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    119 
    120   // Remarks the root set after completing the concurrent mark.
    121   void ReMarkRoots()
    122       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    123       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    124 
    125   void ProcessReferences(Thread* self)
    126       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    127 
    128   // Update and mark references from immune spaces.
    129   void UpdateAndMarkModUnion()
    130       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    131 
    132   // Pre clean cards to reduce how much work is needed in the pause.
    133   void PreCleanCards()
    134       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    135       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    136 
    137   // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
    138   // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
    139   virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    140 
    141   // Sweeps unmarked objects to complete the garbage collection.
    142   void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    143 
    144   // Sweep only pointers within an array. WARNING: Trashes objects.
    145   void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
    146       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    147       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    148 
    149   // Blackens an object.
    150   void ScanObject(mirror::Object* obj)
    151       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    152       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    153 
    154   // No thread safety analysis due to lambdas.
    155   template<typename MarkVisitor, typename ReferenceVisitor>
    156   void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor,
    157                        const ReferenceVisitor& ref_visitor)
    158     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    159     EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    160 
    161   void SweepSystemWeaks(Thread* self)
    162       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
    163 
    164   static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
    165       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    166 
    167   void VerifySystemWeaks()
    168       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
    169 
    170   // Verify that an object is live, either in a live bitmap or in the allocation stack.
    171   void VerifyIsLive(const mirror::Object* obj)
    172       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    173 
    174   static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
    175       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    176       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    177 
    178   static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* ref, void* arg)
    179       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    180       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    181 
    182   static bool HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref, void* arg)
    183       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    184       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    185 
    186   static void MarkRootCallback(mirror::Object** root, void* arg, const RootInfo& root_info)
    187       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    188       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    189 
    190   static void VerifyRootMarked(mirror::Object** root, void* arg, const RootInfo& root_info)
    191       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    192       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    193 
    194   static void ProcessMarkStackCallback(void* arg)
    195       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    196       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    197 
    198   static void MarkRootParallelCallback(mirror::Object** root, void* arg, const RootInfo& root_info)
    199       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    200 
    201   // Marks an object.
    202   void MarkObject(mirror::Object* obj)
    203       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    204       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    205 
    206   Barrier& GetBarrier() {
    207     return *gc_barrier_;
    208   }
    209 
    210   // Schedules an unmarked object for reference processing.
    211   void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
    212       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    213 
    214  protected:
    215   // Returns true if the object has its bit set in the mark bitmap.
    216   bool IsMarked(const mirror::Object* object) const
    217       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    218 
    219   static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
    220       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    221 
    222   static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
    223       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    224 
    225   void MarkObjectNonNull(mirror::Object* obj)
    226         SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
    227         EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
    228 
    229   // Marks an object atomically, safe to use from multiple threads.
    230   void MarkObjectNonNullParallel(mirror::Object* obj);
    231 
    232   // Returns true if we need to add obj to a mark stack.
    233   bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
    234 
    235   // Verify the roots of the heap and print out information related to any invalid roots.
    236   // Called in MarkObject, so may we may not hold the mutator lock.
    237   void VerifyRoots()
    238       NO_THREAD_SAFETY_ANALYSIS;
    239 
    240   // Expand mark stack to 2x its current size.
    241   void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
    242   void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
    243 
    244   // Returns how many threads we should use for the current GC phase based on if we are paused,
    245   // whether or not we care about pauses.
    246   size_t GetThreadCount(bool paused) const;
    247 
    248   static void VerifyRootCallback(mirror::Object** root, void* arg, const RootInfo& root_info);
    249 
    250   void VerifyRoot(const mirror::Object* root, const RootInfo& root_info) NO_THREAD_SAFETY_ANALYSIS;
    251 
    252   // Push a single reference on a mark stack.
    253   void PushOnMarkStack(mirror::Object* obj);
    254 
    255   // Blackens objects grayed during a garbage collection.
    256   void ScanGrayObjects(bool paused, byte minimum_age)
    257       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    258       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    259 
    260   // Recursively blackens objects on the mark stack.
    261   void ProcessMarkStack(bool paused)
    262       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    263       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    264 
    265   void ProcessMarkStackParallel(size_t thread_count)
    266       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
    267       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
    268 
    269   // Used to Get around thread safety annotations. The call is from MarkingPhase and is guarded by
    270   // IsExclusiveHeld.
    271   void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
    272 
    273   // Revoke all the thread-local buffers.
    274   void RevokeAllThreadLocalBuffers();
    275 
    276   // Whether or not we count how many of each type of object were scanned.
    277   static const bool kCountScannedTypes = false;
    278 
    279   // Current space, we check this space first to avoid searching for the appropriate space for an
    280   // object.
    281   accounting::ContinuousSpaceBitmap* current_space_bitmap_;
    282   // Cache the heap's mark bitmap to prevent having to do 2 loads during slow path marking.
    283   accounting::HeapBitmap* mark_bitmap_;
    284 
    285   accounting::ObjectStack* mark_stack_;
    286 
    287   // Immune region, every object inside the immune range is assumed to be marked.
    288   ImmuneRegion immune_region_;
    289 
    290   // Parallel finger.
    291   AtomicInteger atomic_finger_;
    292   // Number of classes scanned, if kCountScannedTypes.
    293   AtomicInteger class_count_;
    294   // Number of arrays scanned, if kCountScannedTypes.
    295   AtomicInteger array_count_;
    296   // Number of non-class/arrays scanned, if kCountScannedTypes.
    297   AtomicInteger other_count_;
    298   AtomicInteger large_object_test_;
    299   AtomicInteger large_object_mark_;
    300   AtomicInteger overhead_time_;
    301   AtomicInteger work_chunks_created_;
    302   AtomicInteger work_chunks_deleted_;
    303   AtomicInteger reference_count_;
    304   AtomicInteger mark_null_count_;
    305   AtomicInteger mark_immune_count_;
    306   AtomicInteger mark_fastpath_count_;
    307   AtomicInteger mark_slowpath_count_;
    308 
    309   std::unique_ptr<Barrier> gc_barrier_;
    310   Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
    311 
    312   const bool is_concurrent_;
    313 
    314   // Verification.
    315   size_t live_stack_freeze_size_;
    316 
    317   std::unique_ptr<MemMap> sweep_array_free_buffer_mem_map_;
    318 
    319  private:
    320   friend class AddIfReachesAllocSpaceVisitor;  // Used by mod-union table.
    321   friend class CardScanTask;
    322   friend class CheckBitmapVisitor;
    323   friend class CheckReferenceVisitor;
    324   friend class art::gc::Heap;
    325   friend class MarkObjectVisitor;
    326   friend class ModUnionCheckReferences;
    327   friend class ModUnionClearCardVisitor;
    328   friend class ModUnionReferenceVisitor;
    329   friend class ModUnionVisitor;
    330   friend class ModUnionTableBitmap;
    331   friend class ModUnionTableReferenceCache;
    332   friend class ModUnionScanImageRootVisitor;
    333   template<bool kUseFinger> friend class MarkStackTask;
    334   friend class FifoMarkStackChunk;
    335   friend class MarkSweepMarkObjectSlowPath;
    336 
    337   DISALLOW_COPY_AND_ASSIGN(MarkSweep);
    338 };
    339 
    340 }  // namespace collector
    341 }  // namespace gc
    342 }  // namespace art
    343 
    344 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
    345