Home | History | Annotate | Download | only in collector
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
     18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
     19 
     20 #include <deque>
     21 #include <memory>  // For unique_ptr.
     22 
     23 #include "base/atomic.h"
     24 #include "base/macros.h"
     25 #include "base/mutex.h"
     26 #include "garbage_collector.h"
     27 #include "gc/accounting/heap_bitmap.h"
     28 #include "gc_root.h"
     29 #include "immune_spaces.h"
     30 #include "lock_word.h"
     31 #include "offsets.h"
     32 
     33 namespace art {
     34 
     35 class Thread;
     36 
     37 namespace mirror {
     38 class Class;
     39 class Object;
     40 }  // namespace mirror
     41 
     42 namespace gc {
     43 
     44 class Heap;
     45 
     46 namespace accounting {
     47 template <typename T> class AtomicStack;
     48 typedef AtomicStack<mirror::Object> ObjectStack;
     49 }  // namespace accounting
     50 
     51 namespace space {
     52 class BumpPointerSpace;
     53 class ContinuousMemMapAllocSpace;
     54 class ContinuousSpace;
     55 }  // namespace space
     56 
     57 namespace collector {
     58 
     59 class MarkCompact : public GarbageCollector {
     60  public:
     61   explicit MarkCompact(Heap* heap, const std::string& name_prefix = "");
     62   ~MarkCompact() {}
     63 
     64   virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
     65   void InitializePhase();
     66   void MarkingPhase() REQUIRES(Locks::mutator_lock_)
     67       REQUIRES(!Locks::heap_bitmap_lock_);
     68   void ReclaimPhase() REQUIRES(Locks::mutator_lock_)
     69       REQUIRES(!Locks::heap_bitmap_lock_);
     70   void FinishPhase() REQUIRES(Locks::mutator_lock_);
     71   void MarkReachableObjects()
     72       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
     73   virtual GcType GetGcType() const OVERRIDE {
     74     return kGcTypePartial;
     75   }
     76   virtual CollectorType GetCollectorType() const OVERRIDE {
     77     return kCollectorTypeMC;
     78   }
     79 
     80   // Sets which space we will be copying objects in.
     81   void SetSpace(space::BumpPointerSpace* space);
     82 
     83   // Initializes internal structures.
     84   void Init();
     85 
     86   // Find the default mark bitmap.
     87   void FindDefaultMarkBitmap();
     88 
     89   void ScanObject(mirror::Object* obj)
     90       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
     91 
     92   // Marks the root set at the start of a garbage collection.
     93   void MarkRoots()
     94       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
     95 
     96   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
     97   // the image. Mark that portion of the heap as immune.
     98   void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_)
     99       REQUIRES(!Locks::heap_bitmap_lock_);
    100 
    101   void UnBindBitmaps()
    102       REQUIRES(Locks::heap_bitmap_lock_);
    103 
    104   void ProcessReferences(Thread* self) REQUIRES(Locks::mutator_lock_)
    105       REQUIRES(Locks::mutator_lock_);
    106 
    107   // Sweeps unmarked objects to complete the garbage collection.
    108   void Sweep(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    109 
    110   // Sweeps unmarked objects to complete the garbage collection.
    111   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
    112 
    113   void SweepSystemWeaks()
    114       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    115 
    116   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
    117       OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
    118 
    119   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
    120                           const RootInfo& info)
    121       OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
    122 
    123   // Schedules an unmarked object for reference processing.
    124   void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference)
    125       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    126 
    127  protected:
    128   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
    129   // object for non movable things).
    130   mirror::Object* GetMarkedForwardAddress(mirror::Object* object)
    131       REQUIRES(Locks::mutator_lock_)
    132       REQUIRES_SHARED(Locks::heap_bitmap_lock_);
    133 
    134   // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
    135   // mark, otherwise we unmark.
    136   bool MarkLargeObject(const mirror::Object* obj)
    137       REQUIRES(Locks::heap_bitmap_lock_)
    138       REQUIRES_SHARED(Locks::mutator_lock_);
    139 
    140   // Expand mark stack to 2x its current size.
    141   void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_);
    142 
    143   // Returns true if we should sweep the space.
    144   bool ShouldSweepSpace(space::ContinuousSpace* space) const;
    145 
    146   // Push an object onto the mark stack.
    147   void MarkStackPush(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_);
    148 
    149   void UpdateAndMarkModUnion()
    150       REQUIRES(Locks::heap_bitmap_lock_)
    151       REQUIRES_SHARED(Locks::mutator_lock_);
    152 
    153   // Recursively blackens objects on the mark stack.
    154   void ProcessMarkStack()
    155       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
    156 
    157   // 3 pass mark compact approach.
    158   void Compact() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
    159   // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding
    160   // bitmap.
    161   void CalculateObjectForwardingAddresses()
    162       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
    163   // Update the references of objects by using the forwarding addresses.
    164   void UpdateReferences() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
    165   // Move objects and restore lock words.
    166   void MoveObjects() REQUIRES(Locks::mutator_lock_);
    167   // Move a single object to its forward address.
    168   void MoveObject(mirror::Object* obj, size_t len) REQUIRES(Locks::mutator_lock_);
    169   // Mark a single object.
    170   virtual mirror::Object* MarkObject(mirror::Object* obj) OVERRIDE
    171       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    172   virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr,
    173                                  bool do_atomic_update) OVERRIDE
    174       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
    175   virtual mirror::Object* IsMarked(mirror::Object* obj) OVERRIDE
    176       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    177       REQUIRES(Locks::mutator_lock_);
    178   virtual bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* obj,
    179                                            bool do_atomic_update) OVERRIDE
    180       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    181       REQUIRES(Locks::mutator_lock_);
    182   void ForwardObject(mirror::Object* obj) REQUIRES(Locks::heap_bitmap_lock_,
    183                                                                    Locks::mutator_lock_);
    184   // Update a single heap reference.
    185   void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference)
    186       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    187       REQUIRES(Locks::mutator_lock_);
    188   // Update all of the references of a single object.
    189   void UpdateObjectReferences(mirror::Object* obj)
    190       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
    191       REQUIRES(Locks::mutator_lock_);
    192 
    193   // Revoke all the thread-local buffers.
    194   void RevokeAllThreadLocalBuffers();
    195 
    196   accounting::ObjectStack* mark_stack_;
    197 
    198   // Every object inside the immune spaces is assumed to be marked.
    199   ImmuneSpaces immune_spaces_;
    200 
    201   // Bump pointer space which we are collecting.
    202   space::BumpPointerSpace* space_;
    203   // Cached mark bitmap as an optimization.
    204   accounting::HeapBitmap* mark_bitmap_;
    205 
    206   // The name of the collector.
    207   std::string collector_name_;
    208 
    209   // The bump pointer in the space where the next forwarding address will be.
    210   uint8_t* bump_pointer_;
    211   // How many live objects we have in the space.
    212   size_t live_objects_in_space_;
    213 
    214   // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle
    215   // objects which are only 8 bytes.
    216   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_;
    217   // Bitmap which describes which lock words we need to restore.
    218   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_;
    219   // Which lock words we need to restore as we are moving objects.
    220   std::deque<LockWord> lock_words_to_restore_;
    221 
    222   // State whether or not we are updating references.
    223   bool updating_references_;
    224 
    225  private:
    226   class MarkObjectVisitor;
    227   class UpdateObjectReferencesVisitor;
    228   class UpdateReferenceVisitor;
    229   class UpdateRootVisitor;
    230 
    231   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkCompact);
    232 };
    233 
    234 }  // namespace collector
    235 }  // namespace gc
    236 }  // namespace art
    237 
    238 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
    239