Home | History | Annotate | Download | only in heap
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #ifndef V8_HEAP_SCAVENGER_H_
      6 #define V8_HEAP_SCAVENGER_H_
      7 
      8 #include "src/base/platform/condition-variable.h"
      9 #include "src/heap/local-allocator.h"
     10 #include "src/heap/objects-visiting.h"
     11 #include "src/heap/slot-set.h"
     12 #include "src/heap/worklist.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 class OneshotBarrier;
     18 
     19 class Scavenger {
     20  public:
     21   static const int kCopiedListSegmentSize = 256;
     22   static const int kPromotionListSegmentSize = 256;
     23 
     24   using ObjectAndSize = std::pair<HeapObject*, int>;
     25   using CopiedList = Worklist<ObjectAndSize, kCopiedListSegmentSize>;
     26   using PromotionList = Worklist<ObjectAndSize, kPromotionListSegmentSize>;
     27 
     28   Scavenger(Heap* heap, bool is_logging, CopiedList* copied_list,
     29             PromotionList* promotion_list, int task_id);
     30 
     31   // Entry point for scavenging an old generation page. For scavenging single
     32   // objects see RootScavengingVisitor and ScavengeVisitor below.
     33   void ScavengePage(MemoryChunk* page);
     34 
     35   // Processes remaining work (=objects) after single objects have been
     36   // manually scavenged using ScavengeObject or CheckAndScavengeObject.
     37   void Process(OneshotBarrier* barrier = nullptr);
     38 
     39   // Finalize the Scavenger. Needs to be called from the main thread.
     40   void Finalize();
     41 
     42   size_t bytes_copied() const { return copied_size_; }
     43   size_t bytes_promoted() const { return promoted_size_; }
     44 
     45  private:
     46   // Number of objects to process before interrupting for potentially waking
     47   // up other tasks.
     48   static const int kInterruptThreshold = 128;
     49   static const int kInitialLocalPretenuringFeedbackCapacity = 256;
     50 
     51   inline Heap* heap() { return heap_; }
     52 
     53   inline void PageMemoryFence(MaybeObject* object);
     54 
     55   void AddPageToSweeperIfNecessary(MemoryChunk* page);
     56 
     57   // Potentially scavenges an object referenced from |slot_address| if it is
     58   // indeed a HeapObject and resides in from space.
     59   inline SlotCallbackResult CheckAndScavengeObject(Heap* heap,
     60                                                    Address slot_address);
     61 
     62   // Scavenges an object |object| referenced from slot |p|. |object| is required
     63   // to be in from space.
     64   inline void ScavengeObject(HeapObjectReference** p, HeapObject* object);
     65 
     66   // Copies |source| to |target| and sets the forwarding pointer in |source|.
     67   V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target,
     68                                int size);
     69 
     70   V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
     71                                      HeapObject* object, int object_size);
     72 
     73   V8_INLINE bool PromoteObject(Map* map, HeapObjectReference** slot,
     74                                HeapObject* object, int object_size);
     75 
     76   V8_INLINE void EvacuateObject(HeapObjectReference** slot, Map* map,
     77                                 HeapObject* source);
     78 
     79   // Different cases for object evacuation.
     80 
     81   V8_INLINE void EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
     82                                        HeapObject* object, int object_size);
     83 
     84   V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
     85                                     JSFunction* object, int object_size);
     86 
     87   inline void EvacuateThinString(Map* map, HeapObject** slot,
     88                                  ThinString* object, int object_size);
     89 
     90   inline void EvacuateShortcutCandidate(Map* map, HeapObject** slot,
     91                                         ConsString* object, int object_size);
     92 
     93   void IterateAndScavengePromotedObject(HeapObject* target, int size);
     94 
     95   static inline bool ContainsOnlyData(VisitorId visitor_id);
     96 
     97   Heap* const heap_;
     98   PromotionList::View promotion_list_;
     99   CopiedList::View copied_list_;
    100   Heap::PretenuringFeedbackMap local_pretenuring_feedback_;
    101   size_t copied_size_;
    102   size_t promoted_size_;
    103   LocalAllocator allocator_;
    104   const bool is_logging_;
    105   const bool is_incremental_marking_;
    106   const bool is_compacting_;
    107 
    108   friend class IterateAndScavengePromotedObjectsVisitor;
    109   friend class RootScavengeVisitor;
    110   friend class ScavengeVisitor;
    111 };
    112 
    113 // Helper class for turning the scavenger into an object visitor that is also
    114 // filtering out non-HeapObjects and objects which do not reside in new space.
    115 class RootScavengeVisitor final : public RootVisitor {
    116  public:
    117   explicit RootScavengeVisitor(Scavenger* scavenger);
    118 
    119   void VisitRootPointer(Root root, const char* description, Object** p) final;
    120   void VisitRootPointers(Root root, const char* description, Object** start,
    121                          Object** end) final;
    122 
    123  private:
    124   void ScavengePointer(Object** p);
    125 
    126   Scavenger* const scavenger_;
    127 };
    128 
    129 class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
    130  public:
    131   explicit ScavengeVisitor(Scavenger* scavenger);
    132 
    133   V8_INLINE void VisitPointers(HeapObject* host, Object** start,
    134                                Object** end) final;
    135   V8_INLINE void VisitPointers(HeapObject* host, MaybeObject** start,
    136                                MaybeObject** end) final;
    137 
    138  private:
    139   Scavenger* const scavenger_;
    140 };
    141 
    142 }  // namespace internal
    143 }  // namespace v8
    144 
    145 #endif  // V8_HEAP_SCAVENGER_H_
    146