Home | History | Annotate | Download | only in src
      1 // Copyright 2011 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #ifndef V8_INCREMENTAL_MARKING_H_
     29 #define V8_INCREMENTAL_MARKING_H_
     30 
     31 
     32 #include "execution.h"
     33 #include "mark-compact.h"
     34 #include "objects.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 
     40 class IncrementalMarking {
     41  public:
     42   enum State {
     43     STOPPED,
     44     SWEEPING,
     45     MARKING,
     46     COMPLETE
     47   };
     48 
     49   enum CompletionAction {
     50     GC_VIA_STACK_GUARD,
     51     NO_GC_VIA_STACK_GUARD
     52   };
     53 
     54   explicit IncrementalMarking(Heap* heap);
     55 
     56   void TearDown();
     57 
     58   State state() {
     59     ASSERT(state_ == STOPPED || FLAG_incremental_marking);
     60     return state_;
     61   }
     62 
     63   bool should_hurry() { return should_hurry_; }
     64   void set_should_hurry(bool val) { should_hurry_ = val; }
     65 
     66   inline bool IsStopped() { return state() == STOPPED; }
     67 
     68   INLINE(bool IsMarking()) { return state() >= MARKING; }
     69 
     70   inline bool IsMarkingIncomplete() { return state() == MARKING; }
     71 
     72   inline bool IsComplete() { return state() == COMPLETE; }
     73 
     74   bool WorthActivating();
     75 
     76   void Start();
     77 
     78   void Stop();
     79 
     80   void PrepareForScavenge();
     81 
     82   void UpdateMarkingDequeAfterScavenge();
     83 
     84   void Hurry();
     85 
     86   void Finalize();
     87 
     88   void Abort();
     89 
     90   void MarkingComplete(CompletionAction action);
     91 
     92   // It's hard to know how much work the incremental marker should do to make
     93   // progress in the face of the mutator creating new work for it.  We start
     94   // of at a moderate rate of work and gradually increase the speed of the
     95   // incremental marker until it completes.
     96   // Do some marking every time this much memory has been allocated.
     97   static const intptr_t kAllocatedThreshold = 65536;
     98   // Start off by marking this many times more memory than has been allocated.
     99   static const intptr_t kInitialAllocationMarkingFactor = 1;
    100   // But if we are promoting a lot of data we need to mark faster to keep up
    101   // with the data that is entering the old space through promotion.
    102   static const intptr_t kFastMarking = 3;
    103   // After this many steps we increase the marking/allocating factor.
    104   static const intptr_t kAllocationMarkingFactorSpeedupInterval = 1024;
    105   // This is how much we increase the marking/allocating factor by.
    106   static const intptr_t kAllocationMarkingFactorSpeedup = 2;
    107   static const intptr_t kMaxAllocationMarkingFactor = 1000;
    108 
    109   void OldSpaceStep(intptr_t allocated) {
    110     Step(allocated * kFastMarking / kInitialAllocationMarkingFactor,
    111          GC_VIA_STACK_GUARD);
    112   }
    113 
    114   void Step(intptr_t allocated, CompletionAction action);
    115 
    116   inline void RestartIfNotMarking() {
    117     if (state_ == COMPLETE) {
    118       state_ = MARKING;
    119       if (FLAG_trace_incremental_marking) {
    120         PrintF("[IncrementalMarking] Restarting (new grey objects)\n");
    121       }
    122     }
    123   }
    124 
    125   static void RecordWriteFromCode(HeapObject* obj,
    126                                   Object* value,
    127                                   Isolate* isolate);
    128 
    129   static void RecordWriteForEvacuationFromCode(HeapObject* obj,
    130                                                Object** slot,
    131                                                Isolate* isolate);
    132 
    133   INLINE(bool BaseRecordWrite(HeapObject* obj, Object** slot, Object* value));
    134   INLINE(void RecordWrite(HeapObject* obj, Object** slot, Object* value));
    135   INLINE(void RecordWriteIntoCode(HeapObject* obj,
    136                                   RelocInfo* rinfo,
    137                                   Object* value));
    138   INLINE(void RecordWriteOfCodeEntry(JSFunction* host,
    139                                      Object** slot,
    140                                      Code* value));
    141 
    142 
    143   void RecordWriteSlow(HeapObject* obj, Object** slot, Object* value);
    144   void RecordWriteIntoCodeSlow(HeapObject* obj,
    145                                RelocInfo* rinfo,
    146                                Object* value);
    147   void RecordWriteOfCodeEntrySlow(JSFunction* host, Object** slot, Code* value);
    148   void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
    149   void RecordCodeTargetPatch(Address pc, HeapObject* value);
    150 
    151   inline void RecordWrites(HeapObject* obj);
    152 
    153   inline void BlackToGreyAndUnshift(HeapObject* obj, MarkBit mark_bit);
    154 
    155   inline void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit);
    156 
    157   inline void WhiteToGrey(HeapObject* obj, MarkBit mark_bit);
    158 
    159   // Does white->black or keeps gray or black color. Returns true if converting
    160   // white to black.
    161   inline bool MarkBlackOrKeepGrey(MarkBit mark_bit) {
    162     ASSERT(!Marking::IsImpossible(mark_bit));
    163     if (mark_bit.Get()) {
    164       // Grey or black: Keep the color.
    165       return false;
    166     }
    167     mark_bit.Set();
    168     ASSERT(Marking::IsBlack(mark_bit));
    169     return true;
    170   }
    171 
    172   inline int steps_count() {
    173     return steps_count_;
    174   }
    175 
    176   inline double steps_took() {
    177     return steps_took_;
    178   }
    179 
    180   inline double longest_step() {
    181     return longest_step_;
    182   }
    183 
    184   inline int steps_count_since_last_gc() {
    185     return steps_count_since_last_gc_;
    186   }
    187 
    188   inline double steps_took_since_last_gc() {
    189     return steps_took_since_last_gc_;
    190   }
    191 
    192   inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
    193     SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());
    194   }
    195 
    196   inline void SetNewSpacePageFlags(NewSpacePage* chunk) {
    197     SetNewSpacePageFlags(chunk, IsMarking());
    198   }
    199 
    200   MarkingDeque* marking_deque() { return &marking_deque_; }
    201 
    202   bool IsCompacting() { return IsMarking() && is_compacting_; }
    203 
    204   void ActivateGeneratedStub(Code* stub);
    205 
    206   void NotifyOfHighPromotionRate() {
    207     if (IsMarking()) {
    208       if (allocation_marking_factor_ < kFastMarking) {
    209         if (FLAG_trace_gc) {
    210           PrintF("Increasing marking speed to %d due to high promotion rate\n",
    211                  static_cast<int>(kFastMarking));
    212         }
    213         allocation_marking_factor_ = kFastMarking;
    214       }
    215     }
    216   }
    217 
    218   void EnterNoMarkingScope() {
    219     no_marking_scope_depth_++;
    220   }
    221 
    222   void LeaveNoMarkingScope() {
    223     no_marking_scope_depth_--;
    224   }
    225 
    226   void UncommitMarkingDeque();
    227 
    228  private:
    229   int64_t SpaceLeftInOldSpace();
    230 
    231   void ResetStepCounters();
    232 
    233   enum CompactionFlag { ALLOW_COMPACTION, PREVENT_COMPACTION };
    234 
    235   void StartMarking(CompactionFlag flag);
    236 
    237   void ActivateIncrementalWriteBarrier(PagedSpace* space);
    238   static void ActivateIncrementalWriteBarrier(NewSpace* space);
    239   void ActivateIncrementalWriteBarrier();
    240 
    241   static void DeactivateIncrementalWriteBarrierForSpace(PagedSpace* space);
    242   static void DeactivateIncrementalWriteBarrierForSpace(NewSpace* space);
    243   void DeactivateIncrementalWriteBarrier();
    244 
    245   static void SetOldSpacePageFlags(MemoryChunk* chunk,
    246                                    bool is_marking,
    247                                    bool is_compacting);
    248 
    249   static void SetNewSpacePageFlags(NewSpacePage* chunk, bool is_marking);
    250 
    251   void EnsureMarkingDequeIsCommitted();
    252 
    253   void VisitGlobalContext(Context* ctx, ObjectVisitor* v);
    254 
    255   Heap* heap_;
    256 
    257   State state_;
    258   bool is_compacting_;
    259 
    260   VirtualMemory* marking_deque_memory_;
    261   bool marking_deque_memory_committed_;
    262   MarkingDeque marking_deque_;
    263 
    264   int steps_count_;
    265   double steps_took_;
    266   double longest_step_;
    267   int64_t old_generation_space_available_at_start_of_incremental_;
    268   int64_t old_generation_space_used_at_start_of_incremental_;
    269   int steps_count_since_last_gc_;
    270   double steps_took_since_last_gc_;
    271   int64_t bytes_rescanned_;
    272   bool should_hurry_;
    273   int allocation_marking_factor_;
    274   intptr_t bytes_scanned_;
    275   intptr_t allocated_;
    276 
    277   int no_marking_scope_depth_;
    278 
    279   DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
    280 };
    281 
    282 } }  // namespace v8::internal
    283 
    284 #endif  // V8_INCREMENTAL_MARKING_H_
    285