1 // Copyright 2016 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_HEAP_REMEMBERED_SET_H_ 6 #define V8_HEAP_REMEMBERED_SET_H_ 7 8 #include "src/heap/heap.h" 9 #include "src/heap/slot-set.h" 10 #include "src/heap/spaces.h" 11 #include "src/reloc-info.h" 12 #include "src/v8memory.h" 13 14 namespace v8 { 15 namespace internal { 16 17 enum RememberedSetIterationMode { SYNCHRONIZED, NON_SYNCHRONIZED }; 18 19 // TODO(ulan): Investigate performance of de-templatizing this class. 20 template <RememberedSetType type> 21 class RememberedSet : public AllStatic { 22 public: 23 // Given a page and a slot in that page, this function adds the slot to the 24 // remembered set. 25 template <AccessMode access_mode = AccessMode::ATOMIC> 26 static void Insert(MemoryChunk* chunk, Address slot_addr) { 27 DCHECK(chunk->Contains(slot_addr)); 28 SlotSet* slot_set = chunk->slot_set<type, access_mode>(); 29 if (slot_set == nullptr) { 30 slot_set = chunk->AllocateSlotSet<type>(); 31 } 32 uintptr_t offset = slot_addr - chunk->address(); 33 slot_set[offset / Page::kPageSize].Insert<access_mode>(offset % 34 Page::kPageSize); 35 } 36 37 // Given a page and a slot in that page, this function returns true if 38 // the remembered set contains the slot. 39 static bool Contains(MemoryChunk* chunk, Address slot_addr) { 40 DCHECK(chunk->Contains(slot_addr)); 41 SlotSet* slot_set = chunk->slot_set<type>(); 42 if (slot_set == nullptr) { 43 return false; 44 } 45 uintptr_t offset = slot_addr - chunk->address(); 46 return slot_set[offset / Page::kPageSize].Contains(offset % 47 Page::kPageSize); 48 } 49 50 // Given a page and a slot in that page, this function removes the slot from 51 // the remembered set. 52 // If the slot was never added, then the function does nothing. 53 static void Remove(MemoryChunk* chunk, Address slot_addr) { 54 DCHECK(chunk->Contains(slot_addr)); 55 SlotSet* slot_set = chunk->slot_set<type>(); 56 if (slot_set != nullptr) { 57 uintptr_t offset = slot_addr - chunk->address(); 58 slot_set[offset / Page::kPageSize].Remove(offset % Page::kPageSize); 59 } 60 } 61 62 // Given a page and a range of slots in that page, this function removes the 63 // slots from the remembered set. 64 static void RemoveRange(MemoryChunk* chunk, Address start, Address end, 65 SlotSet::EmptyBucketMode mode) { 66 SlotSet* slot_set = chunk->slot_set<type>(); 67 if (slot_set != nullptr) { 68 uintptr_t start_offset = start - chunk->address(); 69 uintptr_t end_offset = end - chunk->address(); 70 DCHECK_LT(start_offset, end_offset); 71 if (end_offset < static_cast<uintptr_t>(Page::kPageSize)) { 72 slot_set->RemoveRange(static_cast<int>(start_offset), 73 static_cast<int>(end_offset), mode); 74 } else { 75 // The large page has multiple slot sets. 76 // Compute slot set indicies for the range [start_offset, end_offset). 77 int start_chunk = static_cast<int>(start_offset / Page::kPageSize); 78 int end_chunk = static_cast<int>((end_offset - 1) / Page::kPageSize); 79 int offset_in_start_chunk = 80 static_cast<int>(start_offset % Page::kPageSize); 81 // Note that using end_offset % Page::kPageSize would be incorrect 82 // because end_offset is one beyond the last slot to clear. 83 int offset_in_end_chunk = static_cast<int>( 84 end_offset - static_cast<uintptr_t>(end_chunk) * Page::kPageSize); 85 if (start_chunk == end_chunk) { 86 slot_set[start_chunk].RemoveRange(offset_in_start_chunk, 87 offset_in_end_chunk, mode); 88 } else { 89 // Clear all slots from start_offset to the end of first chunk. 90 slot_set[start_chunk].RemoveRange(offset_in_start_chunk, 91 Page::kPageSize, mode); 92 // Clear all slots in intermediate chunks. 93 for (int i = start_chunk + 1; i < end_chunk; i++) { 94 slot_set[i].RemoveRange(0, Page::kPageSize, mode); 95 } 96 // Clear slots from the beginning of the last page to end_offset. 97 slot_set[end_chunk].RemoveRange(0, offset_in_end_chunk, mode); 98 } 99 } 100 } 101 } 102 103 // Iterates and filters the remembered set with the given callback. 104 // The callback should take (Address slot) and return SlotCallbackResult. 105 template <typename Callback> 106 static void Iterate(Heap* heap, RememberedSetIterationMode mode, 107 Callback callback) { 108 IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) { 109 if (mode == SYNCHRONIZED) chunk->mutex()->Lock(); 110 Iterate(chunk, callback); 111 if (mode == SYNCHRONIZED) chunk->mutex()->Unlock(); 112 }); 113 } 114 115 // Iterates over all memory chunks that contains non-empty slot sets. 116 // The callback should take (MemoryChunk* chunk) and return void. 117 template <typename Callback> 118 static void IterateMemoryChunks(Heap* heap, Callback callback) { 119 MemoryChunkIterator it(heap); 120 MemoryChunk* chunk; 121 while ((chunk = it.next()) != nullptr) { 122 SlotSet* slots = chunk->slot_set<type>(); 123 TypedSlotSet* typed_slots = chunk->typed_slot_set<type>(); 124 if (slots != nullptr || typed_slots != nullptr || 125 chunk->invalidated_slots() != nullptr) { 126 callback(chunk); 127 } 128 } 129 } 130 131 // Iterates and filters the remembered set in the given memory chunk with 132 // the given callback. The callback should take (Address slot) and return 133 // SlotCallbackResult. 134 // 135 // Notice that |mode| can only be of FREE* or PREFREE* if there are no other 136 // threads concurrently inserting slots. 137 template <typename Callback> 138 static void Iterate(MemoryChunk* chunk, Callback callback, 139 SlotSet::EmptyBucketMode mode) { 140 SlotSet* slots = chunk->slot_set<type>(); 141 if (slots != nullptr) { 142 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; 143 int new_count = 0; 144 for (size_t page = 0; page < pages; page++) { 145 new_count += slots[page].Iterate(callback, mode); 146 } 147 // Only old-to-old slot sets are released eagerly. Old-new-slot sets are 148 // released by the sweeper threads. 149 if (type == OLD_TO_OLD && new_count == 0) { 150 chunk->ReleaseSlotSet<OLD_TO_OLD>(); 151 } 152 } 153 } 154 155 static int NumberOfPreFreedEmptyBuckets(MemoryChunk* chunk) { 156 DCHECK(type == OLD_TO_NEW); 157 int result = 0; 158 SlotSet* slots = chunk->slot_set<type>(); 159 if (slots != nullptr) { 160 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; 161 for (size_t page = 0; page < pages; page++) { 162 result += slots[page].NumberOfPreFreedEmptyBuckets(); 163 } 164 } 165 return result; 166 } 167 168 static void PreFreeEmptyBuckets(MemoryChunk* chunk) { 169 DCHECK(type == OLD_TO_NEW); 170 SlotSet* slots = chunk->slot_set<type>(); 171 if (slots != nullptr) { 172 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; 173 for (size_t page = 0; page < pages; page++) { 174 slots[page].PreFreeEmptyBuckets(); 175 } 176 } 177 } 178 179 static void FreeEmptyBuckets(MemoryChunk* chunk) { 180 DCHECK(type == OLD_TO_NEW); 181 SlotSet* slots = chunk->slot_set<type>(); 182 if (slots != nullptr) { 183 size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; 184 for (size_t page = 0; page < pages; page++) { 185 slots[page].FreeEmptyBuckets(); 186 slots[page].FreeToBeFreedBuckets(); 187 } 188 } 189 } 190 191 // Given a page and a typed slot in that page, this function adds the slot 192 // to the remembered set. 193 static void InsertTyped(Page* page, Address host_addr, SlotType slot_type, 194 Address slot_addr) { 195 TypedSlotSet* slot_set = page->typed_slot_set<type>(); 196 if (slot_set == nullptr) { 197 slot_set = page->AllocateTypedSlotSet<type>(); 198 } 199 if (host_addr == kNullAddress) { 200 host_addr = page->address(); 201 } 202 uintptr_t offset = slot_addr - page->address(); 203 uintptr_t host_offset = host_addr - page->address(); 204 DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset)); 205 DCHECK_LT(host_offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset)); 206 slot_set->Insert(slot_type, static_cast<uint32_t>(host_offset), 207 static_cast<uint32_t>(offset)); 208 } 209 210 // Given a page and a range of typed slots in that page, this function removes 211 // the slots from the remembered set. 212 static void RemoveRangeTyped(MemoryChunk* page, Address start, Address end) { 213 TypedSlotSet* slots = page->typed_slot_set<type>(); 214 if (slots != nullptr) { 215 slots->Iterate( 216 [start, end](SlotType slot_type, Address host_addr, 217 Address slot_addr) { 218 return start <= slot_addr && slot_addr < end ? REMOVE_SLOT 219 : KEEP_SLOT; 220 }, 221 TypedSlotSet::PREFREE_EMPTY_CHUNKS); 222 } 223 } 224 225 // Iterates and filters the remembered set with the given callback. 226 // The callback should take (SlotType slot_type, SlotAddress slot) and return 227 // SlotCallbackResult. 228 template <typename Callback> 229 static void IterateTyped(Heap* heap, RememberedSetIterationMode mode, 230 Callback callback) { 231 IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) { 232 if (mode == SYNCHRONIZED) chunk->mutex()->Lock(); 233 IterateTyped(chunk, callback); 234 if (mode == SYNCHRONIZED) chunk->mutex()->Unlock(); 235 }); 236 } 237 238 // Iterates and filters typed old to old pointers in the given memory chunk 239 // with the given callback. The callback should take (SlotType slot_type, 240 // Address slot_addr) and return SlotCallbackResult. 241 template <typename Callback> 242 static void IterateTyped(MemoryChunk* chunk, Callback callback) { 243 TypedSlotSet* slots = chunk->typed_slot_set<type>(); 244 if (slots != nullptr) { 245 int new_count = slots->Iterate(callback, TypedSlotSet::KEEP_EMPTY_CHUNKS); 246 if (new_count == 0) { 247 chunk->ReleaseTypedSlotSet<type>(); 248 } 249 } 250 } 251 252 // Clear all old to old slots from the remembered set. 253 static void ClearAll(Heap* heap) { 254 STATIC_ASSERT(type == OLD_TO_OLD); 255 MemoryChunkIterator it(heap); 256 MemoryChunk* chunk; 257 while ((chunk = it.next()) != nullptr) { 258 chunk->ReleaseSlotSet<OLD_TO_OLD>(); 259 chunk->ReleaseTypedSlotSet<OLD_TO_OLD>(); 260 chunk->ReleaseInvalidatedSlots(); 261 } 262 } 263 264 // Eliminates all stale slots from the remembered set, i.e. 265 // slots that are not part of live objects anymore. This method must be 266 // called after marking, when the whole transitive closure is known and 267 // must be called before sweeping when mark bits are still intact. 268 static void ClearInvalidTypedSlots(Heap* heap, MemoryChunk* chunk); 269 270 private: 271 static bool IsValidSlot(Heap* heap, MemoryChunk* chunk, Object** slot); 272 }; 273 274 class UpdateTypedSlotHelper { 275 public: 276 // Updates a code entry slot using an untyped slot callback. 277 // The callback accepts Object** and returns SlotCallbackResult. 278 template <typename Callback> 279 static SlotCallbackResult UpdateCodeEntry(Address entry_address, 280 Callback callback) { 281 Object* code = Code::GetObjectFromEntryAddress(entry_address); 282 Object* old_code = code; 283 SlotCallbackResult result = 284 callback(reinterpret_cast<MaybeObject**>(&code)); 285 DCHECK(!HasWeakHeapObjectTag(code)); 286 if (code != old_code) { 287 Memory<Address>(entry_address) = reinterpret_cast<Code*>(code)->entry(); 288 } 289 return result; 290 } 291 292 // Updates a code target slot using an untyped slot callback. 293 // The callback accepts Object** and returns SlotCallbackResult. 294 template <typename Callback> 295 static SlotCallbackResult UpdateCodeTarget(RelocInfo* rinfo, 296 Callback callback) { 297 DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode())); 298 Code* old_target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 299 Object* new_target = old_target; 300 SlotCallbackResult result = 301 callback(reinterpret_cast<MaybeObject**>(&new_target)); 302 DCHECK(!HasWeakHeapObjectTag(new_target)); 303 if (new_target != old_target) { 304 rinfo->set_target_address( 305 Code::cast(new_target)->raw_instruction_start()); 306 } 307 return result; 308 } 309 310 // Updates an embedded pointer slot using an untyped slot callback. 311 // The callback accepts Object** and returns SlotCallbackResult. 312 template <typename Callback> 313 static SlotCallbackResult UpdateEmbeddedPointer(Heap* heap, RelocInfo* rinfo, 314 Callback callback) { 315 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 316 HeapObject* old_target = rinfo->target_object(); 317 Object* new_target = old_target; 318 SlotCallbackResult result = 319 callback(reinterpret_cast<MaybeObject**>(&new_target)); 320 DCHECK(!HasWeakHeapObjectTag(new_target)); 321 if (new_target != old_target) { 322 rinfo->set_target_object(heap, HeapObject::cast(new_target)); 323 } 324 return result; 325 } 326 327 // Updates a typed slot using an untyped slot callback. 328 // The callback accepts MaybeObject** and returns SlotCallbackResult. 329 template <typename Callback> 330 static SlotCallbackResult UpdateTypedSlot(Heap* heap, SlotType slot_type, 331 Address addr, Callback callback) { 332 switch (slot_type) { 333 case CODE_TARGET_SLOT: { 334 RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, nullptr); 335 return UpdateCodeTarget(&rinfo, callback); 336 } 337 case CODE_ENTRY_SLOT: { 338 return UpdateCodeEntry(addr, callback); 339 } 340 case EMBEDDED_OBJECT_SLOT: { 341 RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, nullptr); 342 return UpdateEmbeddedPointer(heap, &rinfo, callback); 343 } 344 case OBJECT_SLOT: { 345 return callback(reinterpret_cast<MaybeObject**>(addr)); 346 } 347 case CLEARED_SLOT: 348 break; 349 } 350 UNREACHABLE(); 351 } 352 }; 353 354 inline SlotType SlotTypeForRelocInfoMode(RelocInfo::Mode rmode) { 355 if (RelocInfo::IsCodeTargetMode(rmode)) { 356 return CODE_TARGET_SLOT; 357 } else if (RelocInfo::IsEmbeddedObject(rmode)) { 358 return EMBEDDED_OBJECT_SLOT; 359 } 360 UNREACHABLE(); 361 } 362 363 } // namespace internal 364 } // namespace v8 365 366 #endif // V8_HEAP_REMEMBERED_SET_H_ 367