1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_OBJECTS_VISITING_H_ 6 #define V8_OBJECTS_VISITING_H_ 7 8 #include "src/allocation.h" 9 #include "src/heap/heap.h" 10 #include "src/heap/spaces.h" 11 #include "src/layout-descriptor.h" 12 #include "src/objects-body-descriptors.h" 13 14 // This file provides base classes and auxiliary methods for defining 15 // static object visitors used during GC. 16 // Visiting HeapObject body with a normal ObjectVisitor requires performing 17 // two switches on object's instance type to determine object size and layout 18 // and one or more virtual method calls on visitor itself. 19 // Static visitor is different: it provides a dispatch table which contains 20 // pointers to specialized visit functions. Each map has the visitor_id 21 // field which contains an index of specialized visitor to use. 22 23 namespace v8 { 24 namespace internal { 25 26 27 // Base class for all static visitors. 28 class StaticVisitorBase : public AllStatic { 29 public: 30 #define VISITOR_ID_LIST(V) \ 31 V(SeqOneByteString) \ 32 V(SeqTwoByteString) \ 33 V(ShortcutCandidate) \ 34 V(ByteArray) \ 35 V(BytecodeArray) \ 36 V(FreeSpace) \ 37 V(FixedArray) \ 38 V(FixedDoubleArray) \ 39 V(FixedTypedArray) \ 40 V(FixedFloat64Array) \ 41 V(NativeContext) \ 42 V(AllocationSite) \ 43 V(DataObject2) \ 44 V(DataObject3) \ 45 V(DataObject4) \ 46 V(DataObject5) \ 47 V(DataObject6) \ 48 V(DataObject7) \ 49 V(DataObject8) \ 50 V(DataObject9) \ 51 V(DataObjectGeneric) \ 52 V(JSObject2) \ 53 V(JSObject3) \ 54 V(JSObject4) \ 55 V(JSObject5) \ 56 V(JSObject6) \ 57 V(JSObject7) \ 58 V(JSObject8) \ 59 V(JSObject9) \ 60 V(JSObjectGeneric) \ 61 V(Struct2) \ 62 V(Struct3) \ 63 V(Struct4) \ 64 V(Struct5) \ 65 V(Struct6) \ 66 V(Struct7) \ 67 V(Struct8) \ 68 V(Struct9) \ 69 V(StructGeneric) \ 70 V(ConsString) \ 71 V(SlicedString) \ 72 V(Symbol) \ 73 V(Oddball) \ 74 V(Code) \ 75 V(Map) \ 76 V(Cell) \ 77 V(PropertyCell) \ 78 V(WeakCell) \ 79 V(TransitionArray) \ 80 V(SharedFunctionInfo) \ 81 V(JSFunction) \ 82 V(JSWeakCollection) \ 83 V(JSArrayBuffer) \ 84 V(JSRegExp) 85 86 // For data objects, JS objects and structs along with generic visitor which 87 // can visit object of any size we provide visitors specialized by 88 // object size in words. 89 // Ids of specialized visitors are declared in a linear order (without 90 // holes) starting from the id of visitor specialized for 2 words objects 91 // (base visitor id) and ending with the id of generic visitor. 92 // Method GetVisitorIdForSize depends on this ordering to calculate visitor 93 // id of specialized visitor from given instance size, base visitor id and 94 // generic visitor's id. 95 enum VisitorId { 96 #define VISITOR_ID_ENUM_DECL(id) kVisit##id, 97 VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL) 98 #undef VISITOR_ID_ENUM_DECL 99 kVisitorIdCount, 100 kVisitDataObject = kVisitDataObject2, 101 kVisitJSObject = kVisitJSObject2, 102 kVisitStruct = kVisitStruct2, 103 }; 104 105 // Visitor ID should fit in one byte. 106 STATIC_ASSERT(kVisitorIdCount <= 256); 107 108 // Determine which specialized visitor should be used for given instance type 109 // and instance type. 110 static VisitorId GetVisitorId(int instance_type, int instance_size, 111 bool has_unboxed_fields); 112 113 // Determine which specialized visitor should be used for given map. 114 static VisitorId GetVisitorId(Map* map); 115 116 // For visitors that allow specialization by size calculate VisitorId based 117 // on size, base visitor id and generic visitor id. 118 static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic, 119 int object_size, 120 bool has_unboxed_fields) { 121 DCHECK((base == kVisitDataObject) || (base == kVisitStruct) || 122 (base == kVisitJSObject)); 123 DCHECK(IsAligned(object_size, kPointerSize)); 124 DCHECK(Heap::kMinObjectSizeInWords * kPointerSize <= object_size); 125 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize); 126 DCHECK(!has_unboxed_fields || (base == kVisitJSObject)); 127 128 if (has_unboxed_fields) return generic; 129 130 int visitor_id = Min( 131 base + (object_size >> kPointerSizeLog2) - Heap::kMinObjectSizeInWords, 132 static_cast<int>(generic)); 133 134 return static_cast<VisitorId>(visitor_id); 135 } 136 }; 137 138 139 template <typename Callback> 140 class VisitorDispatchTable { 141 public: 142 void CopyFrom(VisitorDispatchTable* other) { 143 // We are not using memcpy to guarantee that during update 144 // every element of callbacks_ array will remain correct 145 // pointer (memcpy might be implemented as a byte copying loop). 146 for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) { 147 base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]); 148 } 149 } 150 151 inline Callback GetVisitor(Map* map); 152 153 inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) { 154 return reinterpret_cast<Callback>(callbacks_[id]); 155 } 156 157 void Register(StaticVisitorBase::VisitorId id, Callback callback) { 158 DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned. 159 callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback); 160 } 161 162 template <typename Visitor, StaticVisitorBase::VisitorId base, 163 StaticVisitorBase::VisitorId generic, int object_size_in_words> 164 void RegisterSpecialization() { 165 static const int size = object_size_in_words * kPointerSize; 166 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false), 167 &Visitor::template VisitSpecialized<size>); 168 } 169 170 171 template <typename Visitor, StaticVisitorBase::VisitorId base, 172 StaticVisitorBase::VisitorId generic> 173 void RegisterSpecializations() { 174 STATIC_ASSERT((generic - base + Heap::kMinObjectSizeInWords) == 10); 175 RegisterSpecialization<Visitor, base, generic, 2>(); 176 RegisterSpecialization<Visitor, base, generic, 3>(); 177 RegisterSpecialization<Visitor, base, generic, 4>(); 178 RegisterSpecialization<Visitor, base, generic, 5>(); 179 RegisterSpecialization<Visitor, base, generic, 6>(); 180 RegisterSpecialization<Visitor, base, generic, 7>(); 181 RegisterSpecialization<Visitor, base, generic, 8>(); 182 RegisterSpecialization<Visitor, base, generic, 9>(); 183 Register(generic, &Visitor::Visit); 184 } 185 186 private: 187 base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount]; 188 }; 189 190 191 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType> 192 class FlexibleBodyVisitor : public AllStatic { 193 public: 194 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) { 195 int object_size = BodyDescriptor::SizeOf(map, object); 196 BodyDescriptor::template IterateBody<StaticVisitor>(object, object_size); 197 return static_cast<ReturnType>(object_size); 198 } 199 200 // This specialization is only suitable for objects containing pointer fields. 201 template <int object_size> 202 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) { 203 DCHECK(BodyDescriptor::SizeOf(map, object) == object_size); 204 DCHECK(!FLAG_unbox_double_fields || map->HasFastPointerLayout()); 205 StaticVisitor::VisitPointers( 206 object->GetHeap(), object, 207 HeapObject::RawField(object, BodyDescriptor::kStartOffset), 208 HeapObject::RawField(object, object_size)); 209 return static_cast<ReturnType>(object_size); 210 } 211 }; 212 213 214 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType> 215 class FixedBodyVisitor : public AllStatic { 216 public: 217 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) { 218 BodyDescriptor::template IterateBody<StaticVisitor>(object); 219 return static_cast<ReturnType>(BodyDescriptor::kSize); 220 } 221 }; 222 223 224 // Base class for visitors used for a linear new space iteration. 225 // IterateBody returns size of visited object. 226 // Certain types of objects (i.e. Code objects) are not handled 227 // by dispatch table of this visitor because they cannot appear 228 // in the new space. 229 // 230 // This class is intended to be used in the following way: 231 // 232 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> { 233 // ... 234 // } 235 // 236 // This is an example of Curiously recurring template pattern 237 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). 238 // We use CRTP to guarantee aggressive compile time optimizations (i.e. 239 // inlining and specialization of StaticVisitor::VisitPointers methods). 240 template <typename StaticVisitor> 241 class StaticNewSpaceVisitor : public StaticVisitorBase { 242 public: 243 static void Initialize(); 244 245 INLINE(static int IterateBody(Map* map, HeapObject* obj)) { 246 return table_.GetVisitor(map)(map, obj); 247 } 248 249 INLINE(static void VisitPointers(Heap* heap, HeapObject* object, 250 Object** start, Object** end)) { 251 for (Object** p = start; p < end; p++) { 252 StaticVisitor::VisitPointer(heap, object, p); 253 } 254 } 255 256 // Although we are using the JSFunction body descriptor which does not 257 // visit the code entry, compiler wants it to be accessible. 258 // See JSFunction::BodyDescriptorImpl. 259 INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object, 260 Address entry_address)) { 261 UNREACHABLE(); 262 } 263 264 private: 265 INLINE(static int VisitByteArray(Map* map, HeapObject* object)) { 266 return reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 267 } 268 269 INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) { 270 int length = reinterpret_cast<FixedDoubleArray*>(object)->length(); 271 return FixedDoubleArray::SizeFor(length); 272 } 273 274 INLINE(static int VisitJSObject(Map* map, HeapObject* object)) { 275 return JSObjectVisitor::Visit(map, object); 276 } 277 278 INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) { 279 return SeqOneByteString::cast(object) 280 ->SeqOneByteStringSize(map->instance_type()); 281 } 282 283 INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) { 284 return SeqTwoByteString::cast(object) 285 ->SeqTwoByteStringSize(map->instance_type()); 286 } 287 288 INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) { 289 return FreeSpace::cast(object)->size(); 290 } 291 292 INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object)); 293 INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object)); 294 295 class DataObjectVisitor { 296 public: 297 template <int object_size> 298 static inline int VisitSpecialized(Map* map, HeapObject* object) { 299 return object_size; 300 } 301 302 INLINE(static int Visit(Map* map, HeapObject* object)) { 303 return map->instance_size(); 304 } 305 }; 306 307 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int> 308 StructVisitor; 309 310 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int> 311 JSObjectVisitor; 312 313 typedef int (*Callback)(Map* map, HeapObject* object); 314 315 static VisitorDispatchTable<Callback> table_; 316 }; 317 318 319 template <typename StaticVisitor> 320 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback> 321 StaticNewSpaceVisitor<StaticVisitor>::table_; 322 323 324 // Base class for visitors used to transitively mark the entire heap. 325 // IterateBody returns nothing. 326 // Certain types of objects might not be handled by this base class and 327 // no visitor function is registered by the generic initialization. A 328 // specialized visitor function needs to be provided by the inheriting 329 // class itself for those cases. 330 // 331 // This class is intended to be used in the following way: 332 // 333 // class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> { 334 // ... 335 // } 336 // 337 // This is an example of Curiously recurring template pattern. 338 template <typename StaticVisitor> 339 class StaticMarkingVisitor : public StaticVisitorBase { 340 public: 341 static void Initialize(); 342 343 INLINE(static void IterateBody(Map* map, HeapObject* obj)) { 344 table_.GetVisitor(map)(map, obj); 345 } 346 347 INLINE(static void VisitPropertyCell(Map* map, HeapObject* object)); 348 INLINE(static void VisitWeakCell(Map* map, HeapObject* object)); 349 INLINE(static void VisitTransitionArray(Map* map, HeapObject* object)); 350 INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object, 351 Address entry_address)); 352 INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo)); 353 INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo)); 354 INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo)); 355 INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo)); 356 INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo)); 357 INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {} 358 INLINE(static void VisitInternalReference(RelocInfo* rinfo)) {} 359 INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {} 360 // Skip the weak next code link in a code object. 361 INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {} 362 363 protected: 364 INLINE(static void VisitMap(Map* map, HeapObject* object)); 365 INLINE(static void VisitCode(Map* map, HeapObject* object)); 366 INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object)); 367 INLINE(static void VisitAllocationSite(Map* map, HeapObject* object)); 368 INLINE(static void VisitWeakCollection(Map* map, HeapObject* object)); 369 INLINE(static void VisitJSFunction(Map* map, HeapObject* object)); 370 INLINE(static void VisitJSRegExp(Map* map, HeapObject* object)); 371 INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object)); 372 INLINE(static void VisitNativeContext(Map* map, HeapObject* object)); 373 INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object)); 374 375 // Mark pointers in a Map treating some elements of the descriptor array weak. 376 static void MarkMapContents(Heap* heap, Map* map); 377 378 // Code flushing support. 379 INLINE(static bool IsFlushable(Heap* heap, JSFunction* function)); 380 INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info)); 381 382 // Helpers used by code flushing support that visit pointer fields and treat 383 // references to code objects either strongly or weakly. 384 static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object); 385 static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object); 386 static void VisitJSFunctionStrongCode(Map* map, HeapObject* object); 387 static void VisitJSFunctionWeakCode(Map* map, HeapObject* object); 388 389 class DataObjectVisitor { 390 public: 391 template <int size> 392 static inline void VisitSpecialized(Map* map, HeapObject* object) {} 393 394 INLINE(static void Visit(Map* map, HeapObject* object)) {} 395 }; 396 397 typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void> 398 FixedArrayVisitor; 399 400 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void> 401 JSObjectVisitor; 402 403 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void> 404 StructObjectVisitor; 405 406 typedef void (*Callback)(Map* map, HeapObject* object); 407 408 static VisitorDispatchTable<Callback> table_; 409 }; 410 411 412 template <typename StaticVisitor> 413 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback> 414 StaticMarkingVisitor<StaticVisitor>::table_; 415 416 417 class WeakObjectRetainer; 418 419 420 // A weak list is single linked list where each element has a weak pointer to 421 // the next element. Given the head of the list, this function removes dead 422 // elements from the list and if requested records slots for next-element 423 // pointers. The template parameter T is a WeakListVisitor that defines how to 424 // access the next-element pointers. 425 template <class T> 426 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer); 427 } // namespace internal 428 } // namespace v8 429 430 #endif // V8_OBJECTS_VISITING_H_ 431