1 // Copyright 2006-2009 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #ifndef V8_OBJECTS_VISITING_H_ 29 #define V8_OBJECTS_VISITING_H_ 30 31 // This file provides base classes and auxiliary methods for defining 32 // static object visitors used during GC. 33 // Visiting HeapObject body with a normal ObjectVisitor requires performing 34 // two switches on object's instance type to determine object size and layout 35 // and one or more virtual method calls on visitor itself. 36 // Static visitor is different: it provides a dispatch table which contains 37 // pointers to specialized visit functions. Each map has the visitor_id 38 // field which contains an index of specialized visitor to use. 39 40 namespace v8 { 41 namespace internal { 42 43 44 // Base class for all static visitors. 45 class StaticVisitorBase : public AllStatic { 46 public: 47 enum VisitorId { 48 kVisitSeqAsciiString = 0, 49 kVisitSeqTwoByteString, 50 kVisitShortcutCandidate, 51 kVisitByteArray, 52 kVisitFixedArray, 53 kVisitGlobalContext, 54 55 // For data objects, JS objects and structs along with generic visitor which 56 // can visit object of any size we provide visitors specialized by 57 // object size in words. 58 // Ids of specialized visitors are declared in a linear order (without 59 // holes) starting from the id of visitor specialized for 2 words objects 60 // (base visitor id) and ending with the id of generic visitor. 61 // Method GetVisitorIdForSize depends on this ordering to calculate visitor 62 // id of specialized visitor from given instance size, base visitor id and 63 // generic visitor's id. 64 65 kVisitDataObject, 66 kVisitDataObject2 = kVisitDataObject, 67 kVisitDataObject3, 68 kVisitDataObject4, 69 kVisitDataObject5, 70 kVisitDataObject6, 71 kVisitDataObject7, 72 kVisitDataObject8, 73 kVisitDataObject9, 74 kVisitDataObjectGeneric, 75 76 kVisitJSObject, 77 kVisitJSObject2 = kVisitJSObject, 78 kVisitJSObject3, 79 kVisitJSObject4, 80 kVisitJSObject5, 81 kVisitJSObject6, 82 kVisitJSObject7, 83 kVisitJSObject8, 84 kVisitJSObject9, 85 kVisitJSObjectGeneric, 86 87 kVisitStruct, 88 kVisitStruct2 = kVisitStruct, 89 kVisitStruct3, 90 kVisitStruct4, 91 kVisitStruct5, 92 kVisitStruct6, 93 kVisitStruct7, 94 kVisitStruct8, 95 kVisitStruct9, 96 kVisitStructGeneric, 97 98 kVisitConsString, 99 kVisitOddball, 100 kVisitCode, 101 kVisitMap, 102 kVisitPropertyCell, 103 kVisitSharedFunctionInfo, 104 kVisitJSFunction, 105 106 kVisitorIdCount, 107 kMinObjectSizeInWords = 2 108 }; 109 110 // Visitor ID should fit in one byte. 111 STATIC_ASSERT(kVisitorIdCount <= 256); 112 113 // Determine which specialized visitor should be used for given instance type 114 // and instance type. 115 static VisitorId GetVisitorId(int instance_type, int instance_size); 116 117 static VisitorId GetVisitorId(Map* map) { 118 return GetVisitorId(map->instance_type(), map->instance_size()); 119 } 120 121 // For visitors that allow specialization by size calculate VisitorId based 122 // on size, base visitor id and generic visitor id. 123 static VisitorId GetVisitorIdForSize(VisitorId base, 124 VisitorId generic, 125 int object_size) { 126 ASSERT((base == kVisitDataObject) || 127 (base == kVisitStruct) || 128 (base == kVisitJSObject)); 129 ASSERT(IsAligned(object_size, kPointerSize)); 130 ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size); 131 ASSERT(object_size < Page::kMaxHeapObjectSize); 132 133 const VisitorId specialization = static_cast<VisitorId>( 134 base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords); 135 136 return Min(specialization, generic); 137 } 138 }; 139 140 141 template<typename Callback> 142 class VisitorDispatchTable { 143 public: 144 void CopyFrom(VisitorDispatchTable* other) { 145 // We are not using memcpy to guarantee that during update 146 // every element of callbacks_ array will remain correct 147 // pointer (memcpy might be implemented as a byte copying loop). 148 for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) { 149 NoBarrier_Store(&callbacks_[i], other->callbacks_[i]); 150 } 151 } 152 153 inline Callback GetVisitor(Map* map) { 154 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]); 155 } 156 157 void Register(StaticVisitorBase::VisitorId id, Callback callback) { 158 ASSERT(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned. 159 callbacks_[id] = reinterpret_cast<AtomicWord>(callback); 160 } 161 162 template<typename Visitor, 163 StaticVisitorBase::VisitorId base, 164 StaticVisitorBase::VisitorId generic, 165 int object_size_in_words> 166 void RegisterSpecialization() { 167 static const int size = object_size_in_words * kPointerSize; 168 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size), 169 &Visitor::template VisitSpecialized<size>); 170 } 171 172 173 template<typename Visitor, 174 StaticVisitorBase::VisitorId base, 175 StaticVisitorBase::VisitorId generic> 176 void RegisterSpecializations() { 177 STATIC_ASSERT( 178 (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10); 179 RegisterSpecialization<Visitor, base, generic, 2>(); 180 RegisterSpecialization<Visitor, base, generic, 3>(); 181 RegisterSpecialization<Visitor, base, generic, 4>(); 182 RegisterSpecialization<Visitor, base, generic, 5>(); 183 RegisterSpecialization<Visitor, base, generic, 6>(); 184 RegisterSpecialization<Visitor, base, generic, 7>(); 185 RegisterSpecialization<Visitor, base, generic, 8>(); 186 RegisterSpecialization<Visitor, base, generic, 9>(); 187 Register(generic, &Visitor::Visit); 188 } 189 190 private: 191 AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount]; 192 }; 193 194 195 template<typename StaticVisitor> 196 class BodyVisitorBase : public AllStatic { 197 public: 198 INLINE(static void IteratePointers(Heap* heap, 199 HeapObject* object, 200 int start_offset, 201 int end_offset)) { 202 Object** start_slot = reinterpret_cast<Object**>(object->address() + 203 start_offset); 204 Object** end_slot = reinterpret_cast<Object**>(object->address() + 205 end_offset); 206 StaticVisitor::VisitPointers(heap, start_slot, end_slot); 207 } 208 }; 209 210 211 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType> 212 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> { 213 public: 214 static inline ReturnType Visit(Map* map, HeapObject* object) { 215 int object_size = BodyDescriptor::SizeOf(map, object); 216 BodyVisitorBase<StaticVisitor>::IteratePointers( 217 map->heap(), 218 object, 219 BodyDescriptor::kStartOffset, 220 object_size); 221 return static_cast<ReturnType>(object_size); 222 } 223 224 template<int object_size> 225 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) { 226 ASSERT(BodyDescriptor::SizeOf(map, object) == object_size); 227 BodyVisitorBase<StaticVisitor>::IteratePointers( 228 map->heap(), 229 object, 230 BodyDescriptor::kStartOffset, 231 object_size); 232 return static_cast<ReturnType>(object_size); 233 } 234 }; 235 236 237 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType> 238 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> { 239 public: 240 static inline ReturnType Visit(Map* map, HeapObject* object) { 241 BodyVisitorBase<StaticVisitor>::IteratePointers( 242 map->heap(), 243 object, 244 BodyDescriptor::kStartOffset, 245 BodyDescriptor::kEndOffset); 246 return static_cast<ReturnType>(BodyDescriptor::kSize); 247 } 248 }; 249 250 251 // Base class for visitors used for a linear new space iteration. 252 // IterateBody returns size of visited object. 253 // Certain types of objects (i.e. Code objects) are not handled 254 // by dispatch table of this visitor because they cannot appear 255 // in the new space. 256 // 257 // This class is intended to be used in the following way: 258 // 259 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> { 260 // ... 261 // } 262 // 263 // This is an example of Curiously recurring template pattern 264 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). 265 // We use CRTP to guarantee aggressive compile time optimizations (i.e. 266 // inlining and specialization of StaticVisitor::VisitPointers methods). 267 template<typename StaticVisitor> 268 class StaticNewSpaceVisitor : public StaticVisitorBase { 269 public: 270 static void Initialize() { 271 table_.Register(kVisitShortcutCandidate, 272 &FixedBodyVisitor<StaticVisitor, 273 ConsString::BodyDescriptor, 274 int>::Visit); 275 276 table_.Register(kVisitConsString, 277 &FixedBodyVisitor<StaticVisitor, 278 ConsString::BodyDescriptor, 279 int>::Visit); 280 281 table_.Register(kVisitFixedArray, 282 &FlexibleBodyVisitor<StaticVisitor, 283 FixedArray::BodyDescriptor, 284 int>::Visit); 285 286 table_.Register(kVisitGlobalContext, 287 &FixedBodyVisitor<StaticVisitor, 288 Context::ScavengeBodyDescriptor, 289 int>::Visit); 290 291 table_.Register(kVisitByteArray, &VisitByteArray); 292 293 table_.Register(kVisitSharedFunctionInfo, 294 &FixedBodyVisitor<StaticVisitor, 295 SharedFunctionInfo::BodyDescriptor, 296 int>::Visit); 297 298 table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString); 299 300 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); 301 302 table_.Register(kVisitJSFunction, 303 &JSObjectVisitor:: 304 template VisitSpecialized<JSFunction::kSize>); 305 306 table_.RegisterSpecializations<DataObjectVisitor, 307 kVisitDataObject, 308 kVisitDataObjectGeneric>(); 309 table_.RegisterSpecializations<JSObjectVisitor, 310 kVisitJSObject, 311 kVisitJSObjectGeneric>(); 312 table_.RegisterSpecializations<StructVisitor, 313 kVisitStruct, 314 kVisitStructGeneric>(); 315 } 316 317 static inline int IterateBody(Map* map, HeapObject* obj) { 318 return table_.GetVisitor(map)(map, obj); 319 } 320 321 static inline void VisitPointers(Heap* heap, Object** start, Object** end) { 322 for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p); 323 } 324 325 private: 326 static inline int VisitByteArray(Map* map, HeapObject* object) { 327 return reinterpret_cast<ByteArray*>(object)->ByteArraySize(); 328 } 329 330 static inline int VisitSeqAsciiString(Map* map, HeapObject* object) { 331 return SeqAsciiString::cast(object)-> 332 SeqAsciiStringSize(map->instance_type()); 333 } 334 335 static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) { 336 return SeqTwoByteString::cast(object)-> 337 SeqTwoByteStringSize(map->instance_type()); 338 } 339 340 class DataObjectVisitor { 341 public: 342 template<int object_size> 343 static inline int VisitSpecialized(Map* map, HeapObject* object) { 344 return object_size; 345 } 346 347 static inline int Visit(Map* map, HeapObject* object) { 348 return map->instance_size(); 349 } 350 }; 351 352 typedef FlexibleBodyVisitor<StaticVisitor, 353 StructBodyDescriptor, 354 int> StructVisitor; 355 356 typedef FlexibleBodyVisitor<StaticVisitor, 357 JSObject::BodyDescriptor, 358 int> JSObjectVisitor; 359 360 typedef int (*Callback)(Map* map, HeapObject* object); 361 362 static VisitorDispatchTable<Callback> table_; 363 }; 364 365 366 template<typename StaticVisitor> 367 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback> 368 StaticNewSpaceVisitor<StaticVisitor>::table_; 369 370 371 void Code::CodeIterateBody(ObjectVisitor* v) { 372 int mode_mask = RelocInfo::kCodeTargetMask | 373 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 374 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | 375 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | 376 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | 377 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | 378 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); 379 380 // Use the relocation info pointer before it is visited by 381 // the heap compaction in the next statement. 382 RelocIterator it(this, mode_mask); 383 384 IteratePointer(v, kRelocationInfoOffset); 385 IteratePointer(v, kDeoptimizationDataOffset); 386 387 for (; !it.done(); it.next()) { 388 it.rinfo()->Visit(v); 389 } 390 } 391 392 393 template<typename StaticVisitor> 394 void Code::CodeIterateBody(Heap* heap) { 395 int mode_mask = RelocInfo::kCodeTargetMask | 396 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 397 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) | 398 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | 399 RelocInfo::ModeMask(RelocInfo::JS_RETURN) | 400 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | 401 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); 402 403 // Use the relocation info pointer before it is visited by 404 // the heap compaction in the next statement. 405 RelocIterator it(this, mode_mask); 406 407 StaticVisitor::VisitPointer( 408 heap, 409 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset)); 410 StaticVisitor::VisitPointer( 411 heap, 412 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset)); 413 414 for (; !it.done(); it.next()) { 415 it.rinfo()->template Visit<StaticVisitor>(heap); 416 } 417 } 418 419 420 } } // namespace v8::internal 421 422 #endif // V8_OBJECTS_VISITING_H_ 423