1 //=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 10 #include "clang/AST/Attr.h" 11 #include "clang/AST/CXXInheritance.h" 12 #include "clang/AST/Decl.h" 13 #include "clang/AST/DeclCXX.h" 14 #include "clang/AST/DeclObjC.h" 15 #include "clang/AST/Expr.h" 16 #include "clang/AST/RecordLayout.h" 17 #include "clang/Basic/TargetInfo.h" 18 #include "clang/Sema/SemaDiagnostic.h" 19 #include "llvm/Support/Format.h" 20 #include "llvm/ADT/SmallSet.h" 21 #include "llvm/Support/MathExtras.h" 22 #include "llvm/Support/CrashRecoveryContext.h" 23 24 using namespace clang; 25 26 namespace { 27 28 /// BaseSubobjectInfo - Represents a single base subobject in a complete class. 29 /// For a class hierarchy like 30 /// 31 /// class A { }; 32 /// class B : A { }; 33 /// class C : A, B { }; 34 /// 35 /// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo 36 /// instances, one for B and two for A. 37 /// 38 /// If a base is virtual, it will only have one BaseSubobjectInfo allocated. 39 struct BaseSubobjectInfo { 40 /// Class - The class for this base info. 41 const CXXRecordDecl *Class; 42 43 /// IsVirtual - Whether the BaseInfo represents a virtual base or not. 44 bool IsVirtual; 45 46 /// Bases - Information about the base subobjects. 47 llvm::SmallVector<BaseSubobjectInfo*, 4> Bases; 48 49 /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base 50 /// of this base info (if one exists). 51 BaseSubobjectInfo *PrimaryVirtualBaseInfo; 52 53 // FIXME: Document. 54 const BaseSubobjectInfo *Derived; 55 }; 56 57 /// EmptySubobjectMap - Keeps track of which empty subobjects exist at different 58 /// offsets while laying out a C++ class. 59 class EmptySubobjectMap { 60 const ASTContext &Context; 61 uint64_t CharWidth; 62 63 /// Class - The class whose empty entries we're keeping track of. 64 const CXXRecordDecl *Class; 65 66 /// EmptyClassOffsets - A map from offsets to empty record decls. 67 typedef llvm::SmallVector<const CXXRecordDecl *, 1> ClassVectorTy; 68 typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy; 69 EmptyClassOffsetsMapTy EmptyClassOffsets; 70 71 /// MaxEmptyClassOffset - The highest offset known to contain an empty 72 /// base subobject. 73 CharUnits MaxEmptyClassOffset; 74 75 /// ComputeEmptySubobjectSizes - Compute the size of the largest base or 76 /// member subobject that is empty. 77 void ComputeEmptySubobjectSizes(); 78 79 void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset); 80 81 void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info, 82 CharUnits Offset, bool PlacingEmptyBase); 83 84 void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD, 85 const CXXRecordDecl *Class, 86 CharUnits Offset); 87 void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset); 88 89 /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty 90 /// subobjects beyond the given offset. 91 bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const { 92 return Offset <= MaxEmptyClassOffset; 93 } 94 95 CharUnits 96 getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const { 97 uint64_t FieldOffset = Layout.getFieldOffset(FieldNo); 98 assert(FieldOffset % CharWidth == 0 && 99 "Field offset not at char boundary!"); 100 101 return Context.toCharUnitsFromBits(FieldOffset); 102 } 103 104 protected: 105 bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD, 106 CharUnits Offset) const; 107 108 bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info, 109 CharUnits Offset); 110 111 bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, 112 const CXXRecordDecl *Class, 113 CharUnits Offset) const; 114 bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD, 115 CharUnits Offset) const; 116 117 public: 118 /// This holds the size of the largest empty subobject (either a base 119 /// or a member). Will be zero if the record being built doesn't contain 120 /// any empty classes. 121 CharUnits SizeOfLargestEmptySubobject; 122 123 EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class) 124 : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) { 125 ComputeEmptySubobjectSizes(); 126 } 127 128 /// CanPlaceBaseAtOffset - Return whether the given base class can be placed 129 /// at the given offset. 130 /// Returns false if placing the record will result in two components 131 /// (direct or indirect) of the same type having the same offset. 132 bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info, 133 CharUnits Offset); 134 135 /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given 136 /// offset. 137 bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset); 138 }; 139 140 void EmptySubobjectMap::ComputeEmptySubobjectSizes() { 141 // Check the bases. 142 for (CXXRecordDecl::base_class_const_iterator I = Class->bases_begin(), 143 E = Class->bases_end(); I != E; ++I) { 144 const CXXRecordDecl *BaseDecl = 145 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 146 147 CharUnits EmptySize; 148 const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl); 149 if (BaseDecl->isEmpty()) { 150 // If the class decl is empty, get its size. 151 EmptySize = Layout.getSize(); 152 } else { 153 // Otherwise, we get the largest empty subobject for the decl. 154 EmptySize = Layout.getSizeOfLargestEmptySubobject(); 155 } 156 157 if (EmptySize > SizeOfLargestEmptySubobject) 158 SizeOfLargestEmptySubobject = EmptySize; 159 } 160 161 // Check the fields. 162 for (CXXRecordDecl::field_iterator I = Class->field_begin(), 163 E = Class->field_end(); I != E; ++I) { 164 const FieldDecl *FD = *I; 165 166 const RecordType *RT = 167 Context.getBaseElementType(FD->getType())->getAs<RecordType>(); 168 169 // We only care about record types. 170 if (!RT) 171 continue; 172 173 CharUnits EmptySize; 174 const CXXRecordDecl *MemberDecl = cast<CXXRecordDecl>(RT->getDecl()); 175 const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl); 176 if (MemberDecl->isEmpty()) { 177 // If the class decl is empty, get its size. 178 EmptySize = Layout.getSize(); 179 } else { 180 // Otherwise, we get the largest empty subobject for the decl. 181 EmptySize = Layout.getSizeOfLargestEmptySubobject(); 182 } 183 184 if (EmptySize > SizeOfLargestEmptySubobject) 185 SizeOfLargestEmptySubobject = EmptySize; 186 } 187 } 188 189 bool 190 EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD, 191 CharUnits Offset) const { 192 // We only need to check empty bases. 193 if (!RD->isEmpty()) 194 return true; 195 196 EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset); 197 if (I == EmptyClassOffsets.end()) 198 return true; 199 200 const ClassVectorTy& Classes = I->second; 201 if (std::find(Classes.begin(), Classes.end(), RD) == Classes.end()) 202 return true; 203 204 // There is already an empty class of the same type at this offset. 205 return false; 206 } 207 208 void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD, 209 CharUnits Offset) { 210 // We only care about empty bases. 211 if (!RD->isEmpty()) 212 return; 213 214 // If we have empty structures inside an union, we can assign both 215 // the same offset. Just avoid pushing them twice in the list. 216 ClassVectorTy& Classes = EmptyClassOffsets[Offset]; 217 if (std::find(Classes.begin(), Classes.end(), RD) != Classes.end()) 218 return; 219 220 Classes.push_back(RD); 221 222 // Update the empty class offset. 223 if (Offset > MaxEmptyClassOffset) 224 MaxEmptyClassOffset = Offset; 225 } 226 227 bool 228 EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info, 229 CharUnits Offset) { 230 // We don't have to keep looking past the maximum offset that's known to 231 // contain an empty class. 232 if (!AnyEmptySubobjectsBeyondOffset(Offset)) 233 return true; 234 235 if (!CanPlaceSubobjectAtOffset(Info->Class, Offset)) 236 return false; 237 238 // Traverse all non-virtual bases. 239 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class); 240 for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) { 241 BaseSubobjectInfo* Base = Info->Bases[I]; 242 if (Base->IsVirtual) 243 continue; 244 245 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class); 246 247 if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset)) 248 return false; 249 } 250 251 if (Info->PrimaryVirtualBaseInfo) { 252 BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo; 253 254 if (Info == PrimaryVirtualBaseInfo->Derived) { 255 if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset)) 256 return false; 257 } 258 } 259 260 // Traverse all member variables. 261 unsigned FieldNo = 0; 262 for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(), 263 E = Info->Class->field_end(); I != E; ++I, ++FieldNo) { 264 const FieldDecl *FD = *I; 265 if (FD->isBitField()) 266 continue; 267 268 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo); 269 if (!CanPlaceFieldSubobjectAtOffset(FD, FieldOffset)) 270 return false; 271 } 272 273 return true; 274 } 275 276 void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info, 277 CharUnits Offset, 278 bool PlacingEmptyBase) { 279 if (!PlacingEmptyBase && Offset >= SizeOfLargestEmptySubobject) { 280 // We know that the only empty subobjects that can conflict with empty 281 // subobject of non-empty bases, are empty bases that can be placed at 282 // offset zero. Because of this, we only need to keep track of empty base 283 // subobjects with offsets less than the size of the largest empty 284 // subobject for our class. 285 return; 286 } 287 288 AddSubobjectAtOffset(Info->Class, Offset); 289 290 // Traverse all non-virtual bases. 291 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class); 292 for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) { 293 BaseSubobjectInfo* Base = Info->Bases[I]; 294 if (Base->IsVirtual) 295 continue; 296 297 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class); 298 UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase); 299 } 300 301 if (Info->PrimaryVirtualBaseInfo) { 302 BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo; 303 304 if (Info == PrimaryVirtualBaseInfo->Derived) 305 UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset, 306 PlacingEmptyBase); 307 } 308 309 // Traverse all member variables. 310 unsigned FieldNo = 0; 311 for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(), 312 E = Info->Class->field_end(); I != E; ++I, ++FieldNo) { 313 const FieldDecl *FD = *I; 314 if (FD->isBitField()) 315 continue; 316 317 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo); 318 UpdateEmptyFieldSubobjects(FD, FieldOffset); 319 } 320 } 321 322 bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info, 323 CharUnits Offset) { 324 // If we know this class doesn't have any empty subobjects we don't need to 325 // bother checking. 326 if (SizeOfLargestEmptySubobject.isZero()) 327 return true; 328 329 if (!CanPlaceBaseSubobjectAtOffset(Info, Offset)) 330 return false; 331 332 // We are able to place the base at this offset. Make sure to update the 333 // empty base subobject map. 334 UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty()); 335 return true; 336 } 337 338 bool 339 EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, 340 const CXXRecordDecl *Class, 341 CharUnits Offset) const { 342 // We don't have to keep looking past the maximum offset that's known to 343 // contain an empty class. 344 if (!AnyEmptySubobjectsBeyondOffset(Offset)) 345 return true; 346 347 if (!CanPlaceSubobjectAtOffset(RD, Offset)) 348 return false; 349 350 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 351 352 // Traverse all non-virtual bases. 353 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 354 E = RD->bases_end(); I != E; ++I) { 355 if (I->isVirtual()) 356 continue; 357 358 const CXXRecordDecl *BaseDecl = 359 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 360 361 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl); 362 if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset)) 363 return false; 364 } 365 366 if (RD == Class) { 367 // This is the most derived class, traverse virtual bases as well. 368 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(), 369 E = RD->vbases_end(); I != E; ++I) { 370 const CXXRecordDecl *VBaseDecl = 371 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 372 373 CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl); 374 if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset)) 375 return false; 376 } 377 } 378 379 // Traverse all member variables. 380 unsigned FieldNo = 0; 381 for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end(); 382 I != E; ++I, ++FieldNo) { 383 const FieldDecl *FD = *I; 384 if (FD->isBitField()) 385 continue; 386 387 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo); 388 389 if (!CanPlaceFieldSubobjectAtOffset(FD, FieldOffset)) 390 return false; 391 } 392 393 return true; 394 } 395 396 bool 397 EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD, 398 CharUnits Offset) const { 399 // We don't have to keep looking past the maximum offset that's known to 400 // contain an empty class. 401 if (!AnyEmptySubobjectsBeyondOffset(Offset)) 402 return true; 403 404 QualType T = FD->getType(); 405 if (const RecordType *RT = T->getAs<RecordType>()) { 406 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 407 return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset); 408 } 409 410 // If we have an array type we need to look at every element. 411 if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) { 412 QualType ElemTy = Context.getBaseElementType(AT); 413 const RecordType *RT = ElemTy->getAs<RecordType>(); 414 if (!RT) 415 return true; 416 417 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 418 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 419 420 uint64_t NumElements = Context.getConstantArrayElementCount(AT); 421 CharUnits ElementOffset = Offset; 422 for (uint64_t I = 0; I != NumElements; ++I) { 423 // We don't have to keep looking past the maximum offset that's known to 424 // contain an empty class. 425 if (!AnyEmptySubobjectsBeyondOffset(ElementOffset)) 426 return true; 427 428 if (!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset)) 429 return false; 430 431 ElementOffset += Layout.getSize(); 432 } 433 } 434 435 return true; 436 } 437 438 bool 439 EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD, 440 CharUnits Offset) { 441 if (!CanPlaceFieldSubobjectAtOffset(FD, Offset)) 442 return false; 443 444 // We are able to place the member variable at this offset. 445 // Make sure to update the empty base subobject map. 446 UpdateEmptyFieldSubobjects(FD, Offset); 447 return true; 448 } 449 450 void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD, 451 const CXXRecordDecl *Class, 452 CharUnits Offset) { 453 // We know that the only empty subobjects that can conflict with empty 454 // field subobjects are subobjects of empty bases that can be placed at offset 455 // zero. Because of this, we only need to keep track of empty field 456 // subobjects with offsets less than the size of the largest empty 457 // subobject for our class. 458 if (Offset >= SizeOfLargestEmptySubobject) 459 return; 460 461 AddSubobjectAtOffset(RD, Offset); 462 463 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 464 465 // Traverse all non-virtual bases. 466 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 467 E = RD->bases_end(); I != E; ++I) { 468 if (I->isVirtual()) 469 continue; 470 471 const CXXRecordDecl *BaseDecl = 472 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 473 474 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl); 475 UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset); 476 } 477 478 if (RD == Class) { 479 // This is the most derived class, traverse virtual bases as well. 480 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(), 481 E = RD->vbases_end(); I != E; ++I) { 482 const CXXRecordDecl *VBaseDecl = 483 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 484 485 CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl); 486 UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset); 487 } 488 } 489 490 // Traverse all member variables. 491 unsigned FieldNo = 0; 492 for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end(); 493 I != E; ++I, ++FieldNo) { 494 const FieldDecl *FD = *I; 495 if (FD->isBitField()) 496 continue; 497 498 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo); 499 500 UpdateEmptyFieldSubobjects(FD, FieldOffset); 501 } 502 } 503 504 void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const FieldDecl *FD, 505 CharUnits Offset) { 506 QualType T = FD->getType(); 507 if (const RecordType *RT = T->getAs<RecordType>()) { 508 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 509 UpdateEmptyFieldSubobjects(RD, RD, Offset); 510 return; 511 } 512 513 // If we have an array type we need to update every element. 514 if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) { 515 QualType ElemTy = Context.getBaseElementType(AT); 516 const RecordType *RT = ElemTy->getAs<RecordType>(); 517 if (!RT) 518 return; 519 520 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 521 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 522 523 uint64_t NumElements = Context.getConstantArrayElementCount(AT); 524 CharUnits ElementOffset = Offset; 525 526 for (uint64_t I = 0; I != NumElements; ++I) { 527 // We know that the only empty subobjects that can conflict with empty 528 // field subobjects are subobjects of empty bases that can be placed at 529 // offset zero. Because of this, we only need to keep track of empty field 530 // subobjects with offsets less than the size of the largest empty 531 // subobject for our class. 532 if (ElementOffset >= SizeOfLargestEmptySubobject) 533 return; 534 535 UpdateEmptyFieldSubobjects(RD, RD, ElementOffset); 536 ElementOffset += Layout.getSize(); 537 } 538 } 539 } 540 541 class RecordLayoutBuilder { 542 protected: 543 // FIXME: Remove this and make the appropriate fields public. 544 friend class clang::ASTContext; 545 546 const ASTContext &Context; 547 548 EmptySubobjectMap *EmptySubobjects; 549 550 /// Size - The current size of the record layout. 551 uint64_t Size; 552 553 /// Alignment - The current alignment of the record layout. 554 CharUnits Alignment; 555 556 /// \brief The alignment if attribute packed is not used. 557 CharUnits UnpackedAlignment; 558 559 llvm::SmallVector<uint64_t, 16> FieldOffsets; 560 561 /// Packed - Whether the record is packed or not. 562 unsigned Packed : 1; 563 564 unsigned IsUnion : 1; 565 566 unsigned IsMac68kAlign : 1; 567 568 unsigned IsMsStruct : 1; 569 570 /// UnfilledBitsInLastByte - If the last field laid out was a bitfield, 571 /// this contains the number of bits in the last byte that can be used for 572 /// an adjacent bitfield if necessary. 573 unsigned char UnfilledBitsInLastByte; 574 575 /// MaxFieldAlignment - The maximum allowed field alignment. This is set by 576 /// #pragma pack. 577 CharUnits MaxFieldAlignment; 578 579 /// DataSize - The data size of the record being laid out. 580 uint64_t DataSize; 581 582 CharUnits NonVirtualSize; 583 CharUnits NonVirtualAlignment; 584 585 FieldDecl *ZeroLengthBitfield; 586 587 /// PrimaryBase - the primary base class (if one exists) of the class 588 /// we're laying out. 589 const CXXRecordDecl *PrimaryBase; 590 591 /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying 592 /// out is virtual. 593 bool PrimaryBaseIsVirtual; 594 595 typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy; 596 597 /// Bases - base classes and their offsets in the record. 598 BaseOffsetsMapTy Bases; 599 600 // VBases - virtual base classes and their offsets in the record. 601 BaseOffsetsMapTy VBases; 602 603 /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are 604 /// primary base classes for some other direct or indirect base class. 605 CXXIndirectPrimaryBaseSet IndirectPrimaryBases; 606 607 /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in 608 /// inheritance graph order. Used for determining the primary base class. 609 const CXXRecordDecl *FirstNearlyEmptyVBase; 610 611 /// VisitedVirtualBases - A set of all the visited virtual bases, used to 612 /// avoid visiting virtual bases more than once. 613 llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases; 614 615 RecordLayoutBuilder(const ASTContext &Context, EmptySubobjectMap 616 *EmptySubobjects) 617 : Context(Context), EmptySubobjects(EmptySubobjects), Size(0), 618 Alignment(CharUnits::One()), UnpackedAlignment(Alignment), 619 Packed(false), IsUnion(false), 620 IsMac68kAlign(false), IsMsStruct(false), 621 UnfilledBitsInLastByte(0), MaxFieldAlignment(CharUnits::Zero()), 622 DataSize(0), NonVirtualSize(CharUnits::Zero()), 623 NonVirtualAlignment(CharUnits::One()), 624 ZeroLengthBitfield(0), PrimaryBase(0), 625 PrimaryBaseIsVirtual(false), FirstNearlyEmptyVBase(0) { } 626 627 void Layout(const RecordDecl *D); 628 void Layout(const CXXRecordDecl *D); 629 void Layout(const ObjCInterfaceDecl *D); 630 631 void LayoutFields(const RecordDecl *D); 632 void LayoutField(const FieldDecl *D); 633 void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize, 634 bool FieldPacked, const FieldDecl *D); 635 void LayoutBitField(const FieldDecl *D); 636 637 /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects. 638 llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator; 639 640 typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *> 641 BaseSubobjectInfoMapTy; 642 643 /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases 644 /// of the class we're laying out to their base subobject info. 645 BaseSubobjectInfoMapTy VirtualBaseInfo; 646 647 /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the 648 /// class we're laying out to their base subobject info. 649 BaseSubobjectInfoMapTy NonVirtualBaseInfo; 650 651 /// ComputeBaseSubobjectInfo - Compute the base subobject information for the 652 /// bases of the given class. 653 void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD); 654 655 /// ComputeBaseSubobjectInfo - Compute the base subobject information for a 656 /// single class and all of its base classes. 657 BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD, 658 bool IsVirtual, 659 BaseSubobjectInfo *Derived); 660 661 /// DeterminePrimaryBase - Determine the primary base of the given class. 662 void DeterminePrimaryBase(const CXXRecordDecl *RD); 663 664 void SelectPrimaryVBase(const CXXRecordDecl *RD); 665 666 virtual CharUnits GetVirtualPointersSize(const CXXRecordDecl *RD) const; 667 668 /// LayoutNonVirtualBases - Determines the primary base class (if any) and 669 /// lays it out. Will then proceed to lay out all non-virtual base clasess. 670 void LayoutNonVirtualBases(const CXXRecordDecl *RD); 671 672 /// LayoutNonVirtualBase - Lays out a single non-virtual base. 673 void LayoutNonVirtualBase(const BaseSubobjectInfo *Base); 674 675 void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info, 676 CharUnits Offset); 677 678 /// LayoutVirtualBases - Lays out all the virtual bases. 679 void LayoutVirtualBases(const CXXRecordDecl *RD, 680 const CXXRecordDecl *MostDerivedClass); 681 682 /// LayoutVirtualBase - Lays out a single virtual base. 683 void LayoutVirtualBase(const BaseSubobjectInfo *Base); 684 685 /// LayoutBase - Will lay out a base and return the offset where it was 686 /// placed, in chars. 687 CharUnits LayoutBase(const BaseSubobjectInfo *Base); 688 689 /// InitializeLayout - Initialize record layout for the given record decl. 690 void InitializeLayout(const Decl *D); 691 692 /// FinishLayout - Finalize record layout. Adjust record size based on the 693 /// alignment. 694 void FinishLayout(const NamedDecl *D); 695 696 void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment); 697 void UpdateAlignment(CharUnits NewAlignment) { 698 UpdateAlignment(NewAlignment, NewAlignment); 699 } 700 701 void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset, 702 uint64_t UnpackedOffset, unsigned UnpackedAlign, 703 bool isPacked, const FieldDecl *D); 704 705 DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID); 706 707 CharUnits getSize() const { 708 assert(Size % Context.getCharWidth() == 0); 709 return Context.toCharUnitsFromBits(Size); 710 } 711 uint64_t getSizeInBits() const { return Size; } 712 713 void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); } 714 void setSize(uint64_t NewSize) { Size = NewSize; } 715 716 CharUnits getDataSize() const { 717 assert(DataSize % Context.getCharWidth() == 0); 718 return Context.toCharUnitsFromBits(DataSize); 719 } 720 uint64_t getDataSizeInBits() const { return DataSize; } 721 722 void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); } 723 void setDataSize(uint64_t NewSize) { DataSize = NewSize; } 724 725 726 RecordLayoutBuilder(const RecordLayoutBuilder&); // DO NOT IMPLEMENT 727 void operator=(const RecordLayoutBuilder&); // DO NOT IMPLEMENT 728 public: 729 static const CXXMethodDecl *ComputeKeyFunction(const CXXRecordDecl *RD); 730 731 virtual ~RecordLayoutBuilder() { } 732 }; 733 } // end anonymous namespace 734 735 void 736 RecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) { 737 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 738 E = RD->bases_end(); I != E; ++I) { 739 assert(!I->getType()->isDependentType() && 740 "Cannot layout class with dependent bases."); 741 742 const CXXRecordDecl *Base = 743 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 744 745 // Check if this is a nearly empty virtual base. 746 if (I->isVirtual() && Context.isNearlyEmpty(Base)) { 747 // If it's not an indirect primary base, then we've found our primary 748 // base. 749 if (!IndirectPrimaryBases.count(Base)) { 750 PrimaryBase = Base; 751 PrimaryBaseIsVirtual = true; 752 return; 753 } 754 755 // Is this the first nearly empty virtual base? 756 if (!FirstNearlyEmptyVBase) 757 FirstNearlyEmptyVBase = Base; 758 } 759 760 SelectPrimaryVBase(Base); 761 if (PrimaryBase) 762 return; 763 } 764 } 765 766 CharUnits 767 RecordLayoutBuilder::GetVirtualPointersSize(const CXXRecordDecl *RD) const { 768 return Context.toCharUnitsFromBits(Context.Target.getPointerWidth(0)); 769 } 770 771 /// DeterminePrimaryBase - Determine the primary base of the given class. 772 void RecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) { 773 // If the class isn't dynamic, it won't have a primary base. 774 if (!RD->isDynamicClass()) 775 return; 776 777 // Compute all the primary virtual bases for all of our direct and 778 // indirect bases, and record all their primary virtual base classes. 779 RD->getIndirectPrimaryBases(IndirectPrimaryBases); 780 781 // If the record has a dynamic base class, attempt to choose a primary base 782 // class. It is the first (in direct base class order) non-virtual dynamic 783 // base class, if one exists. 784 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), 785 e = RD->bases_end(); i != e; ++i) { 786 // Ignore virtual bases. 787 if (i->isVirtual()) 788 continue; 789 790 const CXXRecordDecl *Base = 791 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl()); 792 793 if (Base->isDynamicClass()) { 794 // We found it. 795 PrimaryBase = Base; 796 PrimaryBaseIsVirtual = false; 797 return; 798 } 799 } 800 801 // Otherwise, it is the first nearly empty virtual base that is not an 802 // indirect primary virtual base class, if one exists. 803 if (RD->getNumVBases() != 0) { 804 SelectPrimaryVBase(RD); 805 if (PrimaryBase) 806 return; 807 } 808 809 // Otherwise, it is the first nearly empty virtual base that is not an 810 // indirect primary virtual base class, if one exists. 811 if (FirstNearlyEmptyVBase) { 812 PrimaryBase = FirstNearlyEmptyVBase; 813 PrimaryBaseIsVirtual = true; 814 return; 815 } 816 817 // Otherwise there is no primary base class. 818 assert(!PrimaryBase && "Should not get here with a primary base!"); 819 820 // Allocate the virtual table pointer at offset zero. 821 assert(DataSize == 0 && "Vtable pointer must be at offset zero!"); 822 823 // Update the size. 824 setSize(getSize() + GetVirtualPointersSize(RD)); 825 setDataSize(getSize()); 826 827 CharUnits UnpackedBaseAlign = 828 Context.toCharUnitsFromBits(Context.Target.getPointerAlign(0)); 829 CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign; 830 831 // The maximum field alignment overrides base align. 832 if (!MaxFieldAlignment.isZero()) { 833 BaseAlign = std::min(BaseAlign, MaxFieldAlignment); 834 UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment); 835 } 836 837 // Update the alignment. 838 UpdateAlignment(BaseAlign, UnpackedBaseAlign); 839 } 840 841 BaseSubobjectInfo * 842 RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD, 843 bool IsVirtual, 844 BaseSubobjectInfo *Derived) { 845 BaseSubobjectInfo *Info; 846 847 if (IsVirtual) { 848 // Check if we already have info about this virtual base. 849 BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD]; 850 if (InfoSlot) { 851 assert(InfoSlot->Class == RD && "Wrong class for virtual base info!"); 852 return InfoSlot; 853 } 854 855 // We don't, create it. 856 InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo; 857 Info = InfoSlot; 858 } else { 859 Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo; 860 } 861 862 Info->Class = RD; 863 Info->IsVirtual = IsVirtual; 864 Info->Derived = 0; 865 Info->PrimaryVirtualBaseInfo = 0; 866 867 const CXXRecordDecl *PrimaryVirtualBase = 0; 868 BaseSubobjectInfo *PrimaryVirtualBaseInfo = 0; 869 870 // Check if this base has a primary virtual base. 871 if (RD->getNumVBases()) { 872 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 873 if (Layout.isPrimaryBaseVirtual()) { 874 // This base does have a primary virtual base. 875 PrimaryVirtualBase = Layout.getPrimaryBase(); 876 assert(PrimaryVirtualBase && "Didn't have a primary virtual base!"); 877 878 // Now check if we have base subobject info about this primary base. 879 PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase); 880 881 if (PrimaryVirtualBaseInfo) { 882 if (PrimaryVirtualBaseInfo->Derived) { 883 // We did have info about this primary base, and it turns out that it 884 // has already been claimed as a primary virtual base for another 885 // base. 886 PrimaryVirtualBase = 0; 887 } else { 888 // We can claim this base as our primary base. 889 Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo; 890 PrimaryVirtualBaseInfo->Derived = Info; 891 } 892 } 893 } 894 } 895 896 // Now go through all direct bases. 897 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 898 E = RD->bases_end(); I != E; ++I) { 899 bool IsVirtual = I->isVirtual(); 900 901 const CXXRecordDecl *BaseDecl = 902 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 903 904 Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info)); 905 } 906 907 if (PrimaryVirtualBase && !PrimaryVirtualBaseInfo) { 908 // Traversing the bases must have created the base info for our primary 909 // virtual base. 910 PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase); 911 assert(PrimaryVirtualBaseInfo && 912 "Did not create a primary virtual base!"); 913 914 // Claim the primary virtual base as our primary virtual base. 915 Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo; 916 PrimaryVirtualBaseInfo->Derived = Info; 917 } 918 919 return Info; 920 } 921 922 void RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD) { 923 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 924 E = RD->bases_end(); I != E; ++I) { 925 bool IsVirtual = I->isVirtual(); 926 927 const CXXRecordDecl *BaseDecl = 928 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 929 930 // Compute the base subobject info for this base. 931 BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, 0); 932 933 if (IsVirtual) { 934 // ComputeBaseInfo has already added this base for us. 935 assert(VirtualBaseInfo.count(BaseDecl) && 936 "Did not add virtual base!"); 937 } else { 938 // Add the base info to the map of non-virtual bases. 939 assert(!NonVirtualBaseInfo.count(BaseDecl) && 940 "Non-virtual base already exists!"); 941 NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info)); 942 } 943 } 944 } 945 946 void 947 RecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD) { 948 // Then, determine the primary base class. 949 DeterminePrimaryBase(RD); 950 951 // Compute base subobject info. 952 ComputeBaseSubobjectInfo(RD); 953 954 // If we have a primary base class, lay it out. 955 if (PrimaryBase) { 956 if (PrimaryBaseIsVirtual) { 957 // If the primary virtual base was a primary virtual base of some other 958 // base class we'll have to steal it. 959 BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase); 960 PrimaryBaseInfo->Derived = 0; 961 962 // We have a virtual primary base, insert it as an indirect primary base. 963 IndirectPrimaryBases.insert(PrimaryBase); 964 965 assert(!VisitedVirtualBases.count(PrimaryBase) && 966 "vbase already visited!"); 967 VisitedVirtualBases.insert(PrimaryBase); 968 969 LayoutVirtualBase(PrimaryBaseInfo); 970 } else { 971 BaseSubobjectInfo *PrimaryBaseInfo = 972 NonVirtualBaseInfo.lookup(PrimaryBase); 973 assert(PrimaryBaseInfo && 974 "Did not find base info for non-virtual primary base!"); 975 976 LayoutNonVirtualBase(PrimaryBaseInfo); 977 } 978 } 979 980 // Now lay out the non-virtual bases. 981 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 982 E = RD->bases_end(); I != E; ++I) { 983 984 // Ignore virtual bases. 985 if (I->isVirtual()) 986 continue; 987 988 const CXXRecordDecl *BaseDecl = 989 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 990 991 // Skip the primary base. 992 if (BaseDecl == PrimaryBase && !PrimaryBaseIsVirtual) 993 continue; 994 995 // Lay out the base. 996 BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl); 997 assert(BaseInfo && "Did not find base info for non-virtual base!"); 998 999 LayoutNonVirtualBase(BaseInfo); 1000 } 1001 } 1002 1003 void RecordLayoutBuilder::LayoutNonVirtualBase(const BaseSubobjectInfo *Base) { 1004 // Layout the base. 1005 CharUnits Offset = LayoutBase(Base); 1006 1007 // Add its base class offset. 1008 assert(!Bases.count(Base->Class) && "base offset already exists!"); 1009 Bases.insert(std::make_pair(Base->Class, Offset)); 1010 1011 AddPrimaryVirtualBaseOffsets(Base, Offset); 1012 } 1013 1014 void 1015 RecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info, 1016 CharUnits Offset) { 1017 // This base isn't interesting, it has no virtual bases. 1018 if (!Info->Class->getNumVBases()) 1019 return; 1020 1021 // First, check if we have a virtual primary base to add offsets for. 1022 if (Info->PrimaryVirtualBaseInfo) { 1023 assert(Info->PrimaryVirtualBaseInfo->IsVirtual && 1024 "Primary virtual base is not virtual!"); 1025 if (Info->PrimaryVirtualBaseInfo->Derived == Info) { 1026 // Add the offset. 1027 assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) && 1028 "primary vbase offset already exists!"); 1029 VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class, 1030 Offset)); 1031 1032 // Traverse the primary virtual base. 1033 AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset); 1034 } 1035 } 1036 1037 // Now go through all direct non-virtual bases. 1038 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class); 1039 for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) { 1040 const BaseSubobjectInfo *Base = Info->Bases[I]; 1041 if (Base->IsVirtual) 1042 continue; 1043 1044 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class); 1045 AddPrimaryVirtualBaseOffsets(Base, BaseOffset); 1046 } 1047 } 1048 1049 void 1050 RecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD, 1051 const CXXRecordDecl *MostDerivedClass) { 1052 const CXXRecordDecl *PrimaryBase; 1053 bool PrimaryBaseIsVirtual; 1054 1055 if (MostDerivedClass == RD) { 1056 PrimaryBase = this->PrimaryBase; 1057 PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual; 1058 } else { 1059 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 1060 PrimaryBase = Layout.getPrimaryBase(); 1061 PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual(); 1062 } 1063 1064 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1065 E = RD->bases_end(); I != E; ++I) { 1066 assert(!I->getType()->isDependentType() && 1067 "Cannot layout class with dependent bases."); 1068 1069 const CXXRecordDecl *BaseDecl = 1070 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1071 1072 if (I->isVirtual()) { 1073 if (PrimaryBase != BaseDecl || !PrimaryBaseIsVirtual) { 1074 bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl); 1075 1076 // Only lay out the virtual base if it's not an indirect primary base. 1077 if (!IndirectPrimaryBase) { 1078 // Only visit virtual bases once. 1079 if (!VisitedVirtualBases.insert(BaseDecl)) 1080 continue; 1081 1082 const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl); 1083 assert(BaseInfo && "Did not find virtual base info!"); 1084 LayoutVirtualBase(BaseInfo); 1085 } 1086 } 1087 } 1088 1089 if (!BaseDecl->getNumVBases()) { 1090 // This base isn't interesting since it doesn't have any virtual bases. 1091 continue; 1092 } 1093 1094 LayoutVirtualBases(BaseDecl, MostDerivedClass); 1095 } 1096 } 1097 1098 void RecordLayoutBuilder::LayoutVirtualBase(const BaseSubobjectInfo *Base) { 1099 assert(!Base->Derived && "Trying to lay out a primary virtual base!"); 1100 1101 // Layout the base. 1102 CharUnits Offset = LayoutBase(Base); 1103 1104 // Add its base class offset. 1105 assert(!VBases.count(Base->Class) && "vbase offset already exists!"); 1106 VBases.insert(std::make_pair(Base->Class, Offset)); 1107 1108 AddPrimaryVirtualBaseOffsets(Base, Offset); 1109 } 1110 1111 CharUnits RecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) { 1112 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class); 1113 1114 // If we have an empty base class, try to place it at offset 0. 1115 if (Base->Class->isEmpty() && 1116 EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())) { 1117 setSize(std::max(getSize(), Layout.getSize())); 1118 1119 return CharUnits::Zero(); 1120 } 1121 1122 CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlign(); 1123 CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign; 1124 1125 // The maximum field alignment overrides base align. 1126 if (!MaxFieldAlignment.isZero()) { 1127 BaseAlign = std::min(BaseAlign, MaxFieldAlignment); 1128 UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment); 1129 } 1130 1131 // Round up the current record size to the base's alignment boundary. 1132 CharUnits Offset = getDataSize().RoundUpToAlignment(BaseAlign); 1133 1134 // Try to place the base. 1135 while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset)) 1136 Offset += BaseAlign; 1137 1138 if (!Base->Class->isEmpty()) { 1139 // Update the data size. 1140 setDataSize(Offset + Layout.getNonVirtualSize()); 1141 1142 setSize(std::max(getSize(), getDataSize())); 1143 } else 1144 setSize(std::max(getSize(), Offset + Layout.getSize())); 1145 1146 // Remember max struct/class alignment. 1147 UpdateAlignment(BaseAlign, UnpackedBaseAlign); 1148 1149 return Offset; 1150 } 1151 1152 void RecordLayoutBuilder::InitializeLayout(const Decl *D) { 1153 if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) 1154 IsUnion = RD->isUnion(); 1155 1156 Packed = D->hasAttr<PackedAttr>(); 1157 1158 IsMsStruct = D->hasAttr<MsStructAttr>(); 1159 1160 // mac68k alignment supersedes maximum field alignment and attribute aligned, 1161 // and forces all structures to have 2-byte alignment. The IBM docs on it 1162 // allude to additional (more complicated) semantics, especially with regard 1163 // to bit-fields, but gcc appears not to follow that. 1164 if (D->hasAttr<AlignMac68kAttr>()) { 1165 IsMac68kAlign = true; 1166 MaxFieldAlignment = CharUnits::fromQuantity(2); 1167 Alignment = CharUnits::fromQuantity(2); 1168 } else { 1169 if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>()) 1170 MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment()); 1171 1172 if (unsigned MaxAlign = D->getMaxAlignment()) 1173 UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign)); 1174 } 1175 } 1176 1177 void RecordLayoutBuilder::Layout(const RecordDecl *D) { 1178 InitializeLayout(D); 1179 LayoutFields(D); 1180 1181 // Finally, round the size of the total struct up to the alignment of the 1182 // struct itself. 1183 FinishLayout(D); 1184 } 1185 1186 void RecordLayoutBuilder::Layout(const CXXRecordDecl *RD) { 1187 InitializeLayout(RD); 1188 1189 // Lay out the vtable and the non-virtual bases. 1190 LayoutNonVirtualBases(RD); 1191 1192 LayoutFields(RD); 1193 1194 NonVirtualSize = Context.toCharUnitsFromBits( 1195 llvm::RoundUpToAlignment(getSizeInBits(), 1196 Context.Target.getCharAlign())); 1197 NonVirtualAlignment = Alignment; 1198 1199 // Lay out the virtual bases and add the primary virtual base offsets. 1200 LayoutVirtualBases(RD, RD); 1201 1202 VisitedVirtualBases.clear(); 1203 1204 // Finally, round the size of the total struct up to the alignment of the 1205 // struct itself. 1206 FinishLayout(RD); 1207 1208 #ifndef NDEBUG 1209 // Check that we have base offsets for all bases. 1210 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1211 E = RD->bases_end(); I != E; ++I) { 1212 if (I->isVirtual()) 1213 continue; 1214 1215 const CXXRecordDecl *BaseDecl = 1216 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1217 1218 assert(Bases.count(BaseDecl) && "Did not find base offset!"); 1219 } 1220 1221 // And all virtual bases. 1222 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(), 1223 E = RD->vbases_end(); I != E; ++I) { 1224 const CXXRecordDecl *BaseDecl = 1225 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1226 1227 assert(VBases.count(BaseDecl) && "Did not find base offset!"); 1228 } 1229 #endif 1230 } 1231 1232 void RecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) { 1233 if (ObjCInterfaceDecl *SD = D->getSuperClass()) { 1234 const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD); 1235 1236 UpdateAlignment(SL.getAlignment()); 1237 1238 // We start laying out ivars not at the end of the superclass 1239 // structure, but at the next byte following the last field. 1240 setSize(SL.getDataSize()); 1241 setDataSize(getSize()); 1242 } 1243 1244 InitializeLayout(D); 1245 ObjCInterfaceDecl *OI = const_cast<ObjCInterfaceDecl*>(D); 1246 // Layout each ivar sequentially. 1247 for (ObjCIvarDecl *IVD = OI->all_declared_ivar_begin(); 1248 IVD; IVD = IVD->getNextIvar()) 1249 LayoutField(IVD); 1250 1251 // Finally, round the size of the total struct up to the alignment of the 1252 // struct itself. 1253 FinishLayout(D); 1254 } 1255 1256 void RecordLayoutBuilder::LayoutFields(const RecordDecl *D) { 1257 // Layout each field, for now, just sequentially, respecting alignment. In 1258 // the future, this will need to be tweakable by targets. 1259 const FieldDecl *LastFD = 0; 1260 ZeroLengthBitfield = 0; 1261 unsigned RemainingInAlignment = 0; 1262 for (RecordDecl::field_iterator Field = D->field_begin(), 1263 FieldEnd = D->field_end(); Field != FieldEnd; ++Field) { 1264 if (IsMsStruct) { 1265 FieldDecl *FD = (*Field); 1266 if (Context.ZeroBitfieldFollowsBitfield(FD, LastFD)) 1267 ZeroLengthBitfield = FD; 1268 // Zero-length bitfields following non-bitfield members are 1269 // ignored: 1270 else if (Context.ZeroBitfieldFollowsNonBitfield(FD, LastFD)) 1271 continue; 1272 // FIXME. streamline these conditions into a simple one. 1273 else if (Context.BitfieldFollowsBitfield(FD, LastFD) || 1274 Context.BitfieldFollowsNoneBitfield(FD, LastFD) || 1275 Context.NoneBitfieldFollowsBitfield(FD, LastFD)) { 1276 // 1) Adjacent bit fields are packed into the same 1-, 2-, or 1277 // 4-byte allocation unit if the integral types are the same 1278 // size and if the next bit field fits into the current 1279 // allocation unit without crossing the boundary imposed by the 1280 // common alignment requirements of the bit fields. 1281 // 2) Establish a new alignment for a bitfield following 1282 // a non-bitfield if size of their types differ. 1283 // 3) Establish a new alignment for a non-bitfield following 1284 // a bitfield if size of their types differ. 1285 std::pair<uint64_t, unsigned> FieldInfo = 1286 Context.getTypeInfo(FD->getType()); 1287 uint64_t TypeSize = FieldInfo.first; 1288 unsigned FieldAlign = FieldInfo.second; 1289 // This check is needed for 'long long' in -m32 mode. 1290 if (TypeSize > FieldAlign) 1291 FieldAlign = TypeSize; 1292 FieldInfo = Context.getTypeInfo(LastFD->getType()); 1293 uint64_t TypeSizeLastFD = FieldInfo.first; 1294 unsigned FieldAlignLastFD = FieldInfo.second; 1295 // This check is needed for 'long long' in -m32 mode. 1296 if (TypeSizeLastFD > FieldAlignLastFD) 1297 FieldAlignLastFD = TypeSizeLastFD; 1298 1299 if (TypeSizeLastFD != TypeSize) { 1300 if (RemainingInAlignment && 1301 LastFD && LastFD->isBitField() && 1302 LastFD->getBitWidth()->EvaluateAsInt(Context).getZExtValue()) { 1303 // If previous field was a bitfield with some remaining unfilled 1304 // bits, pad the field so current field starts on its type boundary. 1305 uint64_t FieldOffset = 1306 getDataSizeInBits() - UnfilledBitsInLastByte; 1307 uint64_t NewSizeInBits = RemainingInAlignment + FieldOffset; 1308 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, 1309 Context.Target.getCharAlign())); 1310 setSize(std::max(getSizeInBits(), getDataSizeInBits())); 1311 RemainingInAlignment = 0; 1312 } 1313 1314 uint64_t UnpaddedFieldOffset = 1315 getDataSizeInBits() - UnfilledBitsInLastByte; 1316 FieldAlign = std::max(FieldAlign, FieldAlignLastFD); 1317 1318 // The maximum field alignment overrides the aligned attribute. 1319 if (!MaxFieldAlignment.isZero()) { 1320 unsigned MaxFieldAlignmentInBits = 1321 Context.toBits(MaxFieldAlignment); 1322 FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits); 1323 } 1324 1325 uint64_t NewSizeInBits = 1326 llvm::RoundUpToAlignment(UnpaddedFieldOffset, FieldAlign); 1327 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, 1328 Context.Target.getCharAlign())); 1329 UnfilledBitsInLastByte = getDataSizeInBits() - NewSizeInBits; 1330 setSize(std::max(getSizeInBits(), getDataSizeInBits())); 1331 } 1332 if (FD->isBitField()) { 1333 uint64_t FieldSize = 1334 FD->getBitWidth()->EvaluateAsInt(Context).getZExtValue(); 1335 assert (FieldSize > 0 && "LayoutFields - ms_struct layout"); 1336 if (RemainingInAlignment < FieldSize) 1337 RemainingInAlignment = TypeSize - FieldSize; 1338 else 1339 RemainingInAlignment -= FieldSize; 1340 } 1341 } 1342 else if (FD->isBitField()) { 1343 uint64_t FieldSize = 1344 FD->getBitWidth()->EvaluateAsInt(Context).getZExtValue(); 1345 std::pair<uint64_t, unsigned> FieldInfo = 1346 Context.getTypeInfo(FD->getType()); 1347 uint64_t TypeSize = FieldInfo.first; 1348 RemainingInAlignment = TypeSize - FieldSize; 1349 } 1350 LastFD = FD; 1351 } 1352 LayoutField(*Field); 1353 } 1354 if (IsMsStruct && RemainingInAlignment && 1355 LastFD && LastFD->isBitField() && 1356 LastFD->getBitWidth()->EvaluateAsInt(Context).getZExtValue()) { 1357 // If we ended a bitfield before the full length of the type then 1358 // pad the struct out to the full length of the last type. 1359 uint64_t FieldOffset = 1360 getDataSizeInBits() - UnfilledBitsInLastByte; 1361 uint64_t NewSizeInBits = RemainingInAlignment + FieldOffset; 1362 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, 1363 Context.Target.getCharAlign())); 1364 setSize(std::max(getSizeInBits(), getDataSizeInBits())); 1365 } 1366 } 1367 1368 void RecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize, 1369 uint64_t TypeSize, 1370 bool FieldPacked, 1371 const FieldDecl *D) { 1372 assert(Context.getLangOptions().CPlusPlus && 1373 "Can only have wide bit-fields in C++!"); 1374 1375 // Itanium C++ ABI 2.4: 1376 // If sizeof(T)*8 < n, let T' be the largest integral POD type with 1377 // sizeof(T')*8 <= n. 1378 1379 QualType IntegralPODTypes[] = { 1380 Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy, 1381 Context.UnsignedLongTy, Context.UnsignedLongLongTy 1382 }; 1383 1384 QualType Type; 1385 for (unsigned I = 0, E = llvm::array_lengthof(IntegralPODTypes); 1386 I != E; ++I) { 1387 uint64_t Size = Context.getTypeSize(IntegralPODTypes[I]); 1388 1389 if (Size > FieldSize) 1390 break; 1391 1392 Type = IntegralPODTypes[I]; 1393 } 1394 assert(!Type.isNull() && "Did not find a type!"); 1395 1396 CharUnits TypeAlign = Context.getTypeAlignInChars(Type); 1397 1398 // We're not going to use any of the unfilled bits in the last byte. 1399 UnfilledBitsInLastByte = 0; 1400 1401 uint64_t FieldOffset; 1402 uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastByte; 1403 1404 if (IsUnion) { 1405 setDataSize(std::max(getDataSizeInBits(), FieldSize)); 1406 FieldOffset = 0; 1407 } else { 1408 // The bitfield is allocated starting at the next offset aligned appropriately 1409 // for T', with length n bits. 1410 FieldOffset = llvm::RoundUpToAlignment(getDataSizeInBits(), 1411 Context.toBits(TypeAlign)); 1412 1413 uint64_t NewSizeInBits = FieldOffset + FieldSize; 1414 1415 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, 1416 Context.Target.getCharAlign())); 1417 UnfilledBitsInLastByte = getDataSizeInBits() - NewSizeInBits; 1418 } 1419 1420 // Place this field at the current location. 1421 FieldOffsets.push_back(FieldOffset); 1422 1423 CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset, 1424 Context.toBits(TypeAlign), FieldPacked, D); 1425 1426 // Update the size. 1427 setSize(std::max(getSizeInBits(), getDataSizeInBits())); 1428 1429 // Remember max struct/class alignment. 1430 UpdateAlignment(TypeAlign); 1431 } 1432 1433 void RecordLayoutBuilder::LayoutBitField(const FieldDecl *D) { 1434 bool FieldPacked = Packed || D->hasAttr<PackedAttr>(); 1435 uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastByte; 1436 uint64_t FieldOffset = IsUnion ? 0 : UnpaddedFieldOffset; 1437 uint64_t FieldSize = D->getBitWidth()->EvaluateAsInt(Context).getZExtValue(); 1438 1439 std::pair<uint64_t, unsigned> FieldInfo = Context.getTypeInfo(D->getType()); 1440 uint64_t TypeSize = FieldInfo.first; 1441 unsigned FieldAlign = FieldInfo.second; 1442 1443 // This check is needed for 'long long' in -m32 mode. 1444 if (IsMsStruct && (TypeSize > FieldAlign)) 1445 FieldAlign = TypeSize; 1446 1447 if (ZeroLengthBitfield) { 1448 // If a zero-length bitfield is inserted after a bitfield, 1449 // and the alignment of the zero-length bitfield is 1450 // greater than the member that follows it, `bar', `bar' 1451 // will be aligned as the type of the zero-length bitfield. 1452 if (ZeroLengthBitfield != D) { 1453 std::pair<uint64_t, unsigned> FieldInfo = 1454 Context.getTypeInfo(ZeroLengthBitfield->getType()); 1455 unsigned ZeroLengthBitfieldAlignment = FieldInfo.second; 1456 // Ignore alignment of subsequent zero-length bitfields. 1457 if ((ZeroLengthBitfieldAlignment > FieldAlign) || (FieldSize == 0)) 1458 FieldAlign = ZeroLengthBitfieldAlignment; 1459 if (FieldSize) 1460 ZeroLengthBitfield = 0; 1461 } 1462 } 1463 1464 if (FieldSize > TypeSize) { 1465 LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D); 1466 return; 1467 } 1468 1469 // The align if the field is not packed. This is to check if the attribute 1470 // was unnecessary (-Wpacked). 1471 unsigned UnpackedFieldAlign = FieldAlign; 1472 uint64_t UnpackedFieldOffset = FieldOffset; 1473 if (!Context.Target.useBitFieldTypeAlignment()) 1474 UnpackedFieldAlign = 1; 1475 1476 if (FieldPacked || !Context.Target.useBitFieldTypeAlignment()) 1477 FieldAlign = 1; 1478 FieldAlign = std::max(FieldAlign, D->getMaxAlignment()); 1479 UnpackedFieldAlign = std::max(UnpackedFieldAlign, D->getMaxAlignment()); 1480 1481 // The maximum field alignment overrides the aligned attribute. 1482 if (!MaxFieldAlignment.isZero()) { 1483 unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment); 1484 FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits); 1485 UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits); 1486 } 1487 1488 // Check if we need to add padding to give the field the correct alignment. 1489 if (FieldSize == 0 || (FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize) 1490 FieldOffset = llvm::RoundUpToAlignment(FieldOffset, FieldAlign); 1491 1492 if (FieldSize == 0 || 1493 (UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize) 1494 UnpackedFieldOffset = llvm::RoundUpToAlignment(UnpackedFieldOffset, 1495 UnpackedFieldAlign); 1496 1497 // Padding members don't affect overall alignment. 1498 if (!D->getIdentifier()) 1499 FieldAlign = UnpackedFieldAlign = 1; 1500 1501 // Place this field at the current location. 1502 FieldOffsets.push_back(FieldOffset); 1503 1504 CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset, 1505 UnpackedFieldAlign, FieldPacked, D); 1506 1507 // Update DataSize to include the last byte containing (part of) the bitfield. 1508 if (IsUnion) { 1509 // FIXME: I think FieldSize should be TypeSize here. 1510 setDataSize(std::max(getDataSizeInBits(), FieldSize)); 1511 } else { 1512 uint64_t NewSizeInBits = FieldOffset + FieldSize; 1513 1514 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, 1515 Context.Target.getCharAlign())); 1516 UnfilledBitsInLastByte = getDataSizeInBits() - NewSizeInBits; 1517 } 1518 1519 // Update the size. 1520 setSize(std::max(getSizeInBits(), getDataSizeInBits())); 1521 1522 // Remember max struct/class alignment. 1523 UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign), 1524 Context.toCharUnitsFromBits(UnpackedFieldAlign)); 1525 } 1526 1527 void RecordLayoutBuilder::LayoutField(const FieldDecl *D) { 1528 if (D->isBitField()) { 1529 LayoutBitField(D); 1530 return; 1531 } 1532 1533 uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastByte; 1534 1535 // Reset the unfilled bits. 1536 UnfilledBitsInLastByte = 0; 1537 1538 bool FieldPacked = Packed || D->hasAttr<PackedAttr>(); 1539 CharUnits FieldOffset = 1540 IsUnion ? CharUnits::Zero() : getDataSize(); 1541 CharUnits FieldSize; 1542 CharUnits FieldAlign; 1543 1544 if (D->getType()->isIncompleteArrayType()) { 1545 // This is a flexible array member; we can't directly 1546 // query getTypeInfo about these, so we figure it out here. 1547 // Flexible array members don't have any size, but they 1548 // have to be aligned appropriately for their element type. 1549 FieldSize = CharUnits::Zero(); 1550 const ArrayType* ATy = Context.getAsArrayType(D->getType()); 1551 FieldAlign = Context.getTypeAlignInChars(ATy->getElementType()); 1552 } else if (const ReferenceType *RT = D->getType()->getAs<ReferenceType>()) { 1553 unsigned AS = RT->getPointeeType().getAddressSpace(); 1554 FieldSize = 1555 Context.toCharUnitsFromBits(Context.Target.getPointerWidth(AS)); 1556 FieldAlign = 1557 Context.toCharUnitsFromBits(Context.Target.getPointerAlign(AS)); 1558 } else { 1559 std::pair<CharUnits, CharUnits> FieldInfo = 1560 Context.getTypeInfoInChars(D->getType()); 1561 FieldSize = FieldInfo.first; 1562 FieldAlign = FieldInfo.second; 1563 1564 if (ZeroLengthBitfield) { 1565 // If a zero-length bitfield is inserted after a bitfield, 1566 // and the alignment of the zero-length bitfield is 1567 // greater than the member that follows it, `bar', `bar' 1568 // will be aligned as the type of the zero-length bitfield. 1569 std::pair<CharUnits, CharUnits> FieldInfo = 1570 Context.getTypeInfoInChars(ZeroLengthBitfield->getType()); 1571 CharUnits ZeroLengthBitfieldAlignment = FieldInfo.second; 1572 if (ZeroLengthBitfieldAlignment > FieldAlign) 1573 FieldAlign = ZeroLengthBitfieldAlignment; 1574 ZeroLengthBitfield = 0; 1575 } 1576 1577 if (Context.getLangOptions().MSBitfields || IsMsStruct) { 1578 // If MS bitfield layout is required, figure out what type is being 1579 // laid out and align the field to the width of that type. 1580 1581 // Resolve all typedefs down to their base type and round up the field 1582 // alignment if necessary. 1583 QualType T = Context.getBaseElementType(D->getType()); 1584 if (const BuiltinType *BTy = T->getAs<BuiltinType>()) { 1585 CharUnits TypeSize = Context.getTypeSizeInChars(BTy); 1586 if (TypeSize > FieldAlign) 1587 FieldAlign = TypeSize; 1588 } 1589 } 1590 } 1591 1592 // The align if the field is not packed. This is to check if the attribute 1593 // was unnecessary (-Wpacked). 1594 CharUnits UnpackedFieldAlign = FieldAlign; 1595 CharUnits UnpackedFieldOffset = FieldOffset; 1596 1597 if (FieldPacked) 1598 FieldAlign = CharUnits::One(); 1599 CharUnits MaxAlignmentInChars = 1600 Context.toCharUnitsFromBits(D->getMaxAlignment()); 1601 FieldAlign = std::max(FieldAlign, MaxAlignmentInChars); 1602 UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars); 1603 1604 // The maximum field alignment overrides the aligned attribute. 1605 if (!MaxFieldAlignment.isZero()) { 1606 FieldAlign = std::min(FieldAlign, MaxFieldAlignment); 1607 UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment); 1608 } 1609 1610 // Round up the current record size to the field's alignment boundary. 1611 FieldOffset = FieldOffset.RoundUpToAlignment(FieldAlign); 1612 UnpackedFieldOffset = 1613 UnpackedFieldOffset.RoundUpToAlignment(UnpackedFieldAlign); 1614 1615 if (!IsUnion && EmptySubobjects) { 1616 // Check if we can place the field at this offset. 1617 while (!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)) { 1618 // We couldn't place the field at the offset. Try again at a new offset. 1619 FieldOffset += FieldAlign; 1620 } 1621 } 1622 1623 // Place this field at the current location. 1624 FieldOffsets.push_back(Context.toBits(FieldOffset)); 1625 1626 CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset, 1627 Context.toBits(UnpackedFieldOffset), 1628 Context.toBits(UnpackedFieldAlign), FieldPacked, D); 1629 1630 // Reserve space for this field. 1631 uint64_t FieldSizeInBits = Context.toBits(FieldSize); 1632 if (IsUnion) 1633 setSize(std::max(getSizeInBits(), FieldSizeInBits)); 1634 else 1635 setSize(FieldOffset + FieldSize); 1636 1637 // Update the data size. 1638 setDataSize(getSizeInBits()); 1639 1640 // Remember max struct/class alignment. 1641 UpdateAlignment(FieldAlign, UnpackedFieldAlign); 1642 } 1643 1644 void RecordLayoutBuilder::FinishLayout(const NamedDecl *D) { 1645 // In C++, records cannot be of size 0. 1646 if (Context.getLangOptions().CPlusPlus && getSizeInBits() == 0) { 1647 if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) { 1648 // Compatibility with gcc requires a class (pod or non-pod) 1649 // which is not empty but of size 0; such as having fields of 1650 // array of zero-length, remains of Size 0 1651 if (RD->isEmpty()) 1652 setSize(CharUnits::One()); 1653 } 1654 else 1655 setSize(CharUnits::One()); 1656 } 1657 // Finally, round the size of the record up to the alignment of the 1658 // record itself. 1659 uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastByte; 1660 uint64_t UnpackedSizeInBits = 1661 llvm::RoundUpToAlignment(getSizeInBits(), 1662 Context.toBits(UnpackedAlignment)); 1663 CharUnits UnpackedSize = Context.toCharUnitsFromBits(UnpackedSizeInBits); 1664 setSize(llvm::RoundUpToAlignment(getSizeInBits(), Context.toBits(Alignment))); 1665 1666 unsigned CharBitNum = Context.Target.getCharWidth(); 1667 if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) { 1668 // Warn if padding was introduced to the struct/class/union. 1669 if (getSizeInBits() > UnpaddedSize) { 1670 unsigned PadSize = getSizeInBits() - UnpaddedSize; 1671 bool InBits = true; 1672 if (PadSize % CharBitNum == 0) { 1673 PadSize = PadSize / CharBitNum; 1674 InBits = false; 1675 } 1676 Diag(RD->getLocation(), diag::warn_padded_struct_size) 1677 << Context.getTypeDeclType(RD) 1678 << PadSize 1679 << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not 1680 } 1681 1682 // Warn if we packed it unnecessarily. If the alignment is 1 byte don't 1683 // bother since there won't be alignment issues. 1684 if (Packed && UnpackedAlignment > CharUnits::One() && 1685 getSize() == UnpackedSize) 1686 Diag(D->getLocation(), diag::warn_unnecessary_packed) 1687 << Context.getTypeDeclType(RD); 1688 } 1689 } 1690 1691 void RecordLayoutBuilder::UpdateAlignment(CharUnits NewAlignment, 1692 CharUnits UnpackedNewAlignment) { 1693 // The alignment is not modified when using 'mac68k' alignment. 1694 if (IsMac68kAlign) 1695 return; 1696 1697 if (NewAlignment > Alignment) { 1698 assert(llvm::isPowerOf2_32(NewAlignment.getQuantity() && 1699 "Alignment not a power of 2")); 1700 Alignment = NewAlignment; 1701 } 1702 1703 if (UnpackedNewAlignment > UnpackedAlignment) { 1704 assert(llvm::isPowerOf2_32(UnpackedNewAlignment.getQuantity() && 1705 "Alignment not a power of 2")); 1706 UnpackedAlignment = UnpackedNewAlignment; 1707 } 1708 } 1709 1710 void RecordLayoutBuilder::CheckFieldPadding(uint64_t Offset, 1711 uint64_t UnpaddedOffset, 1712 uint64_t UnpackedOffset, 1713 unsigned UnpackedAlign, 1714 bool isPacked, 1715 const FieldDecl *D) { 1716 // We let objc ivars without warning, objc interfaces generally are not used 1717 // for padding tricks. 1718 if (isa<ObjCIvarDecl>(D)) 1719 return; 1720 1721 unsigned CharBitNum = Context.Target.getCharWidth(); 1722 1723 // Warn if padding was introduced to the struct/class. 1724 if (!IsUnion && Offset > UnpaddedOffset) { 1725 unsigned PadSize = Offset - UnpaddedOffset; 1726 bool InBits = true; 1727 if (PadSize % CharBitNum == 0) { 1728 PadSize = PadSize / CharBitNum; 1729 InBits = false; 1730 } 1731 if (D->getIdentifier()) 1732 Diag(D->getLocation(), diag::warn_padded_struct_field) 1733 << (D->getParent()->isStruct() ? 0 : 1) // struct|class 1734 << Context.getTypeDeclType(D->getParent()) 1735 << PadSize 1736 << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1) // plural or not 1737 << D->getIdentifier(); 1738 else 1739 Diag(D->getLocation(), diag::warn_padded_struct_anon_field) 1740 << (D->getParent()->isStruct() ? 0 : 1) // struct|class 1741 << Context.getTypeDeclType(D->getParent()) 1742 << PadSize 1743 << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not 1744 } 1745 1746 // Warn if we packed it unnecessarily. If the alignment is 1 byte don't 1747 // bother since there won't be alignment issues. 1748 if (isPacked && UnpackedAlign > CharBitNum && Offset == UnpackedOffset) 1749 Diag(D->getLocation(), diag::warn_unnecessary_packed) 1750 << D->getIdentifier(); 1751 } 1752 1753 const CXXMethodDecl * 1754 RecordLayoutBuilder::ComputeKeyFunction(const CXXRecordDecl *RD) { 1755 // If a class isn't polymorphic it doesn't have a key function. 1756 if (!RD->isPolymorphic()) 1757 return 0; 1758 1759 // A class that is not externally visible doesn't have a key function. (Or 1760 // at least, there's no point to assigning a key function to such a class; 1761 // this doesn't affect the ABI.) 1762 if (RD->getLinkage() != ExternalLinkage) 1763 return 0; 1764 1765 // Template instantiations don't have key functions,see Itanium C++ ABI 5.2.6. 1766 // Same behavior as GCC. 1767 TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind(); 1768 if (TSK == TSK_ImplicitInstantiation || 1769 TSK == TSK_ExplicitInstantiationDefinition) 1770 return 0; 1771 1772 for (CXXRecordDecl::method_iterator I = RD->method_begin(), 1773 E = RD->method_end(); I != E; ++I) { 1774 const CXXMethodDecl *MD = *I; 1775 1776 if (!MD->isVirtual()) 1777 continue; 1778 1779 if (MD->isPure()) 1780 continue; 1781 1782 // Ignore implicit member functions, they are always marked as inline, but 1783 // they don't have a body until they're defined. 1784 if (MD->isImplicit()) 1785 continue; 1786 1787 if (MD->isInlineSpecified()) 1788 continue; 1789 1790 if (MD->hasInlineBody()) 1791 continue; 1792 1793 // We found it. 1794 return MD; 1795 } 1796 1797 return 0; 1798 } 1799 1800 DiagnosticBuilder 1801 RecordLayoutBuilder::Diag(SourceLocation Loc, unsigned DiagID) { 1802 return Context.getDiagnostics().Report(Loc, DiagID); 1803 } 1804 1805 namespace { 1806 // This class implements layout specific to the Microsoft ABI. 1807 class MSRecordLayoutBuilder : public RecordLayoutBuilder { 1808 public: 1809 MSRecordLayoutBuilder(const ASTContext& Ctx, 1810 EmptySubobjectMap *EmptySubobjects) : 1811 RecordLayoutBuilder(Ctx, EmptySubobjects) {} 1812 1813 virtual CharUnits GetVirtualPointersSize(const CXXRecordDecl *RD) const; 1814 }; 1815 } 1816 1817 CharUnits 1818 MSRecordLayoutBuilder::GetVirtualPointersSize(const CXXRecordDecl *RD) const { 1819 // We should reserve space for two pointers if the class has both 1820 // virtual functions and virtual bases. 1821 CharUnits PointerWidth = 1822 Context.toCharUnitsFromBits(Context.Target.getPointerWidth(0)); 1823 if (RD->isPolymorphic() && RD->getNumVBases() > 0) 1824 return 2 * PointerWidth; 1825 return PointerWidth; 1826 } 1827 1828 /// getASTRecordLayout - Get or compute information about the layout of the 1829 /// specified record (struct/union/class), which indicates its size and field 1830 /// position information. 1831 const ASTRecordLayout & 1832 ASTContext::getASTRecordLayout(const RecordDecl *D) const { 1833 D = D->getDefinition(); 1834 assert(D && "Cannot get layout of forward declarations!"); 1835 1836 // Look up this layout, if already laid out, return what we have. 1837 // Note that we can't save a reference to the entry because this function 1838 // is recursive. 1839 const ASTRecordLayout *Entry = ASTRecordLayouts[D]; 1840 if (Entry) return *Entry; 1841 1842 const ASTRecordLayout *NewEntry; 1843 1844 if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) { 1845 EmptySubobjectMap EmptySubobjects(*this, RD); 1846 1847 // When compiling for Microsoft, use the special MS builder. 1848 llvm::OwningPtr<RecordLayoutBuilder> Builder; 1849 switch (Target.getCXXABI()) { 1850 default: 1851 Builder.reset(new RecordLayoutBuilder(*this, &EmptySubobjects)); 1852 break; 1853 case CXXABI_Microsoft: 1854 Builder.reset(new MSRecordLayoutBuilder(*this, &EmptySubobjects)); 1855 } 1856 // Recover resources if we crash before exiting this method. 1857 llvm::CrashRecoveryContextCleanupRegistrar<RecordLayoutBuilder> 1858 RecordBuilderCleanup(Builder.get()); 1859 1860 Builder->Layout(RD); 1861 1862 // FIXME: This is not always correct. See the part about bitfields at 1863 // http://www.codesourcery.com/public/cxx-abi/abi.html#POD for more info. 1864 // FIXME: IsPODForThePurposeOfLayout should be stored in the record layout. 1865 bool IsPODForThePurposeOfLayout = cast<CXXRecordDecl>(D)->isPOD(); 1866 1867 // FIXME: This should be done in FinalizeLayout. 1868 CharUnits DataSize = 1869 IsPODForThePurposeOfLayout ? Builder->getSize() : Builder->getDataSize(); 1870 CharUnits NonVirtualSize = 1871 IsPODForThePurposeOfLayout ? DataSize : Builder->NonVirtualSize; 1872 1873 NewEntry = 1874 new (*this) ASTRecordLayout(*this, Builder->getSize(), 1875 Builder->Alignment, 1876 DataSize, 1877 Builder->FieldOffsets.data(), 1878 Builder->FieldOffsets.size(), 1879 NonVirtualSize, 1880 Builder->NonVirtualAlignment, 1881 EmptySubobjects.SizeOfLargestEmptySubobject, 1882 Builder->PrimaryBase, 1883 Builder->PrimaryBaseIsVirtual, 1884 Builder->Bases, Builder->VBases); 1885 } else { 1886 RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/0); 1887 Builder.Layout(D); 1888 1889 NewEntry = 1890 new (*this) ASTRecordLayout(*this, Builder.getSize(), 1891 Builder.Alignment, 1892 Builder.getSize(), 1893 Builder.FieldOffsets.data(), 1894 Builder.FieldOffsets.size()); 1895 } 1896 1897 ASTRecordLayouts[D] = NewEntry; 1898 1899 if (getLangOptions().DumpRecordLayouts) { 1900 llvm::errs() << "\n*** Dumping AST Record Layout\n"; 1901 DumpRecordLayout(D, llvm::errs()); 1902 } 1903 1904 return *NewEntry; 1905 } 1906 1907 const CXXMethodDecl *ASTContext::getKeyFunction(const CXXRecordDecl *RD) { 1908 RD = cast<CXXRecordDecl>(RD->getDefinition()); 1909 assert(RD && "Cannot get key function for forward declarations!"); 1910 1911 const CXXMethodDecl *&Entry = KeyFunctions[RD]; 1912 if (!Entry) 1913 Entry = RecordLayoutBuilder::ComputeKeyFunction(RD); 1914 1915 return Entry; 1916 } 1917 1918 /// getInterfaceLayoutImpl - Get or compute information about the 1919 /// layout of the given interface. 1920 /// 1921 /// \param Impl - If given, also include the layout of the interface's 1922 /// implementation. This may differ by including synthesized ivars. 1923 const ASTRecordLayout & 1924 ASTContext::getObjCLayout(const ObjCInterfaceDecl *D, 1925 const ObjCImplementationDecl *Impl) const { 1926 assert(!D->isForwardDecl() && "Invalid interface decl!"); 1927 1928 // Look up this layout, if already laid out, return what we have. 1929 ObjCContainerDecl *Key = 1930 Impl ? (ObjCContainerDecl*) Impl : (ObjCContainerDecl*) D; 1931 if (const ASTRecordLayout *Entry = ObjCLayouts[Key]) 1932 return *Entry; 1933 1934 // Add in synthesized ivar count if laying out an implementation. 1935 if (Impl) { 1936 unsigned SynthCount = CountNonClassIvars(D); 1937 // If there aren't any sythesized ivars then reuse the interface 1938 // entry. Note we can't cache this because we simply free all 1939 // entries later; however we shouldn't look up implementations 1940 // frequently. 1941 if (SynthCount == 0) 1942 return getObjCLayout(D, 0); 1943 } 1944 1945 RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/0); 1946 Builder.Layout(D); 1947 1948 const ASTRecordLayout *NewEntry = 1949 new (*this) ASTRecordLayout(*this, Builder.getSize(), 1950 Builder.Alignment, 1951 Builder.getDataSize(), 1952 Builder.FieldOffsets.data(), 1953 Builder.FieldOffsets.size()); 1954 1955 ObjCLayouts[Key] = NewEntry; 1956 1957 return *NewEntry; 1958 } 1959 1960 static void PrintOffset(llvm::raw_ostream &OS, 1961 CharUnits Offset, unsigned IndentLevel) { 1962 OS << llvm::format("%4d | ", Offset.getQuantity()); 1963 OS.indent(IndentLevel * 2); 1964 } 1965 1966 static void DumpCXXRecordLayout(llvm::raw_ostream &OS, 1967 const CXXRecordDecl *RD, const ASTContext &C, 1968 CharUnits Offset, 1969 unsigned IndentLevel, 1970 const char* Description, 1971 bool IncludeVirtualBases) { 1972 const ASTRecordLayout &Layout = C.getASTRecordLayout(RD); 1973 1974 PrintOffset(OS, Offset, IndentLevel); 1975 OS << C.getTypeDeclType(const_cast<CXXRecordDecl *>(RD)).getAsString(); 1976 if (Description) 1977 OS << ' ' << Description; 1978 if (RD->isEmpty()) 1979 OS << " (empty)"; 1980 OS << '\n'; 1981 1982 IndentLevel++; 1983 1984 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); 1985 1986 // Vtable pointer. 1987 if (RD->isDynamicClass() && !PrimaryBase) { 1988 PrintOffset(OS, Offset, IndentLevel); 1989 OS << '(' << RD << " vtable pointer)\n"; 1990 } 1991 // Dump (non-virtual) bases 1992 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1993 E = RD->bases_end(); I != E; ++I) { 1994 assert(!I->getType()->isDependentType() && 1995 "Cannot layout class with dependent bases."); 1996 if (I->isVirtual()) 1997 continue; 1998 1999 const CXXRecordDecl *Base = 2000 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 2001 2002 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base); 2003 2004 DumpCXXRecordLayout(OS, Base, C, BaseOffset, IndentLevel, 2005 Base == PrimaryBase ? "(primary base)" : "(base)", 2006 /*IncludeVirtualBases=*/false); 2007 } 2008 2009 // Dump fields. 2010 uint64_t FieldNo = 0; 2011 for (CXXRecordDecl::field_iterator I = RD->field_begin(), 2012 E = RD->field_end(); I != E; ++I, ++FieldNo) { 2013 const FieldDecl *Field = *I; 2014 CharUnits FieldOffset = Offset + 2015 C.toCharUnitsFromBits(Layout.getFieldOffset(FieldNo)); 2016 2017 if (const RecordType *RT = Field->getType()->getAs<RecordType>()) { 2018 if (const CXXRecordDecl *D = dyn_cast<CXXRecordDecl>(RT->getDecl())) { 2019 DumpCXXRecordLayout(OS, D, C, FieldOffset, IndentLevel, 2020 Field->getName().data(), 2021 /*IncludeVirtualBases=*/true); 2022 continue; 2023 } 2024 } 2025 2026 PrintOffset(OS, FieldOffset, IndentLevel); 2027 OS << Field->getType().getAsString() << ' ' << Field << '\n'; 2028 } 2029 2030 if (!IncludeVirtualBases) 2031 return; 2032 2033 // Dump virtual bases. 2034 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(), 2035 E = RD->vbases_end(); I != E; ++I) { 2036 assert(I->isVirtual() && "Found non-virtual class!"); 2037 const CXXRecordDecl *VBase = 2038 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 2039 2040 CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase); 2041 DumpCXXRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel, 2042 VBase == PrimaryBase ? 2043 "(primary virtual base)" : "(virtual base)", 2044 /*IncludeVirtualBases=*/false); 2045 } 2046 2047 OS << " sizeof=" << Layout.getSize().getQuantity(); 2048 OS << ", dsize=" << Layout.getDataSize().getQuantity(); 2049 OS << ", align=" << Layout.getAlignment().getQuantity() << '\n'; 2050 OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity(); 2051 OS << ", nvalign=" << Layout.getNonVirtualAlign().getQuantity() << '\n'; 2052 OS << '\n'; 2053 } 2054 2055 void ASTContext::DumpRecordLayout(const RecordDecl *RD, 2056 llvm::raw_ostream &OS) const { 2057 const ASTRecordLayout &Info = getASTRecordLayout(RD); 2058 2059 if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD)) 2060 return DumpCXXRecordLayout(OS, CXXRD, *this, CharUnits(), 0, 0, 2061 /*IncludeVirtualBases=*/true); 2062 2063 OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n"; 2064 OS << "Record: "; 2065 RD->dump(); 2066 OS << "\nLayout: "; 2067 OS << "<ASTRecordLayout\n"; 2068 OS << " Size:" << toBits(Info.getSize()) << "\n"; 2069 OS << " DataSize:" << toBits(Info.getDataSize()) << "\n"; 2070 OS << " Alignment:" << toBits(Info.getAlignment()) << "\n"; 2071 OS << " FieldOffsets: ["; 2072 for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i) { 2073 if (i) OS << ", "; 2074 OS << Info.getFieldOffset(i); 2075 } 2076 OS << "]>\n"; 2077 } 2078