1 //===--- MicrosoftVBTables.cpp - Virtual Base Table Emission --------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This class generates data about MSVC virtual base tables. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "MicrosoftVBTables.h" 15 #include "CodeGenModule.h" 16 #include "CGCXXABI.h" 17 18 namespace clang { 19 namespace CodeGen { 20 21 /// Holds intermediate data about a path to a vbptr inside a base subobject. 22 struct VBTablePath { 23 VBTablePath(const VBTableInfo &VBInfo) 24 : VBInfo(VBInfo), NextBase(VBInfo.VBPtrSubobject.getBase()) { } 25 26 /// All the data needed to build a vbtable, minus the GlobalVariable whose 27 /// name we haven't computed yet. 28 VBTableInfo VBInfo; 29 30 /// Next base to use for disambiguation. Can be null if we've already 31 /// disambiguated this path once. 32 const CXXRecordDecl *NextBase; 33 34 /// Path is not really a full path like a CXXBasePath. It holds the subset of 35 /// records that need to be mangled into the vbtable symbol name in order to get 36 /// a unique name. 37 llvm::SmallVector<const CXXRecordDecl *, 1> Path; 38 }; 39 40 VBTableBuilder::VBTableBuilder(CodeGenModule &CGM, 41 const CXXRecordDecl *MostDerived) 42 : CGM(CGM), MostDerived(MostDerived), 43 DerivedLayout(CGM.getContext().getASTRecordLayout(MostDerived)) {} 44 45 void VBTableBuilder::enumerateVBTables(VBTableVector &VBTables) { 46 VBTablePathVector Paths; 47 findUnambiguousPaths(MostDerived, BaseSubobject(MostDerived, 48 CharUnits::Zero()), Paths); 49 for (VBTablePathVector::iterator I = Paths.begin(), E = Paths.end(); 50 I != E; ++I) { 51 VBTablePath *P = *I; 52 P->VBInfo.GV = getAddrOfVBTable(P->VBInfo.ReusingBase, P->Path); 53 VBTables.push_back(P->VBInfo); 54 } 55 } 56 57 bool VBTableBuilder::hasVBPtr(const CXXRecordDecl *RD) { 58 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD); 59 return Layout.getVBPtrOffset().getQuantity() != -1; 60 } 61 62 void VBTableBuilder::findUnambiguousPaths(const CXXRecordDecl *ReusingBase, 63 BaseSubobject CurSubobject, 64 VBTablePathVector &Paths) { 65 size_t PathsStart = Paths.size(); 66 bool ReuseVBPtrFromBase = true; 67 const CXXRecordDecl *CurBase = CurSubobject.getBase(); 68 69 // If this base has a vbptr, then we've found a path. These are not full 70 // paths, so we don't use CXXBasePath. 71 if (hasVBPtr(CurBase)) { 72 ReuseVBPtrFromBase = false; 73 VBTablePath *Info = new VBTablePath( 74 VBTableInfo(ReusingBase, CurSubobject, /*GV=*/0)); 75 Paths.push_back(Info); 76 } 77 78 // Recurse onto any bases which themselves have virtual bases. 79 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(CurBase); 80 for (CXXRecordDecl::base_class_const_iterator I = CurBase->bases_begin(), 81 E = CurBase->bases_end(); I != E; ++I) { 82 const CXXRecordDecl *Base = I->getType()->getAsCXXRecordDecl(); 83 if (!Base->getNumVBases()) 84 continue; // Bases without virtual bases have no vbptrs. 85 CharUnits NextOffset; 86 const CXXRecordDecl *NextReusingBase = Base; 87 if (I->isVirtual()) { 88 if (!VBasesSeen.insert(Base)) 89 continue; // Don't visit virtual bases twice. 90 NextOffset = DerivedLayout.getVBaseClassOffset(Base); 91 } else { 92 NextOffset = (CurSubobject.getBaseOffset() + 93 Layout.getBaseClassOffset(Base)); 94 95 // If CurBase didn't have a vbptr, then ReusingBase will reuse the vbptr 96 // from the first non-virtual base with vbases for its vbptr. 97 if (ReuseVBPtrFromBase) { 98 NextReusingBase = ReusingBase; 99 ReuseVBPtrFromBase = false; 100 } 101 } 102 103 size_t NumPaths = Paths.size(); 104 findUnambiguousPaths(NextReusingBase, BaseSubobject(Base, NextOffset), 105 Paths); 106 107 // Tag paths through this base with the base itself. We might use it to 108 // disambiguate. 109 for (size_t I = NumPaths, E = Paths.size(); I != E; ++I) 110 Paths[I]->NextBase = Base; 111 } 112 113 bool AmbiguousPaths = rebucketPaths(Paths, PathsStart); 114 if (AmbiguousPaths) 115 rebucketPaths(Paths, PathsStart, /*SecondPass=*/true); 116 117 #ifndef NDEBUG 118 // Check that the paths are in fact unique. 119 for (size_t I = PathsStart + 1, E = Paths.size(); I != E; ++I) { 120 assert(Paths[I]->Path != Paths[I - 1]->Path && "vbtable paths are not unique"); 121 } 122 #endif 123 } 124 125 static bool pathCompare(VBTablePath *LHS, VBTablePath *RHS) { 126 return LHS->Path < RHS->Path; 127 } 128 129 void VBTableBuilder::extendPath(VBTablePath *P, bool SecondPass) { 130 assert(P->NextBase || SecondPass); 131 if (P->NextBase) { 132 P->Path.push_back(P->NextBase); 133 P->NextBase = 0; // Prevent the path from being extended twice. 134 } 135 } 136 137 bool VBTableBuilder::rebucketPaths(VBTablePathVector &Paths, size_t PathsStart, 138 bool SecondPass) { 139 // What we're essentially doing here is bucketing together ambiguous paths. 140 // Any bucket with more than one path in it gets extended by NextBase, which 141 // is usually the direct base of the inherited the vbptr. This code uses a 142 // sorted vector to implement a multiset to form the buckets. Note that the 143 // ordering is based on pointers, but it doesn't change our output order. The 144 // current algorithm is designed to match MSVC 2012's names. 145 // TODO: Implement MSVC 2010 or earlier names to avoid extra vbtable cruft. 146 VBTablePathVector PathsSorted(&Paths[PathsStart], &Paths.back() + 1); 147 std::sort(PathsSorted.begin(), PathsSorted.end(), pathCompare); 148 bool AmbiguousPaths = false; 149 for (size_t I = 0, E = PathsSorted.size(); I != E;) { 150 // Scan forward to find the end of the bucket. 151 size_t BucketStart = I; 152 do { 153 ++I; 154 } while (I != E && PathsSorted[BucketStart]->Path == PathsSorted[I]->Path); 155 156 // If this bucket has multiple paths, extend them all. 157 if (I - BucketStart > 1) { 158 AmbiguousPaths = true; 159 for (size_t II = BucketStart; II != I; ++II) 160 extendPath(PathsSorted[II], SecondPass); 161 } 162 } 163 return AmbiguousPaths; 164 } 165 166 llvm::GlobalVariable * 167 VBTableBuilder::getAddrOfVBTable(const CXXRecordDecl *ReusingBase, 168 ArrayRef<const CXXRecordDecl *> BasePath) { 169 // Caching at this layer is redundant with the caching in EnumerateVBTables(). 170 171 SmallString<256> OutName; 172 llvm::raw_svector_ostream Out(OutName); 173 MangleContext &Mangler = CGM.getCXXABI().getMangleContext(); 174 Mangler.mangleCXXVBTable(MostDerived, BasePath, Out); 175 Out.flush(); 176 StringRef Name = OutName.str(); 177 178 llvm::ArrayType *VBTableType = 179 llvm::ArrayType::get(CGM.IntTy, 1 + ReusingBase->getNumVBases()); 180 181 assert(!CGM.getModule().getNamedGlobal(Name) && 182 "vbtable with this name already exists: mangling bug?"); 183 llvm::GlobalVariable *VBTable = 184 CGM.CreateOrReplaceCXXRuntimeVariable(Name, VBTableType, 185 llvm::GlobalValue::ExternalLinkage); 186 VBTable->setUnnamedAddr(true); 187 return VBTable; 188 } 189 190 void VBTableInfo::EmitVBTableDefinition( 191 CodeGenModule &CGM, const CXXRecordDecl *RD, 192 llvm::GlobalVariable::LinkageTypes Linkage) const { 193 assert(RD->getNumVBases() && ReusingBase->getNumVBases() && 194 "should only emit vbtables for classes with vbtables"); 195 196 const ASTRecordLayout &BaseLayout = 197 CGM.getContext().getASTRecordLayout(VBPtrSubobject.getBase()); 198 const ASTRecordLayout &DerivedLayout = 199 CGM.getContext().getASTRecordLayout(RD); 200 201 SmallVector<llvm::Constant *, 4> Offsets; 202 203 // The offset from ReusingBase's vbptr to itself always leads. 204 CharUnits VBPtrOffset = BaseLayout.getVBPtrOffset(); 205 Offsets.push_back( 206 llvm::ConstantInt::get(CGM.IntTy, -VBPtrOffset.getQuantity())); 207 208 // These are laid out in the same order as in Itanium, which is the same as 209 // the order of the vbase iterator. 210 for (CXXRecordDecl::base_class_const_iterator I = ReusingBase->vbases_begin(), 211 E = ReusingBase->vbases_end(); I != E; ++I) { 212 const CXXRecordDecl *VBase = I->getType()->getAsCXXRecordDecl(); 213 CharUnits Offset = DerivedLayout.getVBaseClassOffset(VBase); 214 assert(!Offset.isNegative()); 215 // Make it relative to the subobject vbptr. 216 Offset -= VBPtrSubobject.getBaseOffset() + VBPtrOffset; 217 Offsets.push_back(llvm::ConstantInt::get(CGM.IntTy, Offset.getQuantity())); 218 } 219 220 assert(Offsets.size() == 221 cast<llvm::ArrayType>(cast<llvm::PointerType>(GV->getType()) 222 ->getElementType())->getNumElements()); 223 llvm::ArrayType *VBTableType = 224 llvm::ArrayType::get(CGM.IntTy, Offsets.size()); 225 llvm::Constant *Init = llvm::ConstantArray::get(VBTableType, Offsets); 226 GV->setInitializer(Init); 227 228 // Set the correct linkage. 229 GV->setLinkage(Linkage); 230 231 // Set the right visibility. 232 CGM.setTypeVisibility(GV, RD, CodeGenModule::TVK_ForVTable); 233 } 234 235 } // namespace CodeGen 236 } // namespace clang 237