1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the lowering for the gc.root mechanism. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/CodeGen/GCMetadata.h" 15 #include "llvm/CodeGen/GCStrategy.h" 16 #include "llvm/CodeGen/MachineFrameInfo.h" 17 #include "llvm/CodeGen/MachineFunctionPass.h" 18 #include "llvm/CodeGen/MachineInstrBuilder.h" 19 #include "llvm/CodeGen/MachineModuleInfo.h" 20 #include "llvm/CodeGen/Passes.h" 21 #include "llvm/IR/Dominators.h" 22 #include "llvm/IR/IntrinsicInst.h" 23 #include "llvm/IR/Module.h" 24 #include "llvm/Support/Debug.h" 25 #include "llvm/Support/ErrorHandling.h" 26 #include "llvm/Support/raw_ostream.h" 27 #include "llvm/Target/TargetFrameLowering.h" 28 #include "llvm/Target/TargetInstrInfo.h" 29 #include "llvm/Target/TargetMachine.h" 30 #include "llvm/Target/TargetRegisterInfo.h" 31 #include "llvm/Target/TargetSubtargetInfo.h" 32 33 using namespace llvm; 34 35 namespace { 36 37 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or 38 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as 39 /// directed by the GCStrategy. It also performs automatic root initialization 40 /// and custom intrinsic lowering. 41 class LowerIntrinsics : public FunctionPass { 42 bool PerformDefaultLowering(Function &F, GCStrategy &Coll); 43 44 public: 45 static char ID; 46 47 LowerIntrinsics(); 48 const char *getPassName() const override; 49 void getAnalysisUsage(AnalysisUsage &AU) const override; 50 51 bool doInitialization(Module &M) override; 52 bool runOnFunction(Function &F) override; 53 }; 54 55 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine 56 /// function representation to identify safe points for the garbage collector 57 /// in the machine code. It inserts labels at safe points and populates a 58 /// GCMetadata record for each function. 59 class GCMachineCodeAnalysis : public MachineFunctionPass { 60 GCFunctionInfo *FI; 61 MachineModuleInfo *MMI; 62 const TargetInstrInfo *TII; 63 64 void FindSafePoints(MachineFunction &MF); 65 void VisitCallPoint(MachineBasicBlock::iterator MI); 66 MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, 67 const DebugLoc &DL) const; 68 69 void FindStackOffsets(MachineFunction &MF); 70 71 public: 72 static char ID; 73 74 GCMachineCodeAnalysis(); 75 void getAnalysisUsage(AnalysisUsage &AU) const override; 76 77 bool runOnMachineFunction(MachineFunction &MF) override; 78 }; 79 } 80 81 // ----------------------------------------------------------------------------- 82 83 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false, 84 false) 85 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo) 86 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false) 87 88 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); } 89 90 char LowerIntrinsics::ID = 0; 91 92 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) { 93 initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry()); 94 } 95 96 const char *LowerIntrinsics::getPassName() const { 97 return "Lower Garbage Collection Instructions"; 98 } 99 100 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const { 101 FunctionPass::getAnalysisUsage(AU); 102 AU.addRequired<GCModuleInfo>(); 103 AU.addPreserved<DominatorTreeWrapperPass>(); 104 } 105 106 static bool NeedsDefaultLoweringPass(const GCStrategy &C) { 107 // Default lowering is necessary only if read or write barriers have a default 108 // action. The default for roots is no action. 109 return !C.customWriteBarrier() || !C.customReadBarrier() || 110 C.initializeRoots(); 111 } 112 113 /// doInitialization - If this module uses the GC intrinsics, find them now. 114 bool LowerIntrinsics::doInitialization(Module &M) { 115 GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>(); 116 assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?"); 117 for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I) 118 if (!I->isDeclaration() && I->hasGC()) 119 MI->getFunctionInfo(*I); // Instantiate the GC strategy. 120 121 return false; 122 } 123 124 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the 125 /// instruction could introduce a safe point. 126 static bool CouldBecomeSafePoint(Instruction *I) { 127 // The natural definition of instructions which could introduce safe points 128 // are: 129 // 130 // - call, invoke (AfterCall, BeforeCall) 131 // - phis (Loops) 132 // - invoke, ret, unwind (Exit) 133 // 134 // However, instructions as seemingly inoccuous as arithmetic can become 135 // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead 136 // it is necessary to take a conservative approach. 137 138 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) || 139 isa<LoadInst>(I)) 140 return false; 141 142 // llvm.gcroot is safe because it doesn't do anything at runtime. 143 if (CallInst *CI = dyn_cast<CallInst>(I)) 144 if (Function *F = CI->getCalledFunction()) 145 if (Intrinsic::ID IID = F->getIntrinsicID()) 146 if (IID == Intrinsic::gcroot) 147 return false; 148 149 return true; 150 } 151 152 static bool InsertRootInitializers(Function &F, AllocaInst **Roots, 153 unsigned Count) { 154 // Scroll past alloca instructions. 155 BasicBlock::iterator IP = F.getEntryBlock().begin(); 156 while (isa<AllocaInst>(IP)) 157 ++IP; 158 159 // Search for initializers in the initial BB. 160 SmallPtrSet<AllocaInst *, 16> InitedRoots; 161 for (; !CouldBecomeSafePoint(&*IP); ++IP) 162 if (StoreInst *SI = dyn_cast<StoreInst>(IP)) 163 if (AllocaInst *AI = 164 dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts())) 165 InitedRoots.insert(AI); 166 167 // Add root initializers. 168 bool MadeChange = false; 169 170 for (AllocaInst **I = Roots, **E = Roots + Count; I != E; ++I) 171 if (!InitedRoots.count(*I)) { 172 StoreInst *SI = new StoreInst( 173 ConstantPointerNull::get(cast<PointerType>((*I)->getAllocatedType())), 174 *I); 175 SI->insertAfter(*I); 176 MadeChange = true; 177 } 178 179 return MadeChange; 180 } 181 182 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores. 183 /// Leave gcroot intrinsics; the code generator needs to see those. 184 bool LowerIntrinsics::runOnFunction(Function &F) { 185 // Quick exit for functions that do not use GC. 186 if (!F.hasGC()) 187 return false; 188 189 GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F); 190 GCStrategy &S = FI.getStrategy(); 191 192 bool MadeChange = false; 193 194 if (NeedsDefaultLoweringPass(S)) 195 MadeChange |= PerformDefaultLowering(F, S); 196 197 return MadeChange; 198 } 199 200 bool LowerIntrinsics::PerformDefaultLowering(Function &F, GCStrategy &S) { 201 bool LowerWr = !S.customWriteBarrier(); 202 bool LowerRd = !S.customReadBarrier(); 203 bool InitRoots = S.initializeRoots(); 204 205 SmallVector<AllocaInst *, 32> Roots; 206 207 bool MadeChange = false; 208 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) { 209 for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;) { 210 if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++)) { 211 Function *F = CI->getCalledFunction(); 212 switch (F->getIntrinsicID()) { 213 case Intrinsic::gcwrite: 214 if (LowerWr) { 215 // Replace a write barrier with a simple store. 216 Value *St = 217 new StoreInst(CI->getArgOperand(0), CI->getArgOperand(2), CI); 218 CI->replaceAllUsesWith(St); 219 CI->eraseFromParent(); 220 } 221 break; 222 case Intrinsic::gcread: 223 if (LowerRd) { 224 // Replace a read barrier with a simple load. 225 Value *Ld = new LoadInst(CI->getArgOperand(1), "", CI); 226 Ld->takeName(CI); 227 CI->replaceAllUsesWith(Ld); 228 CI->eraseFromParent(); 229 } 230 break; 231 case Intrinsic::gcroot: 232 if (InitRoots) { 233 // Initialize the GC root, but do not delete the intrinsic. The 234 // backend needs the intrinsic to flag the stack slot. 235 Roots.push_back( 236 cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts())); 237 } 238 break; 239 default: 240 continue; 241 } 242 243 MadeChange = true; 244 } 245 } 246 } 247 248 if (Roots.size()) 249 MadeChange |= InsertRootInitializers(F, Roots.begin(), Roots.size()); 250 251 return MadeChange; 252 } 253 254 // ----------------------------------------------------------------------------- 255 256 char GCMachineCodeAnalysis::ID = 0; 257 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID; 258 259 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis", 260 "Analyze Machine Code For Garbage Collection", false, false) 261 262 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {} 263 264 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 265 MachineFunctionPass::getAnalysisUsage(AU); 266 AU.setPreservesAll(); 267 AU.addRequired<MachineModuleInfo>(); 268 AU.addRequired<GCModuleInfo>(); 269 } 270 271 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB, 272 MachineBasicBlock::iterator MI, 273 const DebugLoc &DL) const { 274 MCSymbol *Label = MBB.getParent()->getContext().createTempSymbol(); 275 BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label); 276 return Label; 277 } 278 279 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) { 280 // Find the return address (next instruction), too, so as to bracket the call 281 // instruction. 282 MachineBasicBlock::iterator RAI = CI; 283 ++RAI; 284 285 if (FI->getStrategy().needsSafePoint(GC::PreCall)) { 286 MCSymbol *Label = InsertLabel(*CI->getParent(), CI, CI->getDebugLoc()); 287 FI->addSafePoint(GC::PreCall, Label, CI->getDebugLoc()); 288 } 289 290 if (FI->getStrategy().needsSafePoint(GC::PostCall)) { 291 MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc()); 292 FI->addSafePoint(GC::PostCall, Label, CI->getDebugLoc()); 293 } 294 } 295 296 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) { 297 for (MachineFunction::iterator BBI = MF.begin(), BBE = MF.end(); BBI != BBE; 298 ++BBI) 299 for (MachineBasicBlock::iterator MI = BBI->begin(), ME = BBI->end(); 300 MI != ME; ++MI) 301 if (MI->isCall()) { 302 // Do not treat tail or sibling call sites as safe points. This is 303 // legal since any arguments passed to the callee which live in the 304 // remnants of the callers frame will be owned and updated by the 305 // callee if required. 306 if (MI->isTerminator()) 307 continue; 308 VisitCallPoint(MI); 309 } 310 } 311 312 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) { 313 const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering(); 314 assert(TFI && "TargetRegisterInfo not available!"); 315 316 for (GCFunctionInfo::roots_iterator RI = FI->roots_begin(); 317 RI != FI->roots_end();) { 318 // If the root references a dead object, no need to keep it. 319 if (MF.getFrameInfo()->isDeadObjectIndex(RI->Num)) { 320 RI = FI->removeStackRoot(RI); 321 } else { 322 unsigned FrameReg; // FIXME: surely GCRoot ought to store the 323 // register that the offset is from? 324 RI->StackOffset = TFI->getFrameIndexReference(MF, RI->Num, FrameReg); 325 ++RI; 326 } 327 } 328 } 329 330 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) { 331 // Quick exit for functions that do not use GC. 332 if (!MF.getFunction()->hasGC()) 333 return false; 334 335 FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(*MF.getFunction()); 336 MMI = &getAnalysis<MachineModuleInfo>(); 337 TII = MF.getSubtarget().getInstrInfo(); 338 339 // Find the size of the stack frame. There may be no correct static frame 340 // size, we use UINT64_MAX to represent this. 341 const MachineFrameInfo *MFI = MF.getFrameInfo(); 342 const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo(); 343 const bool DynamicFrameSize = MFI->hasVarSizedObjects() || 344 RegInfo->needsStackRealignment(MF); 345 FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI->getStackSize()); 346 347 // Find all safe points. 348 if (FI->getStrategy().needsSafePoints()) 349 FindSafePoints(MF); 350 351 // Find the concrete stack offsets for all roots (stack slots) 352 FindStackOffsets(MF); 353 354 return false; 355 } 356