1 //===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// \file 10 /// This file defines late ObjC ARC optimizations. ARC stands for Automatic 11 /// Reference Counting and is a system for managing reference counts for objects 12 /// in Objective C. 13 /// 14 /// This specific file mainly deals with ``contracting'' multiple lower level 15 /// operations into singular higher level operations through pattern matching. 16 /// 17 /// WARNING: This file knows about certain library functions. It recognizes them 18 /// by name, and hardwires knowledge of their semantics. 19 /// 20 /// WARNING: This file knows about how certain Objective-C library functions are 21 /// used. Naive LLVM IR transformations which would otherwise be 22 /// behavior-preserving may break these assumptions. 23 /// 24 //===----------------------------------------------------------------------===// 25 26 // TODO: ObjCARCContract could insert PHI nodes when uses aren't 27 // dominated by single calls. 28 29 #include "ObjCARC.h" 30 #include "ARCRuntimeEntryPoints.h" 31 #include "DependencyAnalysis.h" 32 #include "ProvenanceAnalysis.h" 33 #include "llvm/ADT/Statistic.h" 34 #include "llvm/IR/Dominators.h" 35 #include "llvm/IR/InlineAsm.h" 36 #include "llvm/IR/Operator.h" 37 #include "llvm/Support/Debug.h" 38 #include "llvm/Support/raw_ostream.h" 39 40 using namespace llvm; 41 using namespace llvm::objcarc; 42 43 #define DEBUG_TYPE "objc-arc-contract" 44 45 STATISTIC(NumPeeps, "Number of calls peephole-optimized"); 46 STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed"); 47 48 //===----------------------------------------------------------------------===// 49 // Declarations 50 //===----------------------------------------------------------------------===// 51 52 namespace { 53 /// \brief Late ARC optimizations 54 /// 55 /// These change the IR in a way that makes it difficult to be analyzed by 56 /// ObjCARCOpt, so it's run late. 57 class ObjCARCContract : public FunctionPass { 58 bool Changed; 59 AliasAnalysis *AA; 60 DominatorTree *DT; 61 ProvenanceAnalysis PA; 62 ARCRuntimeEntryPoints EP; 63 64 /// A flag indicating whether this optimization pass should run. 65 bool Run; 66 67 /// The inline asm string to insert between calls and RetainRV calls to make 68 /// the optimization work on targets which need it. 69 const MDString *RetainRVMarker; 70 71 /// The set of inserted objc_storeStrong calls. If at the end of walking the 72 /// function we have found no alloca instructions, these calls can be marked 73 /// "tail". 74 SmallPtrSet<CallInst *, 8> StoreStrongCalls; 75 76 /// Returns true if we eliminated Inst. 77 bool tryToPeepholeInstruction(Function &F, Instruction *Inst, 78 inst_iterator &Iter, 79 SmallPtrSetImpl<Instruction *> &DepInsts, 80 SmallPtrSetImpl<const BasicBlock *> &Visited, 81 bool &TailOkForStoreStrong); 82 83 bool optimizeRetainCall(Function &F, Instruction *Retain); 84 85 bool 86 contractAutorelease(Function &F, Instruction *Autorelease, 87 ARCInstKind Class, 88 SmallPtrSetImpl<Instruction *> &DependingInstructions, 89 SmallPtrSetImpl<const BasicBlock *> &Visited); 90 91 void tryToContractReleaseIntoStoreStrong(Instruction *Release, 92 inst_iterator &Iter); 93 94 void getAnalysisUsage(AnalysisUsage &AU) const override; 95 bool doInitialization(Module &M) override; 96 bool runOnFunction(Function &F) override; 97 98 public: 99 static char ID; 100 ObjCARCContract() : FunctionPass(ID) { 101 initializeObjCARCContractPass(*PassRegistry::getPassRegistry()); 102 } 103 }; 104 } 105 106 //===----------------------------------------------------------------------===// 107 // Implementation 108 //===----------------------------------------------------------------------===// 109 110 /// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a 111 /// return value. We do this late so we do not disrupt the dataflow analysis in 112 /// ObjCARCOpt. 113 bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) { 114 ImmutableCallSite CS(GetArgRCIdentityRoot(Retain)); 115 const Instruction *Call = CS.getInstruction(); 116 if (!Call) 117 return false; 118 if (Call->getParent() != Retain->getParent()) 119 return false; 120 121 // Check that the call is next to the retain. 122 BasicBlock::const_iterator I = Call; 123 ++I; 124 while (IsNoopInstruction(I)) ++I; 125 if (&*I != Retain) 126 return false; 127 128 // Turn it to an objc_retainAutoreleasedReturnValue. 129 Changed = true; 130 ++NumPeeps; 131 132 DEBUG(dbgs() << "Transforming objc_retain => " 133 "objc_retainAutoreleasedReturnValue since the operand is a " 134 "return value.\nOld: "<< *Retain << "\n"); 135 136 // We do not have to worry about tail calls/does not throw since 137 // retain/retainRV have the same properties. 138 Constant *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV); 139 cast<CallInst>(Retain)->setCalledFunction(Decl); 140 141 DEBUG(dbgs() << "New: " << *Retain << "\n"); 142 return true; 143 } 144 145 /// Merge an autorelease with a retain into a fused call. 146 bool ObjCARCContract::contractAutorelease( 147 Function &F, Instruction *Autorelease, ARCInstKind Class, 148 SmallPtrSetImpl<Instruction *> &DependingInstructions, 149 SmallPtrSetImpl<const BasicBlock *> &Visited) { 150 const Value *Arg = GetArgRCIdentityRoot(Autorelease); 151 152 // Check that there are no instructions between the retain and the autorelease 153 // (such as an autorelease_pop) which may change the count. 154 CallInst *Retain = nullptr; 155 if (Class == ARCInstKind::AutoreleaseRV) 156 FindDependencies(RetainAutoreleaseRVDep, Arg, 157 Autorelease->getParent(), Autorelease, 158 DependingInstructions, Visited, PA); 159 else 160 FindDependencies(RetainAutoreleaseDep, Arg, 161 Autorelease->getParent(), Autorelease, 162 DependingInstructions, Visited, PA); 163 164 Visited.clear(); 165 if (DependingInstructions.size() != 1) { 166 DependingInstructions.clear(); 167 return false; 168 } 169 170 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin()); 171 DependingInstructions.clear(); 172 173 if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain || 174 GetArgRCIdentityRoot(Retain) != Arg) 175 return false; 176 177 Changed = true; 178 ++NumPeeps; 179 180 DEBUG(dbgs() << " Fusing retain/autorelease!\n" 181 " Autorelease:" << *Autorelease << "\n" 182 " Retain: " << *Retain << "\n"); 183 184 Constant *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV 185 ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV 186 : ARCRuntimeEntryPointKind::RetainAutorelease); 187 Retain->setCalledFunction(Decl); 188 189 DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n"); 190 191 EraseInstruction(Autorelease); 192 return true; 193 } 194 195 static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load, 196 Instruction *Release, 197 ProvenanceAnalysis &PA, 198 AliasAnalysis *AA) { 199 StoreInst *Store = nullptr; 200 bool SawRelease = false; 201 202 // Get the location associated with Load. 203 AliasAnalysis::Location Loc = AA->getLocation(Load); 204 205 // Walk down to find the store and the release, which may be in either order. 206 for (auto I = std::next(BasicBlock::iterator(Load)), 207 E = Load->getParent()->end(); 208 I != E; ++I) { 209 // If we found the store we were looking for and saw the release, 210 // break. There is no more work to be done. 211 if (Store && SawRelease) 212 break; 213 214 // Now we know that we have not seen either the store or the release. If I 215 // is the the release, mark that we saw the release and continue. 216 Instruction *Inst = &*I; 217 if (Inst == Release) { 218 SawRelease = true; 219 continue; 220 } 221 222 // Otherwise, we check if Inst is a "good" store. Grab the instruction class 223 // of Inst. 224 ARCInstKind Class = GetBasicARCInstKind(Inst); 225 226 // If Inst is an unrelated retain, we don't care about it. 227 // 228 // TODO: This is one area where the optimization could be made more 229 // aggressive. 230 if (IsRetain(Class)) 231 continue; 232 233 // If we have seen the store, but not the release... 234 if (Store) { 235 // We need to make sure that it is safe to move the release from its 236 // current position to the store. This implies proving that any 237 // instruction in between Store and the Release conservatively can not use 238 // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so 239 // continue... 240 if (!CanUse(Inst, Load, PA, Class)) { 241 continue; 242 } 243 244 // Otherwise, be conservative and return nullptr. 245 return nullptr; 246 } 247 248 // Ok, now we know we have not seen a store yet. See if Inst can write to 249 // our load location, if it can not, just ignore the instruction. 250 if (!(AA->getModRefInfo(Inst, Loc) & AliasAnalysis::Mod)) 251 continue; 252 253 Store = dyn_cast<StoreInst>(Inst); 254 255 // If Inst can, then check if Inst is a simple store. If Inst is not a 256 // store or a store that is not simple, then we have some we do not 257 // understand writing to this memory implying we can not move the load 258 // over the write to any subsequent store that we may find. 259 if (!Store || !Store->isSimple()) 260 return nullptr; 261 262 // Then make sure that the pointer we are storing to is Ptr. If so, we 263 // found our Store! 264 if (Store->getPointerOperand() == Loc.Ptr) 265 continue; 266 267 // Otherwise, we have an unknown store to some other ptr that clobbers 268 // Loc.Ptr. Bail! 269 return nullptr; 270 } 271 272 // If we did not find the store or did not see the release, fail. 273 if (!Store || !SawRelease) 274 return nullptr; 275 276 // We succeeded! 277 return Store; 278 } 279 280 static Instruction * 281 findRetainForStoreStrongContraction(Value *New, StoreInst *Store, 282 Instruction *Release, 283 ProvenanceAnalysis &PA) { 284 // Walk up from the Store to find the retain. 285 BasicBlock::iterator I = Store; 286 BasicBlock::iterator Begin = Store->getParent()->begin(); 287 while (I != Begin && GetBasicARCInstKind(I) != ARCInstKind::Retain) { 288 Instruction *Inst = &*I; 289 290 // It is only safe to move the retain to the store if we can prove 291 // conservatively that nothing besides the release can decrement reference 292 // counts in between the retain and the store. 293 if (CanDecrementRefCount(Inst, New, PA) && Inst != Release) 294 return nullptr; 295 --I; 296 } 297 Instruction *Retain = I; 298 if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain) 299 return nullptr; 300 if (GetArgRCIdentityRoot(Retain) != New) 301 return nullptr; 302 return Retain; 303 } 304 305 /// Attempt to merge an objc_release with a store, load, and objc_retain to form 306 /// an objc_storeStrong. An objc_storeStrong: 307 /// 308 /// objc_storeStrong(i8** %old_ptr, i8* new_value) 309 /// 310 /// is equivalent to the following IR sequence: 311 /// 312 /// ; Load old value. 313 /// %old_value = load i8** %old_ptr (1) 314 /// 315 /// ; Increment the new value and then release the old value. This must occur 316 /// ; in order in case old_value releases new_value in its destructor causing 317 /// ; us to potentially have a dangling ptr. 318 /// tail call i8* @objc_retain(i8* %new_value) (2) 319 /// tail call void @objc_release(i8* %old_value) (3) 320 /// 321 /// ; Store the new_value into old_ptr 322 /// store i8* %new_value, i8** %old_ptr (4) 323 /// 324 /// The safety of this optimization is based around the following 325 /// considerations: 326 /// 327 /// 1. We are forming the store strong at the store. Thus to perform this 328 /// optimization it must be safe to move the retain, load, and release to 329 /// (4). 330 /// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are 331 /// safe. 332 void ObjCARCContract::tryToContractReleaseIntoStoreStrong(Instruction *Release, 333 inst_iterator &Iter) { 334 // See if we are releasing something that we just loaded. 335 auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release)); 336 if (!Load || !Load->isSimple()) 337 return; 338 339 // For now, require everything to be in one basic block. 340 BasicBlock *BB = Release->getParent(); 341 if (Load->getParent() != BB) 342 return; 343 344 // First scan down the BB from Load, looking for a store of the RCIdentityRoot 345 // of Load's 346 StoreInst *Store = 347 findSafeStoreForStoreStrongContraction(Load, Release, PA, AA); 348 // If we fail, bail. 349 if (!Store) 350 return; 351 352 // Then find what new_value's RCIdentity Root is. 353 Value *New = GetRCIdentityRoot(Store->getValueOperand()); 354 355 // Then walk up the BB and look for a retain on New without any intervening 356 // instructions which conservatively might decrement ref counts. 357 Instruction *Retain = 358 findRetainForStoreStrongContraction(New, Store, Release, PA); 359 360 // If we fail, bail. 361 if (!Retain) 362 return; 363 364 Changed = true; 365 ++NumStoreStrongs; 366 367 DEBUG( 368 llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n" 369 << " Old:\n" 370 << " Store: " << *Store << "\n" 371 << " Release: " << *Release << "\n" 372 << " Retain: " << *Retain << "\n" 373 << " Load: " << *Load << "\n"); 374 375 LLVMContext &C = Release->getContext(); 376 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 377 Type *I8XX = PointerType::getUnqual(I8X); 378 379 Value *Args[] = { Load->getPointerOperand(), New }; 380 if (Args[0]->getType() != I8XX) 381 Args[0] = new BitCastInst(Args[0], I8XX, "", Store); 382 if (Args[1]->getType() != I8X) 383 Args[1] = new BitCastInst(Args[1], I8X, "", Store); 384 Constant *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong); 385 CallInst *StoreStrong = CallInst::Create(Decl, Args, "", Store); 386 StoreStrong->setDoesNotThrow(); 387 StoreStrong->setDebugLoc(Store->getDebugLoc()); 388 389 // We can't set the tail flag yet, because we haven't yet determined 390 // whether there are any escaping allocas. Remember this call, so that 391 // we can set the tail flag once we know it's safe. 392 StoreStrongCalls.insert(StoreStrong); 393 394 DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong << "\n"); 395 396 if (&*Iter == Store) ++Iter; 397 Store->eraseFromParent(); 398 Release->eraseFromParent(); 399 EraseInstruction(Retain); 400 if (Load->use_empty()) 401 Load->eraseFromParent(); 402 } 403 404 bool ObjCARCContract::tryToPeepholeInstruction( 405 Function &F, Instruction *Inst, inst_iterator &Iter, 406 SmallPtrSetImpl<Instruction *> &DependingInsts, 407 SmallPtrSetImpl<const BasicBlock *> &Visited, 408 bool &TailOkForStoreStrongs) { 409 // Only these library routines return their argument. In particular, 410 // objc_retainBlock does not necessarily return its argument. 411 ARCInstKind Class = GetBasicARCInstKind(Inst); 412 switch (Class) { 413 case ARCInstKind::FusedRetainAutorelease: 414 case ARCInstKind::FusedRetainAutoreleaseRV: 415 return false; 416 case ARCInstKind::Autorelease: 417 case ARCInstKind::AutoreleaseRV: 418 return contractAutorelease(F, Inst, Class, DependingInsts, Visited); 419 case ARCInstKind::Retain: 420 // Attempt to convert retains to retainrvs if they are next to function 421 // calls. 422 if (!optimizeRetainCall(F, Inst)) 423 return false; 424 // If we succeed in our optimization, fall through. 425 // FALLTHROUGH 426 case ARCInstKind::RetainRV: { 427 // If we're compiling for a target which needs a special inline-asm 428 // marker to do the retainAutoreleasedReturnValue optimization, 429 // insert it now. 430 if (!RetainRVMarker) 431 return false; 432 BasicBlock::iterator BBI = Inst; 433 BasicBlock *InstParent = Inst->getParent(); 434 435 // Step up to see if the call immediately precedes the RetainRV call. 436 // If it's an invoke, we have to cross a block boundary. And we have 437 // to carefully dodge no-op instructions. 438 do { 439 if (&*BBI == InstParent->begin()) { 440 BasicBlock *Pred = InstParent->getSinglePredecessor(); 441 if (!Pred) 442 goto decline_rv_optimization; 443 BBI = Pred->getTerminator(); 444 break; 445 } 446 --BBI; 447 } while (IsNoopInstruction(BBI)); 448 449 if (&*BBI == GetArgRCIdentityRoot(Inst)) { 450 DEBUG(dbgs() << "Adding inline asm marker for " 451 "retainAutoreleasedReturnValue optimization.\n"); 452 Changed = true; 453 InlineAsm *IA = 454 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()), 455 /*isVarArg=*/false), 456 RetainRVMarker->getString(), 457 /*Constraints=*/"", /*hasSideEffects=*/true); 458 CallInst::Create(IA, "", Inst); 459 } 460 decline_rv_optimization: 461 return false; 462 } 463 case ARCInstKind::InitWeak: { 464 // objc_initWeak(p, null) => *p = null 465 CallInst *CI = cast<CallInst>(Inst); 466 if (IsNullOrUndef(CI->getArgOperand(1))) { 467 Value *Null = 468 ConstantPointerNull::get(cast<PointerType>(CI->getType())); 469 Changed = true; 470 new StoreInst(Null, CI->getArgOperand(0), CI); 471 472 DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n" 473 << " New = " << *Null << "\n"); 474 475 CI->replaceAllUsesWith(Null); 476 CI->eraseFromParent(); 477 } 478 return true; 479 } 480 case ARCInstKind::Release: 481 // Try to form an objc store strong from our release. If we fail, there is 482 // nothing further to do below, so continue. 483 tryToContractReleaseIntoStoreStrong(Inst, Iter); 484 return true; 485 case ARCInstKind::User: 486 // Be conservative if the function has any alloca instructions. 487 // Technically we only care about escaping alloca instructions, 488 // but this is sufficient to handle some interesting cases. 489 if (isa<AllocaInst>(Inst)) 490 TailOkForStoreStrongs = false; 491 return true; 492 case ARCInstKind::IntrinsicUser: 493 // Remove calls to @clang.arc.use(...). 494 Inst->eraseFromParent(); 495 return true; 496 default: 497 return true; 498 } 499 } 500 501 //===----------------------------------------------------------------------===// 502 // Top Level Driver 503 //===----------------------------------------------------------------------===// 504 505 bool ObjCARCContract::runOnFunction(Function &F) { 506 if (!EnableARCOpts) 507 return false; 508 509 // If nothing in the Module uses ARC, don't do anything. 510 if (!Run) 511 return false; 512 513 Changed = false; 514 AA = &getAnalysis<AliasAnalysis>(); 515 DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree(); 516 517 PA.setAA(&getAnalysis<AliasAnalysis>()); 518 519 DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n"); 520 521 // Track whether it's ok to mark objc_storeStrong calls with the "tail" 522 // keyword. Be conservative if the function has variadic arguments. 523 // It seems that functions which "return twice" are also unsafe for the 524 // "tail" argument, because they are setjmp, which could need to 525 // return to an earlier stack state. 526 bool TailOkForStoreStrongs = 527 !F.isVarArg() && !F.callsFunctionThatReturnsTwice(); 528 529 // For ObjC library calls which return their argument, replace uses of the 530 // argument with uses of the call return value, if it dominates the use. This 531 // reduces register pressure. 532 SmallPtrSet<Instruction *, 4> DependingInstructions; 533 SmallPtrSet<const BasicBlock *, 4> Visited; 534 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) { 535 Instruction *Inst = &*I++; 536 537 DEBUG(dbgs() << "Visiting: " << *Inst << "\n"); 538 539 // First try to peephole Inst. If there is nothing further we can do in 540 // terms of undoing objc-arc-expand, process the next inst. 541 if (tryToPeepholeInstruction(F, Inst, I, DependingInstructions, Visited, 542 TailOkForStoreStrongs)) 543 continue; 544 545 // Otherwise, try to undo objc-arc-expand. 546 547 // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts 548 // and such; to do the replacement, the argument must have type i8*. 549 Value *Arg = cast<CallInst>(Inst)->getArgOperand(0); 550 551 // TODO: Change this to a do-while. 552 for (;;) { 553 // If we're compiling bugpointed code, don't get in trouble. 554 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg)) 555 break; 556 // Look through the uses of the pointer. 557 for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end(); 558 UI != UE; ) { 559 // Increment UI now, because we may unlink its element. 560 Use &U = *UI++; 561 unsigned OperandNo = U.getOperandNo(); 562 563 // If the call's return value dominates a use of the call's argument 564 // value, rewrite the use to use the return value. We check for 565 // reachability here because an unreachable call is considered to 566 // trivially dominate itself, which would lead us to rewriting its 567 // argument in terms of its return value, which would lead to 568 // infinite loops in GetArgRCIdentityRoot. 569 if (DT->isReachableFromEntry(U) && DT->dominates(Inst, U)) { 570 Changed = true; 571 Instruction *Replacement = Inst; 572 Type *UseTy = U.get()->getType(); 573 if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) { 574 // For PHI nodes, insert the bitcast in the predecessor block. 575 unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo); 576 BasicBlock *BB = PHI->getIncomingBlock(ValNo); 577 if (Replacement->getType() != UseTy) 578 Replacement = new BitCastInst(Replacement, UseTy, "", 579 &BB->back()); 580 // While we're here, rewrite all edges for this PHI, rather 581 // than just one use at a time, to minimize the number of 582 // bitcasts we emit. 583 for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i) 584 if (PHI->getIncomingBlock(i) == BB) { 585 // Keep the UI iterator valid. 586 if (UI != UE && 587 &PHI->getOperandUse( 588 PHINode::getOperandNumForIncomingValue(i)) == &*UI) 589 ++UI; 590 PHI->setIncomingValue(i, Replacement); 591 } 592 } else { 593 if (Replacement->getType() != UseTy) 594 Replacement = new BitCastInst(Replacement, UseTy, "", 595 cast<Instruction>(U.getUser())); 596 U.set(Replacement); 597 } 598 } 599 } 600 601 // If Arg is a no-op casted pointer, strip one level of casts and iterate. 602 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg)) 603 Arg = BI->getOperand(0); 604 else if (isa<GEPOperator>(Arg) && 605 cast<GEPOperator>(Arg)->hasAllZeroIndices()) 606 Arg = cast<GEPOperator>(Arg)->getPointerOperand(); 607 else if (isa<GlobalAlias>(Arg) && 608 !cast<GlobalAlias>(Arg)->mayBeOverridden()) 609 Arg = cast<GlobalAlias>(Arg)->getAliasee(); 610 else 611 break; 612 } 613 } 614 615 // If this function has no escaping allocas or suspicious vararg usage, 616 // objc_storeStrong calls can be marked with the "tail" keyword. 617 if (TailOkForStoreStrongs) 618 for (CallInst *CI : StoreStrongCalls) 619 CI->setTailCall(); 620 StoreStrongCalls.clear(); 621 622 return Changed; 623 } 624 625 //===----------------------------------------------------------------------===// 626 // Misc Pass Manager 627 //===----------------------------------------------------------------------===// 628 629 char ObjCARCContract::ID = 0; 630 INITIALIZE_PASS_BEGIN(ObjCARCContract, "objc-arc-contract", 631 "ObjC ARC contraction", false, false) 632 INITIALIZE_AG_DEPENDENCY(AliasAnalysis) 633 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) 634 INITIALIZE_PASS_END(ObjCARCContract, "objc-arc-contract", 635 "ObjC ARC contraction", false, false) 636 637 void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const { 638 AU.addRequired<AliasAnalysis>(); 639 AU.addRequired<DominatorTreeWrapperPass>(); 640 AU.setPreservesCFG(); 641 } 642 643 Pass *llvm::createObjCARCContractPass() { return new ObjCARCContract(); } 644 645 bool ObjCARCContract::doInitialization(Module &M) { 646 // If nothing in the Module uses ARC, don't do anything. 647 Run = ModuleHasARC(M); 648 if (!Run) 649 return false; 650 651 EP.init(&M); 652 653 // Initialize RetainRVMarker. 654 RetainRVMarker = nullptr; 655 if (NamedMDNode *NMD = 656 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker")) 657 if (NMD->getNumOperands() == 1) { 658 const MDNode *N = NMD->getOperand(0); 659 if (N->getNumOperands() == 1) 660 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0))) 661 RetainRVMarker = S; 662 } 663 664 return false; 665 } 666