1 //===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// \file 10 /// This file defines late ObjC ARC optimizations. ARC stands for Automatic 11 /// Reference Counting and is a system for managing reference counts for objects 12 /// in Objective C. 13 /// 14 /// This specific file mainly deals with ``contracting'' multiple lower level 15 /// operations into singular higher level operations through pattern matching. 16 /// 17 /// WARNING: This file knows about certain library functions. It recognizes them 18 /// by name, and hardwires knowledge of their semantics. 19 /// 20 /// WARNING: This file knows about how certain Objective-C library functions are 21 /// used. Naive LLVM IR transformations which would otherwise be 22 /// behavior-preserving may break these assumptions. 23 /// 24 //===----------------------------------------------------------------------===// 25 26 // TODO: ObjCARCContract could insert PHI nodes when uses aren't 27 // dominated by single calls. 28 29 #include "ObjCARC.h" 30 #include "ARCRuntimeEntryPoints.h" 31 #include "DependencyAnalysis.h" 32 #include "ProvenanceAnalysis.h" 33 #include "llvm/ADT/Statistic.h" 34 #include "llvm/IR/Dominators.h" 35 #include "llvm/IR/InlineAsm.h" 36 #include "llvm/IR/Operator.h" 37 #include "llvm/Support/Debug.h" 38 #include "llvm/Support/raw_ostream.h" 39 40 using namespace llvm; 41 using namespace llvm::objcarc; 42 43 #define DEBUG_TYPE "objc-arc-contract" 44 45 STATISTIC(NumPeeps, "Number of calls peephole-optimized"); 46 STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed"); 47 48 //===----------------------------------------------------------------------===// 49 // Declarations 50 //===----------------------------------------------------------------------===// 51 52 namespace { 53 /// \brief Late ARC optimizations 54 /// 55 /// These change the IR in a way that makes it difficult to be analyzed by 56 /// ObjCARCOpt, so it's run late. 57 class ObjCARCContract : public FunctionPass { 58 bool Changed; 59 AliasAnalysis *AA; 60 DominatorTree *DT; 61 ProvenanceAnalysis PA; 62 ARCRuntimeEntryPoints EP; 63 64 /// A flag indicating whether this optimization pass should run. 65 bool Run; 66 67 /// The inline asm string to insert between calls and RetainRV calls to make 68 /// the optimization work on targets which need it. 69 const MDString *RVInstMarker; 70 71 /// The set of inserted objc_storeStrong calls. If at the end of walking the 72 /// function we have found no alloca instructions, these calls can be marked 73 /// "tail". 74 SmallPtrSet<CallInst *, 8> StoreStrongCalls; 75 76 /// Returns true if we eliminated Inst. 77 bool tryToPeepholeInstruction(Function &F, Instruction *Inst, 78 inst_iterator &Iter, 79 SmallPtrSetImpl<Instruction *> &DepInsts, 80 SmallPtrSetImpl<const BasicBlock *> &Visited, 81 bool &TailOkForStoreStrong); 82 83 bool optimizeRetainCall(Function &F, Instruction *Retain); 84 85 bool 86 contractAutorelease(Function &F, Instruction *Autorelease, 87 ARCInstKind Class, 88 SmallPtrSetImpl<Instruction *> &DependingInstructions, 89 SmallPtrSetImpl<const BasicBlock *> &Visited); 90 91 void tryToContractReleaseIntoStoreStrong(Instruction *Release, 92 inst_iterator &Iter); 93 94 void getAnalysisUsage(AnalysisUsage &AU) const override; 95 bool doInitialization(Module &M) override; 96 bool runOnFunction(Function &F) override; 97 98 public: 99 static char ID; 100 ObjCARCContract() : FunctionPass(ID) { 101 initializeObjCARCContractPass(*PassRegistry::getPassRegistry()); 102 } 103 }; 104 } 105 106 //===----------------------------------------------------------------------===// 107 // Implementation 108 //===----------------------------------------------------------------------===// 109 110 /// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a 111 /// return value. We do this late so we do not disrupt the dataflow analysis in 112 /// ObjCARCOpt. 113 bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) { 114 ImmutableCallSite CS(GetArgRCIdentityRoot(Retain)); 115 const Instruction *Call = CS.getInstruction(); 116 if (!Call) 117 return false; 118 if (Call->getParent() != Retain->getParent()) 119 return false; 120 121 // Check that the call is next to the retain. 122 BasicBlock::const_iterator I = ++Call->getIterator(); 123 while (IsNoopInstruction(&*I)) 124 ++I; 125 if (&*I != Retain) 126 return false; 127 128 // Turn it to an objc_retainAutoreleasedReturnValue. 129 Changed = true; 130 ++NumPeeps; 131 132 DEBUG(dbgs() << "Transforming objc_retain => " 133 "objc_retainAutoreleasedReturnValue since the operand is a " 134 "return value.\nOld: "<< *Retain << "\n"); 135 136 // We do not have to worry about tail calls/does not throw since 137 // retain/retainRV have the same properties. 138 Constant *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV); 139 cast<CallInst>(Retain)->setCalledFunction(Decl); 140 141 DEBUG(dbgs() << "New: " << *Retain << "\n"); 142 return true; 143 } 144 145 /// Merge an autorelease with a retain into a fused call. 146 bool ObjCARCContract::contractAutorelease( 147 Function &F, Instruction *Autorelease, ARCInstKind Class, 148 SmallPtrSetImpl<Instruction *> &DependingInstructions, 149 SmallPtrSetImpl<const BasicBlock *> &Visited) { 150 const Value *Arg = GetArgRCIdentityRoot(Autorelease); 151 152 // Check that there are no instructions between the retain and the autorelease 153 // (such as an autorelease_pop) which may change the count. 154 CallInst *Retain = nullptr; 155 if (Class == ARCInstKind::AutoreleaseRV) 156 FindDependencies(RetainAutoreleaseRVDep, Arg, 157 Autorelease->getParent(), Autorelease, 158 DependingInstructions, Visited, PA); 159 else 160 FindDependencies(RetainAutoreleaseDep, Arg, 161 Autorelease->getParent(), Autorelease, 162 DependingInstructions, Visited, PA); 163 164 Visited.clear(); 165 if (DependingInstructions.size() != 1) { 166 DependingInstructions.clear(); 167 return false; 168 } 169 170 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin()); 171 DependingInstructions.clear(); 172 173 if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain || 174 GetArgRCIdentityRoot(Retain) != Arg) 175 return false; 176 177 Changed = true; 178 ++NumPeeps; 179 180 DEBUG(dbgs() << " Fusing retain/autorelease!\n" 181 " Autorelease:" << *Autorelease << "\n" 182 " Retain: " << *Retain << "\n"); 183 184 Constant *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV 185 ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV 186 : ARCRuntimeEntryPointKind::RetainAutorelease); 187 Retain->setCalledFunction(Decl); 188 189 DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n"); 190 191 EraseInstruction(Autorelease); 192 return true; 193 } 194 195 static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load, 196 Instruction *Release, 197 ProvenanceAnalysis &PA, 198 AliasAnalysis *AA) { 199 StoreInst *Store = nullptr; 200 bool SawRelease = false; 201 202 // Get the location associated with Load. 203 MemoryLocation Loc = MemoryLocation::get(Load); 204 auto *LocPtr = Loc.Ptr->stripPointerCasts(); 205 206 // Walk down to find the store and the release, which may be in either order. 207 for (auto I = std::next(BasicBlock::iterator(Load)), 208 E = Load->getParent()->end(); 209 I != E; ++I) { 210 // If we found the store we were looking for and saw the release, 211 // break. There is no more work to be done. 212 if (Store && SawRelease) 213 break; 214 215 // Now we know that we have not seen either the store or the release. If I 216 // is the release, mark that we saw the release and continue. 217 Instruction *Inst = &*I; 218 if (Inst == Release) { 219 SawRelease = true; 220 continue; 221 } 222 223 // Otherwise, we check if Inst is a "good" store. Grab the instruction class 224 // of Inst. 225 ARCInstKind Class = GetBasicARCInstKind(Inst); 226 227 // If Inst is an unrelated retain, we don't care about it. 228 // 229 // TODO: This is one area where the optimization could be made more 230 // aggressive. 231 if (IsRetain(Class)) 232 continue; 233 234 // If we have seen the store, but not the release... 235 if (Store) { 236 // We need to make sure that it is safe to move the release from its 237 // current position to the store. This implies proving that any 238 // instruction in between Store and the Release conservatively can not use 239 // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so 240 // continue... 241 if (!CanUse(Inst, Load, PA, Class)) { 242 continue; 243 } 244 245 // Otherwise, be conservative and return nullptr. 246 return nullptr; 247 } 248 249 // Ok, now we know we have not seen a store yet. See if Inst can write to 250 // our load location, if it can not, just ignore the instruction. 251 if (!(AA->getModRefInfo(Inst, Loc) & MRI_Mod)) 252 continue; 253 254 Store = dyn_cast<StoreInst>(Inst); 255 256 // If Inst can, then check if Inst is a simple store. If Inst is not a 257 // store or a store that is not simple, then we have some we do not 258 // understand writing to this memory implying we can not move the load 259 // over the write to any subsequent store that we may find. 260 if (!Store || !Store->isSimple()) 261 return nullptr; 262 263 // Then make sure that the pointer we are storing to is Ptr. If so, we 264 // found our Store! 265 if (Store->getPointerOperand()->stripPointerCasts() == LocPtr) 266 continue; 267 268 // Otherwise, we have an unknown store to some other ptr that clobbers 269 // Loc.Ptr. Bail! 270 return nullptr; 271 } 272 273 // If we did not find the store or did not see the release, fail. 274 if (!Store || !SawRelease) 275 return nullptr; 276 277 // We succeeded! 278 return Store; 279 } 280 281 static Instruction * 282 findRetainForStoreStrongContraction(Value *New, StoreInst *Store, 283 Instruction *Release, 284 ProvenanceAnalysis &PA) { 285 // Walk up from the Store to find the retain. 286 BasicBlock::iterator I = Store->getIterator(); 287 BasicBlock::iterator Begin = Store->getParent()->begin(); 288 while (I != Begin && GetBasicARCInstKind(&*I) != ARCInstKind::Retain) { 289 Instruction *Inst = &*I; 290 291 // It is only safe to move the retain to the store if we can prove 292 // conservatively that nothing besides the release can decrement reference 293 // counts in between the retain and the store. 294 if (CanDecrementRefCount(Inst, New, PA) && Inst != Release) 295 return nullptr; 296 --I; 297 } 298 Instruction *Retain = &*I; 299 if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain) 300 return nullptr; 301 if (GetArgRCIdentityRoot(Retain) != New) 302 return nullptr; 303 return Retain; 304 } 305 306 /// Attempt to merge an objc_release with a store, load, and objc_retain to form 307 /// an objc_storeStrong. An objc_storeStrong: 308 /// 309 /// objc_storeStrong(i8** %old_ptr, i8* new_value) 310 /// 311 /// is equivalent to the following IR sequence: 312 /// 313 /// ; Load old value. 314 /// %old_value = load i8** %old_ptr (1) 315 /// 316 /// ; Increment the new value and then release the old value. This must occur 317 /// ; in order in case old_value releases new_value in its destructor causing 318 /// ; us to potentially have a dangling ptr. 319 /// tail call i8* @objc_retain(i8* %new_value) (2) 320 /// tail call void @objc_release(i8* %old_value) (3) 321 /// 322 /// ; Store the new_value into old_ptr 323 /// store i8* %new_value, i8** %old_ptr (4) 324 /// 325 /// The safety of this optimization is based around the following 326 /// considerations: 327 /// 328 /// 1. We are forming the store strong at the store. Thus to perform this 329 /// optimization it must be safe to move the retain, load, and release to 330 /// (4). 331 /// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are 332 /// safe. 333 void ObjCARCContract::tryToContractReleaseIntoStoreStrong(Instruction *Release, 334 inst_iterator &Iter) { 335 // See if we are releasing something that we just loaded. 336 auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release)); 337 if (!Load || !Load->isSimple()) 338 return; 339 340 // For now, require everything to be in one basic block. 341 BasicBlock *BB = Release->getParent(); 342 if (Load->getParent() != BB) 343 return; 344 345 // First scan down the BB from Load, looking for a store of the RCIdentityRoot 346 // of Load's 347 StoreInst *Store = 348 findSafeStoreForStoreStrongContraction(Load, Release, PA, AA); 349 // If we fail, bail. 350 if (!Store) 351 return; 352 353 // Then find what new_value's RCIdentity Root is. 354 Value *New = GetRCIdentityRoot(Store->getValueOperand()); 355 356 // Then walk up the BB and look for a retain on New without any intervening 357 // instructions which conservatively might decrement ref counts. 358 Instruction *Retain = 359 findRetainForStoreStrongContraction(New, Store, Release, PA); 360 361 // If we fail, bail. 362 if (!Retain) 363 return; 364 365 Changed = true; 366 ++NumStoreStrongs; 367 368 DEBUG( 369 llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n" 370 << " Old:\n" 371 << " Store: " << *Store << "\n" 372 << " Release: " << *Release << "\n" 373 << " Retain: " << *Retain << "\n" 374 << " Load: " << *Load << "\n"); 375 376 LLVMContext &C = Release->getContext(); 377 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 378 Type *I8XX = PointerType::getUnqual(I8X); 379 380 Value *Args[] = { Load->getPointerOperand(), New }; 381 if (Args[0]->getType() != I8XX) 382 Args[0] = new BitCastInst(Args[0], I8XX, "", Store); 383 if (Args[1]->getType() != I8X) 384 Args[1] = new BitCastInst(Args[1], I8X, "", Store); 385 Constant *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong); 386 CallInst *StoreStrong = CallInst::Create(Decl, Args, "", Store); 387 StoreStrong->setDoesNotThrow(); 388 StoreStrong->setDebugLoc(Store->getDebugLoc()); 389 390 // We can't set the tail flag yet, because we haven't yet determined 391 // whether there are any escaping allocas. Remember this call, so that 392 // we can set the tail flag once we know it's safe. 393 StoreStrongCalls.insert(StoreStrong); 394 395 DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong << "\n"); 396 397 if (&*Iter == Store) ++Iter; 398 Store->eraseFromParent(); 399 Release->eraseFromParent(); 400 EraseInstruction(Retain); 401 if (Load->use_empty()) 402 Load->eraseFromParent(); 403 } 404 405 bool ObjCARCContract::tryToPeepholeInstruction( 406 Function &F, Instruction *Inst, inst_iterator &Iter, 407 SmallPtrSetImpl<Instruction *> &DependingInsts, 408 SmallPtrSetImpl<const BasicBlock *> &Visited, 409 bool &TailOkForStoreStrongs) { 410 // Only these library routines return their argument. In particular, 411 // objc_retainBlock does not necessarily return its argument. 412 ARCInstKind Class = GetBasicARCInstKind(Inst); 413 switch (Class) { 414 case ARCInstKind::FusedRetainAutorelease: 415 case ARCInstKind::FusedRetainAutoreleaseRV: 416 return false; 417 case ARCInstKind::Autorelease: 418 case ARCInstKind::AutoreleaseRV: 419 return contractAutorelease(F, Inst, Class, DependingInsts, Visited); 420 case ARCInstKind::Retain: 421 // Attempt to convert retains to retainrvs if they are next to function 422 // calls. 423 if (!optimizeRetainCall(F, Inst)) 424 return false; 425 // If we succeed in our optimization, fall through. 426 // FALLTHROUGH 427 case ARCInstKind::RetainRV: 428 case ARCInstKind::ClaimRV: { 429 // If we're compiling for a target which needs a special inline-asm 430 // marker to do the return value optimization, insert it now. 431 if (!RVInstMarker) 432 return false; 433 BasicBlock::iterator BBI = Inst->getIterator(); 434 BasicBlock *InstParent = Inst->getParent(); 435 436 // Step up to see if the call immediately precedes the RV call. 437 // If it's an invoke, we have to cross a block boundary. And we have 438 // to carefully dodge no-op instructions. 439 do { 440 if (BBI == InstParent->begin()) { 441 BasicBlock *Pred = InstParent->getSinglePredecessor(); 442 if (!Pred) 443 goto decline_rv_optimization; 444 BBI = Pred->getTerminator()->getIterator(); 445 break; 446 } 447 --BBI; 448 } while (IsNoopInstruction(&*BBI)); 449 450 if (&*BBI == GetArgRCIdentityRoot(Inst)) { 451 DEBUG(dbgs() << "Adding inline asm marker for the return value " 452 "optimization.\n"); 453 Changed = true; 454 InlineAsm *IA = InlineAsm::get( 455 FunctionType::get(Type::getVoidTy(Inst->getContext()), 456 /*isVarArg=*/false), 457 RVInstMarker->getString(), 458 /*Constraints=*/"", /*hasSideEffects=*/true); 459 CallInst::Create(IA, "", Inst); 460 } 461 decline_rv_optimization: 462 return false; 463 } 464 case ARCInstKind::InitWeak: { 465 // objc_initWeak(p, null) => *p = null 466 CallInst *CI = cast<CallInst>(Inst); 467 if (IsNullOrUndef(CI->getArgOperand(1))) { 468 Value *Null = 469 ConstantPointerNull::get(cast<PointerType>(CI->getType())); 470 Changed = true; 471 new StoreInst(Null, CI->getArgOperand(0), CI); 472 473 DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n" 474 << " New = " << *Null << "\n"); 475 476 CI->replaceAllUsesWith(Null); 477 CI->eraseFromParent(); 478 } 479 return true; 480 } 481 case ARCInstKind::Release: 482 // Try to form an objc store strong from our release. If we fail, there is 483 // nothing further to do below, so continue. 484 tryToContractReleaseIntoStoreStrong(Inst, Iter); 485 return true; 486 case ARCInstKind::User: 487 // Be conservative if the function has any alloca instructions. 488 // Technically we only care about escaping alloca instructions, 489 // but this is sufficient to handle some interesting cases. 490 if (isa<AllocaInst>(Inst)) 491 TailOkForStoreStrongs = false; 492 return true; 493 case ARCInstKind::IntrinsicUser: 494 // Remove calls to @clang.arc.use(...). 495 Inst->eraseFromParent(); 496 return true; 497 default: 498 return true; 499 } 500 } 501 502 //===----------------------------------------------------------------------===// 503 // Top Level Driver 504 //===----------------------------------------------------------------------===// 505 506 bool ObjCARCContract::runOnFunction(Function &F) { 507 if (!EnableARCOpts) 508 return false; 509 510 // If nothing in the Module uses ARC, don't do anything. 511 if (!Run) 512 return false; 513 514 Changed = false; 515 AA = &getAnalysis<AAResultsWrapperPass>().getAAResults(); 516 DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree(); 517 518 PA.setAA(&getAnalysis<AAResultsWrapperPass>().getAAResults()); 519 520 DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n"); 521 522 // Track whether it's ok to mark objc_storeStrong calls with the "tail" 523 // keyword. Be conservative if the function has variadic arguments. 524 // It seems that functions which "return twice" are also unsafe for the 525 // "tail" argument, because they are setjmp, which could need to 526 // return to an earlier stack state. 527 bool TailOkForStoreStrongs = 528 !F.isVarArg() && !F.callsFunctionThatReturnsTwice(); 529 530 // For ObjC library calls which return their argument, replace uses of the 531 // argument with uses of the call return value, if it dominates the use. This 532 // reduces register pressure. 533 SmallPtrSet<Instruction *, 4> DependingInstructions; 534 SmallPtrSet<const BasicBlock *, 4> Visited; 535 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) { 536 Instruction *Inst = &*I++; 537 538 DEBUG(dbgs() << "Visiting: " << *Inst << "\n"); 539 540 // First try to peephole Inst. If there is nothing further we can do in 541 // terms of undoing objc-arc-expand, process the next inst. 542 if (tryToPeepholeInstruction(F, Inst, I, DependingInstructions, Visited, 543 TailOkForStoreStrongs)) 544 continue; 545 546 // Otherwise, try to undo objc-arc-expand. 547 548 // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts 549 // and such; to do the replacement, the argument must have type i8*. 550 Value *Arg = cast<CallInst>(Inst)->getArgOperand(0); 551 552 // TODO: Change this to a do-while. 553 for (;;) { 554 // If we're compiling bugpointed code, don't get in trouble. 555 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg)) 556 break; 557 // Look through the uses of the pointer. 558 for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end(); 559 UI != UE; ) { 560 // Increment UI now, because we may unlink its element. 561 Use &U = *UI++; 562 unsigned OperandNo = U.getOperandNo(); 563 564 // If the call's return value dominates a use of the call's argument 565 // value, rewrite the use to use the return value. We check for 566 // reachability here because an unreachable call is considered to 567 // trivially dominate itself, which would lead us to rewriting its 568 // argument in terms of its return value, which would lead to 569 // infinite loops in GetArgRCIdentityRoot. 570 if (DT->isReachableFromEntry(U) && DT->dominates(Inst, U)) { 571 Changed = true; 572 Instruction *Replacement = Inst; 573 Type *UseTy = U.get()->getType(); 574 if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) { 575 // For PHI nodes, insert the bitcast in the predecessor block. 576 unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo); 577 BasicBlock *BB = PHI->getIncomingBlock(ValNo); 578 if (Replacement->getType() != UseTy) 579 Replacement = new BitCastInst(Replacement, UseTy, "", 580 &BB->back()); 581 // While we're here, rewrite all edges for this PHI, rather 582 // than just one use at a time, to minimize the number of 583 // bitcasts we emit. 584 for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i) 585 if (PHI->getIncomingBlock(i) == BB) { 586 // Keep the UI iterator valid. 587 if (UI != UE && 588 &PHI->getOperandUse( 589 PHINode::getOperandNumForIncomingValue(i)) == &*UI) 590 ++UI; 591 PHI->setIncomingValue(i, Replacement); 592 } 593 } else { 594 if (Replacement->getType() != UseTy) 595 Replacement = new BitCastInst(Replacement, UseTy, "", 596 cast<Instruction>(U.getUser())); 597 U.set(Replacement); 598 } 599 } 600 } 601 602 // If Arg is a no-op casted pointer, strip one level of casts and iterate. 603 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg)) 604 Arg = BI->getOperand(0); 605 else if (isa<GEPOperator>(Arg) && 606 cast<GEPOperator>(Arg)->hasAllZeroIndices()) 607 Arg = cast<GEPOperator>(Arg)->getPointerOperand(); 608 else if (isa<GlobalAlias>(Arg) && 609 !cast<GlobalAlias>(Arg)->isInterposable()) 610 Arg = cast<GlobalAlias>(Arg)->getAliasee(); 611 else 612 break; 613 } 614 } 615 616 // If this function has no escaping allocas or suspicious vararg usage, 617 // objc_storeStrong calls can be marked with the "tail" keyword. 618 if (TailOkForStoreStrongs) 619 for (CallInst *CI : StoreStrongCalls) 620 CI->setTailCall(); 621 StoreStrongCalls.clear(); 622 623 return Changed; 624 } 625 626 //===----------------------------------------------------------------------===// 627 // Misc Pass Manager 628 //===----------------------------------------------------------------------===// 629 630 char ObjCARCContract::ID = 0; 631 INITIALIZE_PASS_BEGIN(ObjCARCContract, "objc-arc-contract", 632 "ObjC ARC contraction", false, false) 633 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass) 634 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass) 635 INITIALIZE_PASS_END(ObjCARCContract, "objc-arc-contract", 636 "ObjC ARC contraction", false, false) 637 638 void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const { 639 AU.addRequired<AAResultsWrapperPass>(); 640 AU.addRequired<DominatorTreeWrapperPass>(); 641 AU.setPreservesCFG(); 642 } 643 644 Pass *llvm::createObjCARCContractPass() { return new ObjCARCContract(); } 645 646 bool ObjCARCContract::doInitialization(Module &M) { 647 // If nothing in the Module uses ARC, don't do anything. 648 Run = ModuleHasARC(M); 649 if (!Run) 650 return false; 651 652 EP.init(&M); 653 654 // Initialize RVInstMarker. 655 RVInstMarker = nullptr; 656 if (NamedMDNode *NMD = 657 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker")) 658 if (NMD->getNumOperands() == 1) { 659 const MDNode *N = NMD->getOperand(0); 660 if (N->getNumOperands() == 1) 661 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0))) 662 RVInstMarker = S; 663 } 664 665 return false; 666 } 667