1 //===- ObjCARC.cpp - ObjC ARC Optimization --------------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines ObjC ARC optimizations. ARC stands for 11 // Automatic Reference Counting and is a system for managing reference counts 12 // for objects in Objective C. 13 // 14 // The optimizations performed include elimination of redundant, partially 15 // redundant, and inconsequential reference count operations, elimination of 16 // redundant weak pointer operations, pattern-matching and replacement of 17 // low-level operations into higher-level operations, and numerous minor 18 // simplifications. 19 // 20 // This file also defines a simple ARC-aware AliasAnalysis. 21 // 22 // WARNING: This file knows about certain library functions. It recognizes them 23 // by name, and hardwires knowedge of their semantics. 24 // 25 // WARNING: This file knows about how certain Objective-C library functions are 26 // used. Naive LLVM IR transformations which would otherwise be 27 // behavior-preserving may break these assumptions. 28 // 29 //===----------------------------------------------------------------------===// 30 31 #define DEBUG_TYPE "objc-arc" 32 #include "llvm/Function.h" 33 #include "llvm/Intrinsics.h" 34 #include "llvm/GlobalVariable.h" 35 #include "llvm/DerivedTypes.h" 36 #include "llvm/Module.h" 37 #include "llvm/Analysis/ValueTracking.h" 38 #include "llvm/Transforms/Utils/Local.h" 39 #include "llvm/Support/CallSite.h" 40 #include "llvm/Support/CommandLine.h" 41 #include "llvm/ADT/StringSwitch.h" 42 #include "llvm/ADT/DenseMap.h" 43 #include "llvm/ADT/STLExtras.h" 44 using namespace llvm; 45 46 // A handy option to enable/disable all optimizations in this file. 47 static cl::opt<bool> EnableARCOpts("enable-objc-arc-opts", cl::init(true)); 48 49 //===----------------------------------------------------------------------===// 50 // Misc. Utilities 51 //===----------------------------------------------------------------------===// 52 53 namespace { 54 /// MapVector - An associative container with fast insertion-order 55 /// (deterministic) iteration over its elements. Plus the special 56 /// blot operation. 57 template<class KeyT, class ValueT> 58 class MapVector { 59 /// Map - Map keys to indices in Vector. 60 typedef DenseMap<KeyT, size_t> MapTy; 61 MapTy Map; 62 63 /// Vector - Keys and values. 64 typedef std::vector<std::pair<KeyT, ValueT> > VectorTy; 65 VectorTy Vector; 66 67 public: 68 typedef typename VectorTy::iterator iterator; 69 typedef typename VectorTy::const_iterator const_iterator; 70 iterator begin() { return Vector.begin(); } 71 iterator end() { return Vector.end(); } 72 const_iterator begin() const { return Vector.begin(); } 73 const_iterator end() const { return Vector.end(); } 74 75 #ifdef XDEBUG 76 ~MapVector() { 77 assert(Vector.size() >= Map.size()); // May differ due to blotting. 78 for (typename MapTy::const_iterator I = Map.begin(), E = Map.end(); 79 I != E; ++I) { 80 assert(I->second < Vector.size()); 81 assert(Vector[I->second].first == I->first); 82 } 83 for (typename VectorTy::const_iterator I = Vector.begin(), 84 E = Vector.end(); I != E; ++I) 85 assert(!I->first || 86 (Map.count(I->first) && 87 Map[I->first] == size_t(I - Vector.begin()))); 88 } 89 #endif 90 91 ValueT &operator[](KeyT Arg) { 92 std::pair<typename MapTy::iterator, bool> Pair = 93 Map.insert(std::make_pair(Arg, size_t(0))); 94 if (Pair.second) { 95 Pair.first->second = Vector.size(); 96 Vector.push_back(std::make_pair(Arg, ValueT())); 97 return Vector.back().second; 98 } 99 return Vector[Pair.first->second].second; 100 } 101 102 std::pair<iterator, bool> 103 insert(const std::pair<KeyT, ValueT> &InsertPair) { 104 std::pair<typename MapTy::iterator, bool> Pair = 105 Map.insert(std::make_pair(InsertPair.first, size_t(0))); 106 if (Pair.second) { 107 Pair.first->second = Vector.size(); 108 Vector.push_back(InsertPair); 109 return std::make_pair(llvm::prior(Vector.end()), true); 110 } 111 return std::make_pair(Vector.begin() + Pair.first->second, false); 112 } 113 114 const_iterator find(KeyT Key) const { 115 typename MapTy::const_iterator It = Map.find(Key); 116 if (It == Map.end()) return Vector.end(); 117 return Vector.begin() + It->second; 118 } 119 120 /// blot - This is similar to erase, but instead of removing the element 121 /// from the vector, it just zeros out the key in the vector. This leaves 122 /// iterators intact, but clients must be prepared for zeroed-out keys when 123 /// iterating. 124 void blot(KeyT Key) { 125 typename MapTy::iterator It = Map.find(Key); 126 if (It == Map.end()) return; 127 Vector[It->second].first = KeyT(); 128 Map.erase(It); 129 } 130 131 void clear() { 132 Map.clear(); 133 Vector.clear(); 134 } 135 }; 136 } 137 138 //===----------------------------------------------------------------------===// 139 // ARC Utilities. 140 //===----------------------------------------------------------------------===// 141 142 namespace { 143 /// InstructionClass - A simple classification for instructions. 144 enum InstructionClass { 145 IC_Retain, ///< objc_retain 146 IC_RetainRV, ///< objc_retainAutoreleasedReturnValue 147 IC_RetainBlock, ///< objc_retainBlock 148 IC_Release, ///< objc_release 149 IC_Autorelease, ///< objc_autorelease 150 IC_AutoreleaseRV, ///< objc_autoreleaseReturnValue 151 IC_AutoreleasepoolPush, ///< objc_autoreleasePoolPush 152 IC_AutoreleasepoolPop, ///< objc_autoreleasePoolPop 153 IC_NoopCast, ///< objc_retainedObject, etc. 154 IC_FusedRetainAutorelease, ///< objc_retainAutorelease 155 IC_FusedRetainAutoreleaseRV, ///< objc_retainAutoreleaseReturnValue 156 IC_LoadWeakRetained, ///< objc_loadWeakRetained (primitive) 157 IC_StoreWeak, ///< objc_storeWeak (primitive) 158 IC_InitWeak, ///< objc_initWeak (derived) 159 IC_LoadWeak, ///< objc_loadWeak (derived) 160 IC_MoveWeak, ///< objc_moveWeak (derived) 161 IC_CopyWeak, ///< objc_copyWeak (derived) 162 IC_DestroyWeak, ///< objc_destroyWeak (derived) 163 IC_CallOrUser, ///< could call objc_release and/or "use" pointers 164 IC_Call, ///< could call objc_release 165 IC_User, ///< could "use" a pointer 166 IC_None ///< anything else 167 }; 168 } 169 170 /// IsPotentialUse - Test whether the given value is possible a 171 /// reference-counted pointer. 172 static bool IsPotentialUse(const Value *Op) { 173 // Pointers to static or stack storage are not reference-counted pointers. 174 if (isa<Constant>(Op) || isa<AllocaInst>(Op)) 175 return false; 176 // Special arguments are not reference-counted. 177 if (const Argument *Arg = dyn_cast<Argument>(Op)) 178 if (Arg->hasByValAttr() || 179 Arg->hasNestAttr() || 180 Arg->hasStructRetAttr()) 181 return false; 182 // Only consider values with pointer types, and not function pointers. 183 PointerType *Ty = dyn_cast<PointerType>(Op->getType()); 184 if (!Ty || isa<FunctionType>(Ty->getElementType())) 185 return false; 186 // Conservatively assume anything else is a potential use. 187 return true; 188 } 189 190 /// GetCallSiteClass - Helper for GetInstructionClass. Determines what kind 191 /// of construct CS is. 192 static InstructionClass GetCallSiteClass(ImmutableCallSite CS) { 193 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); 194 I != E; ++I) 195 if (IsPotentialUse(*I)) 196 return CS.onlyReadsMemory() ? IC_User : IC_CallOrUser; 197 198 return CS.onlyReadsMemory() ? IC_None : IC_Call; 199 } 200 201 /// GetFunctionClass - Determine if F is one of the special known Functions. 202 /// If it isn't, return IC_CallOrUser. 203 static InstructionClass GetFunctionClass(const Function *F) { 204 Function::const_arg_iterator AI = F->arg_begin(), AE = F->arg_end(); 205 206 // No arguments. 207 if (AI == AE) 208 return StringSwitch<InstructionClass>(F->getName()) 209 .Case("objc_autoreleasePoolPush", IC_AutoreleasepoolPush) 210 .Default(IC_CallOrUser); 211 212 // One argument. 213 const Argument *A0 = AI++; 214 if (AI == AE) 215 // Argument is a pointer. 216 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType())) { 217 Type *ETy = PTy->getElementType(); 218 // Argument is i8*. 219 if (ETy->isIntegerTy(8)) 220 return StringSwitch<InstructionClass>(F->getName()) 221 .Case("objc_retain", IC_Retain) 222 .Case("objc_retainAutoreleasedReturnValue", IC_RetainRV) 223 .Case("objc_retainBlock", IC_RetainBlock) 224 .Case("objc_release", IC_Release) 225 .Case("objc_autorelease", IC_Autorelease) 226 .Case("objc_autoreleaseReturnValue", IC_AutoreleaseRV) 227 .Case("objc_autoreleasePoolPop", IC_AutoreleasepoolPop) 228 .Case("objc_retainedObject", IC_NoopCast) 229 .Case("objc_unretainedObject", IC_NoopCast) 230 .Case("objc_unretainedPointer", IC_NoopCast) 231 .Case("objc_retain_autorelease", IC_FusedRetainAutorelease) 232 .Case("objc_retainAutorelease", IC_FusedRetainAutorelease) 233 .Case("objc_retainAutoreleaseReturnValue",IC_FusedRetainAutoreleaseRV) 234 .Default(IC_CallOrUser); 235 236 // Argument is i8** 237 if (PointerType *Pte = dyn_cast<PointerType>(ETy)) 238 if (Pte->getElementType()->isIntegerTy(8)) 239 return StringSwitch<InstructionClass>(F->getName()) 240 .Case("objc_loadWeakRetained", IC_LoadWeakRetained) 241 .Case("objc_loadWeak", IC_LoadWeak) 242 .Case("objc_destroyWeak", IC_DestroyWeak) 243 .Default(IC_CallOrUser); 244 } 245 246 // Two arguments, first is i8**. 247 const Argument *A1 = AI++; 248 if (AI == AE) 249 if (PointerType *PTy = dyn_cast<PointerType>(A0->getType())) 250 if (PointerType *Pte = dyn_cast<PointerType>(PTy->getElementType())) 251 if (Pte->getElementType()->isIntegerTy(8)) 252 if (PointerType *PTy1 = dyn_cast<PointerType>(A1->getType())) { 253 Type *ETy1 = PTy1->getElementType(); 254 // Second argument is i8* 255 if (ETy1->isIntegerTy(8)) 256 return StringSwitch<InstructionClass>(F->getName()) 257 .Case("objc_storeWeak", IC_StoreWeak) 258 .Case("objc_initWeak", IC_InitWeak) 259 .Default(IC_CallOrUser); 260 // Second argument is i8**. 261 if (PointerType *Pte1 = dyn_cast<PointerType>(ETy1)) 262 if (Pte1->getElementType()->isIntegerTy(8)) 263 return StringSwitch<InstructionClass>(F->getName()) 264 .Case("objc_moveWeak", IC_MoveWeak) 265 .Case("objc_copyWeak", IC_CopyWeak) 266 .Default(IC_CallOrUser); 267 } 268 269 // Anything else. 270 return IC_CallOrUser; 271 } 272 273 /// GetInstructionClass - Determine what kind of construct V is. 274 static InstructionClass GetInstructionClass(const Value *V) { 275 if (const Instruction *I = dyn_cast<Instruction>(V)) { 276 // Any instruction other than bitcast and gep with a pointer operand have a 277 // use of an objc pointer. Bitcasts, GEPs, Selects, PHIs transfer a pointer 278 // to a subsequent use, rather than using it themselves, in this sense. 279 // As a short cut, several other opcodes are known to have no pointer 280 // operands of interest. And ret is never followed by a release, so it's 281 // not interesting to examine. 282 switch (I->getOpcode()) { 283 case Instruction::Call: { 284 const CallInst *CI = cast<CallInst>(I); 285 // Check for calls to special functions. 286 if (const Function *F = CI->getCalledFunction()) { 287 InstructionClass Class = GetFunctionClass(F); 288 if (Class != IC_CallOrUser) 289 return Class; 290 291 // None of the intrinsic functions do objc_release. For intrinsics, the 292 // only question is whether or not they may be users. 293 switch (F->getIntrinsicID()) { 294 case 0: break; 295 case Intrinsic::bswap: case Intrinsic::ctpop: 296 case Intrinsic::ctlz: case Intrinsic::cttz: 297 case Intrinsic::returnaddress: case Intrinsic::frameaddress: 298 case Intrinsic::stacksave: case Intrinsic::stackrestore: 299 case Intrinsic::vastart: case Intrinsic::vacopy: case Intrinsic::vaend: 300 // Don't let dbg info affect our results. 301 case Intrinsic::dbg_declare: case Intrinsic::dbg_value: 302 // Short cut: Some intrinsics obviously don't use ObjC pointers. 303 return IC_None; 304 default: 305 for (Function::const_arg_iterator AI = F->arg_begin(), 306 AE = F->arg_end(); AI != AE; ++AI) 307 if (IsPotentialUse(AI)) 308 return IC_User; 309 return IC_None; 310 } 311 } 312 return GetCallSiteClass(CI); 313 } 314 case Instruction::Invoke: 315 return GetCallSiteClass(cast<InvokeInst>(I)); 316 case Instruction::BitCast: 317 case Instruction::GetElementPtr: 318 case Instruction::Select: case Instruction::PHI: 319 case Instruction::Ret: case Instruction::Br: 320 case Instruction::Switch: case Instruction::IndirectBr: 321 case Instruction::Alloca: case Instruction::VAArg: 322 case Instruction::Add: case Instruction::FAdd: 323 case Instruction::Sub: case Instruction::FSub: 324 case Instruction::Mul: case Instruction::FMul: 325 case Instruction::SDiv: case Instruction::UDiv: case Instruction::FDiv: 326 case Instruction::SRem: case Instruction::URem: case Instruction::FRem: 327 case Instruction::Shl: case Instruction::LShr: case Instruction::AShr: 328 case Instruction::And: case Instruction::Or: case Instruction::Xor: 329 case Instruction::SExt: case Instruction::ZExt: case Instruction::Trunc: 330 case Instruction::IntToPtr: case Instruction::FCmp: 331 case Instruction::FPTrunc: case Instruction::FPExt: 332 case Instruction::FPToUI: case Instruction::FPToSI: 333 case Instruction::UIToFP: case Instruction::SIToFP: 334 case Instruction::InsertElement: case Instruction::ExtractElement: 335 case Instruction::ShuffleVector: 336 case Instruction::ExtractValue: 337 break; 338 case Instruction::ICmp: 339 // Comparing a pointer with null, or any other constant, isn't an 340 // interesting use, because we don't care what the pointer points to, or 341 // about the values of any other dynamic reference-counted pointers. 342 if (IsPotentialUse(I->getOperand(1))) 343 return IC_User; 344 break; 345 default: 346 // For anything else, check all the operands. 347 // Note that this includes both operands of a Store: while the first 348 // operand isn't actually being dereferenced, it is being stored to 349 // memory where we can no longer track who might read it and dereference 350 // it, so we have to consider it potentially used. 351 for (User::const_op_iterator OI = I->op_begin(), OE = I->op_end(); 352 OI != OE; ++OI) 353 if (IsPotentialUse(*OI)) 354 return IC_User; 355 } 356 } 357 358 // Otherwise, it's totally inert for ARC purposes. 359 return IC_None; 360 } 361 362 /// GetBasicInstructionClass - Determine what kind of construct V is. This is 363 /// similar to GetInstructionClass except that it only detects objc runtine 364 /// calls. This allows it to be faster. 365 static InstructionClass GetBasicInstructionClass(const Value *V) { 366 if (const CallInst *CI = dyn_cast<CallInst>(V)) { 367 if (const Function *F = CI->getCalledFunction()) 368 return GetFunctionClass(F); 369 // Otherwise, be conservative. 370 return IC_CallOrUser; 371 } 372 373 // Otherwise, be conservative. 374 return IC_User; 375 } 376 377 /// IsRetain - Test if the the given class is objc_retain or 378 /// equivalent. 379 static bool IsRetain(InstructionClass Class) { 380 return Class == IC_Retain || 381 Class == IC_RetainRV; 382 } 383 384 /// IsAutorelease - Test if the the given class is objc_autorelease or 385 /// equivalent. 386 static bool IsAutorelease(InstructionClass Class) { 387 return Class == IC_Autorelease || 388 Class == IC_AutoreleaseRV; 389 } 390 391 /// IsForwarding - Test if the given class represents instructions which return 392 /// their argument verbatim. 393 static bool IsForwarding(InstructionClass Class) { 394 // objc_retainBlock technically doesn't always return its argument 395 // verbatim, but it doesn't matter for our purposes here. 396 return Class == IC_Retain || 397 Class == IC_RetainRV || 398 Class == IC_Autorelease || 399 Class == IC_AutoreleaseRV || 400 Class == IC_RetainBlock || 401 Class == IC_NoopCast; 402 } 403 404 /// IsNoopOnNull - Test if the given class represents instructions which do 405 /// nothing if passed a null pointer. 406 static bool IsNoopOnNull(InstructionClass Class) { 407 return Class == IC_Retain || 408 Class == IC_RetainRV || 409 Class == IC_Release || 410 Class == IC_Autorelease || 411 Class == IC_AutoreleaseRV || 412 Class == IC_RetainBlock; 413 } 414 415 /// IsAlwaysTail - Test if the given class represents instructions which are 416 /// always safe to mark with the "tail" keyword. 417 static bool IsAlwaysTail(InstructionClass Class) { 418 // IC_RetainBlock may be given a stack argument. 419 return Class == IC_Retain || 420 Class == IC_RetainRV || 421 Class == IC_Autorelease || 422 Class == IC_AutoreleaseRV; 423 } 424 425 /// IsNoThrow - Test if the given class represents instructions which are always 426 /// safe to mark with the nounwind attribute.. 427 static bool IsNoThrow(InstructionClass Class) { 428 // objc_retainBlock is not nounwind because it calls user copy constructors 429 // which could theoretically throw. 430 return Class == IC_Retain || 431 Class == IC_RetainRV || 432 Class == IC_Release || 433 Class == IC_Autorelease || 434 Class == IC_AutoreleaseRV || 435 Class == IC_AutoreleasepoolPush || 436 Class == IC_AutoreleasepoolPop; 437 } 438 439 /// EraseInstruction - Erase the given instruction. ObjC calls return their 440 /// argument verbatim, so if it's such a call and the return value has users, 441 /// replace them with the argument value. 442 static void EraseInstruction(Instruction *CI) { 443 Value *OldArg = cast<CallInst>(CI)->getArgOperand(0); 444 445 bool Unused = CI->use_empty(); 446 447 if (!Unused) { 448 // Replace the return value with the argument. 449 assert(IsForwarding(GetBasicInstructionClass(CI)) && 450 "Can't delete non-forwarding instruction with users!"); 451 CI->replaceAllUsesWith(OldArg); 452 } 453 454 CI->eraseFromParent(); 455 456 if (Unused) 457 RecursivelyDeleteTriviallyDeadInstructions(OldArg); 458 } 459 460 /// GetUnderlyingObjCPtr - This is a wrapper around getUnderlyingObject which 461 /// also knows how to look through objc_retain and objc_autorelease calls, which 462 /// we know to return their argument verbatim. 463 static const Value *GetUnderlyingObjCPtr(const Value *V) { 464 for (;;) { 465 V = GetUnderlyingObject(V); 466 if (!IsForwarding(GetBasicInstructionClass(V))) 467 break; 468 V = cast<CallInst>(V)->getArgOperand(0); 469 } 470 471 return V; 472 } 473 474 /// StripPointerCastsAndObjCCalls - This is a wrapper around 475 /// Value::stripPointerCasts which also knows how to look through objc_retain 476 /// and objc_autorelease calls, which we know to return their argument verbatim. 477 static const Value *StripPointerCastsAndObjCCalls(const Value *V) { 478 for (;;) { 479 V = V->stripPointerCasts(); 480 if (!IsForwarding(GetBasicInstructionClass(V))) 481 break; 482 V = cast<CallInst>(V)->getArgOperand(0); 483 } 484 return V; 485 } 486 487 /// StripPointerCastsAndObjCCalls - This is a wrapper around 488 /// Value::stripPointerCasts which also knows how to look through objc_retain 489 /// and objc_autorelease calls, which we know to return their argument verbatim. 490 static Value *StripPointerCastsAndObjCCalls(Value *V) { 491 for (;;) { 492 V = V->stripPointerCasts(); 493 if (!IsForwarding(GetBasicInstructionClass(V))) 494 break; 495 V = cast<CallInst>(V)->getArgOperand(0); 496 } 497 return V; 498 } 499 500 /// GetObjCArg - Assuming the given instruction is one of the special calls such 501 /// as objc_retain or objc_release, return the argument value, stripped of no-op 502 /// casts and forwarding calls. 503 static Value *GetObjCArg(Value *Inst) { 504 return StripPointerCastsAndObjCCalls(cast<CallInst>(Inst)->getArgOperand(0)); 505 } 506 507 /// IsObjCIdentifiedObject - This is similar to AliasAnalysis' 508 /// isObjCIdentifiedObject, except that it uses special knowledge of 509 /// ObjC conventions... 510 static bool IsObjCIdentifiedObject(const Value *V) { 511 // Assume that call results and arguments have their own "provenance". 512 // Constants (including GlobalVariables) and Allocas are never 513 // reference-counted. 514 if (isa<CallInst>(V) || isa<InvokeInst>(V) || 515 isa<Argument>(V) || isa<Constant>(V) || 516 isa<AllocaInst>(V)) 517 return true; 518 519 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) { 520 const Value *Pointer = 521 StripPointerCastsAndObjCCalls(LI->getPointerOperand()); 522 if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(Pointer)) { 523 // A constant pointer can't be pointing to an object on the heap. It may 524 // be reference-counted, but it won't be deleted. 525 if (GV->isConstant()) 526 return true; 527 StringRef Name = GV->getName(); 528 // These special variables are known to hold values which are not 529 // reference-counted pointers. 530 if (Name.startswith("\01L_OBJC_SELECTOR_REFERENCES_") || 531 Name.startswith("\01L_OBJC_CLASSLIST_REFERENCES_") || 532 Name.startswith("\01L_OBJC_CLASSLIST_SUP_REFS_$_") || 533 Name.startswith("\01L_OBJC_METH_VAR_NAME_") || 534 Name.startswith("\01l_objc_msgSend_fixup_")) 535 return true; 536 } 537 } 538 539 return false; 540 } 541 542 /// FindSingleUseIdentifiedObject - This is similar to 543 /// StripPointerCastsAndObjCCalls but it stops as soon as it finds a value 544 /// with multiple uses. 545 static const Value *FindSingleUseIdentifiedObject(const Value *Arg) { 546 if (Arg->hasOneUse()) { 547 if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg)) 548 return FindSingleUseIdentifiedObject(BC->getOperand(0)); 549 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg)) 550 if (GEP->hasAllZeroIndices()) 551 return FindSingleUseIdentifiedObject(GEP->getPointerOperand()); 552 if (IsForwarding(GetBasicInstructionClass(Arg))) 553 return FindSingleUseIdentifiedObject( 554 cast<CallInst>(Arg)->getArgOperand(0)); 555 if (!IsObjCIdentifiedObject(Arg)) 556 return 0; 557 return Arg; 558 } 559 560 // If we found an identifiable object but it has multiple uses, but they 561 // are trivial uses, we can still consider this to be a single-use 562 // value. 563 if (IsObjCIdentifiedObject(Arg)) { 564 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end(); 565 UI != UE; ++UI) { 566 const User *U = *UI; 567 if (!U->use_empty() || StripPointerCastsAndObjCCalls(U) != Arg) 568 return 0; 569 } 570 571 return Arg; 572 } 573 574 return 0; 575 } 576 577 /// ModuleHasARC - Test if the given module looks interesting to run ARC 578 /// optimization on. 579 static bool ModuleHasARC(const Module &M) { 580 return 581 M.getNamedValue("objc_retain") || 582 M.getNamedValue("objc_release") || 583 M.getNamedValue("objc_autorelease") || 584 M.getNamedValue("objc_retainAutoreleasedReturnValue") || 585 M.getNamedValue("objc_retainBlock") || 586 M.getNamedValue("objc_autoreleaseReturnValue") || 587 M.getNamedValue("objc_autoreleasePoolPush") || 588 M.getNamedValue("objc_loadWeakRetained") || 589 M.getNamedValue("objc_loadWeak") || 590 M.getNamedValue("objc_destroyWeak") || 591 M.getNamedValue("objc_storeWeak") || 592 M.getNamedValue("objc_initWeak") || 593 M.getNamedValue("objc_moveWeak") || 594 M.getNamedValue("objc_copyWeak") || 595 M.getNamedValue("objc_retainedObject") || 596 M.getNamedValue("objc_unretainedObject") || 597 M.getNamedValue("objc_unretainedPointer"); 598 } 599 600 //===----------------------------------------------------------------------===// 601 // ARC AliasAnalysis. 602 //===----------------------------------------------------------------------===// 603 604 #include "llvm/Pass.h" 605 #include "llvm/Analysis/AliasAnalysis.h" 606 #include "llvm/Analysis/Passes.h" 607 608 namespace { 609 /// ObjCARCAliasAnalysis - This is a simple alias analysis 610 /// implementation that uses knowledge of ARC constructs to answer queries. 611 /// 612 /// TODO: This class could be generalized to know about other ObjC-specific 613 /// tricks. Such as knowing that ivars in the non-fragile ABI are non-aliasing 614 /// even though their offsets are dynamic. 615 class ObjCARCAliasAnalysis : public ImmutablePass, 616 public AliasAnalysis { 617 public: 618 static char ID; // Class identification, replacement for typeinfo 619 ObjCARCAliasAnalysis() : ImmutablePass(ID) { 620 initializeObjCARCAliasAnalysisPass(*PassRegistry::getPassRegistry()); 621 } 622 623 private: 624 virtual void initializePass() { 625 InitializeAliasAnalysis(this); 626 } 627 628 /// getAdjustedAnalysisPointer - This method is used when a pass implements 629 /// an analysis interface through multiple inheritance. If needed, it 630 /// should override this to adjust the this pointer as needed for the 631 /// specified pass info. 632 virtual void *getAdjustedAnalysisPointer(const void *PI) { 633 if (PI == &AliasAnalysis::ID) 634 return (AliasAnalysis*)this; 635 return this; 636 } 637 638 virtual void getAnalysisUsage(AnalysisUsage &AU) const; 639 virtual AliasResult alias(const Location &LocA, const Location &LocB); 640 virtual bool pointsToConstantMemory(const Location &Loc, bool OrLocal); 641 virtual ModRefBehavior getModRefBehavior(ImmutableCallSite CS); 642 virtual ModRefBehavior getModRefBehavior(const Function *F); 643 virtual ModRefResult getModRefInfo(ImmutableCallSite CS, 644 const Location &Loc); 645 virtual ModRefResult getModRefInfo(ImmutableCallSite CS1, 646 ImmutableCallSite CS2); 647 }; 648 } // End of anonymous namespace 649 650 // Register this pass... 651 char ObjCARCAliasAnalysis::ID = 0; 652 INITIALIZE_AG_PASS(ObjCARCAliasAnalysis, AliasAnalysis, "objc-arc-aa", 653 "ObjC-ARC-Based Alias Analysis", false, true, false) 654 655 ImmutablePass *llvm::createObjCARCAliasAnalysisPass() { 656 return new ObjCARCAliasAnalysis(); 657 } 658 659 void 660 ObjCARCAliasAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 661 AU.setPreservesAll(); 662 AliasAnalysis::getAnalysisUsage(AU); 663 } 664 665 AliasAnalysis::AliasResult 666 ObjCARCAliasAnalysis::alias(const Location &LocA, const Location &LocB) { 667 if (!EnableARCOpts) 668 return AliasAnalysis::alias(LocA, LocB); 669 670 // First, strip off no-ops, including ObjC-specific no-ops, and try making a 671 // precise alias query. 672 const Value *SA = StripPointerCastsAndObjCCalls(LocA.Ptr); 673 const Value *SB = StripPointerCastsAndObjCCalls(LocB.Ptr); 674 AliasResult Result = 675 AliasAnalysis::alias(Location(SA, LocA.Size, LocA.TBAATag), 676 Location(SB, LocB.Size, LocB.TBAATag)); 677 if (Result != MayAlias) 678 return Result; 679 680 // If that failed, climb to the underlying object, including climbing through 681 // ObjC-specific no-ops, and try making an imprecise alias query. 682 const Value *UA = GetUnderlyingObjCPtr(SA); 683 const Value *UB = GetUnderlyingObjCPtr(SB); 684 if (UA != SA || UB != SB) { 685 Result = AliasAnalysis::alias(Location(UA), Location(UB)); 686 // We can't use MustAlias or PartialAlias results here because 687 // GetUnderlyingObjCPtr may return an offsetted pointer value. 688 if (Result == NoAlias) 689 return NoAlias; 690 } 691 692 // If that failed, fail. We don't need to chain here, since that's covered 693 // by the earlier precise query. 694 return MayAlias; 695 } 696 697 bool 698 ObjCARCAliasAnalysis::pointsToConstantMemory(const Location &Loc, 699 bool OrLocal) { 700 if (!EnableARCOpts) 701 return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal); 702 703 // First, strip off no-ops, including ObjC-specific no-ops, and try making 704 // a precise alias query. 705 const Value *S = StripPointerCastsAndObjCCalls(Loc.Ptr); 706 if (AliasAnalysis::pointsToConstantMemory(Location(S, Loc.Size, Loc.TBAATag), 707 OrLocal)) 708 return true; 709 710 // If that failed, climb to the underlying object, including climbing through 711 // ObjC-specific no-ops, and try making an imprecise alias query. 712 const Value *U = GetUnderlyingObjCPtr(S); 713 if (U != S) 714 return AliasAnalysis::pointsToConstantMemory(Location(U), OrLocal); 715 716 // If that failed, fail. We don't need to chain here, since that's covered 717 // by the earlier precise query. 718 return false; 719 } 720 721 AliasAnalysis::ModRefBehavior 722 ObjCARCAliasAnalysis::getModRefBehavior(ImmutableCallSite CS) { 723 // We have nothing to do. Just chain to the next AliasAnalysis. 724 return AliasAnalysis::getModRefBehavior(CS); 725 } 726 727 AliasAnalysis::ModRefBehavior 728 ObjCARCAliasAnalysis::getModRefBehavior(const Function *F) { 729 if (!EnableARCOpts) 730 return AliasAnalysis::getModRefBehavior(F); 731 732 switch (GetFunctionClass(F)) { 733 case IC_NoopCast: 734 return DoesNotAccessMemory; 735 default: 736 break; 737 } 738 739 return AliasAnalysis::getModRefBehavior(F); 740 } 741 742 AliasAnalysis::ModRefResult 743 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS, const Location &Loc) { 744 if (!EnableARCOpts) 745 return AliasAnalysis::getModRefInfo(CS, Loc); 746 747 switch (GetBasicInstructionClass(CS.getInstruction())) { 748 case IC_Retain: 749 case IC_RetainRV: 750 case IC_Autorelease: 751 case IC_AutoreleaseRV: 752 case IC_NoopCast: 753 case IC_AutoreleasepoolPush: 754 case IC_FusedRetainAutorelease: 755 case IC_FusedRetainAutoreleaseRV: 756 // These functions don't access any memory visible to the compiler. 757 // Note that this doesn't include objc_retainBlock, becuase it updates 758 // pointers when it copies block data. 759 return NoModRef; 760 default: 761 break; 762 } 763 764 return AliasAnalysis::getModRefInfo(CS, Loc); 765 } 766 767 AliasAnalysis::ModRefResult 768 ObjCARCAliasAnalysis::getModRefInfo(ImmutableCallSite CS1, 769 ImmutableCallSite CS2) { 770 // TODO: Theoretically we could check for dependencies between objc_* calls 771 // and OnlyAccessesArgumentPointees calls or other well-behaved calls. 772 return AliasAnalysis::getModRefInfo(CS1, CS2); 773 } 774 775 //===----------------------------------------------------------------------===// 776 // ARC expansion. 777 //===----------------------------------------------------------------------===// 778 779 #include "llvm/Support/InstIterator.h" 780 #include "llvm/Transforms/Scalar.h" 781 782 namespace { 783 /// ObjCARCExpand - Early ARC transformations. 784 class ObjCARCExpand : public FunctionPass { 785 virtual void getAnalysisUsage(AnalysisUsage &AU) const; 786 virtual bool doInitialization(Module &M); 787 virtual bool runOnFunction(Function &F); 788 789 /// Run - A flag indicating whether this optimization pass should run. 790 bool Run; 791 792 public: 793 static char ID; 794 ObjCARCExpand() : FunctionPass(ID) { 795 initializeObjCARCExpandPass(*PassRegistry::getPassRegistry()); 796 } 797 }; 798 } 799 800 char ObjCARCExpand::ID = 0; 801 INITIALIZE_PASS(ObjCARCExpand, 802 "objc-arc-expand", "ObjC ARC expansion", false, false) 803 804 Pass *llvm::createObjCARCExpandPass() { 805 return new ObjCARCExpand(); 806 } 807 808 void ObjCARCExpand::getAnalysisUsage(AnalysisUsage &AU) const { 809 AU.setPreservesCFG(); 810 } 811 812 bool ObjCARCExpand::doInitialization(Module &M) { 813 Run = ModuleHasARC(M); 814 return false; 815 } 816 817 bool ObjCARCExpand::runOnFunction(Function &F) { 818 if (!EnableARCOpts) 819 return false; 820 821 // If nothing in the Module uses ARC, don't do anything. 822 if (!Run) 823 return false; 824 825 bool Changed = false; 826 827 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ++I) { 828 Instruction *Inst = &*I; 829 830 switch (GetBasicInstructionClass(Inst)) { 831 case IC_Retain: 832 case IC_RetainRV: 833 case IC_Autorelease: 834 case IC_AutoreleaseRV: 835 case IC_FusedRetainAutorelease: 836 case IC_FusedRetainAutoreleaseRV: 837 // These calls return their argument verbatim, as a low-level 838 // optimization. However, this makes high-level optimizations 839 // harder. Undo any uses of this optimization that the front-end 840 // emitted here. We'll redo them in a later pass. 841 Changed = true; 842 Inst->replaceAllUsesWith(cast<CallInst>(Inst)->getArgOperand(0)); 843 break; 844 default: 845 break; 846 } 847 } 848 849 return Changed; 850 } 851 852 //===----------------------------------------------------------------------===// 853 // ARC optimization. 854 //===----------------------------------------------------------------------===// 855 856 // TODO: On code like this: 857 // 858 // objc_retain(%x) 859 // stuff_that_cannot_release() 860 // objc_autorelease(%x) 861 // stuff_that_cannot_release() 862 // objc_retain(%x) 863 // stuff_that_cannot_release() 864 // objc_autorelease(%x) 865 // 866 // The second retain and autorelease can be deleted. 867 868 // TODO: It should be possible to delete 869 // objc_autoreleasePoolPush and objc_autoreleasePoolPop 870 // pairs if nothing is actually autoreleased between them. Also, autorelease 871 // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code 872 // after inlining) can be turned into plain release calls. 873 874 // TODO: Critical-edge splitting. If the optimial insertion point is 875 // a critical edge, the current algorithm has to fail, because it doesn't 876 // know how to split edges. It should be possible to make the optimizer 877 // think in terms of edges, rather than blocks, and then split critical 878 // edges on demand. 879 880 // TODO: OptimizeSequences could generalized to be Interprocedural. 881 882 // TODO: Recognize that a bunch of other objc runtime calls have 883 // non-escaping arguments and non-releasing arguments, and may be 884 // non-autoreleasing. 885 886 // TODO: Sink autorelease calls as far as possible. Unfortunately we 887 // usually can't sink them past other calls, which would be the main 888 // case where it would be useful. 889 890 // TODO: The pointer returned from objc_loadWeakRetained is retained. 891 892 // TODO: Delete release+retain pairs (rare). 893 894 #include "llvm/GlobalAlias.h" 895 #include "llvm/Constants.h" 896 #include "llvm/LLVMContext.h" 897 #include "llvm/Support/ErrorHandling.h" 898 #include "llvm/Support/CFG.h" 899 #include "llvm/ADT/PostOrderIterator.h" 900 #include "llvm/ADT/Statistic.h" 901 902 STATISTIC(NumNoops, "Number of no-op objc calls eliminated"); 903 STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated"); 904 STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases"); 905 STATISTIC(NumRets, "Number of return value forwarding " 906 "retain+autoreleaes eliminated"); 907 STATISTIC(NumRRs, "Number of retain+release paths eliminated"); 908 STATISTIC(NumPeeps, "Number of calls peephole-optimized"); 909 910 namespace { 911 /// ProvenanceAnalysis - This is similar to BasicAliasAnalysis, and it 912 /// uses many of the same techniques, except it uses special ObjC-specific 913 /// reasoning about pointer relationships. 914 class ProvenanceAnalysis { 915 AliasAnalysis *AA; 916 917 typedef std::pair<const Value *, const Value *> ValuePairTy; 918 typedef DenseMap<ValuePairTy, bool> CachedResultsTy; 919 CachedResultsTy CachedResults; 920 921 bool relatedCheck(const Value *A, const Value *B); 922 bool relatedSelect(const SelectInst *A, const Value *B); 923 bool relatedPHI(const PHINode *A, const Value *B); 924 925 // Do not implement. 926 void operator=(const ProvenanceAnalysis &); 927 ProvenanceAnalysis(const ProvenanceAnalysis &); 928 929 public: 930 ProvenanceAnalysis() {} 931 932 void setAA(AliasAnalysis *aa) { AA = aa; } 933 934 AliasAnalysis *getAA() const { return AA; } 935 936 bool related(const Value *A, const Value *B); 937 938 void clear() { 939 CachedResults.clear(); 940 } 941 }; 942 } 943 944 bool ProvenanceAnalysis::relatedSelect(const SelectInst *A, const Value *B) { 945 // If the values are Selects with the same condition, we can do a more precise 946 // check: just check for relations between the values on corresponding arms. 947 if (const SelectInst *SB = dyn_cast<SelectInst>(B)) 948 if (A->getCondition() == SB->getCondition()) { 949 if (related(A->getTrueValue(), SB->getTrueValue())) 950 return true; 951 if (related(A->getFalseValue(), SB->getFalseValue())) 952 return true; 953 return false; 954 } 955 956 // Check both arms of the Select node individually. 957 if (related(A->getTrueValue(), B)) 958 return true; 959 if (related(A->getFalseValue(), B)) 960 return true; 961 962 // The arms both checked out. 963 return false; 964 } 965 966 bool ProvenanceAnalysis::relatedPHI(const PHINode *A, const Value *B) { 967 // If the values are PHIs in the same block, we can do a more precise as well 968 // as efficient check: just check for relations between the values on 969 // corresponding edges. 970 if (const PHINode *PNB = dyn_cast<PHINode>(B)) 971 if (PNB->getParent() == A->getParent()) { 972 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) 973 if (related(A->getIncomingValue(i), 974 PNB->getIncomingValueForBlock(A->getIncomingBlock(i)))) 975 return true; 976 return false; 977 } 978 979 // Check each unique source of the PHI node against B. 980 SmallPtrSet<const Value *, 4> UniqueSrc; 981 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) { 982 const Value *PV1 = A->getIncomingValue(i); 983 if (UniqueSrc.insert(PV1) && related(PV1, B)) 984 return true; 985 } 986 987 // All of the arms checked out. 988 return false; 989 } 990 991 /// isStoredObjCPointer - Test if the value of P, or any value covered by its 992 /// provenance, is ever stored within the function (not counting callees). 993 static bool isStoredObjCPointer(const Value *P) { 994 SmallPtrSet<const Value *, 8> Visited; 995 SmallVector<const Value *, 8> Worklist; 996 Worklist.push_back(P); 997 Visited.insert(P); 998 do { 999 P = Worklist.pop_back_val(); 1000 for (Value::const_use_iterator UI = P->use_begin(), UE = P->use_end(); 1001 UI != UE; ++UI) { 1002 const User *Ur = *UI; 1003 if (isa<StoreInst>(Ur)) { 1004 if (UI.getOperandNo() == 0) 1005 // The pointer is stored. 1006 return true; 1007 // The pointed is stored through. 1008 continue; 1009 } 1010 if (isa<CallInst>(Ur)) 1011 // The pointer is passed as an argument, ignore this. 1012 continue; 1013 if (isa<PtrToIntInst>(P)) 1014 // Assume the worst. 1015 return true; 1016 if (Visited.insert(Ur)) 1017 Worklist.push_back(Ur); 1018 } 1019 } while (!Worklist.empty()); 1020 1021 // Everything checked out. 1022 return false; 1023 } 1024 1025 bool ProvenanceAnalysis::relatedCheck(const Value *A, const Value *B) { 1026 // Skip past provenance pass-throughs. 1027 A = GetUnderlyingObjCPtr(A); 1028 B = GetUnderlyingObjCPtr(B); 1029 1030 // Quick check. 1031 if (A == B) 1032 return true; 1033 1034 // Ask regular AliasAnalysis, for a first approximation. 1035 switch (AA->alias(A, B)) { 1036 case AliasAnalysis::NoAlias: 1037 return false; 1038 case AliasAnalysis::MustAlias: 1039 case AliasAnalysis::PartialAlias: 1040 return true; 1041 case AliasAnalysis::MayAlias: 1042 break; 1043 } 1044 1045 bool AIsIdentified = IsObjCIdentifiedObject(A); 1046 bool BIsIdentified = IsObjCIdentifiedObject(B); 1047 1048 // An ObjC-Identified object can't alias a load if it is never locally stored. 1049 if (AIsIdentified) { 1050 if (BIsIdentified) { 1051 // If both pointers have provenance, they can be directly compared. 1052 if (A != B) 1053 return false; 1054 } else { 1055 if (isa<LoadInst>(B)) 1056 return isStoredObjCPointer(A); 1057 } 1058 } else { 1059 if (BIsIdentified && isa<LoadInst>(A)) 1060 return isStoredObjCPointer(B); 1061 } 1062 1063 // Special handling for PHI and Select. 1064 if (const PHINode *PN = dyn_cast<PHINode>(A)) 1065 return relatedPHI(PN, B); 1066 if (const PHINode *PN = dyn_cast<PHINode>(B)) 1067 return relatedPHI(PN, A); 1068 if (const SelectInst *S = dyn_cast<SelectInst>(A)) 1069 return relatedSelect(S, B); 1070 if (const SelectInst *S = dyn_cast<SelectInst>(B)) 1071 return relatedSelect(S, A); 1072 1073 // Conservative. 1074 return true; 1075 } 1076 1077 bool ProvenanceAnalysis::related(const Value *A, const Value *B) { 1078 // Begin by inserting a conservative value into the map. If the insertion 1079 // fails, we have the answer already. If it succeeds, leave it there until we 1080 // compute the real answer to guard against recursive queries. 1081 if (A > B) std::swap(A, B); 1082 std::pair<CachedResultsTy::iterator, bool> Pair = 1083 CachedResults.insert(std::make_pair(ValuePairTy(A, B), true)); 1084 if (!Pair.second) 1085 return Pair.first->second; 1086 1087 bool Result = relatedCheck(A, B); 1088 CachedResults[ValuePairTy(A, B)] = Result; 1089 return Result; 1090 } 1091 1092 namespace { 1093 // Sequence - A sequence of states that a pointer may go through in which an 1094 // objc_retain and objc_release are actually needed. 1095 enum Sequence { 1096 S_None, 1097 S_Retain, ///< objc_retain(x) 1098 S_CanRelease, ///< foo(x) -- x could possibly see a ref count decrement 1099 S_Use, ///< any use of x 1100 S_Stop, ///< like S_Release, but code motion is stopped 1101 S_Release, ///< objc_release(x) 1102 S_MovableRelease ///< objc_release(x), !clang.imprecise_release 1103 }; 1104 } 1105 1106 static Sequence MergeSeqs(Sequence A, Sequence B, bool TopDown) { 1107 // The easy cases. 1108 if (A == B) 1109 return A; 1110 if (A == S_None || B == S_None) 1111 return S_None; 1112 1113 if (A > B) std::swap(A, B); 1114 if (TopDown) { 1115 // Choose the side which is further along in the sequence. 1116 if ((A == S_Retain || A == S_CanRelease) && 1117 (B == S_CanRelease || B == S_Use)) 1118 return B; 1119 } else { 1120 // Choose the side which is further along in the sequence. 1121 if ((A == S_Use || A == S_CanRelease) && 1122 (B == S_Use || B == S_Release || B == S_Stop || B == S_MovableRelease)) 1123 return A; 1124 // If both sides are releases, choose the more conservative one. 1125 if (A == S_Stop && (B == S_Release || B == S_MovableRelease)) 1126 return A; 1127 if (A == S_Release && B == S_MovableRelease) 1128 return A; 1129 } 1130 1131 return S_None; 1132 } 1133 1134 namespace { 1135 /// RRInfo - Unidirectional information about either a 1136 /// retain-decrement-use-release sequence or release-use-decrement-retain 1137 /// reverese sequence. 1138 struct RRInfo { 1139 /// KnownSafe - After an objc_retain, the reference count of the referenced 1140 /// object is known to be positive. Similarly, before an objc_release, the 1141 /// reference count of the referenced object is known to be positive. If 1142 /// there are retain-release pairs in code regions where the retain count 1143 /// is known to be positive, they can be eliminated, regardless of any side 1144 /// effects between them. 1145 /// 1146 /// Also, a retain+release pair nested within another retain+release 1147 /// pair all on the known same pointer value can be eliminated, regardless 1148 /// of any intervening side effects. 1149 /// 1150 /// KnownSafe is true when either of these conditions is satisfied. 1151 bool KnownSafe; 1152 1153 /// IsRetainBlock - True if the Calls are objc_retainBlock calls (as 1154 /// opposed to objc_retain calls). 1155 bool IsRetainBlock; 1156 1157 /// IsTailCallRelease - True of the objc_release calls are all marked 1158 /// with the "tail" keyword. 1159 bool IsTailCallRelease; 1160 1161 /// ReleaseMetadata - If the Calls are objc_release calls and they all have 1162 /// a clang.imprecise_release tag, this is the metadata tag. 1163 MDNode *ReleaseMetadata; 1164 1165 /// Calls - For a top-down sequence, the set of objc_retains or 1166 /// objc_retainBlocks. For bottom-up, the set of objc_releases. 1167 SmallPtrSet<Instruction *, 2> Calls; 1168 1169 /// ReverseInsertPts - The set of optimal insert positions for 1170 /// moving calls in the opposite sequence. 1171 SmallPtrSet<Instruction *, 2> ReverseInsertPts; 1172 1173 RRInfo() : 1174 KnownSafe(false), IsRetainBlock(false), IsTailCallRelease(false), 1175 ReleaseMetadata(0) {} 1176 1177 void clear(); 1178 }; 1179 } 1180 1181 void RRInfo::clear() { 1182 KnownSafe = false; 1183 IsRetainBlock = false; 1184 IsTailCallRelease = false; 1185 ReleaseMetadata = 0; 1186 Calls.clear(); 1187 ReverseInsertPts.clear(); 1188 } 1189 1190 namespace { 1191 /// PtrState - This class summarizes several per-pointer runtime properties 1192 /// which are propogated through the flow graph. 1193 class PtrState { 1194 /// RefCount - The known minimum number of reference count increments. 1195 unsigned RefCount; 1196 1197 /// NestCount - The known minimum level of retain+release nesting. 1198 unsigned NestCount; 1199 1200 /// Seq - The current position in the sequence. 1201 Sequence Seq; 1202 1203 public: 1204 /// RRI - Unidirectional information about the current sequence. 1205 /// TODO: Encapsulate this better. 1206 RRInfo RRI; 1207 1208 PtrState() : RefCount(0), NestCount(0), Seq(S_None) {} 1209 1210 void SetAtLeastOneRefCount() { 1211 if (RefCount == 0) RefCount = 1; 1212 } 1213 1214 void IncrementRefCount() { 1215 if (RefCount != UINT_MAX) ++RefCount; 1216 } 1217 1218 void DecrementRefCount() { 1219 if (RefCount != 0) --RefCount; 1220 } 1221 1222 bool IsKnownIncremented() const { 1223 return RefCount > 0; 1224 } 1225 1226 void IncrementNestCount() { 1227 if (NestCount != UINT_MAX) ++NestCount; 1228 } 1229 1230 void DecrementNestCount() { 1231 if (NestCount != 0) --NestCount; 1232 } 1233 1234 bool IsKnownNested() const { 1235 return NestCount > 0; 1236 } 1237 1238 void SetSeq(Sequence NewSeq) { 1239 Seq = NewSeq; 1240 } 1241 1242 void SetSeqToRelease(MDNode *M) { 1243 if (Seq == S_None || Seq == S_Use) { 1244 Seq = M ? S_MovableRelease : S_Release; 1245 RRI.ReleaseMetadata = M; 1246 } else if (Seq != S_MovableRelease || RRI.ReleaseMetadata != M) { 1247 Seq = S_Release; 1248 RRI.ReleaseMetadata = 0; 1249 } 1250 } 1251 1252 Sequence GetSeq() const { 1253 return Seq; 1254 } 1255 1256 void ClearSequenceProgress() { 1257 Seq = S_None; 1258 RRI.clear(); 1259 } 1260 1261 void Merge(const PtrState &Other, bool TopDown); 1262 }; 1263 } 1264 1265 void 1266 PtrState::Merge(const PtrState &Other, bool TopDown) { 1267 Seq = MergeSeqs(Seq, Other.Seq, TopDown); 1268 RefCount = std::min(RefCount, Other.RefCount); 1269 NestCount = std::min(NestCount, Other.NestCount); 1270 1271 // We can't merge a plain objc_retain with an objc_retainBlock. 1272 if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock) 1273 Seq = S_None; 1274 1275 if (Seq == S_None) { 1276 RRI.clear(); 1277 } else { 1278 // Conservatively merge the ReleaseMetadata information. 1279 if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata) 1280 RRI.ReleaseMetadata = 0; 1281 1282 RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe; 1283 RRI.IsTailCallRelease = RRI.IsTailCallRelease && Other.RRI.IsTailCallRelease; 1284 RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end()); 1285 RRI.ReverseInsertPts.insert(Other.RRI.ReverseInsertPts.begin(), 1286 Other.RRI.ReverseInsertPts.end()); 1287 } 1288 } 1289 1290 namespace { 1291 /// BBState - Per-BasicBlock state. 1292 class BBState { 1293 /// TopDownPathCount - The number of unique control paths from the entry 1294 /// which can reach this block. 1295 unsigned TopDownPathCount; 1296 1297 /// BottomUpPathCount - The number of unique control paths to exits 1298 /// from this block. 1299 unsigned BottomUpPathCount; 1300 1301 /// MapTy - A type for PerPtrTopDown and PerPtrBottomUp. 1302 typedef MapVector<const Value *, PtrState> MapTy; 1303 1304 /// PerPtrTopDown - The top-down traversal uses this to record information 1305 /// known about a pointer at the bottom of each block. 1306 MapTy PerPtrTopDown; 1307 1308 /// PerPtrBottomUp - The bottom-up traversal uses this to record information 1309 /// known about a pointer at the top of each block. 1310 MapTy PerPtrBottomUp; 1311 1312 public: 1313 BBState() : TopDownPathCount(0), BottomUpPathCount(0) {} 1314 1315 typedef MapTy::iterator ptr_iterator; 1316 typedef MapTy::const_iterator ptr_const_iterator; 1317 1318 ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); } 1319 ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); } 1320 ptr_const_iterator top_down_ptr_begin() const { 1321 return PerPtrTopDown.begin(); 1322 } 1323 ptr_const_iterator top_down_ptr_end() const { 1324 return PerPtrTopDown.end(); 1325 } 1326 1327 ptr_iterator bottom_up_ptr_begin() { return PerPtrBottomUp.begin(); } 1328 ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); } 1329 ptr_const_iterator bottom_up_ptr_begin() const { 1330 return PerPtrBottomUp.begin(); 1331 } 1332 ptr_const_iterator bottom_up_ptr_end() const { 1333 return PerPtrBottomUp.end(); 1334 } 1335 1336 /// SetAsEntry - Mark this block as being an entry block, which has one 1337 /// path from the entry by definition. 1338 void SetAsEntry() { TopDownPathCount = 1; } 1339 1340 /// SetAsExit - Mark this block as being an exit block, which has one 1341 /// path to an exit by definition. 1342 void SetAsExit() { BottomUpPathCount = 1; } 1343 1344 PtrState &getPtrTopDownState(const Value *Arg) { 1345 return PerPtrTopDown[Arg]; 1346 } 1347 1348 PtrState &getPtrBottomUpState(const Value *Arg) { 1349 return PerPtrBottomUp[Arg]; 1350 } 1351 1352 void clearBottomUpPointers() { 1353 PerPtrBottomUp.clear(); 1354 } 1355 1356 void clearTopDownPointers() { 1357 PerPtrTopDown.clear(); 1358 } 1359 1360 void InitFromPred(const BBState &Other); 1361 void InitFromSucc(const BBState &Other); 1362 void MergePred(const BBState &Other); 1363 void MergeSucc(const BBState &Other); 1364 1365 /// GetAllPathCount - Return the number of possible unique paths from an 1366 /// entry to an exit which pass through this block. This is only valid 1367 /// after both the top-down and bottom-up traversals are complete. 1368 unsigned GetAllPathCount() const { 1369 return TopDownPathCount * BottomUpPathCount; 1370 } 1371 1372 /// IsVisitedTopDown - Test whether the block for this BBState has been 1373 /// visited by the top-down portion of the algorithm. 1374 bool isVisitedTopDown() const { 1375 return TopDownPathCount != 0; 1376 } 1377 }; 1378 } 1379 1380 void BBState::InitFromPred(const BBState &Other) { 1381 PerPtrTopDown = Other.PerPtrTopDown; 1382 TopDownPathCount = Other.TopDownPathCount; 1383 } 1384 1385 void BBState::InitFromSucc(const BBState &Other) { 1386 PerPtrBottomUp = Other.PerPtrBottomUp; 1387 BottomUpPathCount = Other.BottomUpPathCount; 1388 } 1389 1390 /// MergePred - The top-down traversal uses this to merge information about 1391 /// predecessors to form the initial state for a new block. 1392 void BBState::MergePred(const BBState &Other) { 1393 // Other.TopDownPathCount can be 0, in which case it is either dead or a 1394 // loop backedge. Loop backedges are special. 1395 TopDownPathCount += Other.TopDownPathCount; 1396 1397 // For each entry in the other set, if our set has an entry with the same key, 1398 // merge the entries. Otherwise, copy the entry and merge it with an empty 1399 // entry. 1400 for (ptr_const_iterator MI = Other.top_down_ptr_begin(), 1401 ME = Other.top_down_ptr_end(); MI != ME; ++MI) { 1402 std::pair<ptr_iterator, bool> Pair = PerPtrTopDown.insert(*MI); 1403 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second, 1404 /*TopDown=*/true); 1405 } 1406 1407 // For each entry in our set, if the other set doesn't have an entry with the 1408 // same key, force it to merge with an empty entry. 1409 for (ptr_iterator MI = top_down_ptr_begin(), 1410 ME = top_down_ptr_end(); MI != ME; ++MI) 1411 if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end()) 1412 MI->second.Merge(PtrState(), /*TopDown=*/true); 1413 } 1414 1415 /// MergeSucc - The bottom-up traversal uses this to merge information about 1416 /// successors to form the initial state for a new block. 1417 void BBState::MergeSucc(const BBState &Other) { 1418 // Other.BottomUpPathCount can be 0, in which case it is either dead or a 1419 // loop backedge. Loop backedges are special. 1420 BottomUpPathCount += Other.BottomUpPathCount; 1421 1422 // For each entry in the other set, if our set has an entry with the 1423 // same key, merge the entries. Otherwise, copy the entry and merge 1424 // it with an empty entry. 1425 for (ptr_const_iterator MI = Other.bottom_up_ptr_begin(), 1426 ME = Other.bottom_up_ptr_end(); MI != ME; ++MI) { 1427 std::pair<ptr_iterator, bool> Pair = PerPtrBottomUp.insert(*MI); 1428 Pair.first->second.Merge(Pair.second ? PtrState() : MI->second, 1429 /*TopDown=*/false); 1430 } 1431 1432 // For each entry in our set, if the other set doesn't have an entry 1433 // with the same key, force it to merge with an empty entry. 1434 for (ptr_iterator MI = bottom_up_ptr_begin(), 1435 ME = bottom_up_ptr_end(); MI != ME; ++MI) 1436 if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end()) 1437 MI->second.Merge(PtrState(), /*TopDown=*/false); 1438 } 1439 1440 namespace { 1441 /// ObjCARCOpt - The main ARC optimization pass. 1442 class ObjCARCOpt : public FunctionPass { 1443 bool Changed; 1444 ProvenanceAnalysis PA; 1445 1446 /// Run - A flag indicating whether this optimization pass should run. 1447 bool Run; 1448 1449 /// RetainRVCallee, etc. - Declarations for ObjC runtime 1450 /// functions, for use in creating calls to them. These are initialized 1451 /// lazily to avoid cluttering up the Module with unused declarations. 1452 Constant *RetainRVCallee, *AutoreleaseRVCallee, *ReleaseCallee, 1453 *RetainCallee, *RetainBlockCallee, *AutoreleaseCallee; 1454 1455 /// UsedInThisFunciton - Flags which determine whether each of the 1456 /// interesting runtine functions is in fact used in the current function. 1457 unsigned UsedInThisFunction; 1458 1459 /// ImpreciseReleaseMDKind - The Metadata Kind for clang.imprecise_release 1460 /// metadata. 1461 unsigned ImpreciseReleaseMDKind; 1462 1463 Constant *getRetainRVCallee(Module *M); 1464 Constant *getAutoreleaseRVCallee(Module *M); 1465 Constant *getReleaseCallee(Module *M); 1466 Constant *getRetainCallee(Module *M); 1467 Constant *getRetainBlockCallee(Module *M); 1468 Constant *getAutoreleaseCallee(Module *M); 1469 1470 void OptimizeRetainCall(Function &F, Instruction *Retain); 1471 bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV); 1472 void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV); 1473 void OptimizeIndividualCalls(Function &F); 1474 1475 void CheckForCFGHazards(const BasicBlock *BB, 1476 DenseMap<const BasicBlock *, BBState> &BBStates, 1477 BBState &MyStates) const; 1478 bool VisitBottomUp(BasicBlock *BB, 1479 DenseMap<const BasicBlock *, BBState> &BBStates, 1480 MapVector<Value *, RRInfo> &Retains); 1481 bool VisitTopDown(BasicBlock *BB, 1482 DenseMap<const BasicBlock *, BBState> &BBStates, 1483 DenseMap<Value *, RRInfo> &Releases); 1484 bool Visit(Function &F, 1485 DenseMap<const BasicBlock *, BBState> &BBStates, 1486 MapVector<Value *, RRInfo> &Retains, 1487 DenseMap<Value *, RRInfo> &Releases); 1488 1489 void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove, 1490 MapVector<Value *, RRInfo> &Retains, 1491 DenseMap<Value *, RRInfo> &Releases, 1492 SmallVectorImpl<Instruction *> &DeadInsts, 1493 Module *M); 1494 1495 bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates, 1496 MapVector<Value *, RRInfo> &Retains, 1497 DenseMap<Value *, RRInfo> &Releases, 1498 Module *M); 1499 1500 void OptimizeWeakCalls(Function &F); 1501 1502 bool OptimizeSequences(Function &F); 1503 1504 void OptimizeReturns(Function &F); 1505 1506 virtual void getAnalysisUsage(AnalysisUsage &AU) const; 1507 virtual bool doInitialization(Module &M); 1508 virtual bool runOnFunction(Function &F); 1509 virtual void releaseMemory(); 1510 1511 public: 1512 static char ID; 1513 ObjCARCOpt() : FunctionPass(ID) { 1514 initializeObjCARCOptPass(*PassRegistry::getPassRegistry()); 1515 } 1516 }; 1517 } 1518 1519 char ObjCARCOpt::ID = 0; 1520 INITIALIZE_PASS_BEGIN(ObjCARCOpt, 1521 "objc-arc", "ObjC ARC optimization", false, false) 1522 INITIALIZE_PASS_DEPENDENCY(ObjCARCAliasAnalysis) 1523 INITIALIZE_PASS_END(ObjCARCOpt, 1524 "objc-arc", "ObjC ARC optimization", false, false) 1525 1526 Pass *llvm::createObjCARCOptPass() { 1527 return new ObjCARCOpt(); 1528 } 1529 1530 void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const { 1531 AU.addRequired<ObjCARCAliasAnalysis>(); 1532 AU.addRequired<AliasAnalysis>(); 1533 // ARC optimization doesn't currently split critical edges. 1534 AU.setPreservesCFG(); 1535 } 1536 1537 Constant *ObjCARCOpt::getRetainRVCallee(Module *M) { 1538 if (!RetainRVCallee) { 1539 LLVMContext &C = M->getContext(); 1540 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 1541 std::vector<Type *> Params; 1542 Params.push_back(I8X); 1543 FunctionType *FTy = 1544 FunctionType::get(I8X, Params, /*isVarArg=*/false); 1545 AttrListPtr Attributes; 1546 Attributes.addAttr(~0u, Attribute::NoUnwind); 1547 RetainRVCallee = 1548 M->getOrInsertFunction("objc_retainAutoreleasedReturnValue", FTy, 1549 Attributes); 1550 } 1551 return RetainRVCallee; 1552 } 1553 1554 Constant *ObjCARCOpt::getAutoreleaseRVCallee(Module *M) { 1555 if (!AutoreleaseRVCallee) { 1556 LLVMContext &C = M->getContext(); 1557 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 1558 std::vector<Type *> Params; 1559 Params.push_back(I8X); 1560 FunctionType *FTy = 1561 FunctionType::get(I8X, Params, /*isVarArg=*/false); 1562 AttrListPtr Attributes; 1563 Attributes.addAttr(~0u, Attribute::NoUnwind); 1564 AutoreleaseRVCallee = 1565 M->getOrInsertFunction("objc_autoreleaseReturnValue", FTy, 1566 Attributes); 1567 } 1568 return AutoreleaseRVCallee; 1569 } 1570 1571 Constant *ObjCARCOpt::getReleaseCallee(Module *M) { 1572 if (!ReleaseCallee) { 1573 LLVMContext &C = M->getContext(); 1574 std::vector<Type *> Params; 1575 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C))); 1576 AttrListPtr Attributes; 1577 Attributes.addAttr(~0u, Attribute::NoUnwind); 1578 ReleaseCallee = 1579 M->getOrInsertFunction( 1580 "objc_release", 1581 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false), 1582 Attributes); 1583 } 1584 return ReleaseCallee; 1585 } 1586 1587 Constant *ObjCARCOpt::getRetainCallee(Module *M) { 1588 if (!RetainCallee) { 1589 LLVMContext &C = M->getContext(); 1590 std::vector<Type *> Params; 1591 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C))); 1592 AttrListPtr Attributes; 1593 Attributes.addAttr(~0u, Attribute::NoUnwind); 1594 RetainCallee = 1595 M->getOrInsertFunction( 1596 "objc_retain", 1597 FunctionType::get(Params[0], Params, /*isVarArg=*/false), 1598 Attributes); 1599 } 1600 return RetainCallee; 1601 } 1602 1603 Constant *ObjCARCOpt::getRetainBlockCallee(Module *M) { 1604 if (!RetainBlockCallee) { 1605 LLVMContext &C = M->getContext(); 1606 std::vector<Type *> Params; 1607 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C))); 1608 AttrListPtr Attributes; 1609 // objc_retainBlock is not nounwind because it calls user copy constructors 1610 // which could theoretically throw. 1611 RetainBlockCallee = 1612 M->getOrInsertFunction( 1613 "objc_retainBlock", 1614 FunctionType::get(Params[0], Params, /*isVarArg=*/false), 1615 Attributes); 1616 } 1617 return RetainBlockCallee; 1618 } 1619 1620 Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) { 1621 if (!AutoreleaseCallee) { 1622 LLVMContext &C = M->getContext(); 1623 std::vector<Type *> Params; 1624 Params.push_back(PointerType::getUnqual(Type::getInt8Ty(C))); 1625 AttrListPtr Attributes; 1626 Attributes.addAttr(~0u, Attribute::NoUnwind); 1627 AutoreleaseCallee = 1628 M->getOrInsertFunction( 1629 "objc_autorelease", 1630 FunctionType::get(Params[0], Params, /*isVarArg=*/false), 1631 Attributes); 1632 } 1633 return AutoreleaseCallee; 1634 } 1635 1636 /// CanAlterRefCount - Test whether the given instruction can result in a 1637 /// reference count modification (positive or negative) for the pointer's 1638 /// object. 1639 static bool 1640 CanAlterRefCount(const Instruction *Inst, const Value *Ptr, 1641 ProvenanceAnalysis &PA, InstructionClass Class) { 1642 switch (Class) { 1643 case IC_Autorelease: 1644 case IC_AutoreleaseRV: 1645 case IC_User: 1646 // These operations never directly modify a reference count. 1647 return false; 1648 default: break; 1649 } 1650 1651 ImmutableCallSite CS = static_cast<const Value *>(Inst); 1652 assert(CS && "Only calls can alter reference counts!"); 1653 1654 // See if AliasAnalysis can help us with the call. 1655 AliasAnalysis::ModRefBehavior MRB = PA.getAA()->getModRefBehavior(CS); 1656 if (AliasAnalysis::onlyReadsMemory(MRB)) 1657 return false; 1658 if (AliasAnalysis::onlyAccessesArgPointees(MRB)) { 1659 for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end(); 1660 I != E; ++I) { 1661 const Value *Op = *I; 1662 if (IsPotentialUse(Op) && PA.related(Ptr, Op)) 1663 return true; 1664 } 1665 return false; 1666 } 1667 1668 // Assume the worst. 1669 return true; 1670 } 1671 1672 /// CanUse - Test whether the given instruction can "use" the given pointer's 1673 /// object in a way that requires the reference count to be positive. 1674 static bool 1675 CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, 1676 InstructionClass Class) { 1677 // IC_Call operations (as opposed to IC_CallOrUser) never "use" objc pointers. 1678 if (Class == IC_Call) 1679 return false; 1680 1681 // Consider various instructions which may have pointer arguments which are 1682 // not "uses". 1683 if (const ICmpInst *ICI = dyn_cast<ICmpInst>(Inst)) { 1684 // Comparing a pointer with null, or any other constant, isn't really a use, 1685 // because we don't care what the pointer points to, or about the values 1686 // of any other dynamic reference-counted pointers. 1687 if (!IsPotentialUse(ICI->getOperand(1))) 1688 return false; 1689 } else if (ImmutableCallSite CS = static_cast<const Value *>(Inst)) { 1690 // For calls, just check the arguments (and not the callee operand). 1691 for (ImmutableCallSite::arg_iterator OI = CS.arg_begin(), 1692 OE = CS.arg_end(); OI != OE; ++OI) { 1693 const Value *Op = *OI; 1694 if (IsPotentialUse(Op) && PA.related(Ptr, Op)) 1695 return true; 1696 } 1697 return false; 1698 } else if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 1699 // Special-case stores, because we don't care about the stored value, just 1700 // the store address. 1701 const Value *Op = GetUnderlyingObjCPtr(SI->getPointerOperand()); 1702 // If we can't tell what the underlying object was, assume there is a 1703 // dependence. 1704 return IsPotentialUse(Op) && PA.related(Op, Ptr); 1705 } 1706 1707 // Check each operand for a match. 1708 for (User::const_op_iterator OI = Inst->op_begin(), OE = Inst->op_end(); 1709 OI != OE; ++OI) { 1710 const Value *Op = *OI; 1711 if (IsPotentialUse(Op) && PA.related(Ptr, Op)) 1712 return true; 1713 } 1714 return false; 1715 } 1716 1717 /// CanInterruptRV - Test whether the given instruction can autorelease 1718 /// any pointer or cause an autoreleasepool pop. 1719 static bool 1720 CanInterruptRV(InstructionClass Class) { 1721 switch (Class) { 1722 case IC_AutoreleasepoolPop: 1723 case IC_CallOrUser: 1724 case IC_Call: 1725 case IC_Autorelease: 1726 case IC_AutoreleaseRV: 1727 case IC_FusedRetainAutorelease: 1728 case IC_FusedRetainAutoreleaseRV: 1729 return true; 1730 default: 1731 return false; 1732 } 1733 } 1734 1735 namespace { 1736 /// DependenceKind - There are several kinds of dependence-like concepts in 1737 /// use here. 1738 enum DependenceKind { 1739 NeedsPositiveRetainCount, 1740 CanChangeRetainCount, 1741 RetainAutoreleaseDep, ///< Blocks objc_retainAutorelease. 1742 RetainAutoreleaseRVDep, ///< Blocks objc_retainAutoreleaseReturnValue. 1743 RetainRVDep ///< Blocks objc_retainAutoreleasedReturnValue. 1744 }; 1745 } 1746 1747 /// Depends - Test if there can be dependencies on Inst through Arg. This 1748 /// function only tests dependencies relevant for removing pairs of calls. 1749 static bool 1750 Depends(DependenceKind Flavor, Instruction *Inst, const Value *Arg, 1751 ProvenanceAnalysis &PA) { 1752 // If we've reached the definition of Arg, stop. 1753 if (Inst == Arg) 1754 return true; 1755 1756 switch (Flavor) { 1757 case NeedsPositiveRetainCount: { 1758 InstructionClass Class = GetInstructionClass(Inst); 1759 switch (Class) { 1760 case IC_AutoreleasepoolPop: 1761 case IC_AutoreleasepoolPush: 1762 case IC_None: 1763 return false; 1764 default: 1765 return CanUse(Inst, Arg, PA, Class); 1766 } 1767 } 1768 1769 case CanChangeRetainCount: { 1770 InstructionClass Class = GetInstructionClass(Inst); 1771 switch (Class) { 1772 case IC_AutoreleasepoolPop: 1773 // Conservatively assume this can decrement any count. 1774 return true; 1775 case IC_AutoreleasepoolPush: 1776 case IC_None: 1777 return false; 1778 default: 1779 return CanAlterRefCount(Inst, Arg, PA, Class); 1780 } 1781 } 1782 1783 case RetainAutoreleaseDep: 1784 switch (GetBasicInstructionClass(Inst)) { 1785 case IC_AutoreleasepoolPop: 1786 // Don't merge an objc_autorelease with an objc_retain inside a different 1787 // autoreleasepool scope. 1788 return true; 1789 case IC_Retain: 1790 case IC_RetainRV: 1791 // Check for a retain of the same pointer for merging. 1792 return GetObjCArg(Inst) == Arg; 1793 default: 1794 // Nothing else matters for objc_retainAutorelease formation. 1795 return false; 1796 } 1797 break; 1798 1799 case RetainAutoreleaseRVDep: { 1800 InstructionClass Class = GetBasicInstructionClass(Inst); 1801 switch (Class) { 1802 case IC_Retain: 1803 case IC_RetainRV: 1804 // Check for a retain of the same pointer for merging. 1805 return GetObjCArg(Inst) == Arg; 1806 default: 1807 // Anything that can autorelease interrupts 1808 // retainAutoreleaseReturnValue formation. 1809 return CanInterruptRV(Class); 1810 } 1811 break; 1812 } 1813 1814 case RetainRVDep: 1815 return CanInterruptRV(GetBasicInstructionClass(Inst)); 1816 } 1817 1818 llvm_unreachable("Invalid dependence flavor"); 1819 return true; 1820 } 1821 1822 /// FindDependencies - Walk up the CFG from StartPos (which is in StartBB) and 1823 /// find local and non-local dependencies on Arg. 1824 /// TODO: Cache results? 1825 static void 1826 FindDependencies(DependenceKind Flavor, 1827 const Value *Arg, 1828 BasicBlock *StartBB, Instruction *StartInst, 1829 SmallPtrSet<Instruction *, 4> &DependingInstructions, 1830 SmallPtrSet<const BasicBlock *, 4> &Visited, 1831 ProvenanceAnalysis &PA) { 1832 BasicBlock::iterator StartPos = StartInst; 1833 1834 SmallVector<std::pair<BasicBlock *, BasicBlock::iterator>, 4> Worklist; 1835 Worklist.push_back(std::make_pair(StartBB, StartPos)); 1836 do { 1837 std::pair<BasicBlock *, BasicBlock::iterator> Pair = 1838 Worklist.pop_back_val(); 1839 BasicBlock *LocalStartBB = Pair.first; 1840 BasicBlock::iterator LocalStartPos = Pair.second; 1841 BasicBlock::iterator StartBBBegin = LocalStartBB->begin(); 1842 for (;;) { 1843 if (LocalStartPos == StartBBBegin) { 1844 pred_iterator PI(LocalStartBB), PE(LocalStartBB, false); 1845 if (PI == PE) 1846 // If we've reached the function entry, produce a null dependence. 1847 DependingInstructions.insert(0); 1848 else 1849 // Add the predecessors to the worklist. 1850 do { 1851 BasicBlock *PredBB = *PI; 1852 if (Visited.insert(PredBB)) 1853 Worklist.push_back(std::make_pair(PredBB, PredBB->end())); 1854 } while (++PI != PE); 1855 break; 1856 } 1857 1858 Instruction *Inst = --LocalStartPos; 1859 if (Depends(Flavor, Inst, Arg, PA)) { 1860 DependingInstructions.insert(Inst); 1861 break; 1862 } 1863 } 1864 } while (!Worklist.empty()); 1865 1866 // Determine whether the original StartBB post-dominates all of the blocks we 1867 // visited. If not, insert a sentinal indicating that most optimizations are 1868 // not safe. 1869 for (SmallPtrSet<const BasicBlock *, 4>::const_iterator I = Visited.begin(), 1870 E = Visited.end(); I != E; ++I) { 1871 const BasicBlock *BB = *I; 1872 if (BB == StartBB) 1873 continue; 1874 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back()); 1875 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) { 1876 const BasicBlock *Succ = *SI; 1877 if (Succ != StartBB && !Visited.count(Succ)) { 1878 DependingInstructions.insert(reinterpret_cast<Instruction *>(-1)); 1879 return; 1880 } 1881 } 1882 } 1883 } 1884 1885 static bool isNullOrUndef(const Value *V) { 1886 return isa<ConstantPointerNull>(V) || isa<UndefValue>(V); 1887 } 1888 1889 static bool isNoopInstruction(const Instruction *I) { 1890 return isa<BitCastInst>(I) || 1891 (isa<GetElementPtrInst>(I) && 1892 cast<GetElementPtrInst>(I)->hasAllZeroIndices()); 1893 } 1894 1895 /// OptimizeRetainCall - Turn objc_retain into 1896 /// objc_retainAutoreleasedReturnValue if the operand is a return value. 1897 void 1898 ObjCARCOpt::OptimizeRetainCall(Function &F, Instruction *Retain) { 1899 CallSite CS(GetObjCArg(Retain)); 1900 Instruction *Call = CS.getInstruction(); 1901 if (!Call) return; 1902 if (Call->getParent() != Retain->getParent()) return; 1903 1904 // Check that the call is next to the retain. 1905 BasicBlock::iterator I = Call; 1906 ++I; 1907 while (isNoopInstruction(I)) ++I; 1908 if (&*I != Retain) 1909 return; 1910 1911 // Turn it to an objc_retainAutoreleasedReturnValue.. 1912 Changed = true; 1913 ++NumPeeps; 1914 cast<CallInst>(Retain)->setCalledFunction(getRetainRVCallee(F.getParent())); 1915 } 1916 1917 /// OptimizeRetainRVCall - Turn objc_retainAutoreleasedReturnValue into 1918 /// objc_retain if the operand is not a return value. Or, if it can be 1919 /// paired with an objc_autoreleaseReturnValue, delete the pair and 1920 /// return true. 1921 bool 1922 ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) { 1923 // Check for the argument being from an immediately preceding call. 1924 Value *Arg = GetObjCArg(RetainRV); 1925 CallSite CS(Arg); 1926 if (Instruction *Call = CS.getInstruction()) 1927 if (Call->getParent() == RetainRV->getParent()) { 1928 BasicBlock::iterator I = Call; 1929 ++I; 1930 while (isNoopInstruction(I)) ++I; 1931 if (&*I == RetainRV) 1932 return false; 1933 } 1934 1935 // Check for being preceded by an objc_autoreleaseReturnValue on the same 1936 // pointer. In this case, we can delete the pair. 1937 BasicBlock::iterator I = RetainRV, Begin = RetainRV->getParent()->begin(); 1938 if (I != Begin) { 1939 do --I; while (I != Begin && isNoopInstruction(I)); 1940 if (GetBasicInstructionClass(I) == IC_AutoreleaseRV && 1941 GetObjCArg(I) == Arg) { 1942 Changed = true; 1943 ++NumPeeps; 1944 EraseInstruction(I); 1945 EraseInstruction(RetainRV); 1946 return true; 1947 } 1948 } 1949 1950 // Turn it to a plain objc_retain. 1951 Changed = true; 1952 ++NumPeeps; 1953 cast<CallInst>(RetainRV)->setCalledFunction(getRetainCallee(F.getParent())); 1954 return false; 1955 } 1956 1957 /// OptimizeAutoreleaseRVCall - Turn objc_autoreleaseReturnValue into 1958 /// objc_autorelease if the result is not used as a return value. 1959 void 1960 ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV) { 1961 // Check for a return of the pointer value. 1962 const Value *Ptr = GetObjCArg(AutoreleaseRV); 1963 SmallVector<const Value *, 2> Users; 1964 Users.push_back(Ptr); 1965 do { 1966 Ptr = Users.pop_back_val(); 1967 for (Value::const_use_iterator UI = Ptr->use_begin(), UE = Ptr->use_end(); 1968 UI != UE; ++UI) { 1969 const User *I = *UI; 1970 if (isa<ReturnInst>(I) || GetBasicInstructionClass(I) == IC_RetainRV) 1971 return; 1972 if (isa<BitCastInst>(I)) 1973 Users.push_back(I); 1974 } 1975 } while (!Users.empty()); 1976 1977 Changed = true; 1978 ++NumPeeps; 1979 cast<CallInst>(AutoreleaseRV)-> 1980 setCalledFunction(getAutoreleaseCallee(F.getParent())); 1981 } 1982 1983 /// OptimizeIndividualCalls - Visit each call, one at a time, and make 1984 /// simplifications without doing any additional analysis. 1985 void ObjCARCOpt::OptimizeIndividualCalls(Function &F) { 1986 // Reset all the flags in preparation for recomputing them. 1987 UsedInThisFunction = 0; 1988 1989 // Visit all objc_* calls in F. 1990 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) { 1991 Instruction *Inst = &*I++; 1992 InstructionClass Class = GetBasicInstructionClass(Inst); 1993 1994 switch (Class) { 1995 default: break; 1996 1997 // Delete no-op casts. These function calls have special semantics, but 1998 // the semantics are entirely implemented via lowering in the front-end, 1999 // so by the time they reach the optimizer, they are just no-op calls 2000 // which return their argument. 2001 // 2002 // There are gray areas here, as the ability to cast reference-counted 2003 // pointers to raw void* and back allows code to break ARC assumptions, 2004 // however these are currently considered to be unimportant. 2005 case IC_NoopCast: 2006 Changed = true; 2007 ++NumNoops; 2008 EraseInstruction(Inst); 2009 continue; 2010 2011 // If the pointer-to-weak-pointer is null, it's undefined behavior. 2012 case IC_StoreWeak: 2013 case IC_LoadWeak: 2014 case IC_LoadWeakRetained: 2015 case IC_InitWeak: 2016 case IC_DestroyWeak: { 2017 CallInst *CI = cast<CallInst>(Inst); 2018 if (isNullOrUndef(CI->getArgOperand(0))) { 2019 Type *Ty = CI->getArgOperand(0)->getType(); 2020 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()), 2021 Constant::getNullValue(Ty), 2022 CI); 2023 CI->replaceAllUsesWith(UndefValue::get(CI->getType())); 2024 CI->eraseFromParent(); 2025 continue; 2026 } 2027 break; 2028 } 2029 case IC_CopyWeak: 2030 case IC_MoveWeak: { 2031 CallInst *CI = cast<CallInst>(Inst); 2032 if (isNullOrUndef(CI->getArgOperand(0)) || 2033 isNullOrUndef(CI->getArgOperand(1))) { 2034 Type *Ty = CI->getArgOperand(0)->getType(); 2035 new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()), 2036 Constant::getNullValue(Ty), 2037 CI); 2038 CI->replaceAllUsesWith(UndefValue::get(CI->getType())); 2039 CI->eraseFromParent(); 2040 continue; 2041 } 2042 break; 2043 } 2044 case IC_Retain: 2045 OptimizeRetainCall(F, Inst); 2046 break; 2047 case IC_RetainRV: 2048 if (OptimizeRetainRVCall(F, Inst)) 2049 continue; 2050 break; 2051 case IC_AutoreleaseRV: 2052 OptimizeAutoreleaseRVCall(F, Inst); 2053 break; 2054 } 2055 2056 // objc_autorelease(x) -> objc_release(x) if x is otherwise unused. 2057 if (IsAutorelease(Class) && Inst->use_empty()) { 2058 CallInst *Call = cast<CallInst>(Inst); 2059 const Value *Arg = Call->getArgOperand(0); 2060 Arg = FindSingleUseIdentifiedObject(Arg); 2061 if (Arg) { 2062 Changed = true; 2063 ++NumAutoreleases; 2064 2065 // Create the declaration lazily. 2066 LLVMContext &C = Inst->getContext(); 2067 CallInst *NewCall = 2068 CallInst::Create(getReleaseCallee(F.getParent()), 2069 Call->getArgOperand(0), "", Call); 2070 NewCall->setMetadata(ImpreciseReleaseMDKind, 2071 MDNode::get(C, ArrayRef<Value *>())); 2072 EraseInstruction(Call); 2073 Inst = NewCall; 2074 Class = IC_Release; 2075 } 2076 } 2077 2078 // For functions which can never be passed stack arguments, add 2079 // a tail keyword. 2080 if (IsAlwaysTail(Class)) { 2081 Changed = true; 2082 cast<CallInst>(Inst)->setTailCall(); 2083 } 2084 2085 // Set nounwind as needed. 2086 if (IsNoThrow(Class)) { 2087 Changed = true; 2088 cast<CallInst>(Inst)->setDoesNotThrow(); 2089 } 2090 2091 if (!IsNoopOnNull(Class)) { 2092 UsedInThisFunction |= 1 << Class; 2093 continue; 2094 } 2095 2096 const Value *Arg = GetObjCArg(Inst); 2097 2098 // ARC calls with null are no-ops. Delete them. 2099 if (isNullOrUndef(Arg)) { 2100 Changed = true; 2101 ++NumNoops; 2102 EraseInstruction(Inst); 2103 continue; 2104 } 2105 2106 // Keep track of which of retain, release, autorelease, and retain_block 2107 // are actually present in this function. 2108 UsedInThisFunction |= 1 << Class; 2109 2110 // If Arg is a PHI, and one or more incoming values to the 2111 // PHI are null, and the call is control-equivalent to the PHI, and there 2112 // are no relevant side effects between the PHI and the call, the call 2113 // could be pushed up to just those paths with non-null incoming values. 2114 // For now, don't bother splitting critical edges for this. 2115 SmallVector<std::pair<Instruction *, const Value *>, 4> Worklist; 2116 Worklist.push_back(std::make_pair(Inst, Arg)); 2117 do { 2118 std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val(); 2119 Inst = Pair.first; 2120 Arg = Pair.second; 2121 2122 const PHINode *PN = dyn_cast<PHINode>(Arg); 2123 if (!PN) continue; 2124 2125 // Determine if the PHI has any null operands, or any incoming 2126 // critical edges. 2127 bool HasNull = false; 2128 bool HasCriticalEdges = false; 2129 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { 2130 Value *Incoming = 2131 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i)); 2132 if (isNullOrUndef(Incoming)) 2133 HasNull = true; 2134 else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back()) 2135 .getNumSuccessors() != 1) { 2136 HasCriticalEdges = true; 2137 break; 2138 } 2139 } 2140 // If we have null operands and no critical edges, optimize. 2141 if (!HasCriticalEdges && HasNull) { 2142 SmallPtrSet<Instruction *, 4> DependingInstructions; 2143 SmallPtrSet<const BasicBlock *, 4> Visited; 2144 2145 // Check that there is nothing that cares about the reference 2146 // count between the call and the phi. 2147 FindDependencies(NeedsPositiveRetainCount, Arg, 2148 Inst->getParent(), Inst, 2149 DependingInstructions, Visited, PA); 2150 if (DependingInstructions.size() == 1 && 2151 *DependingInstructions.begin() == PN) { 2152 Changed = true; 2153 ++NumPartialNoops; 2154 // Clone the call into each predecessor that has a non-null value. 2155 CallInst *CInst = cast<CallInst>(Inst); 2156 Type *ParamTy = CInst->getArgOperand(0)->getType(); 2157 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { 2158 Value *Incoming = 2159 StripPointerCastsAndObjCCalls(PN->getIncomingValue(i)); 2160 if (!isNullOrUndef(Incoming)) { 2161 CallInst *Clone = cast<CallInst>(CInst->clone()); 2162 Value *Op = PN->getIncomingValue(i); 2163 Instruction *InsertPos = &PN->getIncomingBlock(i)->back(); 2164 if (Op->getType() != ParamTy) 2165 Op = new BitCastInst(Op, ParamTy, "", InsertPos); 2166 Clone->setArgOperand(0, Op); 2167 Clone->insertBefore(InsertPos); 2168 Worklist.push_back(std::make_pair(Clone, Incoming)); 2169 } 2170 } 2171 // Erase the original call. 2172 EraseInstruction(CInst); 2173 continue; 2174 } 2175 } 2176 } while (!Worklist.empty()); 2177 } 2178 } 2179 2180 /// CheckForCFGHazards - Check for critical edges, loop boundaries, irreducible 2181 /// control flow, or other CFG structures where moving code across the edge 2182 /// would result in it being executed more. 2183 void 2184 ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB, 2185 DenseMap<const BasicBlock *, BBState> &BBStates, 2186 BBState &MyStates) const { 2187 // If any top-down local-use or possible-dec has a succ which is earlier in 2188 // the sequence, forget it. 2189 for (BBState::ptr_const_iterator I = MyStates.top_down_ptr_begin(), 2190 E = MyStates.top_down_ptr_end(); I != E; ++I) 2191 switch (I->second.GetSeq()) { 2192 default: break; 2193 case S_Use: { 2194 const Value *Arg = I->first; 2195 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back()); 2196 bool SomeSuccHasSame = false; 2197 bool AllSuccsHaveSame = true; 2198 PtrState &S = MyStates.getPtrTopDownState(Arg); 2199 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) { 2200 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg); 2201 switch (SuccS.GetSeq()) { 2202 case S_None: 2203 case S_CanRelease: { 2204 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe) 2205 S.ClearSequenceProgress(); 2206 continue; 2207 } 2208 case S_Use: 2209 SomeSuccHasSame = true; 2210 break; 2211 case S_Stop: 2212 case S_Release: 2213 case S_MovableRelease: 2214 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe) 2215 AllSuccsHaveSame = false; 2216 break; 2217 case S_Retain: 2218 llvm_unreachable("bottom-up pointer in retain state!"); 2219 } 2220 } 2221 // If the state at the other end of any of the successor edges 2222 // matches the current state, require all edges to match. This 2223 // guards against loops in the middle of a sequence. 2224 if (SomeSuccHasSame && !AllSuccsHaveSame) 2225 S.ClearSequenceProgress(); 2226 } 2227 case S_CanRelease: { 2228 const Value *Arg = I->first; 2229 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back()); 2230 bool SomeSuccHasSame = false; 2231 bool AllSuccsHaveSame = true; 2232 PtrState &S = MyStates.getPtrTopDownState(Arg); 2233 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) { 2234 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg); 2235 switch (SuccS.GetSeq()) { 2236 case S_None: { 2237 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe) 2238 S.ClearSequenceProgress(); 2239 continue; 2240 } 2241 case S_CanRelease: 2242 SomeSuccHasSame = true; 2243 break; 2244 case S_Stop: 2245 case S_Release: 2246 case S_MovableRelease: 2247 case S_Use: 2248 if (!S.RRI.KnownSafe && !SuccS.RRI.KnownSafe) 2249 AllSuccsHaveSame = false; 2250 break; 2251 case S_Retain: 2252 llvm_unreachable("bottom-up pointer in retain state!"); 2253 } 2254 } 2255 // If the state at the other end of any of the successor edges 2256 // matches the current state, require all edges to match. This 2257 // guards against loops in the middle of a sequence. 2258 if (SomeSuccHasSame && !AllSuccsHaveSame) 2259 S.ClearSequenceProgress(); 2260 } 2261 } 2262 } 2263 2264 bool 2265 ObjCARCOpt::VisitBottomUp(BasicBlock *BB, 2266 DenseMap<const BasicBlock *, BBState> &BBStates, 2267 MapVector<Value *, RRInfo> &Retains) { 2268 bool NestingDetected = false; 2269 BBState &MyStates = BBStates[BB]; 2270 2271 // Merge the states from each successor to compute the initial state 2272 // for the current block. 2273 const TerminatorInst *TI = cast<TerminatorInst>(&BB->back()); 2274 succ_const_iterator SI(TI), SE(TI, false); 2275 if (SI == SE) 2276 MyStates.SetAsExit(); 2277 else 2278 do { 2279 const BasicBlock *Succ = *SI++; 2280 if (Succ == BB) 2281 continue; 2282 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Succ); 2283 // If we haven't seen this node yet, then we've found a CFG cycle. 2284 // Be optimistic here; it's CheckForCFGHazards' job detect trouble. 2285 if (I == BBStates.end()) 2286 continue; 2287 MyStates.InitFromSucc(I->second); 2288 while (SI != SE) { 2289 Succ = *SI++; 2290 if (Succ != BB) { 2291 I = BBStates.find(Succ); 2292 if (I != BBStates.end()) 2293 MyStates.MergeSucc(I->second); 2294 } 2295 } 2296 break; 2297 } while (SI != SE); 2298 2299 // Visit all the instructions, bottom-up. 2300 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) { 2301 Instruction *Inst = llvm::prior(I); 2302 InstructionClass Class = GetInstructionClass(Inst); 2303 const Value *Arg = 0; 2304 2305 switch (Class) { 2306 case IC_Release: { 2307 Arg = GetObjCArg(Inst); 2308 2309 PtrState &S = MyStates.getPtrBottomUpState(Arg); 2310 2311 // If we see two releases in a row on the same pointer. If so, make 2312 // a note, and we'll cicle back to revisit it after we've 2313 // hopefully eliminated the second release, which may allow us to 2314 // eliminate the first release too. 2315 // Theoretically we could implement removal of nested retain+release 2316 // pairs by making PtrState hold a stack of states, but this is 2317 // simple and avoids adding overhead for the non-nested case. 2318 if (S.GetSeq() == S_Release || S.GetSeq() == S_MovableRelease) 2319 NestingDetected = true; 2320 2321 S.SetSeqToRelease(Inst->getMetadata(ImpreciseReleaseMDKind)); 2322 S.RRI.clear(); 2323 S.RRI.KnownSafe = S.IsKnownNested() || S.IsKnownIncremented(); 2324 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall(); 2325 S.RRI.Calls.insert(Inst); 2326 2327 S.IncrementRefCount(); 2328 S.IncrementNestCount(); 2329 break; 2330 } 2331 case IC_RetainBlock: 2332 case IC_Retain: 2333 case IC_RetainRV: { 2334 Arg = GetObjCArg(Inst); 2335 2336 PtrState &S = MyStates.getPtrBottomUpState(Arg); 2337 S.DecrementRefCount(); 2338 S.SetAtLeastOneRefCount(); 2339 S.DecrementNestCount(); 2340 2341 // An objc_retainBlock call with just a use still needs to be kept, 2342 // because it may be copying a block from the stack to the heap. 2343 if (Class == IC_RetainBlock && S.GetSeq() == S_Use) 2344 S.SetSeq(S_CanRelease); 2345 2346 switch (S.GetSeq()) { 2347 case S_Stop: 2348 case S_Release: 2349 case S_MovableRelease: 2350 case S_Use: 2351 S.RRI.ReverseInsertPts.clear(); 2352 // FALL THROUGH 2353 case S_CanRelease: 2354 // Don't do retain+release tracking for IC_RetainRV, because it's 2355 // better to let it remain as the first instruction after a call. 2356 if (Class != IC_RetainRV) { 2357 S.RRI.IsRetainBlock = Class == IC_RetainBlock; 2358 Retains[Inst] = S.RRI; 2359 } 2360 S.ClearSequenceProgress(); 2361 break; 2362 case S_None: 2363 break; 2364 case S_Retain: 2365 llvm_unreachable("bottom-up pointer in retain state!"); 2366 } 2367 continue; 2368 } 2369 case IC_AutoreleasepoolPop: 2370 // Conservatively, clear MyStates for all known pointers. 2371 MyStates.clearBottomUpPointers(); 2372 continue; 2373 case IC_AutoreleasepoolPush: 2374 case IC_None: 2375 // These are irrelevant. 2376 continue; 2377 default: 2378 break; 2379 } 2380 2381 // Consider any other possible effects of this instruction on each 2382 // pointer being tracked. 2383 for (BBState::ptr_iterator MI = MyStates.bottom_up_ptr_begin(), 2384 ME = MyStates.bottom_up_ptr_end(); MI != ME; ++MI) { 2385 const Value *Ptr = MI->first; 2386 if (Ptr == Arg) 2387 continue; // Handled above. 2388 PtrState &S = MI->second; 2389 Sequence Seq = S.GetSeq(); 2390 2391 // Check for possible releases. 2392 if (CanAlterRefCount(Inst, Ptr, PA, Class)) { 2393 S.DecrementRefCount(); 2394 switch (Seq) { 2395 case S_Use: 2396 S.SetSeq(S_CanRelease); 2397 continue; 2398 case S_CanRelease: 2399 case S_Release: 2400 case S_MovableRelease: 2401 case S_Stop: 2402 case S_None: 2403 break; 2404 case S_Retain: 2405 llvm_unreachable("bottom-up pointer in retain state!"); 2406 } 2407 } 2408 2409 // Check for possible direct uses. 2410 switch (Seq) { 2411 case S_Release: 2412 case S_MovableRelease: 2413 if (CanUse(Inst, Ptr, PA, Class)) { 2414 assert(S.RRI.ReverseInsertPts.empty()); 2415 S.RRI.ReverseInsertPts.insert(Inst); 2416 S.SetSeq(S_Use); 2417 } else if (Seq == S_Release && 2418 (Class == IC_User || Class == IC_CallOrUser)) { 2419 // Non-movable releases depend on any possible objc pointer use. 2420 S.SetSeq(S_Stop); 2421 assert(S.RRI.ReverseInsertPts.empty()); 2422 S.RRI.ReverseInsertPts.insert(Inst); 2423 } 2424 break; 2425 case S_Stop: 2426 if (CanUse(Inst, Ptr, PA, Class)) 2427 S.SetSeq(S_Use); 2428 break; 2429 case S_CanRelease: 2430 case S_Use: 2431 case S_None: 2432 break; 2433 case S_Retain: 2434 llvm_unreachable("bottom-up pointer in retain state!"); 2435 } 2436 } 2437 } 2438 2439 return NestingDetected; 2440 } 2441 2442 bool 2443 ObjCARCOpt::VisitTopDown(BasicBlock *BB, 2444 DenseMap<const BasicBlock *, BBState> &BBStates, 2445 DenseMap<Value *, RRInfo> &Releases) { 2446 bool NestingDetected = false; 2447 BBState &MyStates = BBStates[BB]; 2448 2449 // Merge the states from each predecessor to compute the initial state 2450 // for the current block. 2451 const_pred_iterator PI(BB), PE(BB, false); 2452 if (PI == PE) 2453 MyStates.SetAsEntry(); 2454 else 2455 do { 2456 const BasicBlock *Pred = *PI++; 2457 if (Pred == BB) 2458 continue; 2459 DenseMap<const BasicBlock *, BBState>::iterator I = BBStates.find(Pred); 2460 assert(I != BBStates.end()); 2461 // If we haven't seen this node yet, then we've found a CFG cycle. 2462 // Be optimistic here; it's CheckForCFGHazards' job detect trouble. 2463 if (!I->second.isVisitedTopDown()) 2464 continue; 2465 MyStates.InitFromPred(I->second); 2466 while (PI != PE) { 2467 Pred = *PI++; 2468 if (Pred != BB) { 2469 I = BBStates.find(Pred); 2470 assert(I != BBStates.end()); 2471 if (I->second.isVisitedTopDown()) 2472 MyStates.MergePred(I->second); 2473 } 2474 } 2475 break; 2476 } while (PI != PE); 2477 2478 // Visit all the instructions, top-down. 2479 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) { 2480 Instruction *Inst = I; 2481 InstructionClass Class = GetInstructionClass(Inst); 2482 const Value *Arg = 0; 2483 2484 switch (Class) { 2485 case IC_RetainBlock: 2486 case IC_Retain: 2487 case IC_RetainRV: { 2488 Arg = GetObjCArg(Inst); 2489 2490 PtrState &S = MyStates.getPtrTopDownState(Arg); 2491 2492 // Don't do retain+release tracking for IC_RetainRV, because it's 2493 // better to let it remain as the first instruction after a call. 2494 if (Class != IC_RetainRV) { 2495 // If we see two retains in a row on the same pointer. If so, make 2496 // a note, and we'll cicle back to revisit it after we've 2497 // hopefully eliminated the second retain, which may allow us to 2498 // eliminate the first retain too. 2499 // Theoretically we could implement removal of nested retain+release 2500 // pairs by making PtrState hold a stack of states, but this is 2501 // simple and avoids adding overhead for the non-nested case. 2502 if (S.GetSeq() == S_Retain) 2503 NestingDetected = true; 2504 2505 S.SetSeq(S_Retain); 2506 S.RRI.clear(); 2507 S.RRI.IsRetainBlock = Class == IC_RetainBlock; 2508 // Don't check S.IsKnownIncremented() here because it's not 2509 // sufficient. 2510 S.RRI.KnownSafe = S.IsKnownNested(); 2511 S.RRI.Calls.insert(Inst); 2512 } 2513 2514 S.SetAtLeastOneRefCount(); 2515 S.IncrementRefCount(); 2516 S.IncrementNestCount(); 2517 continue; 2518 } 2519 case IC_Release: { 2520 Arg = GetObjCArg(Inst); 2521 2522 PtrState &S = MyStates.getPtrTopDownState(Arg); 2523 S.DecrementRefCount(); 2524 S.DecrementNestCount(); 2525 2526 switch (S.GetSeq()) { 2527 case S_Retain: 2528 case S_CanRelease: 2529 S.RRI.ReverseInsertPts.clear(); 2530 // FALL THROUGH 2531 case S_Use: 2532 S.RRI.ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind); 2533 S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall(); 2534 Releases[Inst] = S.RRI; 2535 S.ClearSequenceProgress(); 2536 break; 2537 case S_None: 2538 break; 2539 case S_Stop: 2540 case S_Release: 2541 case S_MovableRelease: 2542 llvm_unreachable("top-down pointer in release state!"); 2543 } 2544 break; 2545 } 2546 case IC_AutoreleasepoolPop: 2547 // Conservatively, clear MyStates for all known pointers. 2548 MyStates.clearTopDownPointers(); 2549 continue; 2550 case IC_AutoreleasepoolPush: 2551 case IC_None: 2552 // These are irrelevant. 2553 continue; 2554 default: 2555 break; 2556 } 2557 2558 // Consider any other possible effects of this instruction on each 2559 // pointer being tracked. 2560 for (BBState::ptr_iterator MI = MyStates.top_down_ptr_begin(), 2561 ME = MyStates.top_down_ptr_end(); MI != ME; ++MI) { 2562 const Value *Ptr = MI->first; 2563 if (Ptr == Arg) 2564 continue; // Handled above. 2565 PtrState &S = MI->second; 2566 Sequence Seq = S.GetSeq(); 2567 2568 // Check for possible releases. 2569 if (CanAlterRefCount(Inst, Ptr, PA, Class)) { 2570 S.DecrementRefCount(); 2571 switch (Seq) { 2572 case S_Retain: 2573 S.SetSeq(S_CanRelease); 2574 assert(S.RRI.ReverseInsertPts.empty()); 2575 S.RRI.ReverseInsertPts.insert(Inst); 2576 2577 // One call can't cause a transition from S_Retain to S_CanRelease 2578 // and S_CanRelease to S_Use. If we've made the first transition, 2579 // we're done. 2580 continue; 2581 case S_Use: 2582 case S_CanRelease: 2583 case S_None: 2584 break; 2585 case S_Stop: 2586 case S_Release: 2587 case S_MovableRelease: 2588 llvm_unreachable("top-down pointer in release state!"); 2589 } 2590 } 2591 2592 // Check for possible direct uses. 2593 switch (Seq) { 2594 case S_CanRelease: 2595 if (CanUse(Inst, Ptr, PA, Class)) 2596 S.SetSeq(S_Use); 2597 break; 2598 case S_Retain: 2599 // An objc_retainBlock call may be responsible for copying the block 2600 // data from the stack to the heap. Model this by moving it straight 2601 // from S_Retain to S_Use. 2602 if (S.RRI.IsRetainBlock && 2603 CanUse(Inst, Ptr, PA, Class)) { 2604 assert(S.RRI.ReverseInsertPts.empty()); 2605 S.RRI.ReverseInsertPts.insert(Inst); 2606 S.SetSeq(S_Use); 2607 } 2608 break; 2609 case S_Use: 2610 case S_None: 2611 break; 2612 case S_Stop: 2613 case S_Release: 2614 case S_MovableRelease: 2615 llvm_unreachable("top-down pointer in release state!"); 2616 } 2617 } 2618 } 2619 2620 CheckForCFGHazards(BB, BBStates, MyStates); 2621 return NestingDetected; 2622 } 2623 2624 // Visit - Visit the function both top-down and bottom-up. 2625 bool 2626 ObjCARCOpt::Visit(Function &F, 2627 DenseMap<const BasicBlock *, BBState> &BBStates, 2628 MapVector<Value *, RRInfo> &Retains, 2629 DenseMap<Value *, RRInfo> &Releases) { 2630 // Use reverse-postorder on the reverse CFG for bottom-up, because we 2631 // magically know that loops will be well behaved, i.e. they won't repeatedly 2632 // call retain on a single pointer without doing a release. We can't use 2633 // ReversePostOrderTraversal here because we want to walk up from each 2634 // function exit point. 2635 SmallPtrSet<BasicBlock *, 16> Visited; 2636 SmallVector<std::pair<BasicBlock *, pred_iterator>, 16> Stack; 2637 SmallVector<BasicBlock *, 16> Order; 2638 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) { 2639 BasicBlock *BB = I; 2640 if (BB->getTerminator()->getNumSuccessors() == 0) 2641 Stack.push_back(std::make_pair(BB, pred_begin(BB))); 2642 } 2643 while (!Stack.empty()) { 2644 pred_iterator End = pred_end(Stack.back().first); 2645 while (Stack.back().second != End) { 2646 BasicBlock *BB = *Stack.back().second++; 2647 if (Visited.insert(BB)) 2648 Stack.push_back(std::make_pair(BB, pred_begin(BB))); 2649 } 2650 Order.push_back(Stack.pop_back_val().first); 2651 } 2652 bool BottomUpNestingDetected = false; 2653 for (SmallVectorImpl<BasicBlock *>::const_reverse_iterator I = 2654 Order.rbegin(), E = Order.rend(); I != E; ++I) { 2655 BasicBlock *BB = *I; 2656 BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains); 2657 } 2658 2659 // Use regular reverse-postorder for top-down. 2660 bool TopDownNestingDetected = false; 2661 typedef ReversePostOrderTraversal<Function *> RPOTType; 2662 RPOTType RPOT(&F); 2663 for (RPOTType::rpo_iterator I = RPOT.begin(), E = RPOT.end(); I != E; ++I) { 2664 BasicBlock *BB = *I; 2665 TopDownNestingDetected |= VisitTopDown(BB, BBStates, Releases); 2666 } 2667 2668 return TopDownNestingDetected && BottomUpNestingDetected; 2669 } 2670 2671 /// MoveCalls - Move the calls in RetainsToMove and ReleasesToMove. 2672 void ObjCARCOpt::MoveCalls(Value *Arg, 2673 RRInfo &RetainsToMove, 2674 RRInfo &ReleasesToMove, 2675 MapVector<Value *, RRInfo> &Retains, 2676 DenseMap<Value *, RRInfo> &Releases, 2677 SmallVectorImpl<Instruction *> &DeadInsts, 2678 Module *M) { 2679 Type *ArgTy = Arg->getType(); 2680 Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext())); 2681 2682 // Insert the new retain and release calls. 2683 for (SmallPtrSet<Instruction *, 2>::const_iterator 2684 PI = ReleasesToMove.ReverseInsertPts.begin(), 2685 PE = ReleasesToMove.ReverseInsertPts.end(); PI != PE; ++PI) { 2686 Instruction *InsertPt = *PI; 2687 Value *MyArg = ArgTy == ParamTy ? Arg : 2688 new BitCastInst(Arg, ParamTy, "", InsertPt); 2689 CallInst *Call = 2690 CallInst::Create(RetainsToMove.IsRetainBlock ? 2691 getRetainBlockCallee(M) : getRetainCallee(M), 2692 MyArg, "", InsertPt); 2693 Call->setDoesNotThrow(); 2694 if (!RetainsToMove.IsRetainBlock) 2695 Call->setTailCall(); 2696 } 2697 for (SmallPtrSet<Instruction *, 2>::const_iterator 2698 PI = RetainsToMove.ReverseInsertPts.begin(), 2699 PE = RetainsToMove.ReverseInsertPts.end(); PI != PE; ++PI) { 2700 Instruction *LastUse = *PI; 2701 Instruction *InsertPts[] = { 0, 0, 0 }; 2702 if (InvokeInst *II = dyn_cast<InvokeInst>(LastUse)) { 2703 // We can't insert code immediately after an invoke instruction, so 2704 // insert code at the beginning of both successor blocks instead. 2705 // The invoke's return value isn't available in the unwind block, 2706 // but our releases will never depend on it, because they must be 2707 // paired with retains from before the invoke. 2708 InsertPts[0] = II->getNormalDest()->getFirstInsertionPt(); 2709 InsertPts[1] = II->getUnwindDest()->getFirstInsertionPt(); 2710 } else { 2711 // Insert code immediately after the last use. 2712 InsertPts[0] = llvm::next(BasicBlock::iterator(LastUse)); 2713 } 2714 2715 for (Instruction **I = InsertPts; *I; ++I) { 2716 Instruction *InsertPt = *I; 2717 Value *MyArg = ArgTy == ParamTy ? Arg : 2718 new BitCastInst(Arg, ParamTy, "", InsertPt); 2719 CallInst *Call = CallInst::Create(getReleaseCallee(M), MyArg, 2720 "", InsertPt); 2721 // Attach a clang.imprecise_release metadata tag, if appropriate. 2722 if (MDNode *M = ReleasesToMove.ReleaseMetadata) 2723 Call->setMetadata(ImpreciseReleaseMDKind, M); 2724 Call->setDoesNotThrow(); 2725 if (ReleasesToMove.IsTailCallRelease) 2726 Call->setTailCall(); 2727 } 2728 } 2729 2730 // Delete the original retain and release calls. 2731 for (SmallPtrSet<Instruction *, 2>::const_iterator 2732 AI = RetainsToMove.Calls.begin(), 2733 AE = RetainsToMove.Calls.end(); AI != AE; ++AI) { 2734 Instruction *OrigRetain = *AI; 2735 Retains.blot(OrigRetain); 2736 DeadInsts.push_back(OrigRetain); 2737 } 2738 for (SmallPtrSet<Instruction *, 2>::const_iterator 2739 AI = ReleasesToMove.Calls.begin(), 2740 AE = ReleasesToMove.Calls.end(); AI != AE; ++AI) { 2741 Instruction *OrigRelease = *AI; 2742 Releases.erase(OrigRelease); 2743 DeadInsts.push_back(OrigRelease); 2744 } 2745 } 2746 2747 bool 2748 ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState> 2749 &BBStates, 2750 MapVector<Value *, RRInfo> &Retains, 2751 DenseMap<Value *, RRInfo> &Releases, 2752 Module *M) { 2753 bool AnyPairsCompletelyEliminated = false; 2754 RRInfo RetainsToMove; 2755 RRInfo ReleasesToMove; 2756 SmallVector<Instruction *, 4> NewRetains; 2757 SmallVector<Instruction *, 4> NewReleases; 2758 SmallVector<Instruction *, 8> DeadInsts; 2759 2760 for (MapVector<Value *, RRInfo>::const_iterator I = Retains.begin(), 2761 E = Retains.end(); I != E; ++I) { 2762 Value *V = I->first; 2763 if (!V) continue; // blotted 2764 2765 Instruction *Retain = cast<Instruction>(V); 2766 Value *Arg = GetObjCArg(Retain); 2767 2768 // If the object being released is in static storage, we know it's 2769 // not being managed by ObjC reference counting, so we can delete pairs 2770 // regardless of what possible decrements or uses lie between them. 2771 bool KnownSafe = isa<Constant>(Arg); 2772 2773 // Same for stack storage, unless this is an objc_retainBlock call, 2774 // which is responsible for copying the block data from the stack to 2775 // the heap. 2776 if (!I->second.IsRetainBlock && isa<AllocaInst>(Arg)) 2777 KnownSafe = true; 2778 2779 // A constant pointer can't be pointing to an object on the heap. It may 2780 // be reference-counted, but it won't be deleted. 2781 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg)) 2782 if (const GlobalVariable *GV = 2783 dyn_cast<GlobalVariable>( 2784 StripPointerCastsAndObjCCalls(LI->getPointerOperand()))) 2785 if (GV->isConstant()) 2786 KnownSafe = true; 2787 2788 // If a pair happens in a region where it is known that the reference count 2789 // is already incremented, we can similarly ignore possible decrements. 2790 bool KnownSafeTD = true, KnownSafeBU = true; 2791 2792 // Connect the dots between the top-down-collected RetainsToMove and 2793 // bottom-up-collected ReleasesToMove to form sets of related calls. 2794 // This is an iterative process so that we connect multiple releases 2795 // to multiple retains if needed. 2796 unsigned OldDelta = 0; 2797 unsigned NewDelta = 0; 2798 unsigned OldCount = 0; 2799 unsigned NewCount = 0; 2800 bool FirstRelease = true; 2801 bool FirstRetain = true; 2802 NewRetains.push_back(Retain); 2803 for (;;) { 2804 for (SmallVectorImpl<Instruction *>::const_iterator 2805 NI = NewRetains.begin(), NE = NewRetains.end(); NI != NE; ++NI) { 2806 Instruction *NewRetain = *NI; 2807 MapVector<Value *, RRInfo>::const_iterator It = Retains.find(NewRetain); 2808 assert(It != Retains.end()); 2809 const RRInfo &NewRetainRRI = It->second; 2810 KnownSafeTD &= NewRetainRRI.KnownSafe; 2811 for (SmallPtrSet<Instruction *, 2>::const_iterator 2812 LI = NewRetainRRI.Calls.begin(), 2813 LE = NewRetainRRI.Calls.end(); LI != LE; ++LI) { 2814 Instruction *NewRetainRelease = *LI; 2815 DenseMap<Value *, RRInfo>::const_iterator Jt = 2816 Releases.find(NewRetainRelease); 2817 if (Jt == Releases.end()) 2818 goto next_retain; 2819 const RRInfo &NewRetainReleaseRRI = Jt->second; 2820 assert(NewRetainReleaseRRI.Calls.count(NewRetain)); 2821 if (ReleasesToMove.Calls.insert(NewRetainRelease)) { 2822 OldDelta -= 2823 BBStates[NewRetainRelease->getParent()].GetAllPathCount(); 2824 2825 // Merge the ReleaseMetadata and IsTailCallRelease values. 2826 if (FirstRelease) { 2827 ReleasesToMove.ReleaseMetadata = 2828 NewRetainReleaseRRI.ReleaseMetadata; 2829 ReleasesToMove.IsTailCallRelease = 2830 NewRetainReleaseRRI.IsTailCallRelease; 2831 FirstRelease = false; 2832 } else { 2833 if (ReleasesToMove.ReleaseMetadata != 2834 NewRetainReleaseRRI.ReleaseMetadata) 2835 ReleasesToMove.ReleaseMetadata = 0; 2836 if (ReleasesToMove.IsTailCallRelease != 2837 NewRetainReleaseRRI.IsTailCallRelease) 2838 ReleasesToMove.IsTailCallRelease = false; 2839 } 2840 2841 // Collect the optimal insertion points. 2842 if (!KnownSafe) 2843 for (SmallPtrSet<Instruction *, 2>::const_iterator 2844 RI = NewRetainReleaseRRI.ReverseInsertPts.begin(), 2845 RE = NewRetainReleaseRRI.ReverseInsertPts.end(); 2846 RI != RE; ++RI) { 2847 Instruction *RIP = *RI; 2848 if (ReleasesToMove.ReverseInsertPts.insert(RIP)) 2849 NewDelta -= BBStates[RIP->getParent()].GetAllPathCount(); 2850 } 2851 NewReleases.push_back(NewRetainRelease); 2852 } 2853 } 2854 } 2855 NewRetains.clear(); 2856 if (NewReleases.empty()) break; 2857 2858 // Back the other way. 2859 for (SmallVectorImpl<Instruction *>::const_iterator 2860 NI = NewReleases.begin(), NE = NewReleases.end(); NI != NE; ++NI) { 2861 Instruction *NewRelease = *NI; 2862 DenseMap<Value *, RRInfo>::const_iterator It = 2863 Releases.find(NewRelease); 2864 assert(It != Releases.end()); 2865 const RRInfo &NewReleaseRRI = It->second; 2866 KnownSafeBU &= NewReleaseRRI.KnownSafe; 2867 for (SmallPtrSet<Instruction *, 2>::const_iterator 2868 LI = NewReleaseRRI.Calls.begin(), 2869 LE = NewReleaseRRI.Calls.end(); LI != LE; ++LI) { 2870 Instruction *NewReleaseRetain = *LI; 2871 MapVector<Value *, RRInfo>::const_iterator Jt = 2872 Retains.find(NewReleaseRetain); 2873 if (Jt == Retains.end()) 2874 goto next_retain; 2875 const RRInfo &NewReleaseRetainRRI = Jt->second; 2876 assert(NewReleaseRetainRRI.Calls.count(NewRelease)); 2877 if (RetainsToMove.Calls.insert(NewReleaseRetain)) { 2878 unsigned PathCount = 2879 BBStates[NewReleaseRetain->getParent()].GetAllPathCount(); 2880 OldDelta += PathCount; 2881 OldCount += PathCount; 2882 2883 // Merge the IsRetainBlock values. 2884 if (FirstRetain) { 2885 RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock; 2886 FirstRetain = false; 2887 } else if (ReleasesToMove.IsRetainBlock != 2888 NewReleaseRetainRRI.IsRetainBlock) 2889 // It's not possible to merge the sequences if one uses 2890 // objc_retain and the other uses objc_retainBlock. 2891 goto next_retain; 2892 2893 // Collect the optimal insertion points. 2894 if (!KnownSafe) 2895 for (SmallPtrSet<Instruction *, 2>::const_iterator 2896 RI = NewReleaseRetainRRI.ReverseInsertPts.begin(), 2897 RE = NewReleaseRetainRRI.ReverseInsertPts.end(); 2898 RI != RE; ++RI) { 2899 Instruction *RIP = *RI; 2900 if (RetainsToMove.ReverseInsertPts.insert(RIP)) { 2901 PathCount = BBStates[RIP->getParent()].GetAllPathCount(); 2902 NewDelta += PathCount; 2903 NewCount += PathCount; 2904 } 2905 } 2906 NewRetains.push_back(NewReleaseRetain); 2907 } 2908 } 2909 } 2910 NewReleases.clear(); 2911 if (NewRetains.empty()) break; 2912 } 2913 2914 // If the pointer is known incremented or nested, we can safely delete the 2915 // pair regardless of what's between them. 2916 if (KnownSafeTD || KnownSafeBU) { 2917 RetainsToMove.ReverseInsertPts.clear(); 2918 ReleasesToMove.ReverseInsertPts.clear(); 2919 NewCount = 0; 2920 } else { 2921 // Determine whether the new insertion points we computed preserve the 2922 // balance of retain and release calls through the program. 2923 // TODO: If the fully aggressive solution isn't valid, try to find a 2924 // less aggressive solution which is. 2925 if (NewDelta != 0) 2926 goto next_retain; 2927 } 2928 2929 // Determine whether the original call points are balanced in the retain and 2930 // release calls through the program. If not, conservatively don't touch 2931 // them. 2932 // TODO: It's theoretically possible to do code motion in this case, as 2933 // long as the existing imbalances are maintained. 2934 if (OldDelta != 0) 2935 goto next_retain; 2936 2937 // Ok, everything checks out and we're all set. Let's move some code! 2938 Changed = true; 2939 AnyPairsCompletelyEliminated = NewCount == 0; 2940 NumRRs += OldCount - NewCount; 2941 MoveCalls(Arg, RetainsToMove, ReleasesToMove, 2942 Retains, Releases, DeadInsts, M); 2943 2944 next_retain: 2945 NewReleases.clear(); 2946 NewRetains.clear(); 2947 RetainsToMove.clear(); 2948 ReleasesToMove.clear(); 2949 } 2950 2951 // Now that we're done moving everything, we can delete the newly dead 2952 // instructions, as we no longer need them as insert points. 2953 while (!DeadInsts.empty()) 2954 EraseInstruction(DeadInsts.pop_back_val()); 2955 2956 return AnyPairsCompletelyEliminated; 2957 } 2958 2959 /// OptimizeWeakCalls - Weak pointer optimizations. 2960 void ObjCARCOpt::OptimizeWeakCalls(Function &F) { 2961 // First, do memdep-style RLE and S2L optimizations. We can't use memdep 2962 // itself because it uses AliasAnalysis and we need to do provenance 2963 // queries instead. 2964 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) { 2965 Instruction *Inst = &*I++; 2966 InstructionClass Class = GetBasicInstructionClass(Inst); 2967 if (Class != IC_LoadWeak && Class != IC_LoadWeakRetained) 2968 continue; 2969 2970 // Delete objc_loadWeak calls with no users. 2971 if (Class == IC_LoadWeak && Inst->use_empty()) { 2972 Inst->eraseFromParent(); 2973 continue; 2974 } 2975 2976 // TODO: For now, just look for an earlier available version of this value 2977 // within the same block. Theoretically, we could do memdep-style non-local 2978 // analysis too, but that would want caching. A better approach would be to 2979 // use the technique that EarlyCSE uses. 2980 inst_iterator Current = llvm::prior(I); 2981 BasicBlock *CurrentBB = Current.getBasicBlockIterator(); 2982 for (BasicBlock::iterator B = CurrentBB->begin(), 2983 J = Current.getInstructionIterator(); 2984 J != B; --J) { 2985 Instruction *EarlierInst = &*llvm::prior(J); 2986 InstructionClass EarlierClass = GetInstructionClass(EarlierInst); 2987 switch (EarlierClass) { 2988 case IC_LoadWeak: 2989 case IC_LoadWeakRetained: { 2990 // If this is loading from the same pointer, replace this load's value 2991 // with that one. 2992 CallInst *Call = cast<CallInst>(Inst); 2993 CallInst *EarlierCall = cast<CallInst>(EarlierInst); 2994 Value *Arg = Call->getArgOperand(0); 2995 Value *EarlierArg = EarlierCall->getArgOperand(0); 2996 switch (PA.getAA()->alias(Arg, EarlierArg)) { 2997 case AliasAnalysis::MustAlias: 2998 Changed = true; 2999 // If the load has a builtin retain, insert a plain retain for it. 3000 if (Class == IC_LoadWeakRetained) { 3001 CallInst *CI = 3002 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall, 3003 "", Call); 3004 CI->setTailCall(); 3005 } 3006 // Zap the fully redundant load. 3007 Call->replaceAllUsesWith(EarlierCall); 3008 Call->eraseFromParent(); 3009 goto clobbered; 3010 case AliasAnalysis::MayAlias: 3011 case AliasAnalysis::PartialAlias: 3012 goto clobbered; 3013 case AliasAnalysis::NoAlias: 3014 break; 3015 } 3016 break; 3017 } 3018 case IC_StoreWeak: 3019 case IC_InitWeak: { 3020 // If this is storing to the same pointer and has the same size etc. 3021 // replace this load's value with the stored value. 3022 CallInst *Call = cast<CallInst>(Inst); 3023 CallInst *EarlierCall = cast<CallInst>(EarlierInst); 3024 Value *Arg = Call->getArgOperand(0); 3025 Value *EarlierArg = EarlierCall->getArgOperand(0); 3026 switch (PA.getAA()->alias(Arg, EarlierArg)) { 3027 case AliasAnalysis::MustAlias: 3028 Changed = true; 3029 // If the load has a builtin retain, insert a plain retain for it. 3030 if (Class == IC_LoadWeakRetained) { 3031 CallInst *CI = 3032 CallInst::Create(getRetainCallee(F.getParent()), EarlierCall, 3033 "", Call); 3034 CI->setTailCall(); 3035 } 3036 // Zap the fully redundant load. 3037 Call->replaceAllUsesWith(EarlierCall->getArgOperand(1)); 3038 Call->eraseFromParent(); 3039 goto clobbered; 3040 case AliasAnalysis::MayAlias: 3041 case AliasAnalysis::PartialAlias: 3042 goto clobbered; 3043 case AliasAnalysis::NoAlias: 3044 break; 3045 } 3046 break; 3047 } 3048 case IC_MoveWeak: 3049 case IC_CopyWeak: 3050 // TOOD: Grab the copied value. 3051 goto clobbered; 3052 case IC_AutoreleasepoolPush: 3053 case IC_None: 3054 case IC_User: 3055 // Weak pointers are only modified through the weak entry points 3056 // (and arbitrary calls, which could call the weak entry points). 3057 break; 3058 default: 3059 // Anything else could modify the weak pointer. 3060 goto clobbered; 3061 } 3062 } 3063 clobbered:; 3064 } 3065 3066 // Then, for each destroyWeak with an alloca operand, check to see if 3067 // the alloca and all its users can be zapped. 3068 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) { 3069 Instruction *Inst = &*I++; 3070 InstructionClass Class = GetBasicInstructionClass(Inst); 3071 if (Class != IC_DestroyWeak) 3072 continue; 3073 3074 CallInst *Call = cast<CallInst>(Inst); 3075 Value *Arg = Call->getArgOperand(0); 3076 if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) { 3077 for (Value::use_iterator UI = Alloca->use_begin(), 3078 UE = Alloca->use_end(); UI != UE; ++UI) { 3079 Instruction *UserInst = cast<Instruction>(*UI); 3080 switch (GetBasicInstructionClass(UserInst)) { 3081 case IC_InitWeak: 3082 case IC_StoreWeak: 3083 case IC_DestroyWeak: 3084 continue; 3085 default: 3086 goto done; 3087 } 3088 } 3089 Changed = true; 3090 for (Value::use_iterator UI = Alloca->use_begin(), 3091 UE = Alloca->use_end(); UI != UE; ) { 3092 CallInst *UserInst = cast<CallInst>(*UI++); 3093 if (!UserInst->use_empty()) 3094 UserInst->replaceAllUsesWith(UserInst->getOperand(1)); 3095 UserInst->eraseFromParent(); 3096 } 3097 Alloca->eraseFromParent(); 3098 done:; 3099 } 3100 } 3101 } 3102 3103 /// OptimizeSequences - Identify program paths which execute sequences of 3104 /// retains and releases which can be eliminated. 3105 bool ObjCARCOpt::OptimizeSequences(Function &F) { 3106 /// Releases, Retains - These are used to store the results of the main flow 3107 /// analysis. These use Value* as the key instead of Instruction* so that the 3108 /// map stays valid when we get around to rewriting code and calls get 3109 /// replaced by arguments. 3110 DenseMap<Value *, RRInfo> Releases; 3111 MapVector<Value *, RRInfo> Retains; 3112 3113 /// BBStates, This is used during the traversal of the function to track the 3114 /// states for each identified object at each block. 3115 DenseMap<const BasicBlock *, BBState> BBStates; 3116 3117 // Analyze the CFG of the function, and all instructions. 3118 bool NestingDetected = Visit(F, BBStates, Retains, Releases); 3119 3120 // Transform. 3121 return PerformCodePlacement(BBStates, Retains, Releases, F.getParent()) && 3122 NestingDetected; 3123 } 3124 3125 /// OptimizeReturns - Look for this pattern: 3126 /// 3127 /// %call = call i8* @something(...) 3128 /// %2 = call i8* @objc_retain(i8* %call) 3129 /// %3 = call i8* @objc_autorelease(i8* %2) 3130 /// ret i8* %3 3131 /// 3132 /// And delete the retain and autorelease. 3133 /// 3134 /// Otherwise if it's just this: 3135 /// 3136 /// %3 = call i8* @objc_autorelease(i8* %2) 3137 /// ret i8* %3 3138 /// 3139 /// convert the autorelease to autoreleaseRV. 3140 void ObjCARCOpt::OptimizeReturns(Function &F) { 3141 if (!F.getReturnType()->isPointerTy()) 3142 return; 3143 3144 SmallPtrSet<Instruction *, 4> DependingInstructions; 3145 SmallPtrSet<const BasicBlock *, 4> Visited; 3146 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) { 3147 BasicBlock *BB = FI; 3148 ReturnInst *Ret = dyn_cast<ReturnInst>(&BB->back()); 3149 if (!Ret) continue; 3150 3151 const Value *Arg = StripPointerCastsAndObjCCalls(Ret->getOperand(0)); 3152 FindDependencies(NeedsPositiveRetainCount, Arg, 3153 BB, Ret, DependingInstructions, Visited, PA); 3154 if (DependingInstructions.size() != 1) 3155 goto next_block; 3156 3157 { 3158 CallInst *Autorelease = 3159 dyn_cast_or_null<CallInst>(*DependingInstructions.begin()); 3160 if (!Autorelease) 3161 goto next_block; 3162 InstructionClass AutoreleaseClass = 3163 GetBasicInstructionClass(Autorelease); 3164 if (!IsAutorelease(AutoreleaseClass)) 3165 goto next_block; 3166 if (GetObjCArg(Autorelease) != Arg) 3167 goto next_block; 3168 3169 DependingInstructions.clear(); 3170 Visited.clear(); 3171 3172 // Check that there is nothing that can affect the reference 3173 // count between the autorelease and the retain. 3174 FindDependencies(CanChangeRetainCount, Arg, 3175 BB, Autorelease, DependingInstructions, Visited, PA); 3176 if (DependingInstructions.size() != 1) 3177 goto next_block; 3178 3179 { 3180 CallInst *Retain = 3181 dyn_cast_or_null<CallInst>(*DependingInstructions.begin()); 3182 3183 // Check that we found a retain with the same argument. 3184 if (!Retain || 3185 !IsRetain(GetBasicInstructionClass(Retain)) || 3186 GetObjCArg(Retain) != Arg) 3187 goto next_block; 3188 3189 DependingInstructions.clear(); 3190 Visited.clear(); 3191 3192 // Convert the autorelease to an autoreleaseRV, since it's 3193 // returning the value. 3194 if (AutoreleaseClass == IC_Autorelease) { 3195 Autorelease->setCalledFunction(getAutoreleaseRVCallee(F.getParent())); 3196 AutoreleaseClass = IC_AutoreleaseRV; 3197 } 3198 3199 // Check that there is nothing that can affect the reference 3200 // count between the retain and the call. 3201 // Note that Retain need not be in BB. 3202 FindDependencies(CanChangeRetainCount, Arg, Retain->getParent(), Retain, 3203 DependingInstructions, Visited, PA); 3204 if (DependingInstructions.size() != 1) 3205 goto next_block; 3206 3207 { 3208 CallInst *Call = 3209 dyn_cast_or_null<CallInst>(*DependingInstructions.begin()); 3210 3211 // Check that the pointer is the return value of the call. 3212 if (!Call || Arg != Call) 3213 goto next_block; 3214 3215 // Check that the call is a regular call. 3216 InstructionClass Class = GetBasicInstructionClass(Call); 3217 if (Class != IC_CallOrUser && Class != IC_Call) 3218 goto next_block; 3219 3220 // If so, we can zap the retain and autorelease. 3221 Changed = true; 3222 ++NumRets; 3223 EraseInstruction(Retain); 3224 EraseInstruction(Autorelease); 3225 } 3226 } 3227 } 3228 3229 next_block: 3230 DependingInstructions.clear(); 3231 Visited.clear(); 3232 } 3233 } 3234 3235 bool ObjCARCOpt::doInitialization(Module &M) { 3236 if (!EnableARCOpts) 3237 return false; 3238 3239 Run = ModuleHasARC(M); 3240 if (!Run) 3241 return false; 3242 3243 // Identify the imprecise release metadata kind. 3244 ImpreciseReleaseMDKind = 3245 M.getContext().getMDKindID("clang.imprecise_release"); 3246 3247 // Intuitively, objc_retain and others are nocapture, however in practice 3248 // they are not, because they return their argument value. And objc_release 3249 // calls finalizers. 3250 3251 // These are initialized lazily. 3252 RetainRVCallee = 0; 3253 AutoreleaseRVCallee = 0; 3254 ReleaseCallee = 0; 3255 RetainCallee = 0; 3256 RetainBlockCallee = 0; 3257 AutoreleaseCallee = 0; 3258 3259 return false; 3260 } 3261 3262 bool ObjCARCOpt::runOnFunction(Function &F) { 3263 if (!EnableARCOpts) 3264 return false; 3265 3266 // If nothing in the Module uses ARC, don't do anything. 3267 if (!Run) 3268 return false; 3269 3270 Changed = false; 3271 3272 PA.setAA(&getAnalysis<AliasAnalysis>()); 3273 3274 // This pass performs several distinct transformations. As a compile-time aid 3275 // when compiling code that isn't ObjC, skip these if the relevant ObjC 3276 // library functions aren't declared. 3277 3278 // Preliminary optimizations. This also computs UsedInThisFunction. 3279 OptimizeIndividualCalls(F); 3280 3281 // Optimizations for weak pointers. 3282 if (UsedInThisFunction & ((1 << IC_LoadWeak) | 3283 (1 << IC_LoadWeakRetained) | 3284 (1 << IC_StoreWeak) | 3285 (1 << IC_InitWeak) | 3286 (1 << IC_CopyWeak) | 3287 (1 << IC_MoveWeak) | 3288 (1 << IC_DestroyWeak))) 3289 OptimizeWeakCalls(F); 3290 3291 // Optimizations for retain+release pairs. 3292 if (UsedInThisFunction & ((1 << IC_Retain) | 3293 (1 << IC_RetainRV) | 3294 (1 << IC_RetainBlock))) 3295 if (UsedInThisFunction & (1 << IC_Release)) 3296 // Run OptimizeSequences until it either stops making changes or 3297 // no retain+release pair nesting is detected. 3298 while (OptimizeSequences(F)) {} 3299 3300 // Optimizations if objc_autorelease is used. 3301 if (UsedInThisFunction & 3302 ((1 << IC_Autorelease) | (1 << IC_AutoreleaseRV))) 3303 OptimizeReturns(F); 3304 3305 return Changed; 3306 } 3307 3308 void ObjCARCOpt::releaseMemory() { 3309 PA.clear(); 3310 } 3311 3312 //===----------------------------------------------------------------------===// 3313 // ARC contraction. 3314 //===----------------------------------------------------------------------===// 3315 3316 // TODO: ObjCARCContract could insert PHI nodes when uses aren't 3317 // dominated by single calls. 3318 3319 #include "llvm/Operator.h" 3320 #include "llvm/InlineAsm.h" 3321 #include "llvm/Analysis/Dominators.h" 3322 3323 STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed"); 3324 3325 namespace { 3326 /// ObjCARCContract - Late ARC optimizations. These change the IR in a way 3327 /// that makes it difficult to be analyzed by ObjCARCOpt, so it's run late. 3328 class ObjCARCContract : public FunctionPass { 3329 bool Changed; 3330 AliasAnalysis *AA; 3331 DominatorTree *DT; 3332 ProvenanceAnalysis PA; 3333 3334 /// Run - A flag indicating whether this optimization pass should run. 3335 bool Run; 3336 3337 /// StoreStrongCallee, etc. - Declarations for ObjC runtime 3338 /// functions, for use in creating calls to them. These are initialized 3339 /// lazily to avoid cluttering up the Module with unused declarations. 3340 Constant *StoreStrongCallee, 3341 *RetainAutoreleaseCallee, *RetainAutoreleaseRVCallee; 3342 3343 /// RetainRVMarker - The inline asm string to insert between calls and 3344 /// RetainRV calls to make the optimization work on targets which need it. 3345 const MDString *RetainRVMarker; 3346 3347 Constant *getStoreStrongCallee(Module *M); 3348 Constant *getRetainAutoreleaseCallee(Module *M); 3349 Constant *getRetainAutoreleaseRVCallee(Module *M); 3350 3351 bool ContractAutorelease(Function &F, Instruction *Autorelease, 3352 InstructionClass Class, 3353 SmallPtrSet<Instruction *, 4> 3354 &DependingInstructions, 3355 SmallPtrSet<const BasicBlock *, 4> 3356 &Visited); 3357 3358 void ContractRelease(Instruction *Release, 3359 inst_iterator &Iter); 3360 3361 virtual void getAnalysisUsage(AnalysisUsage &AU) const; 3362 virtual bool doInitialization(Module &M); 3363 virtual bool runOnFunction(Function &F); 3364 3365 public: 3366 static char ID; 3367 ObjCARCContract() : FunctionPass(ID) { 3368 initializeObjCARCContractPass(*PassRegistry::getPassRegistry()); 3369 } 3370 }; 3371 } 3372 3373 char ObjCARCContract::ID = 0; 3374 INITIALIZE_PASS_BEGIN(ObjCARCContract, 3375 "objc-arc-contract", "ObjC ARC contraction", false, false) 3376 INITIALIZE_AG_DEPENDENCY(AliasAnalysis) 3377 INITIALIZE_PASS_DEPENDENCY(DominatorTree) 3378 INITIALIZE_PASS_END(ObjCARCContract, 3379 "objc-arc-contract", "ObjC ARC contraction", false, false) 3380 3381 Pass *llvm::createObjCARCContractPass() { 3382 return new ObjCARCContract(); 3383 } 3384 3385 void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const { 3386 AU.addRequired<AliasAnalysis>(); 3387 AU.addRequired<DominatorTree>(); 3388 AU.setPreservesCFG(); 3389 } 3390 3391 Constant *ObjCARCContract::getStoreStrongCallee(Module *M) { 3392 if (!StoreStrongCallee) { 3393 LLVMContext &C = M->getContext(); 3394 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 3395 Type *I8XX = PointerType::getUnqual(I8X); 3396 std::vector<Type *> Params; 3397 Params.push_back(I8XX); 3398 Params.push_back(I8X); 3399 3400 AttrListPtr Attributes; 3401 Attributes.addAttr(~0u, Attribute::NoUnwind); 3402 Attributes.addAttr(1, Attribute::NoCapture); 3403 3404 StoreStrongCallee = 3405 M->getOrInsertFunction( 3406 "objc_storeStrong", 3407 FunctionType::get(Type::getVoidTy(C), Params, /*isVarArg=*/false), 3408 Attributes); 3409 } 3410 return StoreStrongCallee; 3411 } 3412 3413 Constant *ObjCARCContract::getRetainAutoreleaseCallee(Module *M) { 3414 if (!RetainAutoreleaseCallee) { 3415 LLVMContext &C = M->getContext(); 3416 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 3417 std::vector<Type *> Params; 3418 Params.push_back(I8X); 3419 FunctionType *FTy = 3420 FunctionType::get(I8X, Params, /*isVarArg=*/false); 3421 AttrListPtr Attributes; 3422 Attributes.addAttr(~0u, Attribute::NoUnwind); 3423 RetainAutoreleaseCallee = 3424 M->getOrInsertFunction("objc_retainAutorelease", FTy, Attributes); 3425 } 3426 return RetainAutoreleaseCallee; 3427 } 3428 3429 Constant *ObjCARCContract::getRetainAutoreleaseRVCallee(Module *M) { 3430 if (!RetainAutoreleaseRVCallee) { 3431 LLVMContext &C = M->getContext(); 3432 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 3433 std::vector<Type *> Params; 3434 Params.push_back(I8X); 3435 FunctionType *FTy = 3436 FunctionType::get(I8X, Params, /*isVarArg=*/false); 3437 AttrListPtr Attributes; 3438 Attributes.addAttr(~0u, Attribute::NoUnwind); 3439 RetainAutoreleaseRVCallee = 3440 M->getOrInsertFunction("objc_retainAutoreleaseReturnValue", FTy, 3441 Attributes); 3442 } 3443 return RetainAutoreleaseRVCallee; 3444 } 3445 3446 /// ContractAutorelease - Merge an autorelease with a retain into a fused 3447 /// call. 3448 bool 3449 ObjCARCContract::ContractAutorelease(Function &F, Instruction *Autorelease, 3450 InstructionClass Class, 3451 SmallPtrSet<Instruction *, 4> 3452 &DependingInstructions, 3453 SmallPtrSet<const BasicBlock *, 4> 3454 &Visited) { 3455 const Value *Arg = GetObjCArg(Autorelease); 3456 3457 // Check that there are no instructions between the retain and the autorelease 3458 // (such as an autorelease_pop) which may change the count. 3459 CallInst *Retain = 0; 3460 if (Class == IC_AutoreleaseRV) 3461 FindDependencies(RetainAutoreleaseRVDep, Arg, 3462 Autorelease->getParent(), Autorelease, 3463 DependingInstructions, Visited, PA); 3464 else 3465 FindDependencies(RetainAutoreleaseDep, Arg, 3466 Autorelease->getParent(), Autorelease, 3467 DependingInstructions, Visited, PA); 3468 3469 Visited.clear(); 3470 if (DependingInstructions.size() != 1) { 3471 DependingInstructions.clear(); 3472 return false; 3473 } 3474 3475 Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin()); 3476 DependingInstructions.clear(); 3477 3478 if (!Retain || 3479 GetBasicInstructionClass(Retain) != IC_Retain || 3480 GetObjCArg(Retain) != Arg) 3481 return false; 3482 3483 Changed = true; 3484 ++NumPeeps; 3485 3486 if (Class == IC_AutoreleaseRV) 3487 Retain->setCalledFunction(getRetainAutoreleaseRVCallee(F.getParent())); 3488 else 3489 Retain->setCalledFunction(getRetainAutoreleaseCallee(F.getParent())); 3490 3491 EraseInstruction(Autorelease); 3492 return true; 3493 } 3494 3495 /// ContractRelease - Attempt to merge an objc_release with a store, load, and 3496 /// objc_retain to form an objc_storeStrong. This can be a little tricky because 3497 /// the instructions don't always appear in order, and there may be unrelated 3498 /// intervening instructions. 3499 void ObjCARCContract::ContractRelease(Instruction *Release, 3500 inst_iterator &Iter) { 3501 LoadInst *Load = dyn_cast<LoadInst>(GetObjCArg(Release)); 3502 if (!Load || !Load->isSimple()) return; 3503 3504 // For now, require everything to be in one basic block. 3505 BasicBlock *BB = Release->getParent(); 3506 if (Load->getParent() != BB) return; 3507 3508 // Walk down to find the store. 3509 BasicBlock::iterator I = Load, End = BB->end(); 3510 ++I; 3511 AliasAnalysis::Location Loc = AA->getLocation(Load); 3512 while (I != End && 3513 (&*I == Release || 3514 IsRetain(GetBasicInstructionClass(I)) || 3515 !(AA->getModRefInfo(I, Loc) & AliasAnalysis::Mod))) 3516 ++I; 3517 StoreInst *Store = dyn_cast<StoreInst>(I); 3518 if (!Store || !Store->isSimple()) return; 3519 if (Store->getPointerOperand() != Loc.Ptr) return; 3520 3521 Value *New = StripPointerCastsAndObjCCalls(Store->getValueOperand()); 3522 3523 // Walk up to find the retain. 3524 I = Store; 3525 BasicBlock::iterator Begin = BB->begin(); 3526 while (I != Begin && GetBasicInstructionClass(I) != IC_Retain) 3527 --I; 3528 Instruction *Retain = I; 3529 if (GetBasicInstructionClass(Retain) != IC_Retain) return; 3530 if (GetObjCArg(Retain) != New) return; 3531 3532 Changed = true; 3533 ++NumStoreStrongs; 3534 3535 LLVMContext &C = Release->getContext(); 3536 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C)); 3537 Type *I8XX = PointerType::getUnqual(I8X); 3538 3539 Value *Args[] = { Load->getPointerOperand(), New }; 3540 if (Args[0]->getType() != I8XX) 3541 Args[0] = new BitCastInst(Args[0], I8XX, "", Store); 3542 if (Args[1]->getType() != I8X) 3543 Args[1] = new BitCastInst(Args[1], I8X, "", Store); 3544 CallInst *StoreStrong = 3545 CallInst::Create(getStoreStrongCallee(BB->getParent()->getParent()), 3546 Args, "", Store); 3547 StoreStrong->setDoesNotThrow(); 3548 StoreStrong->setDebugLoc(Store->getDebugLoc()); 3549 3550 if (&*Iter == Store) ++Iter; 3551 Store->eraseFromParent(); 3552 Release->eraseFromParent(); 3553 EraseInstruction(Retain); 3554 if (Load->use_empty()) 3555 Load->eraseFromParent(); 3556 } 3557 3558 bool ObjCARCContract::doInitialization(Module &M) { 3559 Run = ModuleHasARC(M); 3560 if (!Run) 3561 return false; 3562 3563 // These are initialized lazily. 3564 StoreStrongCallee = 0; 3565 RetainAutoreleaseCallee = 0; 3566 RetainAutoreleaseRVCallee = 0; 3567 3568 // Initialize RetainRVMarker. 3569 RetainRVMarker = 0; 3570 if (NamedMDNode *NMD = 3571 M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker")) 3572 if (NMD->getNumOperands() == 1) { 3573 const MDNode *N = NMD->getOperand(0); 3574 if (N->getNumOperands() == 1) 3575 if (const MDString *S = dyn_cast<MDString>(N->getOperand(0))) 3576 RetainRVMarker = S; 3577 } 3578 3579 return false; 3580 } 3581 3582 bool ObjCARCContract::runOnFunction(Function &F) { 3583 if (!EnableARCOpts) 3584 return false; 3585 3586 // If nothing in the Module uses ARC, don't do anything. 3587 if (!Run) 3588 return false; 3589 3590 Changed = false; 3591 AA = &getAnalysis<AliasAnalysis>(); 3592 DT = &getAnalysis<DominatorTree>(); 3593 3594 PA.setAA(&getAnalysis<AliasAnalysis>()); 3595 3596 // For ObjC library calls which return their argument, replace uses of the 3597 // argument with uses of the call return value, if it dominates the use. This 3598 // reduces register pressure. 3599 SmallPtrSet<Instruction *, 4> DependingInstructions; 3600 SmallPtrSet<const BasicBlock *, 4> Visited; 3601 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) { 3602 Instruction *Inst = &*I++; 3603 3604 // Only these library routines return their argument. In particular, 3605 // objc_retainBlock does not necessarily return its argument. 3606 InstructionClass Class = GetBasicInstructionClass(Inst); 3607 switch (Class) { 3608 case IC_Retain: 3609 case IC_FusedRetainAutorelease: 3610 case IC_FusedRetainAutoreleaseRV: 3611 break; 3612 case IC_Autorelease: 3613 case IC_AutoreleaseRV: 3614 if (ContractAutorelease(F, Inst, Class, DependingInstructions, Visited)) 3615 continue; 3616 break; 3617 case IC_RetainRV: { 3618 // If we're compiling for a target which needs a special inline-asm 3619 // marker to do the retainAutoreleasedReturnValue optimization, 3620 // insert it now. 3621 if (!RetainRVMarker) 3622 break; 3623 BasicBlock::iterator BBI = Inst; 3624 --BBI; 3625 while (isNoopInstruction(BBI)) --BBI; 3626 if (&*BBI == GetObjCArg(Inst)) { 3627 InlineAsm *IA = 3628 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()), 3629 /*isVarArg=*/false), 3630 RetainRVMarker->getString(), 3631 /*Constraints=*/"", /*hasSideEffects=*/true); 3632 CallInst::Create(IA, "", Inst); 3633 } 3634 break; 3635 } 3636 case IC_InitWeak: { 3637 // objc_initWeak(p, null) => *p = null 3638 CallInst *CI = cast<CallInst>(Inst); 3639 if (isNullOrUndef(CI->getArgOperand(1))) { 3640 Value *Null = 3641 ConstantPointerNull::get(cast<PointerType>(CI->getType())); 3642 Changed = true; 3643 new StoreInst(Null, CI->getArgOperand(0), CI); 3644 CI->replaceAllUsesWith(Null); 3645 CI->eraseFromParent(); 3646 } 3647 continue; 3648 } 3649 case IC_Release: 3650 ContractRelease(Inst, I); 3651 continue; 3652 default: 3653 continue; 3654 } 3655 3656 // Don't use GetObjCArg because we don't want to look through bitcasts 3657 // and such; to do the replacement, the argument must have type i8*. 3658 const Value *Arg = cast<CallInst>(Inst)->getArgOperand(0); 3659 for (;;) { 3660 // If we're compiling bugpointed code, don't get in trouble. 3661 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg)) 3662 break; 3663 // Look through the uses of the pointer. 3664 for (Value::const_use_iterator UI = Arg->use_begin(), UE = Arg->use_end(); 3665 UI != UE; ) { 3666 Use &U = UI.getUse(); 3667 unsigned OperandNo = UI.getOperandNo(); 3668 ++UI; // Increment UI now, because we may unlink its element. 3669 if (Instruction *UserInst = dyn_cast<Instruction>(U.getUser())) 3670 if (Inst != UserInst && DT->dominates(Inst, UserInst)) { 3671 Changed = true; 3672 Instruction *Replacement = Inst; 3673 Type *UseTy = U.get()->getType(); 3674 if (PHINode *PHI = dyn_cast<PHINode>(UserInst)) { 3675 // For PHI nodes, insert the bitcast in the predecessor block. 3676 unsigned ValNo = 3677 PHINode::getIncomingValueNumForOperand(OperandNo); 3678 BasicBlock *BB = 3679 PHI->getIncomingBlock(ValNo); 3680 if (Replacement->getType() != UseTy) 3681 Replacement = new BitCastInst(Replacement, UseTy, "", 3682 &BB->back()); 3683 for (unsigned i = 0, e = PHI->getNumIncomingValues(); 3684 i != e; ++i) 3685 if (PHI->getIncomingBlock(i) == BB) { 3686 // Keep the UI iterator valid. 3687 if (&PHI->getOperandUse( 3688 PHINode::getOperandNumForIncomingValue(i)) == 3689 &UI.getUse()) 3690 ++UI; 3691 PHI->setIncomingValue(i, Replacement); 3692 } 3693 } else { 3694 if (Replacement->getType() != UseTy) 3695 Replacement = new BitCastInst(Replacement, UseTy, "", UserInst); 3696 U.set(Replacement); 3697 } 3698 } 3699 } 3700 3701 // If Arg is a no-op casted pointer, strip one level of casts and 3702 // iterate. 3703 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg)) 3704 Arg = BI->getOperand(0); 3705 else if (isa<GEPOperator>(Arg) && 3706 cast<GEPOperator>(Arg)->hasAllZeroIndices()) 3707 Arg = cast<GEPOperator>(Arg)->getPointerOperand(); 3708 else if (isa<GlobalAlias>(Arg) && 3709 !cast<GlobalAlias>(Arg)->mayBeOverridden()) 3710 Arg = cast<GlobalAlias>(Arg)->getAliasee(); 3711 else 3712 break; 3713 } 3714 } 3715 3716 return Changed; 3717 } 3718